branch_name
stringclasses
149 values
text
stringlengths
23
89.3M
directory_id
stringlengths
40
40
languages
listlengths
1
19
num_files
int64
1
11.8k
repo_language
stringclasses
38 values
repo_name
stringlengths
6
114
revision_id
stringlengths
40
40
snapshot_id
stringlengths
40
40
refs/heads/main
<repo_name>floresmatias0/Lyracons<file_sep>/js/index.js document.addEventListener('DOMContentLoaded', (event) => { //ITEM1 let item1 = document.getElementById("item1"); let itemV1 = document.getElementById("itemV1"); var item = true; const dropDown = () => { let submenu = document.getElementById("submenu") if(item === true){ submenu.className = "submenuV2 animate__animated animate__fadeInDown animate__faster" item = false; }else if(item === false){ submenu.className = "hidden" item = true; } } item1.onclick = dropDown; itemV1.onclick = dropDown; //ITEM2 let item2 = document.getElementById("item2") let itemV2 = document.getElementById("itemV2") var prod = true; const deleteProduct = () => { if(prod === true){ rigth.innerHTML = ` <div class="rigth"> <h1 class="title">Sorry no product please reload page</h1> </div>` prod = false; }else if(prod === false){ location.reload(); prod = true; } } item2.onclick = deleteProduct; itemV2.onclick = deleteProduct; //ITEM3 var item3 = document.getElementById("item3"); var backgroundV2 = document.querySelector(".backgroundV2"); var background = document.getElementById("change") var aux = true; item3.addEventListener("mouseover", () => { if(aux === false){ background.className = "background"; aux = true; }else if(aux === true){ background.className = "backgroundV2"; aux = false; } }) //ITEM4 let item4 = document.getElementById("item4"); var auxSaludo = true; item4.addEventListener("click", () => { if(auxSaludo === true){ let text = document.createElement("p") text.setAttribute("class", "parrafo") text.innerText = "<NAME>!" let change = document.getElementById("change"); change.append(text); auxSaludo = false; } }) //MENU MOBILE let menu = document.querySelector(".menu") var submenuRes = document.getElementById("submenuRes") var sub = true; menu.addEventListener("click", () => { if(sub === true){ submenuRes.className = "resNoHidden animate__animated animate__fadeInLeft animate__faster"; sub = false; }else if(sub === false){ submenuRes.className = "resHidden"; sub = true; } }) //CARGA DE PRODUCTOS var product = { image:"../assets/images/sony.jpg", title:"Titulo del producto", marca:"Marca", price:"$12.345,00", } var products = []; for(let i = 0; i < 8; i++){ products.push(product) } let rigth = document.querySelector(".rigth"); var product; if(products && products.length > 0){ products.forEach((point,i)=> { product = document.createElement("div"); product.className = "card" rigth.append(product) return ( product.innerHTML = ` <div class="contentImage"> <img id="imageCard" src=${point.image} alt="photo"/> </div> <div> <h2>${point.title}</h2> <p>${point.marca}</p> <p>${point.price}</p> <button class="buy">COMPRAR</button> </div> ` ) }) } var buy = document.querySelectorAll(".buy"); buy.forEach((item, i) => { item.addEventListener("click", ()=>{ Swal.fire({ title: 'Genial!', text: 'Gracias por su compra', icon: 'success', confirmButtonText: 'Cool' }) }) }); });
12b4fbf530070e8a9e20ecfa5d678fdd5345726c
[ "JavaScript" ]
1
JavaScript
floresmatias0/Lyracons
9a9834c36d978d7e102bafe0eb004e385f0114d8
399276c835449578dc575027e631ab37f5f9c5a5
refs/heads/master
<repo_name>kendisk/v2ex<file_sep>/v2ex/babel/l10n/messages/zhHans.py # coding=utf-8 # Messages on top bar home = '首页' images = '图片上传' mentions = '提到我的' notes = '记事本' nearby = '附近' settings = '设置' backstage = '后台' signin = '登入' signup = '注册' signout = '登出' # Messages on / # Messages on /t/ # Messages on /go/ # Messages on /settings
c05a763523e50f367348612f053eb3152968bb92
[ "Python" ]
1
Python
kendisk/v2ex
cb0a7ded26ca72d57567e58300b79ee1b5677248
c7760d6fbbe47cf39cb23c3b4c34c8d354c067bc
refs/heads/master
<file_sep># coding = utf-8 import pygatt import logging import binascii import time # Many devices, e.g. Fitbit, use random addressing - this is required to connect. ADDRESS_TYPE = pygatt.BLEAddressType.random DEVICE_ADDRESS = "E6:DC:5B:F9:C5:F3" def indication_callBack(handle,value): print("indication,handle %d:%s" %(handle,value)) def pytest(address=DEVICE_ADDRESS,type=pygatt.BLEAddressType.public): try: adapter = pygatt.BGAPIBackend() adapter.start() print("===== adapter.scan() =====") devices = adapter.scan() for dev in devices: print("address : %s,name : %s " %(dev['address'],dev['name'])) print("===== adapter.connect() =====") devices = adapter.connect(address,address_type=type) print("address: " + str(devices._address)) print("handle :" + str(devices._handle)) print("rssi :" + str(devices.get_rssi())) print("====== device.discover_characteristics() =====") for uuid in devices.discover_characteristics().keys(): try: print("Read UUID %s (handle %d): %s" %(uuid,devices.get_handle(uuid),binascii.hexlify(devices.char_read(uuid)))) except: print("Read UUID %s (handle %d): %s" % (uuid,devices.get_handle(uuid),"!deny!")) while (True): time.sleep(0.1) finally: adapter.stop() if __name__ == "__main__": pytest()<file_sep># coding:utf-8 import flask from flask import request from flask import jsonify import tools ''' flask: web框架,可以通过flask提供的装饰器@server.route()将普通函数转换为服务 登录接口,需要传url、username、passwd ''' #创建一个服务,把当前这个python文件当做一个服务 server = flask.Flask(__name__) @server.route('/getProducts',methods=['get']) def getProducts(): # 获取通过url请求传参的数据 # username = request.values.get('name') # 获取url请求传的密码,明文 # pwd = request.values.get('pwd') d = { "name":"鱼香肉丝", "url":"http://pic37.nipic.com/20140113/8800276_184927469000_2.png", "price":"¥23.0" }; return jsonify([d]) if __name__ == '__main__': server.run(debug=True,port=8080,host='0.0.0.0') <file_sep># coding = utf-8 # 读取pcap文件,解析相应的信息,为了在记事本中显示的方便,把二进制的信息 import struct fpcap = open('ref_baidu.pcap', 'rb') ftxt = open('result.txt', 'w') string_data = fpcap.read() # pcap文件包头解析 pcap_header = {} pcap_header['magic_number'] = string_data[0:4] pcap_header['version_major'] = string_data[4:6] pcap_header['version_minor'] = string_data[6:8] pcap_header['thiszone'] = string_data[8:12] pcap_header['sigfigs'] = string_data[12:16] pcap_header['snaplen'] = string_data[16:20] pcap_header['linktype'] = string_data[20:24] # 把pacp文件头信息写入result.txt ftxt.write("Pcap文件的包头内容如下: \n") for key in ['magic_number', 'version_major', 'version_minor', 'thiszone', 'sigfigs', 'snaplen', 'linktype']: ftxt.write(key + " : " + repr(pcap_header[key]) + '\n') # pcap文件的数据包解析 step = 0 packet_num = 0 packet_data = [] pcap_packet_header = {} i = 24 while (i < len(string_data)): # 数据包头各个字段 pcap_packet_header['GMTtime'] = string_data[i:i + 4] pcap_packet_header['MicroTime'] = string_data[i + 4:i + 8] pcap_packet_header['caplen'] = string_data[i + 8:i + 12] pcap_packet_header['len'] = string_data[i + 12:i + 16] # 求出此包的包长len packet_len = struct.unpack('I', pcap_packet_header['len'])[0] # 写入此包数据 packet_data.append(string_data[i + 16:i + 16 + packet_len]) i = i + packet_len + 16 packet_num += 1 # 把pacp文件里的数据包信息写入result.txt for i in range(packet_num): # 先写每一包的包头 ftxt.write("这是第" + str(i) + "包数据的包头和数据:" + '\n') for key in ['GMTtime', 'MicroTime', 'caplen', 'len']: ftxt.write(key + ' : ' + repr(pcap_packet_header[key]) + '\n') # 再写数据部分 ftxt.write('此包的数据内容' + repr(packet_data[i]) + '\n') ftxt.write('一共有' + str(packet_num) + "包数据" + '\n') ftxt.close() fpcap.close() <file_sep># with open("android_ip.txt","r") as f: # for line in f.readlines(): # ipList = line.split('.') # if len(ipList) == 4: # try : # int(ipList[0]) # with open('result.txt', 'a+') as r: # result.txt里面存储的是批量解析后的结果 # r.write(line + "\n") # 显示有ip绑定的域名,用空格隔开 # except: # pass # file_list = [] #创建一个空列表 # # with open("result.txt","r") as f: # file_2 = f.readlines() # for file in file_2: # file_list.append(file) # out_file1 = set(file_list) #set()函数可以自动过滤掉重复元素 # last_out_file = list(out_file1) # for out in last_out_file: # ipList = out.split('.') # # with open('android_ip.txt', 'a+') as r: # result.txt里面存储的是批量解析后的结果 # r.write(out) # 显示有ip绑定的域名,用空格隔开 # # file = "testip1.txt" #打开需要去重的文件 # with open(file, "r", encoding="utf-8") as f: # file_2 = f.readlines() # for file in file_2: # file_list.append(file) # out_file1 = set(file_list) #set()函数可以自动过滤掉重复元素 # last_out_file = list(out_file1) # for out in last_out_file: # ipList = out.split('.') # if len(ipList) == 4: # with open('result.txt', 'a+') as r: # result.txt里面存储的是批量解析后的结果 # r.write(out) # 显示有ip绑定的域名,用空格隔开 # from _dbm import open with open("/Users/app005synergy/Documents/Python/域名解析/result.txt","r") as f: # words = "" file = f.readlines() for line in file: line = line.replace("\n","") + "," with open("/Users/app005synergy/Documents/Python/域名解析/testip1.txt", "a+") as r: r.write(line) print(line) print(file) # <file_sep> name = "I'm jacky" result = name.find('Im') print(result) # 列表推导式 n_list = [x ** 2 for x in range(10) if x%2 == 0] print(n_list) # 错误处理 import datetime as dt def read_date_from_file(filename): try: with open(filename) as file: in_date = file.read() in_date = in_date.strip() date = dt.datetime.strptime(in_date,'%Y-%m-%d') return date except ValueError as e: print('处理ValueError异常') except OSError as e: print('处理OSError异常') date = read_date_from_file('readme.txt') print('日期 = {0}'.format(date)) # 显示抛出异常 raise class MyException(Exception): def __init__(self,message): super().__init__(message) def read_date_from_file1(filename): try: file = open(filename) in_date = file.read() in_date = in_date.strip() date = dt.datetime.strptime(in_date,'%Y-%m-%d') return date except ValueError as e: raise MyException('不是有效日期') except FileNotFoundError as e: raise MyException('文件找不到') except OSError as e: raise MyException('文件无法打开或找不到') date1 = read_date_from_file1("readme.txt") print('日期{0}'.format(date)) <file_sep># coding:utf-8 import os fileName = "Ios_cz.txt" #input("输入文件路径:") isExist = os.path.exists(fileName) if isExist == True: print("文件存在") outPutString = "" with open(fileName,"r") as f: lines = f.readlines() # print(lines) for line in lines: # 去除尾部的换行符 line = line.rstrip("\n") # 去除分号 line = line.replace(";","") # 按等号 分割成两个数组 numberList = line.split("=") if len(numberList) != 2: continue # 得到数组后去除首尾的引号 key = numberList[0].strip().strip("\"").strip("'") value = numberList[1].strip().strip("\"").strip("'") value = value.replace('\\N','\\n') #将大写的转换成小写 value = value.replace("\"","\\\"") value = value.replace("</string>", "") value = value.replace("<string>", "") # 如果value值中间存在\n 或者存在" 就加转换符 print("%s , %s \n" %(key,value)) #输出文件 if len(value) > 0: outPutString += "\"%s\" = \"%s\";\n" %(key,value) f.close() # 输出文件 with open("Localizable.strings","a") as a: a.write(outPutString) a.close() <file_sep>import xlrd import os from pip._vendor.distlib.compat import raw_input from openpyxl import load_workbook,Workbook wb = Workbook() def Excel(): fileName = raw_input("请输入文件夹路径:") # 获取文件夹下所有文件 _getAllFileOfPath(fileName) def _getAllFileOfPath(fileName): ws = createExcel() index = 1 for root, dirs, files in os.walk(fileName): # print("当前目录路径",root) #当前目录路径 # print("当前路径下的所有子目录",dirs) # 当前路径下的所有子目录 # print("当前路径下所有非目录子文件",files) # 当前路径下所有非目录子文件 # 判断files 是否是excel for name in files: if name.endswith(".xls") or name.endswith(".xlsx"): print("是excel: %s ,文件路径为 %s" %(name,root)) try: restr = dealWithExcelFile(root + "/" + name,name) index += 1 print(restr) writeDataToExecl(ws, index, str(root[len(fileName):]),restr[0],restr[1] ) except: print("打开文件出错 excel: %s ,文件路径为 %s" %(name,root)) # Useopenpyxl(root + "/" + name) wb.save("./抓取结果.xlsx") def createExcel(): ws = wb.active ws.insert_rows(1) ws["A1"] = "供应商" ws["B1"] = "文件名" ws["C1"] = "总金额" return ws def writeDataToExecl(ws,index,supplier,fileName,totalPrice): ws.cell(index, 1).value = supplier ws.cell(index, 2).value = fileName ws.cell(index, 3).value = totalPrice def dealWithExcelFile(excelFIle,fileName): # 打开文件 data = xlrd.open_workbook(excelFIle) # 取出第一个表 table = data.sheet_by_index(0) # 记录金额对应的列 priceCol = 0 totalPrice = "0" for rowNum in range(table.nrows): # 读取行的值 rowValue = table.row_values(rowNum) for colNum in range(table.ncols): priceRow = str(rowValue[colNum]) # print(priceRow) if (priceRow.startswith("金额") and (not priceRow.startswith("金额大写"))) or (priceRow.startswith("含税金额")): priceCol = colNum if priceRow.startswith("合计") or priceRow.startswith("总计") or priceRow.lower().startswith("total"): # print("金额---- :",str(rowValue[priceCol])) if totalPrice == "0": totalPrice = str(rowValue[priceCol]) return (fileName,totalPrice) def Useopenpyxl(fileName): # 加载文件 wb = load_workbook(fileName) sheet_names = wb.sheetnames ws = wb[sheet_names[0]] # 最大行数 maxRow = ws.max_row # 最大列数 maxColumn = ws.max_column # 记录金额对应的列 priceCol = "" for rowNum in range(1,maxRow+1): #读取行的值 rowValue = ws[rowNum] # print(rowValue) for tumple in rowValue: # print(tumple) priceRow = tumple.value print(priceRow) if str(priceRow).startswith('金额'): if len(priceCol) == 0: priceCol = tumple.column_letter if (str(priceRow).startswith("合计") or str(priceRow).startswith("总计")): # print("金额 ---",rowValue[priceCol].value) priceCol += str(rowNum) break lastPrice = ws[priceCol] print("金额 ---" ,lastPrice.value) if __name__ == "__main__": Excel()<file_sep># coding: utf-8 # import socket # import csv # import requests # import threading # import queue # q = queue.Queue() # threading_num = 50 # # # with open("AndroidLink.txt", "r") as f: # filedata = f.readlines() # q.put(filedata) # f.close() # # def run(): # while not q.empty(): # filedata = q.get() # for i in filedata: # url = str(i).replace("\n",'') # print(url) # try: # myaddr = socket.getaddrinfo(url, 'http') # #print(str(myaddr[0][4][0])[:3]) # if str(myaddr[0][4][0])[:3] != str(172): # #print(url+" "+str(myaddr[0][4][0])) # #value_array.append(url).append(str(myaddr[0][4][0])) # with open("public_domain.csv","a") as fw: # writer = csv.writer(fw) # writer.writerow([url, str(myaddr[0][4][0])]) # except: # #print('can't open') # pass # #f.close() # # if __name__ =="__main__": # print('begin') # for i in range(threading_num): # t = threading.Thread(target=run) # t.start() # t.join() # print('over') import socket from urllib.parse import urlparse with open("AndroidLink.txt","r") as f: for line in f.readlines(): url = line.strip('\n') try: hostname = urlparse(url).netloc ip = socket.gethostbyname(hostname) print(ip) pass except Exception as e: with open('error.txt', 'a+') as ERR: # error.txt为没有IP绑定的域名 ERR.write(line.strip() + '\n') else: with open('result.txt', 'a+') as r: # result.txt里面存储的是批量解析后的结果 r.write(hostname + "\n") # 显示有ip绑定的域名,用空格隔开 r.write(ip+"\n") r.write('\n') <file_sep># encoding=utf-8 import os coding = ['收到数据','#OP code Error','run','executer','prn_threshold','offset','max_object_size','OTA','index','写数据','#cmd'] fileName = "Ios_cz.txt" #input("输入文件路径:") isExist = os.path.exists(fileName) with open(fileName, "r") as f: lines = f.readlines() outPutstring = "" for line in lines: # print(line) for cod in coding: if line.startswith(cod): outPutstring += (line + "\n") # print(outPutstring) with open("OutPut", "a") as a: a.write(outPutstring) a.close()<file_sep># coding = utf-8 import xlrd ''' #打开文件 data = xlrd.open_workbook('/Users/app005synergy/Desktop/ios用户评论汇总0225.xlsx') #查看工作表 sheetNames = data.sheet_names() print("sheets : " + str(sheetNames)) #通过文件名获取工作表,获取 Sheet1 table = data.sheet_by_name('Sheet1') # 或者使用第一个表 # table = data.sheet_by_index(0) #获取行数和列数 # 行 table.nrows # 列 table.ncols print("总行数 :" + str(table.nrows)) print("总列数 :" + str(table.ncols)) # 获取整行的值和整列的值 # 整行值 rowValues = table.row_values(0) # 整列值 colValues = table.col_values(1) print('整行值' + str(rowValues)) print('整列值' + str(colValues)) # 获取某一个单元格的值 cel_B2 = table.cell(1,1).value print("第2行B列的值" + cel_B2) ''' def read_xlrd(excelFIle): # 打开文件 data = xlrd.open_workbook(excelFIle) # 取出第一个表 table = data.sheet_by_index(0) for rowNum in range(table.nrows): # 读取行的值 rowValue = table.row_values(rowNum) for colNum in range(table.ncols): print(rowValue[colNum]) print("---------") if __name__ == '__main__': read_xlrd("/Users/app005synergy/Desktop/ios用户评论汇总0225.xlsx")<file_sep># coding=utf8 class Persion: name = [] p1 = Persion() p2 = Persion() p1.name.append("小美") print(p1.name) print(p2.name) print(Persion.name) # 单例模式 class Singleton(object): """ 单例模式 """ class _A(object): """ 真正干活类,对外隐藏 """ def __init__(self): pass def display(self): """返回当前实例的 ID,是全局唯一的""" return id(self) # 类变量,用于存储 _A的实例 _instance = None def __init__(self): if Singleton._instance is None: Singleton._instance = Singleton._A() def __getattr__(self, item): """所有的属性都应该从 Singleton._instance 获取""" return getattr(self._instance,item) if __name__ == '__main__': s1 = Singleton() s2 = Singleton() print(id(s1),s1.display()) print(id(s2),s2.display())
117db2482cbd3c70dfca105d582b5eb38fca670d
[ "Python" ]
11
Python
ZjcsHub/PythonProject
26371234b338bed73aa7fecb90b613a4144bb6a9
af1a119cb5f0149ae17e349552a5f21742c3bc45
refs/heads/master
<file_sep>using System; using System.Linq; using System.Windows; using ScjnUtilities; namespace AsistenciaLibreria { /// <summary> /// Interaction logic for Retardo.xaml /// </summary> public partial class Retardo : Window { Usuarios usuario; public Retardo(Usuarios usuario) { InitializeComponent(); this.usuario = usuario; } private void Window_Loaded(object sender, RoutedEventArgs e) { } private void BtnCancelar_Click(object sender, RoutedEventArgs e) { DialogResult = false; this.Close(); } private void BtnAceptar_Click(object sender, RoutedEventArgs e) { TxtObservacion.Text = VerificationUtilities.TextBoxStringValidation(TxtObservacion.Text); if (TxtObservacion.Text.Length < 10) { MessageBox.Show("Debes ingresar alguna observación de lo contrario oprime el botón cancelar"); return; } AsistenciaModel model = new AsistenciaModel(); int idAsistencia = model.DoUserCheckInToday(usuario); bool complete = model.SetObservacionEntrada(usuario, idAsistencia, TxtObservacion.Text); if (complete) { DialogResult = true; this.Close(); } else { MessageBox.Show("No se pudo completar el registro de las observaciones, favor de volverlo a intentar"); } } } } <file_sep>using System; using System.Configuration; using System.Data; using System.Data.OleDb; using System.Linq; using ScjnUtilities; namespace AsistenciaLibreria { public class AsistenciaModel { private readonly string connectionString = ConfigurationManager.ConnectionStrings["Base"].ConnectionString; public Usuarios GetCurrentUsuario(string username) { OleDbCommand cmd; OleDbDataReader reader; OleDbConnection connection = new OleDbConnection(connectionString); Usuarios usuario = null; try { connection.Open(); string sSql = "SELECT P.IdPersonal, P.NombreCompleto, L.IdLibreria, L.Nombre " + "FROM C_Personal P INNER JOIN C_Libreria L ON P.IdLiberia = L.IdLibreria " + "WHERE Usuario = @Usuario"; cmd = new OleDbCommand(sSql, connection); cmd.Parameters.AddWithValue("@Usuario", username); reader = cmd.ExecuteReader(); while (reader.Read()) { usuario = new Usuarios(); usuario.IdUsuario = Convert.ToInt32(reader["IdPersonal"]); usuario.Usuario = reader["NombreCompleto"].ToString(); usuario.IdLibreria = Convert.ToInt32(reader["IdLibreria"]); usuario.Libreria = reader["Nombre"].ToString(); } } catch (OleDbException ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AccesoModel", "BusquedaLatinos"); } catch (Exception ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AccesoModel", "BusquedaLatinos"); } finally { connection.Close(); } return usuario; } /// <summary> /// Verifica si el usuario registro su entrada previamente /// </summary> /// <param name="usuario"></param> /// <returns></returns> public int DoUserCheckInToday(Usuarios usuario) { int doCheckIn = -3; OleDbCommand cmd; OleDbDataReader reader; OleDbConnection connection = new OleDbConnection(connectionString); try { connection.Open(); string sSql = "SELECT * FROM Asistencia WHERE FechaInt = @Fecha AND IdPersonal = @Id"; cmd = new OleDbCommand(sSql, connection); cmd.Parameters.AddWithValue("@Fecha", DateTimeUtilities.DateToInt(DateTime.Now)); cmd.Parameters.AddWithValue("@Id", usuario.IdUsuario); reader = cmd.ExecuteReader(); while (reader.Read()) { doCheckIn = Convert.ToInt32(reader["IdAsistencia"]); } } catch (OleDbException ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } catch (Exception ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } finally { connection.Close(); } return doCheckIn; } public bool SetCheckIn(Usuarios usuario) { bool checkInComplete = false; OleDbConnection connection = new OleDbConnection(connectionString); OleDbDataAdapter dataAdapter; DataSet dataSet = new DataSet(); DataRow dr; try { int id = DataBaseUtilities.GetNextIdForUse("Asistencia", "IdAsistencia", connection); if (id != 0) { dataAdapter = new OleDbDataAdapter(); dataAdapter.SelectCommand = new OleDbCommand("SELECT * FROM Asistencia WHERE IdAsistencia = 0", connection); dataAdapter.Fill(dataSet, "Asistencia"); dr = dataSet.Tables["Asistencia"].NewRow(); dr["IdAsistencia"] = id; dr["IdPersonal"] = usuario.IdUsuario; dr["IdLibreria"] = usuario.IdLibreria; dr["Fecha"] = DateTime.Now; dr["FechaInt"] = DateTimeUtilities.DateToInt(DateTime.Now); dataSet.Tables["Asistencia"].Rows.Add(dr); dataAdapter.InsertCommand = connection.CreateCommand(); dataAdapter.InsertCommand.CommandText = "INSERT INTO Asistencia(IdAsistencia,IdPersonal,IdLibreria,Fecha,HoraEntrada,FechaInt) " + "VALUES (@IdAsistencia,@IdPersonal,@IdLibreria,@Fecha,TIME(),@FechaInt)"; dataAdapter.InsertCommand.Parameters.Add("@IdAsistencia", OleDbType.Numeric, 0, "IdAsistencia"); dataAdapter.InsertCommand.Parameters.Add("@IdPersonal", OleDbType.Numeric, 0, "IdPersonal"); dataAdapter.InsertCommand.Parameters.Add("@IdLibreria", OleDbType.Numeric, 0, "IdLibreria"); dataAdapter.InsertCommand.Parameters.Add("@Fecha", OleDbType.Date, 0, "Fecha"); dataAdapter.InsertCommand.Parameters.Add("@FechaInt", OleDbType.Numeric, 0, "FechaInt"); dataAdapter.Update(dataSet, "Asistencia"); dataSet.Dispose(); dataAdapter.Dispose(); connection.Close(); checkInComplete = true; } } catch (OleDbException ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } catch (Exception ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } finally { connection.Close(); } return checkInComplete; } /// <summary> /// Actualiza la hora de entrada de un usuario /// </summary> /// <param name="usuario"></param> /// <param name="idAsistencia"></param> /// <returns></returns> public bool UpdateCheckIn(Usuarios usuario, int idAsistencia) { bool updateComplete = false; OleDbConnection connection = new OleDbConnection(connectionString); OleDbDataAdapter dataAdapter = new OleDbDataAdapter(); DataSet dataSet = new DataSet(); DataRow dr; try { string sqlCadena = "SELECT * FROM Asistencia WHERE IdAsistencia = @Asist"; dataAdapter.SelectCommand = new OleDbCommand(sqlCadena, connection); dataAdapter.SelectCommand.Parameters.AddWithValue("@Asist", idAsistencia); dataAdapter.Fill(dataSet, "Asistencia"); dr = dataSet.Tables["Asistencia"].Rows[0]; dr.BeginEdit(); dr["IdAsistencia"] = idAsistencia; dr.EndEdit(); dataAdapter.UpdateCommand = connection.CreateCommand(); dataAdapter.UpdateCommand.CommandText = "UPDATE Asistencia SET HoraEntrada = TIME() WHERE IdAsistencia = @IdAsistencia"; dataAdapter.UpdateCommand.Parameters.Add("@IdAsistencia", OleDbType.Numeric, 0, "IdAsistencia"); dataAdapter.Update(dataSet, "Asistencia"); dataSet.Dispose(); dataAdapter.Dispose(); connection.Close(); updateComplete = true; } catch (OleDbException ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } catch (Exception ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } finally { connection.Close(); } return updateComplete; } public bool SetObservacionEntrada(Usuarios usuario, int idAsistencia, string observacion) { bool updateComplete = false; OleDbConnection connection = new OleDbConnection(connectionString); OleDbDataAdapter dataAdapter = new OleDbDataAdapter(); DataSet dataSet = new DataSet(); DataRow dr; try { string sqlCadena = "SELECT * FROM Asistencia WHERE IdAsistencia = @Asist"; dataAdapter.SelectCommand = new OleDbCommand(sqlCadena, connection); dataAdapter.SelectCommand.Parameters.AddWithValue("@Asist", idAsistencia); dataAdapter.Fill(dataSet, "Asistencia"); dr = dataSet.Tables["Asistencia"].Rows[0]; dr.BeginEdit(); dr["ObservacionesUser"] = observacion; dr.EndEdit(); dataAdapter.UpdateCommand = connection.CreateCommand(); dataAdapter.UpdateCommand.CommandText = "UPDATE Asistencia SET ObservacionesUser = @ObservacionesUser WHERE IdAsistencia = @IdAsistencia"; dataAdapter.UpdateCommand.Parameters.Add("@ObservacionesUser", OleDbType.VarChar, 0, "ObservacionesUser"); dataAdapter.UpdateCommand.Parameters.Add("@IdAsistencia", OleDbType.Numeric, 0, "IdAsistencia"); dataAdapter.Update(dataSet, "Asistencia"); dataSet.Dispose(); dataAdapter.Dispose(); connection.Close(); updateComplete = true; } catch (OleDbException ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } catch (Exception ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } finally { connection.Close(); } return updateComplete; } public bool SetCheckOut(Usuarios usuario) { bool updateComplete = false; OleDbConnection connection = new OleDbConnection(connectionString); OleDbDataAdapter dataAdapter = new OleDbDataAdapter(); DataSet dataSet = new DataSet(); DataRow dr; try { string sqlCadena = "SELECT * FROM Asistencia WHERE FechaInt = @Fecha AND IdPersonal = @IdPersonal"; dataAdapter.SelectCommand = new OleDbCommand(sqlCadena, connection); dataAdapter.SelectCommand.Parameters.AddWithValue("@Fecha", DateTimeUtilities.DateToInt(DateTime.Now)); dataAdapter.SelectCommand.Parameters.AddWithValue("@IdPersonal", usuario.IdUsuario); dataAdapter.Fill(dataSet, "Asistencia"); dr = dataSet.Tables["Asistencia"].Rows[0]; dr.BeginEdit(); dr["IdPersonal"] = usuario.IdUsuario; dr.EndEdit(); dataAdapter.UpdateCommand = connection.CreateCommand(); dataAdapter.UpdateCommand.CommandText = "UPDATE Asistencia SET HoraSalida = TIME() WHERE FechaInt = @Fecha AND IdPersonal = @IdPersonal"; dataAdapter.UpdateCommand.Parameters.Add("@Fecha", OleDbType.Numeric, 0, "FechaInt"); dataAdapter.UpdateCommand.Parameters.Add("@IdPersonal", OleDbType.Numeric, 0, "IdPersonal"); dataAdapter.Update(dataSet, "Asistencia"); dataSet.Dispose(); dataAdapter.Dispose(); connection.Close(); updateComplete = true; } catch (OleDbException ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } catch (Exception ex) { string methodName = System.Reflection.MethodBase.GetCurrentMethod().Name; ErrorUtilities.SetNewErrorMessage(ex, methodName + " Exception,AsistenciaModel", "AsistenciaLibreria"); } finally { connection.Close(); } return updateComplete; } } }<file_sep>using System; using System.Linq; using System.Windows; namespace AsistenciaLibreria { /// <summary> /// Interaction logic for MainWindow.xaml /// </summary> public partial class MainWindow : Window { private string username; private Usuarios usuario; TimeSpan tolerancia = new TimeSpan(9, 16, 0); public MainWindow() { InitializeComponent(); } private void Window_Loaded(object sender, RoutedEventArgs e) { username = Environment.UserName; usuario = new AsistenciaModel().GetCurrentUsuario(username); if (usuario != null) { LblLibreria.Content = usuario.Libreria; LblUsuario.Content = usuario.Usuario; } else { MessageBox.Show("No estas registrado en el sistema, favor de comunicarte con tu superior jerárquico"); return; } } private void Button_Click(object sender, RoutedEventArgs e) { DateTime hora = DateTime.Now; AsistenciaModel model = new AsistenciaModel(); int idAsistencia = model.DoUserCheckInToday(usuario); if (idAsistencia != -3) { MessageBoxResult result = MessageBox.Show("Ya habías registrado la hora de entrada del día de hoy. ¿Deseas sustituirla por la hora actual?", "Atención", MessageBoxButton.YesNo, MessageBoxImage.Question); if (result == MessageBoxResult.Yes) { model.UpdateCheckIn(usuario, idAsistencia); } } else { bool chekInComplete = model.SetCheckIn(usuario); if (!chekInComplete) { MessageBox.Show("No se pudo completar el registro, favor de volver a intentarlo"); return; } } if (hora.TimeOfDay > tolerancia) { Retardo retardo = new Retardo(usuario) { Owner = this }; retardo.ShowDialog(); } MessageBox.Show(String.Format("Hora de entrada registrada: {0}", hora)); this.Close(); } private void BtnSalida_Click(object sender, RoutedEventArgs e) { AsistenciaModel model = new AsistenciaModel(); int idAsistencia = model.DoUserCheckInToday(usuario); if (idAsistencia == -3) { MessageBox.Show("Para poder registrar la salida primero debes de registrar la entrada"); return; } model.SetCheckOut(usuario); MessageBox.Show(String.Format("Hora de salida registrada: {0}", DateTime.Now)); this.Close(); } } } <file_sep>using System; using System.Linq; namespace AsistenciaLibreria { public class Usuarios { private int idUsuario; private string usuario; private int idLibreria; private string libreria; public int IdUsuario { get { return this.idUsuario; } set { this.idUsuario = value; } } public string Usuario { get { return this.usuario; } set { this.usuario = value; } } public int IdLibreria { get { return this.idLibreria; } set { this.idLibreria = value; } } public string Libreria { get { return this.libreria; } set { this.libreria = value; } } } }
c74152d9154a5426cbc4d78849799d170418e75f
[ "C#" ]
4
C#
danilozacyac/AsistenciaLibreria
87cd3074efb68a368af0df6af6da00de5d8098e2
bbe2741dbcb9b6d07e17671ddc3d56d841cafb30
refs/heads/master
<repo_name>ukritTKK/Newsworm<file_sep>/README.md # Newsworm A Medium-like web application but focuses about News using React, Redux, NodeJs, Express, MongoDB <file_sep>/app/routes/index.js const newsRoutes = require('./news_routes') module.exports = function(app, db) { newsRoutes(app, db) } <file_sep>/app/routes/news_routes.js let ObjectID = require('mongodb').ObjectID module.exports = function(app, db) { app.get('/news/:id', (req, res) => { const id = req.params.id const newsItem = { '_id' : new ObjectID(id) } db.collection('news').findOne(newsItem, (err, item) => { if (err) { res.send({ 'error' : 'An error has occurred.'}) } else { res.send(item) } }) }) app.post('/news', (req, res) => { const news = { text: req.body.body, title: req.body.title } db.collection('news').insert(news, (err, result) => { if (err) { res.send({ 'error' : 'An error has occurred.' }) } else { res.send(result.ops[0]) } }) }) app.delete('/news/:id', (req, res) => { const id = req.params.id const newsItem = { '_id' : new ObjectID(id) } db.collection('news').remove(newsItem, (err, item) => { if (err) { res.send({ 'error' : 'An error has occurred.'}) } else { res.send('News ' + id + ' has been removed.') } }) }) app.put('/news/:id', (req, res) => { const id = req.params.id const newsItem = { '_id' : new ObjectID(id) } const updatedContent = { text: req.body.body, title: req.body.title } db.collection('news').update(newsItem, updatedContent, (err, result) => { if (err) { res.send({ 'error' : 'An error has occurred.'}) } else { res.send(updatedContent) console.log('News ' + id + ' has been edited.') } }) }) } <file_sep>/client/src/components/home/index.js import React from 'react' import './style.css' const Home = props => { return ( <div> <div className='header-flex-container'> <div className='header-name'> <p className='header-logo' onClick={() => props.goToHome()}>Newsworm</p> </div> <div className='signup-login'> <button className='header-signup' onClick={() => props.goToSignUp()}>Sign Up</button> <button className='header-login' onClick={() => props.goToLogIn()}>Log In</button> </div> </div> <div className='body-flex-container'>heyyyyyyyyyyyyyyyyyy</div> </div> ) } export default Home
34c07acc50fdfe7299df4374769292032894143b
[ "Markdown", "JavaScript" ]
4
Markdown
ukritTKK/Newsworm
ec293a34d6ca065452b5d568de6cb14768a86b9a
0f0360b14478f5b981e40613290226249ce595f7
refs/heads/master
<repo_name>itshorsey/player<file_sep>/App.js import React from "react"; import { StyleSheet, Text, View, WebView } from "react-native"; import { createStackNavigator, createAppContainer } from "react-navigation"; import Homefeed from "./Screens/Homefeed"; import CollectionScreen from "./Screens/CollectionScreen"; import styled from "styled-components"; class App extends React.Component { render() { return ( <View> <createAppContainer /> </View> ); } } const AppNavigator = createStackNavigator( { Home: Homefeed, Details: CollectionScreen }, { initialRouteName: "Home" } ); export default createAppContainer(AppNavigator); const MainView = styled.View` z-index: -1; `; const PlayerView = styled.View` z-index: 5; `; <file_sep>/Components/Collection.js import React from "react"; import styled from "styled-components"; const Collection = props => ( <Container> <Cover> <Title>{props.title}</Title> <Count>{props.count}</Count> </Cover> </Container> ); export default Collection; const Container = styled.View` background: #eee; height: 350px; width: 100%; flex: 1; margin-bottom: 2px; `; const Cover = styled.View` margin-top: 50px; `; const Title = styled.Text``; const Count = styled.Text``; <file_sep>/data/Data.js const Data = [ { title: "Walking in the woods", count: "6 Mixes", image: "", mixes: [] }, { title: "Yes I did Tapes", count: "4 Mixes", image: "", mixes: [] }, { title: "Drawing Alone at Night", count: "6 Mixes", image: "", mixes: [] }, { title: "Cooking on Sunday", count: "2 Mixes", image: "", mixes: [] } ]; export default Data;
8dece2d277c1f93ebfe55ec4f07173ba70c98457
[ "JavaScript" ]
3
JavaScript
itshorsey/player
6a636d3fb853ecad4cde383bd35d29ba8afa7235
7970d3f158a44c541ae0bf192a56e496b09e8f18
refs/heads/master
<repo_name>TomHawkins11/Networking-Lobby<file_sep>/Client/Client - Lobby Connection/Assets/LanGamesPanel.cs using System.Collections; using System.Collections.Generic; using UnityEngine; public class LanGamesPanel : MonoBehaviour { public List<GameObject> buttonList; public GameObject GameButton; // Start is called before the first frame update void Start() { } // Update is called once per frame void Update() { } public void AddButton(string ipAddress, string gameName) { GameObject newButton = Instantiate(GameButton); newButton.transform.SetParent(gameObject.transform, false); newButton.GetComponent<GameButton>().gameIP = ipAddress; newButton.GetComponent<GameButton>().gameInfo = gameName; } } <file_sep>/Client/Client - Lobby Connection/Assets/UIManager.cs using System.Collections; using System.Collections.Generic; using UnityEngine; public class UIManager : MonoBehaviour { // Start is called before the first frame update public List<GameObject> gameButtonList; public GameObject GameButton; public GameObject lanGamesPanel; // Start is called before the first frame update void Start() { } // Update is called once per frame void Update() { } public void AddLanGameButton(string ipAddress, string gameName) { GameObject newButton = Instantiate(GameButton); newButton.transform.SetParent(lanGamesPanel.transform, false); newButton.GetComponent<GameButton>().gameIP = ipAddress; newButton.GetComponent<GameButton>().gameInfo = gameName; } public void RemoveLanGames() { foreach (GameObject button in GameObject.FindGameObjectsWithTag("LanGameButton")) { Destroy(button); } } public void DisablePanel(GameObject panel) { panel.SetActive(false); } public void EnablePanel(GameObject panel) { panel.SetActive(true); } } <file_sep>/Client/Client - Lobby Connection/Assets/GameButton.cs using System.Net.Mime; using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.UI; public class GameButton: MonoBehaviour { public string gameIP = ""; public string gameInfo = ""; public GameObject textIP; public GameObject textInfo; CustomNetworkManager cnm; // Start is called before the first frame update void Start() { textIP = gameObject.transform.Find("GameIP").gameObject; textInfo = gameObject.transform.Find("GameName").gameObject; textInfo.GetComponent<Text>().text = gameInfo; textIP.GetComponent<Text>().text = gameIP; cnm = GameObject.Find("Client").GetComponent<CustomNetworkManager>(); } public void ConnectToGame() { cnm.JoinGame(gameIP); } // Update is called once per frame } <file_sep>/Server/Server - Connection Lobby/Assets/Scripts/NewCustomNetworkDiscovery.cs using System; using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.Networking; public class NewCustomNetworkDiscovery : NetworkDiscovery { public bool client = false; public bool server = true; // Start is called before the first frame update void Start() { base.Initialize(); if (server) { LaunchAsServer(); } else if (client) { LaunchAsClient(); } else { LaunchAsServer(); } } private void LaunchAsClient() { base.StartAsClient(); } private void LaunchAsServer() { base.StartAsServer(); } // Update is called once per frame } <file_sep>/Client/Client - Lobby Connection/Assets/CustomNetworkDiscovery.cs using System; using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.Networking; using UnityEngine.UI; public class CustomNetworkDiscovery : NetworkDiscovery { // Start is called before the first frame update public bool client; public GameObject fromText; public GameObject dataText; public GameObject uiManager; public void Start() { fromText = GameObject.Find("GameName"); dataText = GameObject.Find("GameIP"); uiManager = GameObject.Find("UIManager"); } public void StartHost() { StopBroadcast(); Initialize(); StartAsServer(); } // Update is called once per frame public void StartClient() { StopBroadcast(); Initialize(); StartAsClient(); RefreshAvailableGames(); } public void RefreshAvailableGames() { uiManager.GetComponent<UIManager>().RemoveLanGames(); foreach (var value in broadcastsReceived.Values) { string ip = value.serverAddress.Substring(7); string name = System.Text.Encoding.Unicode.GetString(value.broadcastData); name = name.Substring(name.IndexOf(":") + 1, name.IndexOf(":7") - name.IndexOf(":") - 1); Debug.Log(value.serverAddress); Debug.Log(System.Text.Encoding.Unicode.GetString(value.broadcastData)); uiManager.GetComponent<UIManager>().AddLanGameButton(ip, name); } } } <file_sep>/Server/Server - Connection Lobby/Assets/Scripts/testScript.cs using System; using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.Networking; public class testScript : MonoBehaviour { // Start is called before the first frame update NewCustomNetworkDiscovery nd; void Start() { nd = gameObject.GetComponent<NewCustomNetworkDiscovery>(); StartCoroutine(OutputData()); } // Update is called once per frame void Update() { } IEnumerator OutputData() { while (true) { Debug.Log(nd.broadcastData); Debug.Log(nd.broadcastKey); Debug.Log(nd.broadcastPort); yield return new WaitForSeconds(5f); } } }
ef5dc75977d464c3d649cd69690820888fe1fe1d
[ "C#" ]
6
C#
TomHawkins11/Networking-Lobby
7ee26960846066ab812d4ace93121a8491c91ee6
2427b65816f6b7db147bc76a23595ff7c904c9a3
refs/heads/master
<repo_name>OscarDR89/Utest_Screenplay<file_sep>/src/main/java/model/UtestData.java package model; public class UtestData { private String Nombre; private String Apellidos; private String Correoelectronico; private String Dia; private String Mes; private String Año; private String Ciudad; private String CodigoPostal; private String Pais; private String Contraseña; private String ConfirmarContraseña; private String NombresUsuario; private String ContraseñaInicio; public String getNombre() { return Nombre; } public void setNombre(String nombre) { Nombre = nombre; } public String getApellidos() { return Apellidos; } public void setApellidos(String apellidos) { Apellidos = apellidos; } public String getCorreoelectronico() { return Correoelectronico; } public void setCorreoelectronico(String correoelectronico) { Correoelectronico = correoelectronico; } public String getDia() { return Dia; } public void setDia(String dia) { Dia = dia; } public String getMes() { return Mes; } public void setMes(String mes) { Mes = mes; } public String getAño() { return Año; } public void setAño(String año) { Año = año; } public String getCiudad() { return Ciudad; } public void setCiudad(String ciudad) { Ciudad = ciudad; } public String getCodigoPostal() { return CodigoPostal; } public void setCodigoPostal(String codigoPostal) { CodigoPostal = codigoPostal; } public String getPais() { return Pais; } public void setPais(String pais) { Pais = pais; } public String getContraseña() { return Contraseña; } public void setContraseña(String contraseña) { Contraseña = contraseña; } public String getConfirmarContraseña() { return ConfirmarContraseña; } public void setConfirmarContraseña(String confirmarContraseña) { ConfirmarContraseña = confirmarContraseña; } public String getNombresUsuario() { return NombresUsuario; } public void setNombresUsuario(String nombresUsuario) { NombresUsuario = nombresUsuario; } public String getContraseñaInicio() { return ContraseñaInicio; } public void setContraseñaInicio(String contraseñaInicio) { ContraseñaInicio = contraseñaInicio; } } <file_sep>/settings.gradle rootProject.name = 'com.utest.prueba1'
5769c529894885710c31b4231729fbfd40116d25
[ "Java", "Gradle" ]
2
Java
OscarDR89/Utest_Screenplay
ce01c92308f0b7c906cd4282c87f5ef6109d81d9
6b4223b9f21c967e09b1d284261fa8555e348e84
refs/heads/master
<repo_name>MattGenaro/NASA_Space_Apps_Challenge_2020<file_sep>/Aurora_AlachuaRegression.py # -*- coding: utf-8 -*- import pandas as pd import numpy as np import matplotlib.pyplot as plt df = pd.read_csv('/Assets/us-counties.csv') df0 = df[(df["county"] == 'Alachua') & (df["state"] == 'Florida')] import datetime as dt df0['date'] = pd.to_datetime(df0['date']) df0['date'] = df0['date'].map(dt.datetime.toordinal) X = df0['date'].values.reshape(-1, 1) y = df0['cases'].values #Regressão linear from sklearn import linear_model regressor1 = LinearRegression() regressor1.fit(X, y) score1 = regressor1.score(X, y) fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(X, y, color='purple') plt.plot(X, regressor1.predict(X), color = 'red') ax.set(xlabel="Date", ylabel="Cases", title="Linear Regression for Daily Total Cases in Alachua County") plt.show() #Regressão polinomal from sklearn.preprocessing import PolynomialFeatures poly = PolynomialFeatures(degree = 3) X_poly = poly.fit_transform(X) regressor2 = LinearRegression() regressor2.fit(X_poly, y) score2 = regressor2.score(X_poly, y) import matplotlib.ticker as ticker fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(X, y, color='purple') plt.plot(X, regressor2.predict(poly.fit_transform(X)), color = 'red') ax.set(xlabel="Date", ylabel="Cases", title="Polinomial Regression for Daily Total Cases in Alachua County") plt.show() <file_sep>/Aurora_MiamidadeRegression.py # -*- coding: utf-8 -*- import pandas as pd import numpy as np import matplotlib.pyplot as plt df = pd.read_csv('/Assets/us-counties.csv', engine='python') df4 = df[(df["county"] == 'Miami-Dade') & (df["state"] == 'Florida')] import datetime as dt df4['date'] = pd.to_datetime(df4['date']) df4['date'] = df4['date'].map(dt.datetime.toordinal) X = df4['date'].values.reshape(-1, 1) y = df4['cases'].values #Regressão linear from sklearn import linear_model regressor1 = LinearRegression() regressor1.fit(X, y) score1 = regressor1.score(X, y) fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(X, y, color='purple') plt.plot(X, regressor1.predict(X), color = 'red') ax.set(xlabel="Date", ylabel="Cases", title="Linear Regression for Daily Total Cases in Miami-Dade County") plt.show() #Regressão polinomal from sklearn.preprocessing import PolynomialFeatures poly = PolynomialFeatures(degree = 3) X_poly = poly.fit_transform(X) regressor2 = LinearRegression() regressor2.fit(X_poly, y) score2 = regressor2.score(X_poly, y) fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(X, y, color='purple') plt.plot(X, regressor2.predict(poly.fit_transform(X)), color = 'red') ax.set(xlabel="Date", ylabel="Cases", title="Polinomial Regression for Daily Total Cases in Miami-Dade County") plt.show() <file_sep>/README.md Criado no hackathon Space Apps, pela NASA, em Maio de 2020, o Projeto Aurora é uma plataforma de dados que ajuda as organizações governamentais a reduzir o impacto da velocidade de disseminação do COVID-19, o qual, hoje, afeta mais de 6 milhões de pessoas em todo o mundo. Utilizando dados de credibilidade/confiáveis, conseguimos construir modelos preditivos que alcançam uma acurácia de até 98% na previsão de casos por COVID-19 confirmados. Ainda, utilizando recursos oferecidos pela NASA, o Projeto Aurora busca encontrar os fatores ambientais, socioeconômicos e humanos mais relevantes que poderiam explicar como se dá a evolução do COVID-19 em determinada região. Desta forma, podendo auxiliar as tomadas de decisões governamentais, fornecendo as informações mais importantes da situação e auxiliando com medidas de controle, impactando diretamente na economia de tempo e dinheiro, com o objetivo final de salvar vidas. <file_sep>/Aurora_CasesByCounties.py # -*- coding: utf-8 -*- import pandas as pd import numpy as np import seaborn as sns import matplotlib.pyplot as plt df = pd.read_csv('/Assets/us-counties.csv') df0 = df[(df["county"] == 'Alachua') & (df["state"] == 'Florida')] df00 = df0[['date','cases']] df00.to_csv('/Assets/Cases/alachuacases.csv', index = False) df1 = df[(df["county"] == 'Broward') & (df["state"] == 'Florida')] df01 = df1[['date','cases']] df01.to_csv('/Assets/Cases/browardcases.csv', index = False) df2 = df[(df["county"] == 'Hillsborough') & (df["state"] == 'Florida')] df02 = df2[['date','cases']] df02.to_csv('/Assets/Cases/hillsboroughcases.csv', index = False) df3 = df[(df["county"] == 'Leon') & (df["state"] == 'Florida')] df03 = df3[['date','cases']] df03.to_csv('/Assets/Cases/leoncases.csv', index = False) df4 = df[(df["county"] == 'Miami-Dade') & (df["state"] == 'Florida')] df04 = df4[['date','cases']] df04.to_csv('/Assets/Cases/miamidadecases.csv', index = False) df6 = df[(df["county"] == 'Monroe') & (df["state"] == 'Florida')] df05 = df6[['date','cases']] df05.to_csv('/Assets/Cases/monroecases.csv', index = False) df7 = df[(df["county"] == 'Orange') & (df["state"] == 'Florida')] df06 = df7[['date','cases']] df06.to_csv('/Assets/Cases/orangecases.csv', index = False) df8 = df[(df["county"] == 'Osceola') & (df["state"] == 'Florida')] df07 = df8[['date','cases']] df07.to_csv('/Assets/Cases/osceolacases.csv', index = False) df9 = df[(df["county"] == 'Palm Beach') & (df["state"] == 'Florida')] df08 = df9[['date','cases']] df08.to_csv('/Assets/Cases/palmbeachcases.csv', index = False) df10 = df[(df["county"] == 'Pinellas') & (df["state"] == 'Florida')] df09 = df10[['date','cases']] df09.to_csv('C/Assets/Cases/pinellascases.csv', index = False) frames = [df0, df1, df2, df3, df4, df5, df6, df7, df8, df9, df10] data = pd.concat(frames) #ALACHUA COUNTY fig, ax = plt.subplots(figsize=(10, 10)) # Add x-axis and y-axis ax.scatter(df0.index.values, df0['cases'], color='purple') # Set title and labels for axes ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Alachua County from March to May 2020") # Rotate tick marks on x-axis plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('alachuacases.png') plt.show() #BROWARD COUNTY fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(df1.index.values, df1['cases'], color='purple') ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Broward County from March to May 2020") plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('browardcases.png') plt.show() #HILLSBOROUGH COUNTY fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(df2.index.values, df2['cases'], color='purple') ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Hillsborough County from March to May 2020") plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('hillsboroughcases.png') plt.show() #LEON COUNTY fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(df3.index.values, df3['cases'], color='purple') ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Leon County from March to May 2020") plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('leoncases.png') plt.show() #MIAMI-DADE COUNTY fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(df4.index.values, df4['cases'], color='purple') ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Miami-Dade County from March to May 2020") plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('miamidadecases.png') plt.show() #MONROE COUNTY fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(df6.index.values, df6['cases'], color='purple') ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Monroe County from March to May 2020") plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('monroecases.png') plt.show() #ORANGE COUNTY fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(df7.index.values, df7['cases'], color='purple') ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Orange County from March to May 2020") plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('orangecases.png') plt.show() #OSCEOLA COUNTY fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(df8.index.values, df8['cases'], color='purple') ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Osceola County from March to May 2020") plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('osceolacases.png') plt.show() #PALM BEACH COUNTY fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(df9.index.values, df9['cases'], color='purple') ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Palm Beach County from March to May 2020") plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('palmbeachcases.png') plt.show() #PINELLAS COUNTY fig, ax = plt.subplots(figsize=(10, 10)) ax.scatter(df10.index.values, df10['cases'], color='purple') ax.set(xlabel="Date", ylabel="Cases", title="Daily Total Cases, Pinellas County from March to May 2020") plt.setp(ax.get_xticklabels(), rotation=45) plt.savefig('pinellascases.png') plt.show()
00b1fefe18c90adfbf23f58b14199514b4d147b0
[ "Markdown", "Python" ]
4
Python
MattGenaro/NASA_Space_Apps_Challenge_2020
5c0103e37c7f774c8a8195fe27c8990800e00383
84bfc40dfa542706b6b129ed39da474138b1ea09
refs/heads/master
<repo_name>OpenMageModuleFostering/fianet_sceau<file_sep>/app/code/community/Fianet/Sceau/Model/Observer.php <?php class Fianet_Sceau_Model_Observer extends Varien_Event_Observer { CONST GRID_NAME_IN_LAYOUT = 'sales_order.grid'; CONST MASSACTION_BLOCK_NAME_IN_LAYOUT = 'sales_order.grid.child'; CONST MASSACTION_BLOCK_CLASS = 'Mage_Adminhtml_Block_Widget_Grid_Massaction'; public function updateSalesOrderGrid($observer) { $event = $observer->getEvent(); $block = $event->getData('block'); if ($block->getNameInLayout() == self::GRID_NAME_IN_LAYOUT) { $block->addColumnAfter('fianet_sceau', array( 'header' => 'FIA-NET SCEAU', 'sortable' => false, 'type' => 'fianet', 'align' => 'center', 'width' => '20', 'renderer' => 'Fianet_Sceau_Block_Widget_grid_column_renderer_fianet', 'filter' => 'Fianet_Sceau_Block_Widget_grid_column_filter_fianet' ), 'action'); } if (preg_match('/' . self::MASSACTION_BLOCK_NAME_IN_LAYOUT . '[0-9]+/', $block->getNameInLayout()) && (get_class($block) == self::MASSACTION_BLOCK_CLASS || get_parent_class($block) == self::MASSACTION_BLOCK_CLASS)) { $block->addItem('fianet_sceau', array( 'label' => Mage::helper('fianet_sceau')->__('Envoyer à FIA-NET sceau'), 'url' => Mage::getUrl('sceau/index/mass') )); } } public function sendToFianet($observer) { //gestion de l'évènement magento $event = $observer->getEvent(); $order = $event->getData('order'); //$myorder = (Fianet_Sceau_Model_Order)$order; //Zend_Debug::dump($myorder); //die; if ($this->_canSendOrder($order)) { Mage::Helper('fianet_sceau/Data')->processOrderToFianet($order); } } protected function _canSendOrder(Mage_Sales_Model_Order $order) { if (!Mage::Helper('fianet_sceau/Data')->isModuleActive($order)) {//si le module est désactivé return false; } else if (Mage::Helper('fianet_sceau/Data')->isOrderAlreadySent($order)) {//Si la commande a déjà été envoyée return false; } elseif (Mage::Helper('fianet_sceau/Data')->checkCurrentOrderStatus($order)) {//si la status de la commande est l'un des status pour lequel l'envoi est requis return true; } elseif ($order->getState() == Mage_Sales_Model_Order::STATE_NEW && $order->getIsInProcess()) {//si la commande a été payée return true; } return false; } }<file_sep>/app/code/community/Fianet/Sceau/Block/Sales/Order.php <?php class Fianet_Sceau_Block_Sales_Order extends Mage_Adminhtml_Block_Sales_Order { protected $_blockGroup = 'fianet_sceau'; }<file_sep>/app/code/community/Fianet/Sceau/Model/Source/Status.php <?php /* * To change this template, choose Tools | Templates * and open the template in the editor. */ class Fianet_Sceau_Model_Source_Status { public static function toOptionArray() { $statuses = Mage::getModel('sales/order_status')->getCollection()->load(); $data = array(); foreach ($statuses as $status) { $data[] = array('label' => $status->getLabel(), 'value' => $status->getStatus()); } return $data; } }<file_sep>/app/code/community/Fianet/Sceau/Model/Source/Mode.php <?php /** * 2000-2012 FIA-NET * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) that is available * through the world-wide-web at this URL: http://www.opensource.org/licenses/OSL-3.0 * If you are unable to obtain it through the world-wide-web, please contact us * via http://www.fia-net-group.com/formulaire.php so we can send you a copy immediately. * * @author <NAME> <<EMAIL>> * @copyright 2000-2012 FIA-NET * @version Release: $Revision: 0.2.0 $ * @license http://www.opensource.org/licenses/OSL-3.0 Open Software License (OSL 3.0) */ class Fianet_Sceau_Model_Source_Mode { const MODE_PROD = 'prod'; const MODE_PREPROD = 'test'; public function toOptionArray() { return array( array('value' => self::MODE_PREPROD, 'label' => Mage::helper('adminhtml')->__('Test')), array('value' => self::MODE_PROD, 'label' => Mage::helper('adminhtml')->__('Production')), ); } } <file_sep>/app/code/community/Fianet/Sceau/Block/Sales/Order/Grid.php <?php class Fianet_Sceau_Block_Sales_Order_Grid extends Mage_Adminhtml_Block_Sales_Order_Grid { protected function _prepareColumns() { parent::_prepareColumns(); $params = array('grid' => $this); Mage::dispatchEvent('fianet_prepare_sales_order_grid_columns', $params); $this->sortColumnsByOrder(); return $this; } protected function _prepareMassaction() { parent::_prepareMassaction(); $params = array('grid' => $this); Mage::dispatchEvent('fianet_prepare_sales_order_grid_massaction', $params); return $this; } } <file_sep>/app/code/community/Fianet/Sceau/Block/Widget/Grid/Column/Renderer/Fianet.php <?php /** * 2000-2012 FIA-NET * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) that is available * through the world-wide-web at this URL: http://www.opensource.org/licenses/OSL-3.0 * If you are unable to obtain it through the world-wide-web, please contact us * via http://www.fia-net-group.com/formulaire.php so we can send you a copy immediately. * * @author <NAME> <<EMAIL>> * @copyright 2000-2012 FIA-NET * @version Release: $Revision: 0.0.13 $ * @license http://www.opensource.org/licenses/OSL-3.0 Open Software License (OSL 3.0) */ class Fianet_Sceau_Block_Widget_grid_column_renderer_fianet extends Mage_Adminhtml_Block_Widget_Grid_Column_Renderer_Abstract { const PICTO_KO = "ko.PNG"; const PICTO_OK = "ok.PNG"; public function render(Varien_Object $row) { $sent_prod = $row->getData('fianet_sceau_order_sent_prod'); $sent_preprod = $row->getData('fianet_sceau_order_sent_preprod'); $sent_error = $row->getData('fianet_sceau_order_sent_error'); if (!Mage::Helper('fianet_sceau/Data')->isModuleActive($row)) {//si le module est désactivé sur le front de la commande return 'Module désactivé'; } $text_pprod = ''; $icon = self::PICTO_KO; /*if($sent_preprod == 0 && $sent_prod==0){}*/ if($sent_preprod == 1 && $sent_prod==0) { $icon = self::PICTO_OK; $text_pprod = " [Test]"; } elseif($sent_preprod == 0 && $sent_prod==1) { $icon = self::PICTO_OK; } elseif($sent_preprod == 1 && $sent_prod==1) { $icon = self::PICTO_OK; } $html = "<img src=" . $this->getSkinUrl('images/sceau/' . $icon)." WIDTH=20 >".$text_pprod; //$html .= $row->getData('fianet_sceau_order_sent'); $order= Mage::getModel('sales/order')->load($row->getId()); /* Zend_Debug::dump($row->getData('fianet_sceau_order_sent_prod')); Zend_Debug::dump($row->getData('fianet_sceau_order_sent_preprod')); Zend_Debug::dump($order->getData('fianet_sceau_order_sent_prod'), 'order->fianet_sceau_order_sent_prod'); Zend_Debug::dump($order->getData('fianet_sceau_order_sent_preprod'), 'order->fianet_sceau_order_sent_preprod'); */ if($sent_error==1) $html .= "<img src=" . $this->getSkinUrl('images/sceau/warning.gif')." WIDTH=20 >"; return ($html); } }<file_sep>/app/code/community/Fianet/Sceau/sql/fianet_sceau_setup/mysql4-install-1.0.0.php <?php $installer = $this; $installer->startSetup(); $installer->addAttribute('order', 'fianet_sceau_order_sent_prod', array('type' => 'int', 'visible' => false, 'required' => true, 'default_value' => 0)); $installer->getConnection()->addColumn($installer->getTable('sales_flat_order_grid'), 'fianet_sceau_order_sent_prod', 'varchar(255) default 0'); $installer->addAttribute('order', 'fianet_sceau_order_sent_preprod', array('type' => 'int', 'visible' => false, 'required' => true, 'default_value' => 0)); $installer->getConnection()->addColumn($installer->getTable('sales_flat_order_grid'), 'fianet_sceau_order_sent_preprod', 'varchar(255) default 0'); $installer->addAttribute('order', 'fianet_sceau_order_sent_error', array('type' => 'int', 'visible' => false, 'required' => true, 'default_value' => 0)); $installer->getConnection()->addColumn($installer->getTable('sales_flat_order_grid'), 'fianet_sceau_order_sent_error', 'varchar(255) default 0'); $installer->endSetup(); <file_sep>/app/code/community/Fianet/Sceau/controllers/IndexController.php <?php class Fianet_Sceau_IndexController extends Mage_Adminhtml_Controller_Action { public function massAction() { $params = Mage::app()->getRequest()->getParams(); $orderIds = $params[$params['massaction_prepare_key']]; $successMsg = ''; foreach ($orderIds as $orderId) { $order = Mage::getModel('sales/order')->load($orderId); //$order->setData('fianet_sceau_order_sent', '0'); if ( $this->_canSendOrder($order) && Mage::Helper('fianet_sceau/Data')->processOrderToFianet($order)) { //Zend_Debug::dump($order->getData('fianet_sceau_order_sent_prod'), 'order->fianet_sceau_order_sent_prod'); //Zend_Debug::dump($order->getData('fianet_sceau_order_sent_preprod'), 'order->fianet_sceau_order_sent_preprod'); //die; $successMsg .= "\n<br />- Commande n° " . $order->getIncrementId(); } } if ($successMsg != '') { $successMsg = 'Commande envoyées à FIA-NET :' . $successMsg; Mage::getSingleton('adminhtml/session')->addSuccess($successMsg); } $order->save(); $this->_redirect('adminhtml/sales_order/index'); } protected function _canSendOrder(Mage_Sales_Model_Order $order) { if (!Mage::Helper('fianet_sceau/Data')->isModuleActive($order)) {//si le module est désactivé return false; } else if (Mage::Helper('fianet_sceau/Data')->isOrderAlreadySent($order)) {//Si la commande a déjà été envoyée return false; } return true; } }<file_sep>/app/code/community/Fianet/Sceau/Model/Order.php <?php /* * To change this template, choose Tools | Templates * and open the template in the editor. */ /** * Description of Order * * @author ahoury */ class Fianet_Sceau_Model_Order { static function processOrderToFianet($order) { if (Fianet_Sceau_Helper_Data::sendOrderToFianet($this)) { $attribut_sceau = Fianet_Sceau_Helper_Data::ORDER_ATTR_SCEAU_SENT_PPROD; if (Fianet_Sceau_Helper_Data::sendingMode($this) == Fianet_Sceau_Model_Source_Mode::MODE_PROD) { $attribut_sceau = Fianet_Sceau_Helper_Data::ORDER_ATTR_SCEAU_SENT_PROD; } $this->setData($attribut_sceau, '1'); return true; } return false; } } <file_sep>/app/code/community/Fianet/Sceau/Block/Widget/Left.php <?php class Fianet_Sceau_Block_Widget_Left extends Fianet_Sceau_Block_Widget_Abstract { CONST BLOCK_CACHE_KEY_PREFIX = 'fianet_sceau_widget_block_frontend_left_'; CONST CONFIG_PATH_BLOCK_ACTIVE = 'sceau/widgetconf/left_position'; function _construct() { parent::_construct(); $this->setCacheKey(self::BLOCK_CACHE_KEY_PREFIX . Mage::app()->getStore()->getCode()); $this->setCacheTags(array(Mage_Core_Block_Abstract::CACHE_GROUP)); $this->setCacheLifetime(60*60*24); } public function canDisplay() { return ($this->isModuleActive() && $this->isActive() && $this->getSiteId()); } public function isActive() { if (Mage::getStoreConfig(self::CONFIG_PATH_BLOCK_ACTIVE) == '1') { return true; } return false; } }<file_sep>/app/code/community/Fianet/Sceau/Block/Logo/Abstract.php <?php abstract class Fianet_Sceau_Block_Logo_Abstract extends Mage_Core_Block_Template { abstract public function canDisplay(); abstract public function isActive(); public function isModuleActive() { if (Mage::getStoreConfig('sceau/sceauconfg/active') == '1') { return true; } return false; } }<file_sep>/app/code/community/Fianet/Sceau/Helper/Data.php <?php class Fianet_Sceau_Helper_Data extends Mage_Core_Helper_Abstract { const ORDER_ATTR_SCEAU_SENT_PROD = 'fianet_sceau_order_sent_prod'; const ORDER_ATTR_SCEAU_SENT_PPROD = 'fianet_sceau_order_sent_preprod'; const ORDER_ATTR_SCEAU_SENT_ERROR= 'fianet_sceau_order_sent_error'; static function Generate_Sceau_xml(Mage_Sales_Model_Order $order) { //récupération des informations $email = $order->customer_email; $timestamp = Mage::getModel('core/date')->date(null,strtotime($order->created_at)); $refid = $order->increment_id; $privatekey = Mage::getStoreConfig('sceau/sceauconfg/private_key', $order->getStoreId()); $crypt = md5($privatekey . "_" . $refid . "+" . $timestamp . "=" . $email); $siteid = self::getSiteID($order); //Si l'IP de l'internaute n'est pas présente dans Magento (en cas de création de commande depuis le BO) alors on récupère l'IP de la boutique $ip = (!$order->getRemoteIp()) ? $_SERVER['REMOTE_ADDR'] : $order->getRemoteIp(); //Zend_Debug::dump($order);die; return("<?xml version='1.0' encoding='UTF-8' ?><control><utilisateur><nom titre='$order->customer_gender'><![CDATA[$order->customer_lastname]]></nom><prenom><![CDATA[$order->customer_firstname]]></prenom><email><![CDATA[$email]]></email></utilisateur><infocommande><siteid><![CDATA[$siteid]]></siteid><refid><![CDATA[$refid]]></refid><montant devise='$order->base_currency_code'><![CDATA[$order->base_grand_total]]></montant><ip timestamp='$timestamp'><![CDATA[$ip]]></ip></infocommande><crypt><![CDATA[$crypt]]></crypt></control>"); } static public function clean_xml($xml) { $xml = str_replace("\\'", "'", $xml); $xml = str_replace("\\\"", "\"", $xml); $xml = str_replace("\\\\", "\\", $xml); $xml = str_replace("\t", "", $xml); $xml = str_replace("\n", "", $xml); $xml = str_replace("\r", "", $xml); $xml = trim($xml); return ($xml); } static public function clean_invalid_char($var) { //supprimes les balises html $var = strip_tags($var); //$var = str_replace("&", "&&amp;", $var); $var = str_replace('&', '', $var); $var = str_replace("<", "&lt;", $var); $var = str_replace(">", "&gt;", $var); $var = trim($var); return ($var); } static function processOrderToFianet(Mage_Sales_Model_Order $order) { if (self::sendOrderToFianet($order)) { $attribut_sceau = self::ORDER_ATTR_SCEAU_SENT_PPROD; if (self::sendingMode($order) == Fianet_Sceau_Model_Source_Mode::MODE_PROD) { $attribut_sceau = self::ORDER_ATTR_SCEAU_SENT_PROD; } $order->setData($attribut_sceau, '1'); //Mage::getSingleton('adminhtml/session')->addError('processOrderToFianet() : ' . $attribut_sceau . ' = ' . $order->getData($attribut_sceau)); return true; } return false; } static function getStatusesConfig() { return explode(',', Mage::getStoreConfig('sceau/sceauconfg/orderstatuses')); } static function checkCurrentOrderStatus(Mage_Sales_Model_Order $order) { return in_array($order->getData('status'), self::getStatusesConfig()); } static function sendOrderToFianet(Mage_Sales_Model_Order $order) { $flux = self::Generate_Sceau_xml($order); //Zend_Debug::dump($flux, 'flux'); $url = self::getFianetUrl($order); $config = array('maxredirects' => 0, 'timeout' => 5); $params = array('SiteID' => self::getSiteID($order), 'CheckSum' => md5($flux), 'XMLInfo' => $flux); Mage::dispatchEvent('Fianet_Sceau_Before_Send_Order', array('order' => $order, 'url' => $url, 'flux' => $flux)); //Zend_Debug::dump($config, 'config'); //Zend_Debug::dump($params, 'params'); try { //Zend_Debug::dump($url, 'flux'); $client = new Zend_Http_Client($url, $config); $client->setMethod(Zend_Http_Client::POST); $client->setParameterPost($params); //Zend_Debug::dump($client, 'client'); $response = $client->request(); //Mage::getSingleton('adminhtml/session')->addError('sendOrderToFianet() : '.htmlentities($flux)); //Mage::getSingleton('adminhtml/session')->addError('sendOrderToFianet() : '.$response->getBody()); Mage::dispatchEvent('Fianet_Sceau_After_Send_Order', array('order' => $order, 'response' => $response)); //Zend_Debug::dump($response); return self::parseFianetResponse($response, $order); } catch (Exception $e) { Mage::getSingleton('adminhtml/session')->addError('FIA-NET Sceau sending error : ' . $e->getMessage()); } } static function parseFianetResponse($response, Mage_Sales_Model_Order $order) { $attribut_sceau_error = self::ORDER_ATTR_SCEAU_SENT_ERROR; if ($response->isSuccessful()) { $xml = $response->getBody(); $simplexml = new Varien_Simplexml_Config($xml); if ($simplexml->getNode()->getAttribute('type') == 'OK') { //Zend_Debug::dump('OK'); $order->setData($attribut_sceau_error, '0'); return true; } } if ($simplexml->getNode()->detail == '') { $order->setData($attribut_sceau_error, '1'); $ret = "erreur de flux"; } else $ret = $simplexml->getNode()->detail; Mage::getSingleton('adminhtml/session')->addError('FIA-NET Sceau sending error : ' . $ret); return false; } static function getFianetUrl(Mage_Sales_Model_Order $order) { $url = Mage::getStoreConfig('sceau/sceaulinks/test_send_url'); if (self::sendingMode($order) == Fianet_Sceau_Model_Source_Mode::MODE_PROD) { $url = Mage::getStoreConfig('sceau/sceaulinks/prod_send_url'); } return Mage::getStoreConfig('sceau/sceaulinks/fianet_url') . '/' . $url; } static function getSiteID(Mage_Sales_Model_Order $order=null) { if (isset($order)) { return Mage::getStoreConfig('sceau/sceauconfg/siteid', $order->getStoreId()); } return Mage::getStoreConfig('sceau/sceauconfg/siteid'); } static function getLogin() { return Mage::getStoreConfig('sceau/sceauconfg/login'); } static function activateWidgetComments() { return Mage::getStoreConfig('sceau/widgetconf/commentaires'); } static function getWidgetTransparent() { if (Mage::getStoreConfig('sceau/widgetconf/widgettransparent') == TRUE) { return "transparent"; } return "blanc"; } static public function isOrderAlreadySent(Mage_Sales_Model_Order $order) { $attribute_name = self::ORDER_ATTR_SCEAU_SENT_PPROD; if (self::sendingMode($order) == Fianet_Sceau_Model_Source_Mode::MODE_PROD) { $attribute_name = self::ORDER_ATTR_SCEAU_SENT_PROD; } if ($order->getData($attribute_name) == '1') { return true; } return false; } static public function getMagentoVersion() { $version = Mage::getVersion(); $version = substr($version, 0, 5); $version = str_replace('.', '', $version); while (strlen($version) < 3) { $version .= "0"; } return (int) $version; } static function isModuleActive($order) { if (Mage::getStoreConfig('sceau/sceauconfg/active', $order->getStoreId()) == '1') { return true; } return false; } static function sendingMode(Mage_Sales_Model_Order $order) { return Mage::getStoreConfig('sceau/sceauconfg/mode', $order->getStoreId()); } }
fd4dd869ca5871beb7a8adccae50fa2a1f7d6be2
[ "PHP" ]
12
PHP
OpenMageModuleFostering/fianet_sceau
fcef79dd8baf8630c41b81d3fd4b911662dbc0bc
e2d78f07a56e450185701a1ef3edf1348c19813b
refs/heads/main
<file_sep>CREATE TABLE `tblBoard` ( `num` int(11) NOT NULL auto_increment , `name` varchar(20) , `subject` varchar(50) , `content` text , `pos` smallint(7) unsigned , `ref` smallint(7) , `depth` smallint(7) unsigned , `regdate` date , `pass` varchar(15) Not NULL , `ip` varchar(15) , `count` smallint(7) unsigned , `filename` varchar(30) , `filesize` int(11) , PRIMARY KEY ( `num` ) ) default character set utf8 collate utf8_general_ci; <file_sep>function inputCheck(){ if(document.regFrm.id.value==""){ alert("ID를 입력하지 않았습니다."); document.regFrm.id.focus(); return; } if(document.regFrm.pass.value==""){ alert("비밀번호를 입력하지 않았습니다."); document.regFrm.pass.focus(); return; } if(document.regFrm.repass.value==""){ alert("비밀번호 재확인을 입력하지 않았습니다."); document.regFrm.repass.focus(); return; } if(document.regFrm.pass.value != document.regFrm.repass.value){ alert("입력한 비밀번호와 비밀번호 재확인이 일치하지 않습니다."); document.regFrm.repass.value=""; document.regFrm.repass.focus(); return; } document.regFrm.submit(); } function win_close(){ self.close(); } <file_sep>package www; public class MemberBean { private String id; private String pass; private String group; private String gidNumber; private String email; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getPass() { return pass; } public void setPass(String pass) { this.pass = pass; } public String getGroup() { return group; } public void setGroup(String group) { this.group = group; setGidNumber(group); } public String getGidNumber() { return gidNumber; } public void setGidNumber(String group) { if(group == "users") { this.gidNumber = "501"; } } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } }
b807dd54bac4b2f10bd0ac2818039313e0b9bb45
[ "JavaScript", "SQL", "Java" ]
3
SQL
dlgdlg/www
ecdad46ee1508c5ee3a77f2f7fc9cd348475671c
ae6681d3f185a4a9227c97ed8e0d939848efecba
refs/heads/master
<file_sep>## Python 3 parte 1 Introdução à nova versão da linguagem. ## Resumo Implementar um jogo para começar a programar com Python. É um jogo de adivinhação, onde o computador escolhe um número aleatório e o usuário precisa adivinhá-lo. Aprender a trabalhar com variáveis de diversos tipos, como gerar um número aleatório e como tomar decisões através de if else, pois o computador dará uma dica para nós, se o chute foi menor ou maior que o número. O jogo tem várias rodadas, então existe a necessidade de repetir uma parte do código. É preciso ler a entrada do usuário e mostrar o resultado para o jogador. ## Conclusão Este curso abordou os seguintes tópicos: * Introdução e Instalação do Python 3 * Instalação do Python 3; * Python online [repl.it](https://repl.it/languages/python3); * Função print e variáveis; * Imprimindo datas; * Tipagem do Python; * Padrão Snake_Case. * Lidando com a entrada do usuário * Instalando e conhecendo o PyCharm; * Comparando variáveis; * Função input() e função int(); * Convertendo uma string para número inteiro; * Diferenças entre o Python 2 e o Python 3. * Testando valores * A condição elif; * else com condição de entrada; * Melhorando a legibilidade do código; * Para saber mais: if sem ou com parênteses? * A sequência do jogo * O laço com while; * Formatação de strings (interpolação de strings); * Iterando de maneira diferente * O laço com for; * Encerrando a interação e o loop; * Formatação de strings, inteiros e floats. * Gerando números aleatórios * Gerando e arredondando um número aleatório; * Definindo um intervalo para a geração de números aleatórios. * Nível e Pontuação * Adicionando níveis ao jogo; * Definindo uma pontuação para o usuário; * Funções built-in; * Arredondar no Python e divisão de float e integer. * Organizando ainda melhor o nosso código * Importando arquivos dentro de outros; * Criando funções para os nossos jogos; * Diferenciando um arquivo executado de um importado; * Declarando e executando funções; * Importação de módulo. * Comparando Python com C * Python vs C; * Interpretado vs Compilado. <file_sep># Carreira Desenvolvedor Python Web Cursos da [Carreira Desenvolvedor Python Web](https://www.alura.com.br/carreira-desenvolvimento-python-web), disponível na plataforma da Alura. ## Resumo A linguagem Python está entre as 5 linguagens mais populares, segundo pesquisa da RedMonk. Essa popularidade se dá principalmente por sua característica de exigir poucas linhas de código e permitir uma leitura fácil do código, além de ter várias bibliotecas do mundo de Data Science. Na Carreira Desenvolvedor Python Web, você vai começar com os fundamentos de HTML e CSS, para avançar e trabalhar com o framework web Flask. Após isso, vai aprender Python na versão 3 na prática, aplicando Orientação a Objetos em seu código e boas práticas de programação. ## Lista de cursos #### Python 3 parte 1 - [X] Introdução à nova versão da linguagem #### Python 3 parte 2 - [X] Avançando na linguagem #### Python 3 parte 3 - [X] Introdução a Orientação a objetos #### Python 3 parte 4 - [X] Avançando na orientação a objetos #### Testes automatizados - [ ] TDD com Python #### HTML5 e CSS3 I - [ ] Suas primeiras páginas da Web #### HTTP - [ ] Entendendo a web por baixo dos panos #### Flask parte 1 - [ ] Crie uma webapp com Python 3 #### Flask parte 2 - [ ] Avançando no desenvolvimento web <file_sep>from flask import Flask, render_template, request, redirect, session, flash, url_for # Cria instância do Flask app = Flask(__name__) # secret_key será usado para encriptar os dados da sessão app.secret_key = 'alura' class Jogo: def __init__(self, nome, categoria, console): self.nome = nome self.categoria = categoria self.console = console class Usuario: def __init__(self, id, login, senha): self.id = id self.login = login self.senha = senha jogo1 = Jogo('Super Mario', 'Ação', 'SNES') jogo2 = Jogo('Pokemon', 'RPG', 'GBA') jogo3 = Jogo('Mortal Kombat', 'Luta', 'SNES') lista = [jogo1, jogo2, jogo3] usuario1 = Usuario('lucas', 'lucas', '123') usuario2 = Usuario('andre', 'andre', '456') usuario3 = Usuario('joao', 'joao', '789') usuarios = {usuario1.id : usuario1, usuario2.id : usuario2, usuario3.id : usuario3} @app.route('/index') def index(): return render_template('lista.html', titulo='jogos', jogos=lista) @app.route('/novo') def novo(): if 'usuario_logado' not in session or session['usuario_logado'] is None: return redirect(url_for('login', proxima=url_for('novo'))) return render_template('novo.html',titulo='Novo jogo') @app.route('/criar', methods=['POST']) def criar(): nome = request.form['nome'] categoria = request.form['categoria'] console = request.form['console'] jogo = Jogo(nome, categoria, console) lista.append(jogo) return redirect(url_for('index')) @app.route('/login') def login(): proxima_pagina = request.args.get('proxima') return render_template('login.html', proxima=proxima_pagina) @app.route('/autenticar', methods=['POST']) def autenticar(): if(request.form['usuario'] in usuarios): usuario = usuarios[request.form['usuario']] if(usuario.senha == request.form['senha']): session['usuario_logado'] = usuario.id flash('Usuario {} logado com sucesso!'.format(usuario.login)) proxima_pagina = request.form['proxima'] return redirect(proxima_pagina) else: flash('Usuario ou senha inválidos! Tente novamente!') return redirect(url_for('login')) @app.route('/logout') def logout(): session['usuario_logado'] = None flash('Nenhum usuário logado!') return redirect(url_for('index')) app.run(debug=True)<file_sep>## Flask parte 2 <file_sep>class Funcionario: def __init__(self, nome): self.nome = nome def registra_horas(self, horas): print('Horas registradas...') def mostra_tarefas(self): print('Fez muita coisa...') class Caelum(Funcionario): def mostra_tarefas(self): print("Fez muita coisa Caelum") def busca_cursos_do_mes(self, mes=None): print(f'Mostrando cursos - {mes}' if mes else 'Mostrando curso desse mês') class Alura(Funcionario): # def mostra_tarefas(self): # print('Fez muita coisa Alura') def busca_perguntas_sem_respostas(self): print('Mostrando perguntas não respondidas no fórum') class Hipster: def __str__(self): return f'Hipster, {self.nome}' class Junior(Alura): pass class Pleno(Alura, Caelum): pass class Senior(Alura, Caelum, Hipster): pass ryan = Senior('Ryan') print(ryan) <file_sep>## Python 3 parte 3 Introdução à Orientação a objetos. ## Resumo Apresentar os fundamentos do paradigma Orientação a Objetos. Ver como definir classes e quais são os membros delas como atributos, métodos e construtores. Aprender que as classes servem como planta para criar objetos e os métodos encapsulam a implementação. ## Conclusão Este curso abordou os seguintes tópicos: * O problema do paradigma procedural * Dicionário; * Funções; * Encapsulamento de código. * Classes e Objetos * Classes; * Objetos; * Função construtora; * Endereço e referência de objetos; * Atributos de classe; * Acesso aos atributos através do objeto. * Implementando Métodos * Métodos, que definem o comportamento de uma classe; * Criação de métodos; * Como chamar métodos através do objeto; * Acesso aos atributos através do self; * Garbage Collector; * O tipo None. * Encapsulamento * Atributos privados; * Encapsulamento de código; * Encapsulamento da manipulação dos atributos nos métodos; * Coesão do código. * Usando Propriedades * Métodos de leitura dos atributos, os getters; * Métodos de modifição dos atributos, os setters; * Propriedades. * Métodos privados e estáticos * Métodos privados; * Métodos da classe e métodos estáticos. <file_sep>class Programa: def __init__(self, nome, ano): self._nome = nome.title() self.ano = ano self._likes = 0 @property def likes(self): return self._likes def dar_likes(self): self._likes += 1 @property def nome(self): return self._nome @nome.setter def nome(self, nome): self._nome = nome def __str__(self): return "Nome: {} ano: {} likes: {}".format(self._nome, self.ano, self._likes) class Playlist: def __init__(self, nome, programas): self._nome = nome self._programas = programas def __len__(self): return len(self._programas) def __getitem__(self, item): return self._programas[item] class Filme(Programa): def __init__(self, nome, ano, duracao): super().__init__(nome, ano) self.duracao = duracao def __str__(self): return "Nome: {} ano: {} likes: {} duracao: {} min".format(self._nome, self.ano, self._likes, self.duracao) class Serie(Programa): def __init__(self, nome, ano, temporadas): super().__init__(nome, ano) self.temporadas = temporadas def __str__(self): return "Nome: {} ano: {} likes: {} temporadas: {}".format(self._nome, self.ano, self._likes, self.temporadas) # Filmes vingadores = Filme('vingadores - guerra infinita', 2018, 160) vingadores.dar_likes() vingadores.dar_likes() click = Filme('click',2012,120) click.dar_likes() click.dar_likes() click.dar_likes() # Séries atlanta = Serie('atlanta', 2018, 2) atlanta.dar_likes() #print(f'Nome: {atlanta.nome} - Ano: {atlanta.ano}') #print('Nome: {} - Ano: {}'.format(atlanta.nome,atlanta.ano)) got = Serie('game of thrones', 2010, 8) got.dar_likes() got.dar_likes() got.dar_likes() got.dar_likes() got.dar_likes() filmes_e_series = [vingadores, atlanta, click, got] playlist_fim_de_semana = Playlist('fim de semana', filmes_e_series) for programa in playlist_fim_de_semana: print(programa) <file_sep>## Python 3 parte 4 Avançando na orientação a objetos. ## Resumo Continuação do curso de Orientação a Objetos com Python. O objetivo é reforçar o conhecimento sobre esse assunto, falar sobre alguns aspectos idiomáticos da linguagem, que nos auxiliarão a atuar com Python. Além disso, tratar aspectos específicos da Orientação a Objeto e como eles são aplicados nesta linguagem. Por exemplo, herança, polimorfismo e Duck Typing. Falar também sobre herança múltipla, um aspecto que pode assustar algumas pessoas, mas apresentar de forma sucinta, deixando claro o conceito. Abordar o assunto mixin, falar de forma a garantir maior segurança quando trabalhar com um código Python. Outra situação que pode encontrar será o código no qual seja exigida uma interface mais ligada aos aspectos desta linguagem. Tentar introduzir esse conhecimento para a linguagem Pyhton ligado a O.O., um tema que este será o foco nesta parte, mas que provavelmente será abordado em outros cursos. ## Conclusão Este curso abordou os seguintes tópicos: * Relembrando classes e objetos * Criação da classe; * Definição de métodos assessores; * @property; * name. * Removendo duplicação com herança * Herança; * Generalização/especialização; * Método super(). * Reduzindo ifs com polimorfismo * Polimorfismo; * Relacionamento é um; * Representação textual de um objeto. * Quando não usar herança * Herança de um tipo built-in (nativo); * Vantagens da herança de um iterável; * Desvantagem de fazer herança. * Duck Typing e um modelo de dados * Duck typing; * Python data (object) model; * Dunder methods; * Uso de ABCs. * Herança múltipla * Herança múltipla; * Resolução da ordem de chamada de métodos; * Mixins. <file_sep>## Python 3 parte 2 Avançando na linguagem. ## Resumo Implementar mais um jogo, mas dessa vez um Jogo da Forca, que é mais desafiador do que o jogo de adivinhação do treinamento anterior. Aprender novas estruturas de dados, listas e tuplas, entre outras sequências. Ver leitura e escrita de arquivos, além de organizar melhor o código através de funções. ## Conclusão Este curso abordou os seguintes tópicos: * Preparando o jogo da forca * Ajustando a infraestrutura; * Definindo a palavra secreta; * O tipo bool. * Manipulando strings * Buscando um caracter em uma string; * Iterando em uma palavra; * Funções importantes da String; * O tipo str é impossível alterá-lo (imutabilidade). * Conhecendo e trabalhando com listas * Estrutura de dados: List; * Guardando as letras acertadas; * Funções min(), max(), len(), count() e index(). * Conhecendo e trabalhando com tuplas * O que são tuplas? * Diferenças entre sequências; * Listas e tuplas juntas; * Set e Dictionary. * Implementando o encerramento do jogo * Estipulando tentativas de erros; * Calculando o total de caracteres em um loop; * Compreensão de lista (List Comprehension). * Escrita e leitura de arquivos * Escrevendo em um arquivo; * Lendo um arquivo; * Escolhendo uma palavra; * Lendo a primeira linha; * Lendo um arquivo por completo. * Melhorando o código e a apresentação * Organizando o código em funções; * Criando mais funções; * Melhorando a apresentação da forca.
956198e3d6a3fefaa24241e6a1acc6630b2e12fa
[ "Markdown", "Python" ]
9
Markdown
ghmiyoshi/alura-python
b68e133e8d26d5507f60db7136be98503ef3b4e8
ab9e01fbc48b3b3e260aab7d393f40720808a657
refs/heads/main
<repo_name>BGSU-LITS/framework-mail<file_sep>/src/settings.php <?php declare(strict_types=1); use Lits\Config\MailConfig; use Lits\Framework; return function (Framework $framework): void { $framework->addConfig('mail', new MailConfig()); }; <file_sep>/src/Lits/Config/MailConfig.php <?php declare(strict_types=1); namespace Lits\Config; use Lits\Config; final class MailConfig extends Config { public string $dsn = ''; public ?string $from = null; } <file_sep>/src/definitions.php <?php declare(strict_types=1); use Lits\Config\MailConfig; use Lits\Framework; use Lits\Mail; use Lits\Settings; use Symfony\Bridge\Twig\Mime\BodyRenderer; use Symfony\Component\EventDispatcher\EventDispatcherInterface as Dispatcher; use Symfony\Component\Mailer\EventListener\MessageListener; use Symfony\Component\Mailer\Mailer; use Symfony\Component\Mailer\Transport as MailerTransport; use Twig\Environment; return function (Framework $framework): void { $framework->addDefinition( Mail::class, function ( Dispatcher $dispatcher, Environment $environment, Settings $settings ): Mail { $dispatcher->addSubscriber( new MessageListener(null, new BodyRenderer($environment)) ); assert($settings['mail'] instanceof MailConfig); /** @psalm-suppress TooManyArguments */ $transport = MailerTransport::fromDsn( $settings['mail']->dsn, $dispatcher ); $mailer = new Mailer( $transport, null, $dispatcher ); return new Mail($settings['mail'], $mailer); } ); }; <file_sep>/src/Lits/Mail.php <?php declare(strict_types=1); namespace Lits; use Lits\Config\MailConfig; use Lits\Exception\FailedSendingException; use Symfony\Bridge\Twig\Mime\TemplatedEmail; use Symfony\Component\Mailer\MailerInterface as Mailer; use Symfony\Component\Mime\Message; final class Mail { public MailConfig $config; public Mailer $mailer; public function __construct(MailConfig $config, Mailer $mailer) { $this->config = $config; $this->mailer = $mailer; } public function message(): TemplatedEmail { $message = new TemplatedEmail(); if (\is_string($this->config->from)) { $message->from($this->config->from); } return $message; } /** @throws FailedSendingException */ public function send(Message $message): void { try { $this->mailer->send($message); } catch (\Throwable $exception) { throw new FailedSendingException( 'Could not send message', 0, $exception ); } } }
f6c7d3719ffd0a07967b5561ad4c0439670e107c
[ "PHP" ]
4
PHP
BGSU-LITS/framework-mail
0ac0ebe7798cd2bf750f2ecd940df841d3f12d67
01cd4e8cbe2fe8be57c99b98870dfbf1c53e6838
refs/heads/main
<file_sep>export interface Result { reversed: number; items: ResultItem[]; } interface ResultItem { reversed: number; x: number; } export const defaultResult: Result = { items: [], reversed: 0 }; export function reverse(x: number): Result { const items: ResultItem[] = []; const max = Math.pow(2, 31) - 1; const min = Math.pow(-2, 31); let reversed = 0; while (x !== 0) { if (reversed > ~~(max / 10) || reversed < ~~(min / 10)) { return { items, reversed }; } const digit = x % 10; reversed = reversed * 10 + digit; x = ~~(x / 10); items.push({ reversed, x }) } return { reversed, items }; }; <file_sep>export const title = "Binary Tree Paths"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ // preorder function binaryTreePaths(root: TreeNode | null): string[] { const paths: string[] = []; function dfs(node: TreeNode | null, parents: number[]) { if (node === null) { return; } const path = [...parents, node.val]; if (!node.left && !node.right) { paths.push(path.join("->")); return; } dfs(node.left, path); dfs(node.right, path); } dfs(root, []); return paths; };`; export const description = ` Given the **root** of a binary tree, return all root-to-leaf paths in ***any order***. A **leaf** is a node with no children. `; export const solution = ``; export const usecases = ''; export const example = ` --- #### Example - Input: root = [1,2,3,null,5] - Output: ["1->2->5", "1->3"] `; <file_sep>export const title = "Lowest Common Ancestor of a Binary Search Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ // preorder function lowestCommonAncestor(root: TreeNode | null, p: TreeNode | null, q: TreeNode | null): TreeNode | null { if (root === null || root === p || root === q) { return root; } if (root.val > Math.min(p.val, q.val) && root.val < Math.max(p.val, q.val)) { return root; } return lowestCommonAncestor(root.left, p, q) || lowestCommonAncestor(root.right, p, q); };`; export const description = ` Given a binary search tree (BST), find the lowest common ancestor (LCA) node of two given nodes in the BST. According to the ***definition of LCA on Wikipedia***: “The lowest common ancestor is defined between two nodes **p** and **q** as the lowest node in **T** that has both **p** and **q** as descendants (where we allow ***a node to be a descendant of itself***).” `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const title = "Construct Binary Tree from Preorder and Inorder Traversal"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function buildTree(preorder: number[], inorder: number[]): TreeNode | null { const inorderIndexMap = new Map<number, number>(); inorder.forEach((num, i) => inorderIndexMap.set(num, i)); const buildMyTree = (preorderLeft: number, preorderRight: number, inorderLeft: number, inorderRight: number): TreeNode | null => { if (preorderLeft > preorderRight) { return null; } const inorderRootIndex = inorderIndexMap.get(preorder[preorderLeft]) const leftTreeLength = inorderRootIndex - inorderLeft; const root = new TreeNode(preorder[preorderLeft]); root.left = buildMyTree(preorderLeft + 1, preorderLeft + leftTreeLength, inorderLeft, inorderRootIndex - 1); root.right = buildMyTree(preorderLeft + leftTreeLength + 1, preorderRight, inorderRootIndex + 1, inorderRight); return root; } return buildMyTree(0, preorder.length - 1, 0, inorder.length - 1) };`; export const description = ` Given two integer arrays **preorder** and **inorder** where **preorder** is the preorder traversal of a binary tree and **inorder** is the inorder traversal of the same tree, construct and return the ***binary tree***. `; export const usecases = ''; export const example = ''; <file_sep>export default interface Displayer { show: () => void; hide: () => void; } <file_sep>import * as THREE from "three"; import Position from "../_commons/params/position.interface"; import { GraphEdge, DirectedGraphEdge as IDirectedGraphEdge, UndirectedGraphEdge as IUndirectedGraphEdge } from "./edge.interface"; import { GraphNode } from "./node.interface"; import Displayer from "../_commons/params/displayer.interface"; import DisplayerImpl from "../_commons/three/displayer.class"; import Color from "../_commons/params/color.interface"; import ColorImpl from "../_commons/three/color.class"; const threePosition = <T>(node: GraphNode<T>) => { const { x, y, z } = node.skin; return new THREE.Vector3(x, y, z); } abstract class Base<T> implements GraphEdge<T> { readonly source: GraphNode<T>; readonly target: GraphNode<T>; private displayer: Displayer; constructor( source: GraphNode<T>, target: GraphNode<T>, scene: THREE.Scene, object3D: THREE.Object3D, ) { this.source = source; this.target = target; this.displayer = new DisplayerImpl(scene, object3D); } abstract setColor(color: string): Promise<void>; abstract get color(): string abstract refresh(): void show() { this.displayer.show(); } hide() { this.displayer.hide(); } } export class DirectedGraphEdge<T> extends Base<T> implements IDirectedGraphEdge<T> { private arrow: THREE.ArrowHelper; private readonly headLength: number; private readonly headWidth: number; private arrowColor: string; constructor( source: GraphNode<T>, target: GraphNode<T>, scene: THREE.Scene, color: THREE.Color | string | number, headLength: number, headWidth: number ) { const origin = threePosition(source); const dest = threePosition(target); const direction = dest.clone().sub(origin); const arrow = new THREE.ArrowHelper( direction.clone().normalize(), origin, direction.length(), color, headLength, headWidth ); super(source, target, scene, arrow) this.arrow = arrow; this.headLength = headLength; this.headWidth = headWidth; this.arrowColor = color + ""; } setColor(color: string): Promise<void> { this.arrowColor = color; return Promise.resolve(this.arrow.setColor(color)); } get color(): string { return this.arrowColor; } refresh(): void { const origin = threePosition(this.source); const dest = threePosition(this.target); const direction = dest.clone().sub(origin); this.arrow.position.copy(origin); this.arrow.setDirection(direction.clone().normalize()); this.arrow.setLength(direction.length(), this.headLength, this.headWidth); } } export class UndirectedGraphEdge<T> extends Base<T> implements IUndirectedGraphEdge<T> { private line: THREE.Line; private readonly colorProxy: Color; constructor( source: GraphNode<T>, target: GraphNode<T>, scene: THREE.Scene, material: THREE.Material, ) { const geometry = new THREE.BufferGeometry().setFromPoints([threePosition(source), threePosition(target)]); const line = new THREE.Line(geometry, material); super(source, target, scene, line) this.line = line; this.colorProxy = new ColorImpl(material); } setColor(color: string): Promise<void> { return this.colorProxy.setColor(color); } get color(): string { return this.colorProxy.color; } refresh(): void { this.refreshSource(); this.refreshTarget(); } private refreshSource(): void { this.update(this.source.skin, 0, 1, 2); } private refreshTarget(): void { this.update(this.target.skin, 3, 4, 5); } private update(position: Position, xIndex: number, yIndex: number, zIndex: number) { this.line.geometry.attributes.position.needsUpdate = true; const positions = this.line.geometry.attributes.position.array; const { x, y, z } = position; (positions[xIndex] as any) = x; (positions[yIndex] as any) = y; (positions[zIndex] as any) = z; } } export class SimpleDirectedGraphEdge<T> extends DirectedGraphEdge<T> { constructor( source: GraphNode<T>, target: GraphNode<T>, scene: THREE.Scene, color: THREE.Color | string | number ) { const headLength = 1.2; const headWidth = 0.5; super(source, target, scene, color, headLength, headWidth); } } export class SimpleUndirectedGraphEdge<T> extends UndirectedGraphEdge<T> { constructor( source: GraphNode<T>, target: GraphNode<T>, scene: THREE.Scene, color: THREE.Color | string | number ) { const material = new THREE.LineBasicMaterial({ color }); super(source, target, scene, material); } } <file_sep>import * as THREE from "three"; import { TextGeometry } from "three/examples/jsm/geometries/TextGeometry"; import BaseNode, { PlaneParameters, TextParameters } from "../commons/node"; import { ArrowStyles } from "../commons/styles"; import { BackwardArrow, ForwardArrow } from "./arrow"; import { toMemoryAddress } from "../commons/helpers"; export default class Node<T> extends BaseNode<T> { private _previous?: Node<T>; private _next?: Node<T>; private _forwardArrow?: ForwardArrow; private _backwardArrow?: BackwardArrow; private arrowStyles: ArrowStyles; address: THREE.Mesh; readonly key: number; constructor( key: number, data: T, display: string, address: number, scene: THREE.Scene, planeParameters: PlaneParameters, textParameters: TextParameters, arrowStyles: ArrowStyles, addressColor: THREE.Color | string | number, ) { super(data, display, scene, planeParameters, textParameters) this.key = key; this.arrowStyles = arrowStyles; this.address = this.createAddress(address, textParameters, planeParameters, addressColor); scene.add(this.address); } private createAddress( address: number, { textGeometryParameters }: TextParameters, { height, position }: PlaneParameters, color: THREE.Color | string | number, ): THREE.Mesh { const text = toMemoryAddress(address); const material = new THREE.MeshBasicMaterial({ color }); const addressGeometry = new TextGeometry(text, textGeometryParameters); const addressMesh = new THREE.Mesh(addressGeometry, material); const addressPostion = position.clone().set(position.x - 3, position.y - height, position.z); addressMesh.position.copy(addressPostion); return addressMesh; } set previous(node: Node<T> | undefined) { if (node) { if (this.backwardArrow) { this.backwardArrow.from = this; this.backwardArrow.to = node; } else { const { color, headLength, headWidth } = this.arrowStyles; this.backwardArrow = new BackwardArrow(this, node, color, headLength, headWidth); } } this._previous = node; } get previous() { return this._previous; } set next(node: Node<T> | undefined) { if (node) { if (this.forwardArrow) { this.forwardArrow.from = this; this.forwardArrow.to = node; } else { const { color, headLength, headWidth } = this.arrowStyles; this.forwardArrow = new ForwardArrow(this, node, color, headLength, headWidth); } } this._next = node; } get next() { return this._next; } append(node: Node<T>): void { node.previous = this; node.next = this.next; if (this.next) { this.next.previous = node; } this.next = node; } delete(): void { if (this.previous) { this.previous.next = this.next; } if (this.next) { this.next.previous = this.previous; } this.removeFromScene(); this.removeArrowsFromScene(); this.scene.remove(this.address); this.forwardArrow = undefined; this.backwardArrow = undefined; } update(): void { this.updateBackwardArrow(); this.updateForwardArrow(); } prepend(node: Node<T>): void { node.next = this; node.previous = this.previous; if (this.previous) { this.previous.next = node; } this.previous = node; } addToScene(): void { super.addToScene(); if (this.address) { this.scene.add(this.address); } } private removeArrowsFromScene(): void { if (this.forwardArrow) { this.scene.remove(this.forwardArrow); } if (this.backwardArrow) { this.scene.remove(this.backwardArrow); } } private set forwardArrow(arrow: ForwardArrow | undefined) { this._forwardArrow = arrow; if (this._forwardArrow) { this.scene.add(this._forwardArrow); } } private get forwardArrow() { return this._forwardArrow; } private set backwardArrow(arrow: BackwardArrow | undefined) { this._backwardArrow = arrow; if (this._backwardArrow) { this.scene.add(this._backwardArrow); } } private get backwardArrow() { return this._backwardArrow; } private updateBackwardArrow(): void { if (this.backwardArrow) { this.backwardArrow.update(); } } private updateForwardArrow(): void { if (this.forwardArrow) { this.forwardArrow.update(); } } get leftUpperConnectPosition(): THREE.Vector3 { const { width, height } = this.planeGeometry.parameters; const { x, y, z } = this.plane.position; return new THREE.Vector3(x - width / 2, y + height / 5, z); } get leftLowerConnectPosition(): THREE.Vector3 { const { width, height } = this.planeGeometry.parameters; const { x, y, z } = this.plane.position; return new THREE.Vector3(x - width / 2, y - height / 5, z); } get rightUpperConnectPosition(): THREE.Vector3 { const { width, height } = this.planeGeometry.parameters; const { x, y, z } = this.plane.position; return new THREE.Vector3(x + width / 2, y + height / 5, z); } get rightLowerConnectPosition(): THREE.Vector3 { const { width, height } = this.planeGeometry.parameters; const { x, y, z } = this.plane.position; return new THREE.Vector3(x + width / 2, y - height / 5, z); } } <file_sep>export interface LinePosition { x1: number; x2: number; y1: number; y2: number; } export class NodePosition { cx: number; cy: number; r: number; constructor(cx: number, cy: number, r: number) { this.cx = cx; this.cy = cy; this.r = r; } get topX() { return this.cx; } get topY() { return this.cy - this.r; } get bottomX() { return this.cx; } get bottomY() { return this.cy + this.r; } get topLeftX() { return this.cx - this.r * Math.cos(Math.PI / 4); } get topLeftY() { return this.cy - this.r * Math.sin(Math.PI / 4); } get topRightX() { return this.cx + this.r * Math.cos(Math.PI / 4); } get topRightY() { return this.cy - this.r * Math.sin(Math.PI / 4); } get bottomLeftX() { return this.cx - this.r * Math.cos(Math.PI / 4); } get bottomLeftY() { return this.cy + this.r * Math.sin(Math.PI / 4); } get bottomRightX() { return this.cx + this.r * Math.cos(Math.PI / 4); } get bottomRightY() { return this.cy + this.r * Math.sin(Math.PI / 4); } } export const calculateLeftLinePosition = (node: NodePosition, left: NodePosition): LinePosition => ({ x1: node.bottomLeftX, y1: node.bottomLeftY, x2: left.topX, y2: left.topY }); export const calculateRightLinePosition = (node: NodePosition, right: NodePosition): LinePosition => ({ x1: node.bottomRightX, y1: node.bottomRightY, x2: right.topX, y2: right.topY }); <file_sep>export const title = 'First Bad Version'; export const formula = `/** * The knows API is defined in the parent class Relation. * isBadVersion(version: number): boolean { * ... * }; */ var solution = function (isBadVersion: any) { return function (n: number): number { let [left, right] = [1, n]; while (left < right) { const mid = left + Math.floor((right - left) / 2); if (isBadVersion(mid)) { right = mid; } else { left = mid + 1; } } return left; }; };`; export const description = ` You are a product manager and currently leading a team to develop a new product. Unfortunately, the latest version of your product fails the quality check. Since each version is developed based on the previous version, all the versions after a bad version are also bad. Suppose you have n versions [1, 2, ..., n] and you want to find out the first bad one, which causes all the following ones to be bad. You are given an API bool isBadVersion(version) which returns whether version is bad. Implement a function to find the first bad version. You should minimize the number of calls to the API. `; export const usecases = ''; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { index: number; node?: TreeNode<string>; direction?: Direction; } export function buildSteps(k: number, root?: TreeNode<string>): Step[] { const steps: Step[] = []; let index: number = 0; function kthSmallest(node?: TreeNode<string>, direction?: Direction) { if (node === undefined) { return; } kthSmallest(node.left, Direction.Left); index = index + 1; if (index > k) { return; } steps.push({ node, direction, index }); kthSmallest(node.right, Direction.Right); } kthSmallest(root); return steps; } <file_sep>export const helperColorOne = "#faca48"; export const helperColorTwo = "#fced88"; export const helperColorThree = "green"; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; interface Node { index: number; left?: Node; right?: Node; } export enum Direction { Left, Right, Back } export interface Step { node: Node; } const buildTree = (node?: TreeNode<string>): Node | undefined => { if (!node) { return undefined; } const root: Node = { index: node.index }; const left = buildTree(node.left); const right = buildTree(node.right); root.left = left; root.right = right; return root; } export function buildTreeNodeMap(root?: TreeNode<string>): Map<number, TreeNode<string>> { const map: Map<number, TreeNode<string>> = new Map(); function dfs(node?: TreeNode<string>) { if (!node) { return; } map.set(node.index, node); dfs(node.left); dfs(node.right); } dfs(root); return map; } export function buildSteps(root?: TreeNode<string>): Step[] { const steps: Step[] = []; function invertTree(node?: Node) { if (!node) { return; } steps.push({ node }); const temp = node.left; node.left = node.right; node.right = temp; invertTree(node.left); invertTree(node.right); }; const node = buildTree(root); invertTree(node); return steps; } <file_sep>import * as THREE from 'three'; import { Cube } from '../../../../data-structures/_commons/cube/three/cube'; import { nodeSize, shellMterial } from "../styles"; class QueueShellBuilder { private _scene: THREE.Scene; private _material: THREE.Material = shellMterial; private _geometry: THREE.BoxGeometry = new THREE.BoxGeometry(nodeSize.width, nodeSize.height, nodeSize.depth); private _position: THREE.Vector3 = new THREE.Vector3(0, 0, 0); private _show: boolean = true; constructor(scene: THREE.Scene, show: boolean) { this._show = show; this._scene = scene; } position(x: number, y: number, z: number): QueueShellBuilder { this._position = new THREE.Vector3(x, y, z); return this; } material(material: THREE.Material): QueueShellBuilder { this._material = material; return this; } geometry(width: number, height: number, depth: number): QueueShellBuilder { this._geometry = new THREE.BoxGeometry(width, height, depth); return this; } build(): Cube { const item = new Cube(this._geometry, this._material, this._scene); item.width = nodeSize.width; item.height = nodeSize.height; item.depth = nodeSize.depth; this.setPosition(item); if (this._show) { item.show(); } return item; } private setPosition(item: Cube): void { item.position.x = this._position.x; item.position.y = this._position.y; item.position.z = this._position.z; } } export default QueueShellBuilder; <file_sep>export const title = "Coin Change (How Many Ways)" export const formula = ` if (coin > col) { table[row][col] = table[row - 1][col]; } else { table[row][col] = table[row][col - coin] + table[row - 1][col]; } `; export const description = ` You are given coins of different denominations and a total amount of money. Write a function to compute the number of combinations that make up that amount. You may assume that you have infinite number of each kind of coin. `; export const usecases = ''; export const example = ''; <file_sep>import { DisjointSet } from "./unionFind"; export interface ResultNumberOfProvinces { roots: number[]; } export interface Step { row: number; col: number; } export function buildSteps(isConnected: number[][]): Step[] { const steps: Step[] = []; for (let row = 0; row < isConnected.length; row++) { for (let col = row; col < isConnected[row].length; col++) { if (isConnected[row][col] === 1) { steps.push({ row, col }); } } } return steps; } export function buildAdjacencyList(isConnected: number[][]): number[][] { const adjacency: number[][] = []; const disjointSet = new DisjointSet(); for (let row = 0; row < isConnected.length; row++) { for (let col = row; col < isConnected[row].length; col++) { if (isConnected[row][col] === 1) { disjointSet.union(row, col); } } } disjointSet.compress(); Array.from(disjointSet.map.values()).forEach(node => { adjacency.push([node.value, node.parent.value]); }); return adjacency; } export function getRoots(isConnected: number[][]): number[] { const disjointSet = new DisjointSet(); for (let row = 0; row < isConnected.length; row++) { for (let col = row; col < isConnected[row].length; col++) { if (isConnected[row][col] === 1) { disjointSet.union(row, col); } } } return disjointSet.getRoots() } <file_sep>export enum State { Typing, Play, Computing } <file_sep>const initTable = (sentence: string): boolean[][] => { const length = sentence.length; return Array(length).fill(false).map(() => Array(length).fill(false)); }; const createDPTable = (sentence: string, dictionary: string[]): boolean[][] => { const table = initTable(sentence); for (let len = 1; len <= sentence.length; len++) { for (let start = 0, end = start + len - 1; end < sentence.length; start++, end++) { const sub = sentence.substring(start, end + 1); if (dictionary.includes(sub)) { table[start][end] = true; continue; } for (let i = start; i < end; i++) { if (table[start][i] && table[i + 1][end]) { table[start][end] = true; break; } } } } return table; }; export default createDPTable; <file_sep>export const title = "Valid Parentheses"; export const formula = `function isValid(s: string): boolean { const map: Map<string, string> = new Map([ [")", "("], ["]", "["], ["}", "{"] ]); const stack:string[] = []; for(let ch of s) { if(!map.has(ch)){ stack.push(ch); } else if(stack.pop() !== map.get(ch)){ return false; } } return stack.length === 0; };`; export const description = ` Given a string containing '(', ')', '{', '}', '[' and ']', check whether if it is valid. The brackets must close in the correct order. `; export const solution = ` ### Solution --- We are going to use **stack** and **hash-table** to solve this problem. #### Steps: 1. Add **)** **]** **}** to hash-table. 2. Create an empty stack. 3. For-each all the characters of the input string. - If a character is not in hash-table, push this character into stack. - ElseIf a character is in hash-table, compare the result between stack.pop() and map.get(character). + return false if stack.pop() != map.get(character). 4. The stack should be empty after the for-loop, otherwise return false. ` export const usecases = ''; export const example = ` Example 1: - Input: s = "()" - Output: true Example 2: - Input: s = "()[]{}" - Output: true Example 3: - Input: s = "(]" - Output: false `; <file_sep>export const title = "Find K Pairs with Smallest Sums"; export const minHeap = `interface HeapItem { val: number; x: number; // index in nums1 y: number; // index in nums2 a: number; // value in nums1 b: number; // value in nums2 } class MinHeap { private items: HeapItem[]; constructor() { this.items = []; } push(item: HeapItem) { this.items.push(item); this.bubbleUp(this.items.length - 1); } pop() { const root = this.items.shift(); const last = this.items.pop(); if (last) { this.items.unshift(last); this.bubbleDown(0); } return root; } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.items[index].val < this.items[parentIndex].val) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if ( this.items[target] && this.items[leftChildIndex] && this.items[target].val > this.items[leftChildIndex].val ) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if ( this.items[target] && this.items[rightChildIndex] && this.items[target].val > this.items[rightChildIndex].val ) { target = rightChildIndex; } if (target !== index) { this.swap(target, index); this.bubbleDown(target); } } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } };`; export const kSmallestPairs = `function kSmallestPairs(nums1: number[], nums2: number[], k: number): number[][] { const set: Set<string> = new Set(); const result: number[][] = []; const minHeap = new MinHeap(); const buildSetKey = (x: number, y: number): string => { return x + "," + y; } const pushToMinHeap = (x: number, y: number) => { const a = nums1[x]; const b = nums2[y]; minHeap.push({ val: a + b, a, b, x, y }); } pushToMinHeap(0, 0); for (let i = 0; i < k; i++) { const root = minHeap.pop(); if (root) { const { x, y, a, b } = root; result.push([a, b]); if (x + 1 < nums1.length) { const key = buildSetKey(x + 1, y); if (!set.has(key)) { pushToMinHeap(x + 1, y); set.add(key); } } if (y + 1 < nums2.length) { const key = buildSetKey(x, y + 1); if (!set.has(key)) { pushToMinHeap(x, y + 1); set.add(key); } } } } return result; };`; export const description = ` You are given two integer arrays **nums1** and **nums2** sorted in **ascending order** and an integer **k**. Define a pair **(u, v)** which consists of one element from the first array and one element from the second array. Return the **k** pairs **(u1, v1), (u2, v2), ..., (uk, vk)** with the smallest sums. `; export const solution = ``; export const usecases = ''; export const example = ` `; <file_sep>export enum State { Typing, Count, AddToHeap, Computing, Finished } <file_sep>import { StackVis as Stack } from './stack-vis'; export default Stack; <file_sep>export const title = "Lowest Common Ancestor of a Binary Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ // postorder function lowestCommonAncestor(root: TreeNode | null, p: TreeNode | null, q: TreeNode | null): TreeNode | null { if (root === null || root === p || root === q) { return root; } const left = lowestCommonAncestor(root.left, p, q); const right = lowestCommonAncestor(root.right, p, q); if (left === null) { return right; } if (right === null) { return left; } return root; };`; export const description = ` Given a binary tree, find the lowest common ancestor (LCA) of two given nodes in the tree. According to the ***definition of LCA on Wikipedia***: “The lowest common ancestor is defined between two nodes **p** and **q** as the lowest node in **T** that has both **p** and **q** as descendants (where we allow ***a node to be a descendant of itself***).” `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>const initTable = (array: number[]) => array.map(() => Array(array.length).fill(1)); const createDPTable = (array: number[]): number[][] => { const table = initTable(array); for (let col = 1; col < array.length; col++) { for (let row = 1; row <= col; row++) { const toCompare = array[row - 1]; const target = array[col]; if (toCompare < target) { table[row][col] = Math.max(table[row - 1][col], table[row - 1][row - 1] + 1); } else { table[row][col] = table[row - 1][col]; } } } return table; }; export default createDPTable; <file_sep>export interface Step { adjacency: Map<number, number[]>; current?: number; hasCycle: boolean; stack: number[]; visited: number[]; numCourses: number; } const calculateNumCourses = (prerequisites: number[][]): number => { let numCourses: number = prerequisites .map(array => Math.max(...array)) .reduce((prev, curr) => Math.max(prev, curr), 0); numCourses += 1; return numCourses; } const buildAdjacency = (numCourses: number, prerequisites: number[][]): Map<number, number[]> => { const adjacency: Map<number, number[]> = new Map(); for (let i = 0; i < numCourses; i++) { adjacency.set(i, []); } prerequisites.forEach(prerequisite => { const [a, b] = prerequisite; adjacency.get(b)?.push(a); }); return adjacency; } export function findOrder(prerequisites: number[][]): Step[] { const steps: Step[] = []; const numCourses: number = calculateNumCourses(prerequisites); const adjacency: Map<number, number[]> = buildAdjacency(numCourses, prerequisites); const stack: number[] = []; let hasCycle = false; const visited: Set<number> = new Set(); const dfs = (current: number) => { const returnEarly: boolean = visited.has(current) || stack.indexOf(current) >= 0; if (returnEarly) { // only add new step if return-early to avoid add same node multple times. steps.push({ current, adjacency, hasCycle, stack: [...stack], visited: Array.from(visited), numCourses }); } if (visited.has(current)) { hasCycle = true; return; } if (stack.indexOf(current) >= 0) { return; } visited.add(current); steps.push({ current, adjacency, hasCycle, stack: [...stack], visited: Array.from(visited), numCourses }); const children = adjacency.get(current) || []; for (let i = 0; i < children.length; i++) { dfs(children[i]); if (hasCycle) { return; } } visited.delete(current); stack.push(current); steps.push({ current, adjacency, hasCycle, stack: [...stack], visited: Array.from(visited), numCourses }); } for (let i = 0; i < numCourses; i++) { dfs(i); if (hasCycle) { steps.push({ current: i, adjacency, hasCycle, stack: [], visited: Array.from(visited), numCourses }); return steps; } } steps.push({ adjacency, hasCycle, stack: [...stack], visited: Array.from(visited), numCourses }); return steps; }; <file_sep>import { Point } from '../../commons/point'; import { helperStyle } from '../../dp/_commons/styles'; import createDPTable, { getIndices } from './algo'; const startPoint: Point = { row: 1, col: 1, }; interface TableSize { rows: number; cols: number; } const getTableSize = (nums: number[]): TableSize => ({ rows: 2, cols: nums.length + 1 }); const createTableMatrix = (nums: number[]): (number | string)[][] => { const { rows, cols } = getTableSize(nums); const table = new Array(rows).fill(0).map(() => new Array(cols).fill(0)); table[0][0] = "index" table[1][0] = "value" for (let col = 1; col < cols; col++) { table[0][col] = col - 1; } for (let col = 1; col < cols; col++) { table[1][col] = nums[col - 1]; } return table; }; const createComparedTable = (nums: number[]): (number | string)[][] => createDPTable(nums); const addHelperStyles = (styles: React.CSSProperties[][], point: Point): void => { styles[0][0] = { backgroundColor: "lightgray" }; styles[1][0] = { backgroundColor: "lightgray" }; styles[1][point.col] = helperStyle; }; const createTableStyles = (nums: number[]): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(nums); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(table, startPoint); return table; }; const createButtons = (): string[] => { return ["Add To HashTable", "Gotcha"]; }; const createButtonsStyles = (nums: number[]): (React.CSSProperties)[] => { return createButtons().map(() => ({ color: 'back', paddingLeft: "6px", paddingRight: "6px" })); }; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, startPoint, getIndices, }; <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; export const enabledSphereColor = "lightgreen"; export const normalSphereColor = "yellow"; export const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); export const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: normalSphereColor, opacity: 0.4, transparent: true }); } export const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "green" }); export const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; export const lineMaterial = new THREE.LineBasicMaterial({ color: "gold" }); export const center = { x: -9, y: 7, z: 0 }; export const depthTreeCenter = { x: 9, y: 7, z: 0 }; export const yDistance = 3; export const duration = 0; export const xAxisAplha = 2; <file_sep>export const title = "Path Sum"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function hasPathSum(root: TreeNode | null, targetSum: number): boolean { function dfs(node: TreeNode | null, num: number): boolean { if (node === null) { return false; } if (isLeaf(node) && node.val + num === targetSum) { return true; } return dfs(node.left, node.val + num) || dfs(node.right, node.val + num); } return dfs(root, 0); }; function isLeaf(node: TreeNode) { return !node.left && !node.right; };`; export const description = ` Given the **root** of a binary tree and an integer **targetSum**, return **true** if the tree has a ***root-to-leaf*** path such that adding up all the values along the path equals **targetSum**. A **leaf** is a node with no children. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const title = "Convert Sorted List to Binary Search Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function sortedListToBST(head: ListNode | null): TreeNode | null { function preorder(left: ListNode | null, right: ListNode | null): TreeNode | null { if (left === right) { return null; } const mid = findMedian(left, right); const node = new TreeNode(mid.val); node.left = preorder(left, mid); node.right = preorder(mid.next, right); return node; } function findMedian(left: ListNode | null, right: ListNode | null): ListNode | null { let slow = left; let fast = left; while (fast !== right && fast.next !== right) { fast = fast.next; fast = fast.next; slow = slow.next; } return slow; } return preorder(head, null); };`; export const description = ` Given the **head** of a singly linked list where elements are sorted in **ascending order**, convert it to a **height-balanced** binary search tree. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const title = "Wildcard Matching"; export const formula = ` if (p === '?' || p === t) { table[row][col] = table[row - 1][col - 1]; } else if (p === '*') { table[row][col] = table[row][col - 1] || table[row - 1][col]; } else { table[row][col] = false; }`; export const description = ` Given an input string (s) and a pattern (p), implement wildcard pattern matching with support for '?' and '*'. - '?' Matches any single character. - '*' Matches any sequence of characters (including the empty sequence). The matching should cover the ***entire*** input string (not partial). `; export const usecases = ''; export const example = ` Empty string: | STRING | PATTERN | MATCH | |:------:|:-------:|:-----:| | "" | "*" | True | | "" | "?" | False | One Character: | STRING | PATTERN | MATCH | |:------:|:-------:|:-----:| | "a" | "?" | True | | "a" | "*" | True | Two Characters: | STRING | PATTERN | MATCH | |:------:|:-------:|:-----:| | "ab" | "*" | True | | "ab" | "?" | False | | "ab" | "??" | True | | "ab" | "**" | True | | "ab" | "?a" | False | | "ab" | "?b" | True | | "ab" | "???" | False | | "ab" | "***" | True | | "ab" | "a*b" | True | | "ab" | "a?b" | False | `; <file_sep>import createDpTable from "../algo"; const getLastCell = (table: boolean[][]): boolean => { const lastRow = table[table.length - 1]; return lastRow[lastRow.length - 1]; } test("Wildcard-matching 1", () => { const pattern = "?"; const text = "a"; const table = createDpTable(pattern, text); const distance = getLastCell(table); expect(distance).toBe(true); }); test("Wildcard-matching 2", () => { const pattern = "a?"; const text = "ab"; const table = createDpTable(pattern, text); const distance = getLastCell(table); expect(distance).toBe(true); }); test("Wildcard-matching 3", () => { const pattern = "a*b"; const text = "ab"; const table = createDpTable(pattern, text); const distance = getLastCell(table); expect(distance).toBe(true); }); test("Wildcard-matching 4", () => { const pattern = "a?b"; const text = "ab"; const table = createDpTable(pattern, text); const distance = getLastCell(table); expect(distance).toBe(false); }); <file_sep>export const title = "String to Integer (atoi)"; export const formula = `function myAtoi(s: string): number { function isNumeric(str: string) { const num = parseInt(str); return !isNaN(num); } const max = Math.pow(2, 31) - 1; const min = Math.pow(-2, 31); let index = 0; while (s.charAt(index) === ' ') { index++; } let sign = 1; if (s.charAt(index) === "+" || s.charAt(index) === "-") { if (s.charAt(index) === "-") sign = -1; index++; } let num = 0; for (; index < s.length; index++) { if (!isNumeric(s.charAt(index))) return num * sign; if (num > ~~(max / 10)) return sign === 1 ? max : min; const digit: number = +s.charAt(index); if (num === ~~(max / 10)) { if (sign === 1 && digit > (max % 10)) { return max; } else if (sign === -1 && (digit * -1) < (min % 10)) { return min; } } num = num * 10 + digit; } return sign * num; };`; export const description = `#### Description Implement the **myAtoi(string s)** function, which converts a string to a 32-bit signed integer (similar to C/C++'s **atoi** function). The algorithm for **myAtoi(string s)** is as follows: Read in and ignore any leading whitespace. Check if the next character (if not already at the end of the string) is **'-'** or **'+'**. Read this character in if it is either. This determines if the final result is negative or positive respectively. Assume the result is positive if neither is present. Convert these digits into an integer (i.e. **"123" -> 123**, **"0032" -> 32**). If no digits were read, then the integer is **0**. Change the sign as necessary (from step 2). If the integer is out of the 32-bit signed integer range **[-2^31, 2^31 - 1]**, then clamp the integer so that it remains in the range. Specifically, integers less than **-2^31** should be clamped to **-2^31**, and integers greater than **2^31 - 1** should be clamped to **2^31 - 1**. Return the integer as the final result. #### Note: - Only the space character ' ' is considered a whitespace character. - Do not ignore any characters other than the leading whitespace or the rest of the string after the digits. `; export const solution = ``; export const usecases = ''; export const examples = ` --- #### Example 1: - Input: x = "121" - Output: 121 #### Example 2: - Input: x = " -121" - Output: 121 #### Example 3: - Input: x = " 123 alchemist" - Output: 121 `; <file_sep>export const title = "Two Sum"; export const formula = ` public int[] twoSum(int[] nums, int target) { Map<Integer, Integer> map = new HashMap<Integer, Integer>(); for (int i=0; i <nums.length; i++) { if (map.containsKey(target - nums[i])) { return new int[]{ map.get(target - nums[i]), i }; } map.put(nums[i], i); } return new int[0]; } `; export const description = ` Given an array of integers nums and an integer target, return indices of the two numbers such that they add up to target. You may assume that each input would have exactly one solution, and you may not use the same element twice. You can return the answer in any order. \\- From Leetcode. `; export const usecases = ''; export const example = ` Example 1: - Input: nums = [1,3,7,5], target = 12 - Output: [2,3] - Output: Because nums[2] + nums[3] == 12, we return [2, 3]. Example 2: - Input: nums = [3,6,1], target = 4 - Output: [0,2] Example 3: - Input: nums = [5,5,1], target = 10 - Output: [0,1] `; <file_sep>class DisjointSetNode { value: number; rank: number; parent: DisjointSetNode; constructor(value: number) { this.value = value; this.rank = 0; this.parent = this; } } export class DisjointSet { readonly map: Map<number, DisjointSetNode>; constructor() { this.map = new Map(); } getRoots(): number[] { const set: Set<DisjointSetNode> = new Set(); Array .from(this.map.values()) .map(node => this.findRootByNode(node)) .forEach(node => set.add(node)); return Array.from(set).map(node => node.value); } countRoots(): number { const set: Set<DisjointSetNode> = new Set(); Array .from(this.map.values()) .map(node => this.findRootByNode(node)) .forEach(node => set.add(node)); return set.size; } compress() { Array.from(this.map.values()).forEach(node => this.findRootByNode(node)) } union(a: number, b: number) { const rootA = this.findRootByValue(a); const rootB = this.findRootByValue(b); if (rootA === rootB) { return; } if (rootA.rank === rootB.rank) { rootA.rank += 1; rootB.parent = rootA; } else if (rootA.rank > rootB.rank) { rootB.parent = rootA; } else { rootA.parent = rootB; } } findRootByValue(value: number): DisjointSetNode { const node = this.getNode(value); return this.findRootByNode(node); } private findRootByNode(node: DisjointSetNode): DisjointSetNode { if (node.parent === node) { return node; } node.parent = this.findRootByNode(node.parent); return node.parent; } private getNode(value: number): DisjointSetNode { if (!this.map.has(value)) { this.map.set(value, new DisjointSetNode(value)); } return this.map.get(value)!; } } <file_sep>export const formula = ` function levelOrder(root: TreeNode | null): number[][] { const result: number[][] = []; function dfs(node: TreeNode | null, level: number) { if (node === null) { return; } if (result.length === level) { result.push([]); } result[level].push(node.val); dfs(node.left, level + 1); dfs(node.right, level + 1); } dfs(root, 0); return result; };` <file_sep>export const title = 'Max Area of Island'; export const formula = `function maxAreaOfIsland(grid: number[][]): number { const inArea = (row: number, col: number): boolean => { return row >= 0 && row < grid.length && col >= 0 && col < grid[row].length; } const dfs = (row: number, col: number): number => { if (!inArea(row, col)) { return 0; } if (grid[row][col] !== 1) { return 0; } grid[row][col] = 2; return 1 + dfs(row - 1, col) + dfs(row, col + 1) + dfs(row + 1, col) + dfs(row, col - 1); } let max = 0; for (let row = 0; row < grid.length; row++) { for (let col = 0; col < grid[row].length; col++) { max = Math.max(max, dfs(row, col)); } } return max; };`; export const description = ` You are given an **m x n** binary matrix **grid**. An island is a group of 1's (representing land) connected 4-directionally (horizontal or vertical.) You may assume all four edges of the grid are surrounded by water. The **area** of an island is the number of cells with a value **1** in the island. Return the **maximum** area of an island in grid. If there is no island, return 0. `; export const usecases = ''; export const example = ` `; <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; const nodeSize = { width: 1, height: 1, depth: 1 }; const nodeTextGeometryParameters: TextGeometryParameters = { font, size: 0.6, height: 0.1 }; export const nodeParams = { textMaterial: new THREE.MeshBasicMaterial({ color: "blue" }), textGeometryParameters: nodeTextGeometryParameters, cubeMaterial: new THREE.MeshBasicMaterial({ color: "white", opacity: 0, transparent: true }), cubeGeometry: new THREE.BoxGeometry(nodeSize.width, nodeSize.height, nodeSize.depth), initPosition: new THREE.Vector3(10, 2, -4), }; export const stackShellParams = { material: new THREE.MeshBasicMaterial({ color: "lightgrey", opacity: 0.2, transparent: true }), geometry: new THREE.BoxGeometry(nodeSize.width, nodeSize.height, nodeSize.depth), }; export const queuePosition = new THREE.Vector3(-7, 3, -4); export const stackPosition = new THREE.Vector3(-7, 0, -4); <file_sep>export const title = "Longest Substring Without Repeating Characters"; export const formula = `function lengthOfLongestSubstring(s: string): number { if (s.length === 0) return 0; let max = 1; let left = 0; const map: Map<string, number> = new Map(); for (let i = 0; i < s.length; i++) { const character = s.charAt(i); if (map.has(character)) { left = Math.max(left, map.get(character) + 1); } map.set(character, i); max = Math.max(max, i - left + 1); } return max; };`; export const description = ` Given a string **s**, find the length of the longest substring without repeating characters. \\- From Leetcode. `; export const usecases = ''; export const example = ` Example 1: - Input: s = "abcabcbb" - Output: 3 - Explanation: The answer is "abc", with the length of 3. Example 2: - Input: s = "bbbbb" - Output: 1 - Explanation: The answer is "b", with the length of 1. Example 3: - Input: s = "pwwkew" - Output: 3 - Explanation: The answer is "wke", with the length of 3. - Notice that the answer must be a substring, "pwke" is a subsequence and not a substring. `; <file_sep>export const title = "Minimum Depth of Binary Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function minDepth(root: TreeNode | null): number { if(root === null) { return 0; } const left = minDepth(root.left); const right = minDepth(root.right); if(root.left === null) { return right + 1; } if(root.right === null) { return left + 1; } return Math.min(left, right) + 1; };`; export const description = ` Given a binary tree, find its minimum depth. The minimum depth is the number of nodes along the shortest path from the root node down to the nearest leaf node. **Note**: A leaf is a node with ***no*** children. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const title = "Palindrome Number"; export const formula = `function isPalindrome(x: number): boolean { if (x < 0 || (x % 10 === 0 && x !== 0)) { return false; } let reverted: number = 0; while (x > reverted) { reverted = reverted * 10 + x % 10; x = Math.floor(x / 10); } return x === reverted || x === Math.floor(reverted / 10); };`; export const description = `#### Description Given an integer x, return ***true*** if x is a palindrome, and ***false*** otherwise. `; export const solution = ``; export const usecases = ''; export const examples = ` --- #### Example 1: - Input: x = 121 - Output: true #### Example 2: - Input: x = -121 - Output: false #### Example 3: - Input: x = 20 - Output: false `; <file_sep>import { green } from '@mui/material/colors'; import { createTheme } from "@mui/material"; const theme = createTheme({ palette: { primary: { main: green[500], } }, typography: { fontFamily: [ "Circular", "-apple-system", "BlinkMacSystemFont", "Roboto", "Helvetica Neue", "sans-serif", "monospace" ].join(",") }, }); export default theme; <file_sep>export enum Action { RemoveSpace, AssignSign, NonDigitCharacter, BiggerThanMax, LessThanMin, Accumulate, NumMultiplySign } interface Data { index: number; sign: number; num: number; linesToHighlight: number[] } export interface Item { action: Action; data: Data; } export function myAtoi(s: string): Item[] { const items: Item[] = []; const max = Math.pow(2, 31) - 1; const min = Math.pow(-2, 31); function isNumeric(str: string) { const num = parseInt(str); return !isNaN(num); } let index = 0; let sign = 1; let num = 0; while (s.charAt(index) === ' ') { items.push({ data: { index, sign, num, linesToHighlight: [10] }, action: Action.RemoveSpace }); index++; } if (s.charAt(index) === "+" || s.charAt(index) === "-") { if (s.charAt(index) === "-") { sign = -1; } items.push({ data: { index, sign, num, linesToHighlight: [15] }, action: Action.AssignSign }); index++; } for (; index < s.length; index++) { if (!isNumeric(s.charAt(index))) { items.push({ data: { index, sign, num: num * sign, linesToHighlight: [23] }, action: Action.NonDigitCharacter }); return items; } const digit: number = +s.charAt(index); if (num > ~~(max / 10)) { const value = (sign === 1) ? max : min; const action = (sign === 1) ? Action.BiggerThanMax : Action.LessThanMin; items.push({ data: { index, sign, num: value, linesToHighlight: [25] }, action }); return items; } if (num === ~~(max / 10)) { if (sign === 1) { if (digit > (max % 10)) { items.push({ data: { index, sign, num: max, linesToHighlight: [30] }, action: Action.BiggerThanMax }); return items; } } else if (sign === -1) { if (digit * -1 < (min % 10)) { items.push({ data: { index, sign, num: min, linesToHighlight: [32] }, action: Action.LessThanMin }); return items; } } } num = num * 10 + digit; items.push({ data: { index, sign, num, linesToHighlight: [37] }, action: Action.Accumulate }); } items.push({ data: { index, sign, num: num * sign, linesToHighlight: [40] }, action: Action.NumMultiplySign }); return items; }; <file_sep>import { Collection } from '../_commons/collection'; export interface IQueue<T> extends Collection { enqueue(t: T): Promise<number>; dequeue(): Promise<T | undefined>; peek(): Promise<T | undefined>; } <file_sep>import { Point } from "../_commons/point"; export const getLastCell = (table: (string | boolean)[][]): Point => { const row = table.length - 1; const col = table[row].length - 1; return { row, col }; }; const booleanToString = (value: boolean | string): string => { if (value === "?") return value; return (value ? 'T' : 'F'); }; export const isMatch = ({ row, col }: Point, r: number, c: number) => (row === r && col === c); export const createNewTableStyles = (table: React.CSSProperties[][]): React.CSSProperties[][] => table.map(row => row.map(() => ({}))); export const updateTable = (table: (string | boolean)[][], point: Point, value: boolean | string): (string | boolean)[][] => table.map((row, rowIndex) => { return row.map((cell, colIndex) => isMatch(point, rowIndex, colIndex) ? booleanToString(value) : cell); }); export const nonCorrect = (comparedTable: (string | boolean)[][], { row, col }: Point, value: boolean): boolean => (comparedTable[row][col] !== value); export const isLastCell = (table: (string | boolean)[][], point: Point): boolean => { const { row, col } = getLastCell(table); return isMatch(point, row, col); }; export const getNextPoint = (table: (string | boolean)[][], { row, col }: Point): Point => (col === table[row].length - 1) ? { row: row + 1, col: 2 } : { row, col: col + 1 }; export const isLastRow = (table: (string | boolean)[][], point: Point): boolean => { return point.row === table.length - 1; }; export const isSuccessCell = (table: (string | boolean)[][], point: Point): boolean => { if (!isLastRow(table, point)) { return false; } return table[point.row][point.col] === true || table[point.row][point.col] === 'T'; }; <file_sep>export const title = "Kth Largest Element in a Stream"; export const minHeap = `class MinHeap { private items: number[]; constructor() { this.items = []; } push(item: number) { this.items.push(item); this.bubbleUp(this.items.length - 1); } pop() { const root = this.items.shift(); const last = this.items.pop(); if (last !== undefined) { this.items.unshift(last); this.bubbleDown(0) } return root; } size() { return this.items.length; } peek() { return this.items[0]; } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.items[index] < this.items[parentIndex]) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if (this.shouldBubbleDown(target, leftChildIndex)) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if (this.shouldBubbleDown(target, rightChildIndex)) { target = rightChildIndex; } if (target !== index) { this.swap(target, index); this.bubbleDown(target); } } private shouldBubbleDown(index: number, childIndex: number): boolean { const current = this.items[index]; const child = this.items[childIndex]; if (current === undefined || child === undefined) { return false; } return current > child; } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } }`; export const KthLargest = `class KthLargest { private k: number; private minHeap: MinHeap; constructor(k: number, nums: number[]) { this.k = k; this.minHeap = new MinHeap(); nums.forEach(num => { this.add(num); }); } add(val: number): number { this.minHeap.push(val); if (this.minHeap.size() > this.k) { this.minHeap.pop(); } return this.minHeap.peek(); } }`; export const description = ` Design a class to find the **k^th** largest element in a stream. Note that it is the **k^th** largest element in the sorted order, not the **k^th** distinct element. Implement **KthLargest** class: - **KthLargest(int k, int[] nums)** Initializes the object with the integer **k** and the stream of integers **nums**. - **int add(int val)** Appends the integer **val** to the stream and returns the element representing the **k^th** largest element in the stream. `; export const solution = ``; export const usecases = ''; export const example = ``; /** * Your KthLargest object will be instantiated and called as such: * var obj = new KthLargest(k, nums) * var param_1 = obj.add(val) */<file_sep>export enum Action { Left, Right } export interface Item { action: Action; left: number; right: number; area: number; max: number; } export function maxArea(height: number[]): Item[] { const items: Item[] = []; let left = 0; let right = height.length - 1; let max = 0; while (left < right) { const area = Math.min(height[left], height[right]) * (right - left); max = Math.max(max, area); if (height[left] < height[right]) { items.push({ left, right, area, max, action: Action.Left }); left++; } else { items.push({ left, right, area, max, action: Action.Right }); right-- } } items.push({ left, right, area: 0, max, action: Action.Right }); return items; }; <file_sep>const max = Number.MAX_SAFE_INTEGER; const initTable = (array: number[]): number[][] => { const table: number[][] = []; for (let row = 0; row < array.length; row++) { table.push(Array(array.length).fill(max)); } table[0][0] = 0; return table; }; const createDPTable = (array: number[]): number[][] => { const table = initTable(array); for (let i = 0; i < table.length; i++) { for (let j = 0; j < i; j++) { const prev = j === 0 ? max : table[j - 1][i]; let last = (j === 0 ? 0 : table[j - 1][j]) + 1; if (j + array[j] < i) { last = max; } table[j][i] = Math.min(last, prev); } } return table; }; export default createDPTable; <file_sep>import { buildGraphology } from '../../../data-structures/graph/utils'; import { Graph } from "../../../data-structures/graph"; import forceAtlas2 from 'graphology-layout-forceatlas2'; export const layoutCalculator = <T>(graph: Graph<T>) => { const graphology = buildGraphology(graph); graphology.forEachNode((_, attributes) => { attributes.x = Math.random() * 15 - 4; // Set initial x position attributes.y = Math.random() * 10 - 2; // Set initial y position }); const sensibleSettings = forceAtlas2.inferSettings(graphology); return forceAtlas2(graphology, { iterations: 50, settings: sensibleSettings }); } <file_sep>import * as THREE from 'three'; import { Cube } from '../../../data-structures/_commons/cube/three/cube'; import { node, shell } from "./stackStyles"; class StackShellBuilder { private _scene: THREE.Scene; private _material: THREE.Material = shell.material; private _geometry: THREE.BoxGeometry = new THREE.BoxGeometry(node.size.width, node.size.height, node.size.depth); private _position: THREE.Vector3 = new THREE.Vector3(0, 0, 0); private _show: boolean = true; constructor(scene: THREE.Scene, show: boolean) { this._show = show; this._scene = scene; } position(x: number, y: number, z: number): StackShellBuilder { this._position = new THREE.Vector3(x, y, z); return this; } material(material: THREE.Material): StackShellBuilder { this._material = material; return this; } geometry(width: number, height: number, depth: number): StackShellBuilder { this._geometry = new THREE.BoxGeometry(width, height, depth); return this; } build(): Cube { const item = new Cube(this._geometry, this._material, this._scene); this.setPosition(item); if (this._show) { item.show(); } return item; } private setPosition(item: Cube): void { item.position.x = this._position.x; item.position.y = this._position.y; item.position.z = this._position.z; } } export default StackShellBuilder; <file_sep>import { buildPerfectBinaryTree } from "../../../data-structures/tree/nodes/utils/perfect-binary-tree"; import { xAxisAlpha } from "./styles"; export enum Direction { Left, Right, Back } interface Node { index: number; value: number; left?: Node; right?: Node; } export interface InputOutput { input: number[]; steps: Step[]; xAxis: number[]; } export interface Step { node?: Node; mid?: number; slow?: number; fast?: number; direction?: Direction; } const calDepth = (root?: Node): number => { if (root === undefined) return 0; const left = calDepth(root.left); const right = calDepth(root.right); return Math.max(right, left) + 1; } export function buildSteps(input: number[]): InputOutput { const steps: Step[] = []; function sortedListToBST(): Node | undefined { function preorder(left: number, right: number, index: number, direction?: Direction): Node | undefined { if (left === right) { return undefined; } const mid = findMedian(left, right); const node: Node = { index, value: input[mid] }; steps.push({ node, mid, direction }); node.left = preorder(left, mid, 2 * index + 1, Direction.Left); node.right = preorder(mid + 1, right, 2 * index + 2, Direction.Right); return node; } function findMedian(left: number, right: number): number { let slow = left; let fast = left; steps.push({ slow, fast, }); while (fast !== right && (fast + 1) !== right) { fast = fast + 1; fast = fast + 1; slow = slow + 1; steps.push({ slow, fast, }); } return slow; } return preorder(0, input.length, 0); }; const root = sortedListToBST(); const depth: number = calDepth(root); const xAxis: number[] = buildPerfectBinaryTree(depth, xAxisAlpha, 2).map(node => node.x); return { input, steps, xAxis }; } <file_sep>const cloneTable = (table: number[][]): number[][] => table.map(row => row.map(item => item)); const createDPTable = (table: number[][]): number[][] => { const result: number[][] = cloneTable(table); for (let col = 1; col < result[0].length; col++) { result[0][col] = result[0][col - 1] + result[0][col]; } for (let row = 1; row < result.length; row++) { result[row][0] = result[row - 1][0] + result[row][0]; } for (let row = 1; row < result.length; row++) { for (let col = 1; col < result[row].length; col++) { result[row][col] = result[row][col] + Math.min(result[row - 1][col], result[row][col - 1]); } } return result; }; export default createDPTable; <file_sep>export const title = "Implement Stack Using Queue"; export const formula = `class MyStack { private queue: number[]; constructor() { this.queue = []; } push(x: number): void { this.queue.push(x); const length = this.queue.length; for (let i = 0; i < length - 1; i++) { const item = this.queue.shift(); this.queue.push(item); } } pop(): number | undefined { return this.queue.shift(); } top(): number { return this.queue[0]; } empty(): boolean { return this.queue.length === 0; } }`; export const description = `Implement a last in first out (LIFO) stack using only one queue. The implemented stack should support all the functions of a normal stack. The functions are ***push***, ***pop***, ***top***, and ***empty***.`; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import * as THREE from 'three'; import { Cube } from '../_commons/cube/three/cube'; import { TextCube } from '../_commons/cube/three/text-cube'; import { wait } from '../_commons/utils'; import { IStack } from './stack'; import { StackAlgo } from './stack-algo'; export class StackVis<T> implements IStack<TextCube<T>> { public duration: number; private shells: Cube[]; private stack: StackAlgo<TextCube<T>>; private position: THREE.Vector3; constructor( position: THREE.Vector3, duration?: number ) { this.duration = duration ? duration : 0; this.position = position; this.shells = []; this.stack = new StackAlgo(); } increaseShells(shell: Cube) { const { x, y, z } = this.position; shell.position.x = x + this.getShellsWidth(); shell.position.y = y; shell.position.z = z; this.shells.push(shell); } decreaseShells(): Cube | undefined { return this.shells.pop(); } emptyShells() { let item = this.shells.pop(); while (item) { item.hide(); item = this.shells.pop(); } } get shellsLength(): number { return this.shells.length; } async empty() { let item = await this.pop(); while (item) { item.hide(); item = await this.pop(); } } private getShellsWidth(): number { return this.shells.reduce((accumulator, current) => accumulator + current.width, 0) } async push(item: TextCube<T>): Promise<number> { await this.playPush(item); return this.stack.push(item); } private async playPush(item: TextCube<T>): Promise<void> { this.shiftNodesForPush(); const position = new THREE.Vector3(this.position.x, this.position.y, this.position.z); item.move(position, this.duration); await wait(this.duration); } async pop(): Promise<TextCube<T> | undefined> { await this.playPop(); return this.stack.pop();; } async peek(): Promise<TextCube<T> | undefined> { const item: TextCube<T> | undefined = await this.stack.peek(); await this.playPeek(item); return item; } async isEmpty(): Promise<boolean> { await this.playIsEmpty(); return this.stack.isEmpty(); } async size(): Promise<number> { await this.playSize(); return this.stack.size(); } private async shiftNodesForPush(): Promise<void> { const iterator = this.stack.iterator(); while (iterator.hasNext()) { const current = iterator.next(); const position = new THREE.Vector3(current.position.x + current.width, current.position.y, current.position.z); current.move(position, this.duration); } } private async playPop(): Promise<void> { const iterator = this.stack.iterator(); while (iterator.hasNext()) { const current = iterator.next(); const position = new THREE.Vector3(current.position.x - current.width, current.position.y, current.position.z); current.move(position, this.duration); } await wait(this.duration); } private playPeek(item: TextCube<T> | undefined): Promise<void> { return Promise.resolve(); } private playIsEmpty(): Promise<void> { return Promise.resolve(); } private playSize(): Promise<void> { return Promise.resolve(); } } <file_sep>export const title = "The Skyline Problem"; export const MaxHeap = `class MaxHeap { private heights: number[]; private map: Map<number, number[]>; constructor() { this.heights = []; this.map = new Map(); } peek() { return this.heights[0]; } push(height: number) { this.heights.push(height); this.addIndex(height, this.heights.length - 1); this.bubbleUp(this.heights.length - 1); } delete(height: number) { // check whether height in the heap const indices = this.map.get(height) || []; if (indices.length === 0) { return; } // find the height index const index = indices.pop(); // remove last from heap const lastIndex = this.heights.length - 1; const last = this.heights.pop(); this.deleteIndex(last, lastIndex); // check whether height is the last element in heap if (index === lastIndex) { return; } // delete height from index this.heights.splice(index, 1)[0]; this.deleteIndex(height, index); // add last to index this.heights.splice(index, 0, last); this.addIndex(last, index); // heapify this.heapify(); } heapify() { for (let i = Math.floor(this.heights.length / 2) - 1; i >= 0; i--) { this.bubbleDown(i); } } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.heights[index] > this.heights[parentIndex]) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if (this.shouldBubbleDown(target, leftChildIndex)) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if (this.shouldBubbleDown(target, rightChildIndex)) { target = rightChildIndex; } if (target !== index) { this.swap(index, target); this.bubbleDown(target); } } private shouldBubbleDown(index: number, childIndex: number): boolean { const current = this.heights[index]; const child = this.heights[childIndex]; return (current === undefined || child === undefined) ? false : current < child; } private addIndex(height: number, index: number) { if (!this.map.has(height)) { this.map.set(height, []); } this.map.get(height).push(index); } private deleteIndex(height: number | undefined, index: number) { if (height === undefined) { return; } const indices = this.map.get(height) || []; const target = indices.indexOf(index); if (target > -1) { indices.splice(target, 1); } } private updateIndex(item: number, from: number, to: number) { this.deleteIndex(item, from); this.addIndex(item, to); } private swap(i: number, j: number) { this.updateIndex(this.heights[i], i, j); this.updateIndex(this.heights[j], j, i); [this.heights[i], this.heights[j]] = [this.heights[j], this.heights[i]]; } }`; export const getSkyline = `enum Edge { Start, End } type Item = { x: number; height: number; edge: Edge; } function getSkyline(buildings: number[][]): number[][] { const items: Item[] = []; buildings.forEach(building => { const [start, end, height] = building; items.push({ x: start, height, edge: Edge.Start }); items.push({ x: end, height, edge: Edge.End }); }); const compareFn = (a: Item, b: Item): number => { if (a.x !== b.x) { return a.x - b.x; } // if two starts are compared then higher building should be picked first if (a.edge === Edge.Start && b.edge === Edge.Start) { return b.height - a.height; } // if two ends are compared then lower building should be picked first if (a.edge === Edge.End && b.edge === Edge.End) { return a.height - b.height; } // if one start and one end are compared then start should be picked first return (a.edge === Edge.Start) ? -1 : 1; }; items.sort(compareFn); let prevMaxHeight = 0; const result: number[][] = []; const heap = new MaxHeap(); items.forEach(item => { const { x, height, edge } = item; if (edge === Edge.Start) { heap.push(height); } else { heap.delete(height); } const peek = heap.peek() || 0; if (prevMaxHeight !== peek) { result.push([x, peek]); prevMaxHeight = peek; } }); return result; };`; export const description = ` A city's **skyline** is the outer contour of the silhouette formed by all the buildings in that city when viewed from a distance. Given the locations and heights of all the buildings, return the ***skyline*** formed by these buildings collectively. The geometric information of each building is given in the array **buildings** where ***buildings[i] = [left, right, height]***: - left is the x coordinate of the left edge of the i^th building. - right is the x coordinate of the right edge of the i^th building. - height is the height of the i^th building. You may assume all buildings are perfect rectangles grounded on an absolutely flat surface at height 0. The **kyline** should be represented as a list of "key points" **sorted by their x-coordinate** in the form [[x1,y1],[x2,y2],...]. Each key point is the left endpoint of some horizontal segment in the skyline except the last point in the list, which always has a y-coordinate 0 and is used to mark the skyline's termination where the rightmost building ends. Any ground between the leftmost and rightmost buildings should be part of the skyline's contour. Note: There must be no consecutive horizontal lines of equal height in the output skyline. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>const createDPTable = (houses: number[]): number[] => { const table = houses.map(house => house); if (table.length < 2) { return table; } table[1] = Math.max(table[0], table[1]); for (let i = 2; i < table.length; i++) { table[i] = Math.max(table[i] + table[i - 2], table[i - 1]); } return table; }; export default createDPTable; <file_sep>import IDisplayer from "../params/displayer.interface"; export default class Displayer implements IDisplayer { private scene: THREE.Scene; private mesh: THREE.Object3D; constructor( scene: THREE.Scene, mesh: THREE.Object3D ) { this.scene = scene; this.mesh = mesh; } show(): void { this.scene.add(this.mesh); } hide(): void { this.scene.remove(this.mesh); } } <file_sep>import InOrderActions from "./inOrder"; import PreOrderActions from "./preOrder"; import PostOrderActions from "./postOrder"; export { InOrderActions, PreOrderActions, PostOrderActions }; <file_sep>export const title = "Container With Most Water"; export const formula = `function maxArea(heights: number[]): number { let left = 0; let right = heights.length - 1; let max = 0; while (left < right) { const height = Math.min(heights[left], heights[right]); const width = right - left; max = Math.max(max, height * width); if (heights[left] < heights[right]) { left++; } else { right-- } } return max; };`; export const description = ` You are given an integer array **height** of length **n**. There are **n** vertical lines drawn such that the two endpoints of the **i^th** line are **(i, 0)** and **(i, height[i])**. Find two lines that together with the x-axis form a container, such that the container contains the most water. *Return the maximum amount of water a container can store*. **Notice that:** you may ***not*** slant the container. `; export const usecases = ''; export const example = ` Example 1: - Input: height = [1,8,6,2,5,4,8,3,7] - Output: 49 Example 2: - Input: height = [1,1] - Output: 1 `; <file_sep>export enum Status { INITIAL, ACTIVATED, PRINTED, FINISHED, } export default interface Props { index: number; status: Status; circleCx: number; circleCy: number; circleR: number; content: string; textX: number; textY: number; parent?: Props; left?: Props; right?: Props; goLeftEnabled: boolean; goRightEnabled: boolean; printValEnabled: boolean; returnToParentEnabled: boolean; } <file_sep>import { Point } from "../_commons/point"; export const nonCorrect = (currentPoint: Point, leftMax: number[], rightMax: number[], water: number[], value: number): boolean => { const { row, col } = currentPoint; if (row === 0) { return leftMax[col] !== value; } else if (row === 1) { return rightMax[col] !== value; } else { return water[col] !== value; } }; export const isLastCell = (currentPoint: Point, water: number[]): boolean => { const { row, col } = currentPoint; return row === 2 && col === water.length - 1; }; export const getNextPoint = ({ row, col }: Point, cols: number): Point => { if (row === 0) { if (col >= cols - 1) { return { row: 1, col: cols - 1 }; } else { return { row: 0, col: col + 1 }; } } else if (row === 1) { if (col === 0) { return { row: 2, col: 0 }; } else { return { row: 1, col: col - 1 }; } } else { return { row, col: col + 1 }; } }; <file_sep>import { green } from '@mui/material/colors'; import { createTheme } from "@mui/material"; const theme = createTheme({ palette: { primary: { main: green[500], } }, typography: { fontFamily: [ "Circular", "-apple-system", "BlinkMacSystemFont", "Roboto", "Helvetica Neue", "sans-serif", "monospace" ].join(",") }, components: { MuiTable: { styleOverrides: { root: { width: '80%', } } }, MuiTableCell: { styleOverrides: { root: { textAlign: 'center', borderBottom: 'none', '&:last-child': { // paddingRight: 0, }, }, body: { borderWidth: 1, borderStyle: 'solid', borderColor: "lightgray", color: "black", lineHeight: "5px", }, } } } }); export default theme; <file_sep>export const title = "Integer to Roman"; export const GreedySolution = `function intToRoman(num: number): string { const valueSymbols = [ [1000, "M"], [900, "CM"], [500, "D"], [400, "CD"], [100, "C"], [90, "XC"], [50, "L"], [40, "XL"], [10, "X"], [9, "IX"], [5, "V"], [4, "IV"], [1, "I"] ]; let roman = ""; for (const [value, symbol] of valueSymbols) { while (num >= value) { roman += symbol; num -= value; } } return roman; };`; export const HardcodeSolution = `function intToRoman(num: number): string { const thousands = ["", "M", "MM", "MMM"]; const hundreds = ["", "C", "CC", "CCC", "CD", "D", "DC", "DCC", "DCCC", "CM"]; const tens = ["", "X", "XX", "XXX", "XL", "L", "LX", "LXX", "LXXX", "XC"]; const ones = ["", "I", "II", "III", "IV", "V", "VI", "VII", "VIII", "IX"]; const thousand = thousands[~~(num / 1000)]; const hundred = hundreds[~~(num % 1000 / 100)]; const ten = tens[~~(num % 100 / 10)]; const one = ones[num % 10]; return thousand + hundred + ten + one; };` export const description = `#### Description Roman numerals are represented by seven different symbols: **I**, **V**, **X**, **L**, **C**, **D** and **M**. | Symbol | Value | |:------:|------:| | I | 1 | | V | 5 | | X | 10 | | L | 50 | | C | 100 | | D | 500 | | M | 1000 | For example, **2** is written as **II** in Roman numeral, just two one's added together. **12** is written as **XII**, which is simply **X + II**. The number **27** is written as **XXVII**, which is **XX + V + II**. Roman numerals are usually written largest to smallest from left to right. However, the numeral for four is not **IIII**. Instead, the number four is written as **IV**. Because the one is before the five we subtract it making four. The same principle applies to the number nine, which is written as **IX**. There are six instances where subtraction is used: - **I** can be placed before **V** (5) and **X** (10) to make 4 and 9. - **X** can be placed before **L** (50) and **C** (100) to make 40 and 90. - **C** can be placed before **D** (500) and **M** (1000) to make 400 and 900. Given an integer, convert it to a roman numeral. **Constraints:** - 1 <= num <= 3999 `; export const solution = ``; export const examples = ` --- #### Example 1: - Input: num = 3 - Output: "III" - Explanation: 3 is represented as 3 ones. #### Example 2: - Input: num = 58 - Output: "LVIII" - Explanation: L = 50, V = 5, III = 3. #### Example 3: - Input: num = 1994 - Output: "MCMXCIV" - Explanation: M = 1000, CM = 900, XC = 90 and IV = 4. `; <file_sep>export const title = "Sliding Window Median"; export const description = ` The **median** is the middle value in an ordered integer list. If the size of the list is even, there is no middle value. So the median is the mean of the two middle values. - For examples, if arr = [3,4,5], the median is 4. - For examples, if arr = [2,3,4,5], the median is (3 + 4) / 2 = 3.5. You are given an integer array **nums** and an integer **k**. There is a sliding window of size **k** which is moving from the very left of the array to the very right. You can only see the **k** numbers in the window. Each time the sliding window moves right by one position. Return the median array for each window in the original array. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export interface RodCuttingItem { price: number; length: number; } <file_sep>import { Point } from "../_commons/point"; export const getLastCell = (table: (string | number)[][]): Point => { const row = table.length - 1; const col = table[row].length - 1; return { row, col }; }; export const createNewTableStyles = (table: React.CSSProperties[][]): React.CSSProperties[][] => table.map(row => row.map(() => ({}))); export const updateTable = (table: (string | number)[][], point: Point, value: number): (string | number)[][] => { const cloned = [...table]; cloned[1][point.col] = value; return cloned; }; export const nonCorrect = (comparedTable: (string | number)[][], { row, col }: Point, value: number): boolean => { return (comparedTable[row - 1][col - 1] !== value); } export const isLastCell = (table: (string | number)[][], point: Point): boolean => { const { row, col } = getLastCell(table); return row === 1 && point.col === col; }; export const getNextPoint = ({ row, col }: Point, inputArray: number[]): Point => { const length = inputArray[row - 1]; const lastCol = row - 1 + length + 1; if (col === inputArray.length) { return { row: row + 1, col: row + 2 }; } if (col === lastCol) { return { row: row + 1, col: row + 2 }; } else { return { row, col: col + 1 }; } }; <file_sep>const createDPTable = (nums: number[]): number[][] => { const rows = 2; const cols = nums.length; const table = new Array(rows).fill(0).map(() => new Array(cols).fill(0)); for (let col = 0; col < cols; col++) { table[0][col] = nums[col]; table[1][col] = col; } return table; }; export const getIndices = (nums: number[], target: number): number[] => { const map = new Map(); for (let i = 0; i < nums.length; i++) { if (map.has(target - nums[i])) { return [map.get(target - nums[i]), i] } map.set(nums[i], i); } return []; } export default createDPTable; <file_sep>import helperColor from '@mui/material/colors/green'; import helperColorSecondary from '@mui/material/colors/blue'; import helperColorThird from '@mui/material/colors/yellow'; import React from 'react'; export const helperStyle: React.CSSProperties = { backgroundColor: helperColor[100] }; export const helperStyleSecondary: React.CSSProperties = { backgroundColor: helperColorSecondary[100] }; export const helperStyleThird: React.CSSProperties = { backgroundColor: helperColorThird[200] }; export const errorStyle: React.CSSProperties = { background: "red" } <file_sep>import { buildPerfectBinaryTree } from "../../../data-structures/tree/nodes/utils/perfect-binary-tree"; import { xAxisAlpha } from "./styles"; export enum Direction { Left, Right, Back } interface Node { index: number; value: string; left?: Node; right?: Node; } export interface InputOutput { input: string[]; steps: Step[]; xAxis: number[]; } export interface Step { node: Node; left: number; mid: number; right: number; direction?: Direction; } const calDepth = (root?: Node): number => { if (root === undefined) return 0; const left = calDepth(root.left); const right = calDepth(root.right); return Math.max(right, left) + 1; } export function buildSteps(input: string[]): InputOutput { const steps: Step[] = []; function buildTree(values: string[], left: number, right: number, index: number, direction?: Direction): Node | undefined { if (left > right) { return undefined; } const mid = ~~((left + right) / 2); const node: Node = { index, value: values[mid] }; steps.push({ node, left, mid, right, direction }); node.left = buildTree(values, left, mid - 1, 2 * index + 1, Direction.Left); node.right = buildTree(values, mid + 1, right, 2 * index + 2, Direction.Right); return node; }; const root = buildTree(input, 0, input.length - 1, 0); const depth: number = calDepth(root); const xAxis: number[] = buildPerfectBinaryTree(depth, xAxisAlpha, 2).map(node => node.x); return { input, steps, xAxis }; } <file_sep>export const title = "Trapping Rain Water"; export const formula = ` const createLeftMax = (heights: number[]): number[] => { const result: number[] = new Array(heights.length).fill(0); let max = 0; for (let i = 1; i < heights.length; i++) { max = Math.max(max, heights[i - 1]); result[i] = max; } return result; } const createRightMax = (heights: number[]): number[] => { const result: number[] = new Array(heights.length).fill(0); let max = 0; for (let i = heights.length - 2; i >= 0; i--) { max = Math.max(max, heights[i + 1]); result[i] = max; } return result; } const trap = (heights: number[]): number[] => { const leftMax = createLeftMax(heights); const rightMax = createRightMax(heights); let waters = 0; for (let i = 0; i < heights.length; i++) { const waterLevel = Math.min(leftMax[i], rightMax[i]); const water = (waterLevel - heights[i]); if (water > 0) { waters += water; } } return waters; } `; export const description = ` Given n non-negative integers representing an elevation map where the width of each bar is 1, compute how much water it is able to trap after raining. `; export const usecases = ''; export const example = ` Example 1: - Input: arr[] = { 3, 0, 2 } - Output: 2 Example 2: - Input: arr[] = { 0, 1, 0, 2, 1 } - Output: 2 `; <file_sep># Sliding Window <file_sep>import { AbstractArray } from '../_commons/abstract-array'; import { IQueue } from './queue'; export default class Queue<T> extends AbstractArray<T> implements IQueue<T> { constructor() { super([]); } enqueue(element: T): Promise<number> { return Promise.resolve(this.elements.push(element)); } dequeue(): Promise<T | undefined> { return Promise.resolve(this.elements.shift()); } peek(): Promise<T | undefined> { return Promise.resolve(this.elements[0]); } } <file_sep>export enum Action { Add, Subtract } interface Roman { value: number; symbol: string; } interface Sum { previous: number; current: number; } export interface Item { action: Action; prev: Roman; current?: Roman; sum: Sum; } export function romanToInt(s: string): Item[] { const getValue = (ch: string): number => { switch (ch) { case 'I': return 1; case 'V': return 5; case 'X': return 10; case 'L': return 50; case 'C': return 100; case 'D': return 500; case 'M': return 1000; default: return 0; } } const items: Item[] = []; let sum = 0; let prev = getValue(s.charAt(0)); let prevRoman: Roman = { value: getValue(s.charAt(0)), symbol: s.charAt(0) } for (let i = 1; i < s.length; i++) { const symbol = s.charAt(i); const value = getValue(symbol); let action = Action.Add; const previousSum = sum; if (prev < value) { sum -= prev; action = Action.Subtract; } else { sum += prev; } const currentRoman: Roman = { value, symbol } items.push({ prev: prevRoman, current: currentRoman, sum: { previous: previousSum, current: sum }, action }); prev = value; prevRoman = currentRoman; } const previousSum = sum; sum += prev; items.push({ prev: prevRoman, sum: { previous: previousSum, current: sum }, action: Action.Add }); return items; }; <file_sep>export const title = "Course Schedule"; export const formula = `function canFinish(numCourses: number, prerequisites: number[][]): boolean { const adjacency: Map<number, number[]> = new Map(); for (let i = 0; i < numCourses; i++) { adjacency.set(i, []); } prerequisites.forEach(prerequisite => { const [a, b] = prerequisite; adjacency.get(b).push(a); }); const visited: Set<number> = new Set(); const dfs = (current: number): boolean => { if (visited.has(current)) { return false; } visited.add(current); const children = adjacency.get(current); for (let i = 0; i < children.length; i++) { const child = children[i]; if (!dfs(child)) { return false; } } visited.delete(current); adjacency.set(current, []); return true; } for (let i = 0; i < numCourses; i++) { if (!dfs(i)) return false; } return true; };`; export const description = ` There are a total of numCourses courses you have to take, labeled from 0 to numCourses - 1. You are given an array prerequisites where prerequisites[i] = [a_i, b_i] indicates that you must take course b_i first if you want to take course a_i. > For example, the pair [0, 1], indicates that to take course 0 you have to first take course 1. Return **true** if you can finish all courses. Otherwise, return **false**. `; export const tips = `This is a **Detect Cycle in a Directed Graph** problem. We could use **hashSet** to find whehter there is a cycle in a graph.`; export const usecases = ''; export const example = ``; <file_sep>export const title = "Range Sum Query - Mutable"; export const formula = `interface SegmentTreeNode { lower: number; upper: number; sum: number; left?: SegmentTreeNode; right?: SegmentTreeNode; } class NumArray { private root: SegmentTreeNode; constructor(nums: number[]) { this.root = this.buildTree(0, nums.length - 1, nums); } private buildTree(lower: number, upper: number, nums: number[]): SegmentTreeNode { if (lower === upper) { return { lower, upper, sum: nums[lower] }; } const mid = Math.floor((lower + upper) / 2); const left = this.buildTree(lower, mid, nums); const right = this.buildTree(mid + 1, upper, nums); return { lower, upper, sum: left.sum + right.sum, left, right }; } update(index: number, val: number): void { this.updateTree(this.root, index, val); } private updateTree(node: SegmentTreeNode, index: number, val: number) { if (node.lower === index && node.upper === index) { node.sum = val; return; } const mid = Math.floor((node.lower + node.upper) / 2); if (index <= mid) { this.updateTree(node.left, index, val); } else { this.updateTree(node.right, index, val); } node.sum = node.left.sum + node.right.sum; } sumRange(left: number, right: number): number { return this.querySum(this.root, left, right); } private querySum(node: SegmentTreeNode, left: number, right: number): number { if (node.lower === left && node.upper === right) { return node.sum; } const mid = Math.floor((node.lower + node.upper) / 2); if (right <= mid) { return this.querySum(node.left, left, right); } else if (left > mid) { return this.querySum(node.right, left, right); } else { return this.querySum(node.left, left, mid) + this.querySum(node.right, mid + 1, right); } } } /** * Your NumArray object will be instantiated and called as such: * var obj = new NumArray(nums) * obj.update(index,val) * var param_2 = obj.sumRange(left,right) */` export const description = ` Given an integer array **nums**, handle multiple queries of the following types: **Update** the value of an element in **nums**. Calculate the **sum** of the elements of **nums** between indices **left** and **right** inclusive where **left <= right**. Implement the NumArray class: - **NumArray(int[] nums)** Initializes the object with the integer array **nums**. - **void update(int index, int val)** Updates the value of nums[index] to be val. - **int sumRange(int left, int right)** Returns the sum of the elements of nums between indices left and right inclusive (i.e. nums[left] + nums[left + 1] + ... + nums[right]). `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>interface TableSize { rows: number; cols: number; } const getTableSize = (sequence: string): TableSize => { const rows = sequence.length; const cols = sequence.length; return { rows, cols }; }; export const createPalindromeBooleanTable = (sequence: string): boolean[][] => { const { rows, cols } = getTableSize(sequence); const table = Array(rows).fill(false).map(() => Array(cols).fill(false)); // length === 1; for (let i = 0; i < table.length; i += 1) { table[i][i] = true; } // length === 2; for (let i = 0; i < table.length - 1; i += 1) { if (sequence.charAt(i) === sequence.charAt(i + 1)) { table[i][i + 1] = true; } } // length > 2; for (let len = 3; len <= table.length; len += 1) { for (let i = 0; i + len <= table.length; i += 1) { const front = sequence.charAt(i); const end = sequence.charAt(i + len - 1); if (front === end) { table[i][i + len - 1] = table[i + 1][i + len - 2]; } } } return table; }; const createDPTable = (sequence: string): number[][] => { const booleanTable = createPalindromeBooleanTable(sequence); const { rows, cols } = getTableSize(sequence); const table = Array(rows).fill(0).map(() => Array(cols).fill(0)); // length === 1; for (let i = 0; i < table.length; i += 1) { table[i][i] = 0; } // length === 2; for (let i = 0; i < table.length - 1; i += 1) { if (sequence.charAt(i) !== sequence.charAt(i + 1)) { table[i][i + 1] = 1; } } // length > 2; for (let len = 3; len <= table.length; len += 1) { for (let row = 0; row + len <= table.length; row += 1) { const col = row + len - 1; if (!booleanTable[row][col]) { let min = sequence.length; for (let k = row; k < col; k += 1) { min = Math.min(min, table[row][k] + table[k + 1][col]); } table[row][col] = min + 1; } } } return table; }; export default createDPTable; <file_sep>import { Game } from "./commons/game"; import bubbleSort from "./sorting/bubble-sort/info"; import insertionSort from "./sorting/insertion-sort/info"; import selectionSort from "./sorting/selection-sort/info"; import editDistance from "./dp/edit-distance/info"; import coinChangeFewestNumber from "./dp/coin-change-fewest-number/info"; import coinChangeHowManyWays from "./dp/coin-change-how-many-ways/info"; import longestCommonSubsequence from "./dp/longest-common-subsequence/info"; import longestCommonSubstring from "./dp/longest-common-substring/info"; import isSubsequence from "./dp/is-subsequence/info"; import isSubstring from "./dp/is-substring/info"; import minimumPathSum from "./dp/minimum-path-sum/info"; import rodCuttingProblem from "./dp/rod-cutting-problem/info"; import wildcardMatching from "./dp/wildcard-matching/info"; import regularExpression from "./dp/regular-expression/info"; import wordBreak from './dp/word-break/info'; import knapsackProblem from "./dp/knapsack-problem/info"; import subsetSumProblem from "./dp/subset-sum-problem/info"; import minimumNumberOfJumpsToReachEnd from "./dp/minimum-number-of-jumps-to-reach-end/info"; import minimumNumberOfJumpsToReachEnd2 from "./dp/minimum-number-of-jumps-to-reach-end-ii/info"; import longestIncreasingSubsequence from "./dp/longest-increasing-subsequence/info"; import maximumSubarrayProblem from "./dp/maximum-subarray-problem/info"; import longestPalindromicSubsequence from "./dp/longest-palindromic-subsequence/info"; import longestPalindromicSubstring from "./dp/longest-palindromic-substring/info"; import palindromePartitioning from "./dp/palindrome-partitioning/info"; import houseRobber from "./dp/house-robber/info"; import eggDroppingProblem from "./dp/egg-dropping-problem/info"; import trappingRainWater from "./dp/trapping-rain-water/info"; import trappingRainWaterII from "./two-pointer/trapping-rain-water-ii/info"; import twoSum from "./hash-table/two-sum/info"; import binaryTreeInorderTraversal from "./tree/binary-tree-inorder-traversal/info"; import binaryTreePostorderTraversal from "./tree/binary-tree-postorder-traversal/info"; import binaryTreePreorderTraversal from "./tree/binary-tree-preorder-traversal/info"; import twoThreeTreeRedBlackTree from "./tree/two-three-tree_vs-red-black-tree/info"; import LRU from "./hash-table/lru-cache/info"; import validParentheses from "./stack/valid-parentheses/info"; import ImplementQueueUsingStacks from "./queue/implement-queue-using-stack/info"; import ImplementStackUsingQueues from "./stack/implement-stack-using-queues/info"; import ImplementStackUsingQueue from "./stack/implement-stack-using-queue/info"; import BasicCalculator from "./stack/basic-calculator/info"; import BasicCalculatorII from "./stack/basic-calculator-ii/info"; import LongestSubstringWithoutRepeatingCharacters from "./sliding-window/longest-substring-without-repeating-characters/info"; import ZigzagConversion from "./flip-direction/zigzag-conversion/info"; import PalindromeNumber from "./math/palindrome-number/info"; import ReverseInteger from "./math/reverse-integer/info"; import StringToIntegerAtoi from "./math/string-to-integer-atoi/info"; import ContainerWithMostWater from "./two-pointer/container-with-most-water/info"; import IntegerToRoman from "./greedy/integer-to-roman/info"; import RomanToInteger from "./math/roman-to-integer/info"; import MaximumDepthOfBinaryTree from "./tree/maximum-depth-of-binary-tree/info"; import MinimumDepthOfBinaryTree from "./tree/minimum-depth-of-binary-tree/info"; import RecoverBinarySearchTree from "./tree/recover-binary-search-tree/info"; import BalancedBinaryTree from "./tree/balanced-binary-tree/info"; import ValidateBinarySearchTree from "./tree/validate-binary-search-tree/info"; import SameTree from "./tree/same-tree/info"; import SymmetricTree from "./tree/symmetric-tree/info"; import PathSum from "./tree/path-sum/info"; import PathSumII from "./tree/path-sum-ii/info"; import BinaryTreeRightSideView from "./tree/binary-tree-right-side-view/info"; import SumRootToLeafNumbers from "./tree/sum-root-to-leaf-numbers/info"; import ConvertSortedArrayToBinarySearchTree from "./tree/convert-sorted-array-to-binary-search-tree/info"; import ConvertSortedListToBinarySearchTree from "./tree/convert-sorted-list-to-binary-search-tree/info"; import ConstructBinaryTreeFromPreorderAndInorderTraversal from "./tree/construct-binary-tree-from-preorder-and-inorder-traversal/info"; import ConstructBinaryTreeFromInorderAndPostorderTraversal from "./tree/construct-binary-tree-from-inorder-and-postorder-traversal/info"; import PopulatingNextRightPointersInEachNode from "./tree/populating-next-right-pointers-in-each-node/info"; import PopulatingNextRightPointersInEachNodeII from "./tree/populating-next-right-pointers-in-each-node-ii/info"; import LowestCommonAncestorOfaBinarySearchTree from "./tree/lowest-common-ancestor-of-a-binary-search-tree/info"; import LowestCommonAncestorOfaBinaryTree from "./tree/lowest-common-ancestor-of-a-binary-tree/info"; import KthSmallestElementInBST from "./tree/kth-smallest-element-in-a-bst/info"; import SumOfLeftLeaves from "./tree/sum-of-left-leaves/info"; import CountCompleteTreeNodes from "./tree/count-complete-tree-nodes/info"; import BinaryTreePaths from "./tree/binary-tree-paths/info"; import BinaryTreeLevelOrderTraversal from "./tree/binary-tree-level-order-traversal/info"; import BinaryTreeZigzagLevelOrderTraversal from "./tree/binary-tree-zigzag-level-order-traversal/info"; import BinaryTreeLevelOrderTraversalII from "./tree/binary-tree-level-order-traversal-ii/info"; import InvertBinaryTree from "./tree/invert-binary-tree/info"; import FlattenBinaryTreeToLinkedList from "./tree/flatten-binary-tree-to-linked-list/info"; import BinarySearchTreeIterator from "./tree/binary-search-tree-iterator/info"; import VerifyPreorderSerializationOfABinaryTree from "./tree/verify-preorder-serialization-of-a-binary-tree/info"; import SerializeAndDeserializeBinaryTree from "./tree/serialize-and-deserialize-binary-tree/info"; import KthLargestElementInAnArray from "./tree/kth-largest-element-in-an-array/info"; import TopKFrequentElements from "./tree/top-k-frequent-elements/info"; import SlidingWindowMaximum from "./queue/sliding-window-maximum/info"; import FindMedianFromDataStream from "./tree/find-median-from-data-stream/info"; import MergeKSortedLists from "./tree/merge-k-sorted-lists/info"; import KthSmallestElementInASortedMatrix from "./tree/kth-smallest-element-in-a-sorted-matrix/info"; import FindKPairsWithSmallestSums from "./tree/find-k-pairs-with-smallest-sums/info"; import UglyNumberII from "./tree/ugly-number-ii/info"; import TopKFrequentWords from "./tree/top-k-frequent-words/info"; import KthLargestElementInAStream from "./tree/kth-largest-element-in-a-stream/info"; import SlidingWindowMedian from "./tree/sliding-window-median/info"; import TheSkylineProblem from "./tree/the-skyline-problem/info"; import RangeSumQueryMutable from "./segment-tree/range-sum-query-mutable/info"; import CourseSchedule from "./graph/course-schedule/info"; import CourseScheduleII from "./graph/course-schedule-ii/info"; import NumberOfIslands from "./graph/number-of-islands/info"; import SurroundedRegions from "./graph/surrounded-regions/info"; import NumberOfProvinces from "./graph/number-of-provinces/info"; import MaxAreaOfIsland from "./graph/max-area-of-island/info"; import RedundantConnection from "./graph/redundant-connection/info"; import Sqrtx from "./binary-search/sqrtx/info"; import SearchInsertPosition from "./binary-search/search-insert-position/info"; import FirstBadVersion from "./binary-search/first-bad-version/info"; export const games: Game[] = [ editDistance, coinChangeFewestNumber, coinChangeHowManyWays, longestCommonSubsequence, longestCommonSubstring, isSubsequence, isSubstring, minimumPathSum, rodCuttingProblem, wildcardMatching, regularExpression, wordBreak, knapsackProblem, subsetSumProblem, minimumNumberOfJumpsToReachEnd, minimumNumberOfJumpsToReachEnd2, longestIncreasingSubsequence, maximumSubarrayProblem, longestPalindromicSubsequence, longestPalindromicSubstring, palindromePartitioning, houseRobber, eggDroppingProblem, trappingRainWater, trappingRainWaterII, twoSum, bubbleSort, selectionSort, insertionSort, LRU, validParentheses, ImplementQueueUsingStacks, ImplementStackUsingQueues, ImplementStackUsingQueue, BasicCalculator, BasicCalculatorII, LongestSubstringWithoutRepeatingCharacters, ZigzagConversion, PalindromeNumber, ReverseInteger, StringToIntegerAtoi, ContainerWithMostWater, IntegerToRoman, RomanToInteger, SlidingWindowMaximum, binaryTreeInorderTraversal, binaryTreePostorderTraversal, binaryTreePreorderTraversal, twoThreeTreeRedBlackTree, MaximumDepthOfBinaryTree, MinimumDepthOfBinaryTree, RecoverBinarySearchTree, BalancedBinaryTree, ValidateBinarySearchTree, ConvertSortedArrayToBinarySearchTree, ConvertSortedListToBinarySearchTree, SameTree, SymmetricTree, PathSum, PathSumII, BinaryTreeRightSideView, SumRootToLeafNumbers, PopulatingNextRightPointersInEachNode, PopulatingNextRightPointersInEachNodeII, ConstructBinaryTreeFromPreorderAndInorderTraversal, ConstructBinaryTreeFromInorderAndPostorderTraversal, LowestCommonAncestorOfaBinarySearchTree, LowestCommonAncestorOfaBinaryTree, KthSmallestElementInBST, SumOfLeftLeaves, CountCompleteTreeNodes, BinaryTreePaths, BinaryTreeLevelOrderTraversal, BinaryTreeZigzagLevelOrderTraversal, BinaryTreeLevelOrderTraversalII, InvertBinaryTree, FlattenBinaryTreeToLinkedList, BinarySearchTreeIterator, VerifyPreorderSerializationOfABinaryTree, SerializeAndDeserializeBinaryTree, KthLargestElementInAnArray, TopKFrequentElements, FindMedianFromDataStream, MergeKSortedLists, KthSmallestElementInASortedMatrix, FindKPairsWithSmallestSums, UglyNumberII, TopKFrequentWords, KthLargestElementInAStream, SlidingWindowMedian, TheSkylineProblem, RangeSumQueryMutable, CourseSchedule, CourseScheduleII, NumberOfIslands, SurroundedRegions, NumberOfProvinces, MaxAreaOfIsland, RedundantConnection, Sqrtx, SearchInsertPosition, FirstBadVersion, ]; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node: TreeNode<any>; depth?: number; direction?: Direction; } export function buildSteps<T>(root?: TreeNode<T>): Step[] { const steps: Step[] = []; function maxDepth<T>(node?: TreeNode<T>, direction?: Direction, depth?: number): number { if (node === undefined) { return 0; } steps.push({ node, direction, depth }); const left = maxDepth(node.left, Direction.Left, depth); const right = maxDepth(node.right, Direction.Right, depth); const max = Math.max(left, right) + 1; steps.push({ node, direction: Direction.Back, depth: max }); return max; }; maxDepth(root); return steps; } <file_sep>export const title = "Longest Palindromic Substring"; export const formula = ` // length === 1; for (let i = 0; i < table.length; i++) { table[i][i] = true; } // length === 2; for (let i = 0; i < table.length - 1; i++) { if (sequence.charAt(i) === sequence.charAt(i + 1)) { table[i][i + 1] = true; } else { table[i][i + 1] = false; } } // length > 2; for (let len = 3; len <= table.length; len++) { for (let i = 0; i + len <= table.length; i++) { const front = sequence.charAt(i); const end = sequence.charAt(i + len - 1); if (front === end) { table[i][i + len - 1] = table[i + 1][i + len - 2]; } else { table[i][i + len - 1] = false; } } }`; export const description = `Given a string, find the length of longest palindromic substring.`; export const usecases = ''; export const example = ` ***Input***: "dbcdcd" ***Output***: 3 ***Note***: "cdc", "dcd" with length 3 `; <file_sep>import * as THREE from 'three'; export const text = { color: "green", size: 0.6, height: 0.1 }; export const node = { size: { width: 1, height: 1, depth: 1 }, } export const shell = { material: new THREE.MeshBasicMaterial({ color: "lightgrey", opacity: 0.2, transparent: true }), }; export const stackPosition = { name: new THREE.Vector3(-5.2, 7.0, -6), stack: new THREE.Vector3(-3, 7.1, -6), } export const StackNameStyles = { color: "orange", size: 0.4, height: 0.1 }; <file_sep>import createDPTable from './algo'; import { Point } from "../_commons/point"; import { helperStyle, helperStyleSecondary } from '../_commons/styles'; const startPoint: Point = { row: 1, col: 2, }; interface TableSize { rows: number; cols: number; } const getTableSize = (array: number[]): TableSize => { const rows = 2; const cols = array.length + 1; return { rows, cols }; }; const createTableMatrix = (array: number[]): (number | string)[][] => { const { cols } = getTableSize(array); const fistRow: (string | number)[] = ['INPUT', ...array]; const secondRow: (string | number)[] = ['JUMPS', 0, ...new Array(cols - 2).fill("MAX")]; return [fistRow, secondRow]; }; const createComparedTable = (array: number[]): number[][] => createDPTable(array); const addHelperStyles = (styles: React.CSSProperties[][], point: Point, table: (string | number)[][]): void => { const length = Number(table[0][point.row]) styles[0][point.row] = helperStyleSecondary; styles[1][point.row] = helperStyleSecondary; for (let i = 1; i <= length; i++) { if (point.row + i < styles[0].length) { styles[0][point.row + i] = helperStyle; } } }; const createTableStyles = (array: number[], table: (string | number)[][]): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(array); const styles = new Array(rows).fill(0).map(() => new Array(cols).fill({})); styles[startPoint.row][startPoint.col] = helperStyle; addHelperStyles(styles, startPoint, table); return styles; }; const max = Number.MAX_SAFE_INTEGER; const createButtons = (array: number[]): (string | number)[] => { const dpTable = createDPTable(array); const set = new Set<string | number>(); for (let row = 0; row < dpTable.length; row++) { for (let col = 0; col < dpTable[row].length; col++) { const value = dpTable[row][col]; set.add(value); } } set.delete(0); set.delete(max); return Array.from(set).sort(); }; const createButtonsStyles = (array: number[]): (React.CSSProperties)[] => { return createButtons(array).map(() => ({ color: 'back' })); }; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, startPoint, max }; <file_sep>export const createLeftMax = (heights: number[]): number[] => { const result: number[] = new Array(heights.length).fill(0); let max = 0; for (let i = 1; i < heights.length; i++) { max = Math.max(max, heights[i - 1]); result[i] = max; } return result; }; export const createRightMax = (heights: number[]): number[] => { const result: number[] = new Array(heights.length).fill(0); let max = 0; for (let i = heights.length - 2; i >= 0; i--) { max = Math.max(max, heights[i + 1]); result[i] = max; } return result; }; export class Coordinate { left: number; right: number; maxLeft: number; maxRight: number; constructor(left: number, right: number, maxLeft: number, maxRight: number) { this.left = left; this.right = right; this.maxLeft = maxLeft; this.maxRight = maxRight; } } export interface Guiders { directions: number[]; coordinates: Coordinate[]; waters: number[]; } export const createGuiders = (heights: number[]): Guiders => { const directions: number[] = []; const coordinates: Coordinate[] = []; const waters: number[] = []; let maxLeft = heights[0]; let maxRight = heights[heights.length - 1]; let left = 1; let right = heights.length - 2; let total = 0; coordinates.push(new Coordinate(left, right, maxLeft, maxRight)); waters.push(0); while (left <= right) { if (maxLeft > maxRight) { const min = maxRight; const water = min - heights[right]; if (water > 0) { total += water; } maxRight = Math.max(maxRight, heights[right]); right--; directions.push(1); } else { const min = maxLeft; const water = min - heights[left]; if (water > 0) { total += water; } maxLeft = Math.max(maxLeft, heights[left]); left++; directions.push(0); } coordinates.push(new Coordinate(left, right, maxLeft, maxRight)); waters.push(total); } return { directions, coordinates, waters }; }; const createDPTable = (heights: number[]): number[] => { const leftMax = createLeftMax(heights); const rightMax = createRightMax(heights); const waters: number[] = new Array(heights.length).fill(0); for (let i = 0; i < heights.length; i++) { const waterLevel = Math.min(leftMax[i], rightMax[i]); const water = (waterLevel - heights[i]); if (water > 0) { waters[i] = water; } } return waters; }; export default createDPTable; <file_sep>import * as THREE from "three"; import { OrbitControls } from "three/examples/jsm/controls/OrbitControls"; import { ArrowStyles, NodeStyles } from "./commons/styles"; import DoubleLinkedList from "./doubly-linked-list"; import DoubleLinkedListNode from "./doubly-linked-list/node"; import HashMap from "./hashmap"; import { toMemoryAddress } from "./commons/helpers"; import { Font, FontLoader } from "three/examples/jsm/loaders/FontLoader"; let memoryAddress = 2; const FONT_LOCATION = '/fonts/Roboto_Regular.json'; export default class LRUCache<T> { private DURATION: number = 1; private NODE_MARGIN: number = 8; private DOUBLY_LINKED_LIST_START_POSITION = new THREE.Vector3(-70, 43, 0); private DOUBLY_LINKED_LIST_ARROW_STYLES: ArrowStyles = { color: "#4caf50", headLength: 3, headWidth: 1.5 }; private DOUBLY_LINKED_LIST_NODE_STYLES = { color: "#4caf50", width: 10, height: 6, textGeometryParameters: { size: 2, height: 0.02 }, textColor: 'white' }; private HASH_MAP_BUCKETS = 3; private HASH_MAP_START_POSITION = new THREE.Vector3(-70, 25, 0); private HASH_MAP_ARROW_STYLES: ArrowStyles = { color: "rgb(217, 200, 159)", headLength: 3, headWidth: 1.5 }; private HASH_MAP_NODE_STYLES = { color: "rgb(217, 200, 159)", width: 10, height: 6, textGeometryParameters: { size: 2, height: 0.02 }, textColor: 'white' }; private CONTROLS_ROTATE_SPEEND = 0.5 private CONTROLS_MIN_DISTANCE = 100; private CONTROLS_MAX_DISTANCE = 300; private readonly capacity: number; private _currentSize: number; private scene: THREE.Scene; private camera: THREE.PerspectiveCamera; private renderer: THREE.WebGLRenderer; private doublyLinkedList?: DoubleLinkedList<T>; private hashMap?: HashMap<DoubleLinkedListNode<T>>; constructor(capacity: number) { this.capacity = capacity; this._currentSize = 0; this.renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true }); this.scene = this.createScene(); this.camera = this.createCamera(this.scene); this.registeGrid(this.scene); this.registeControls(this.camera, this.renderer); this.afterInit(this.scene); this.get = this.get.bind(this); this.render = this.render.bind(this); } private createScene(): THREE.Scene { const scene = new THREE.Scene(); scene.background = new THREE.Color("#f5f4ed"); return scene; } private createCamera(scene: THREE.Scene): THREE.PerspectiveCamera { const camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 1, 10000); camera.position.set(0, 0, 95); scene.add(camera); return camera; } private registeGrid(scene: THREE.Scene) { const gridHelper = new THREE.GridHelper(2000, 100, "white", "white"); const material = gridHelper.material; if (material instanceof THREE.Material) { material.opacity = 0.95; material.transparent = true; } gridHelper.position.y = - 199; scene.add(gridHelper); } private afterInit(scene: THREE.Scene) { const self = this; new FontLoader().load(FONT_LOCATION, function (font) { self.doublyLinkedList = self.createDoublyLinkedList(font, scene); self.hashMap = self.createHashMap(font, scene); self.render(); }); } private createDoublyLinkedList(font: Font, scene: THREE.Scene) { return new DoubleLinkedList<T>( scene, this.NODE_MARGIN, this.DOUBLY_LINKED_LIST_START_POSITION, this.DOUBLY_LINKED_LIST_ARROW_STYLES, this.createDoublyLinkedListNodeStyles(font), this.render, this.DURATION ); } private createDoublyLinkedListNodeStyles(font: Font): NodeStyles { const textGeometryParameters = { ...this.DOUBLY_LINKED_LIST_NODE_STYLES.textGeometryParameters, font }; return { ...this.DOUBLY_LINKED_LIST_NODE_STYLES, textGeometryParameters }; } private createHashMap(font: Font, scene: THREE.Scene) { return new HashMap<DoubleLinkedListNode<T>>( scene, this.render, this.HASH_MAP_START_POSITION, this.DURATION, this.HASH_MAP_BUCKETS, this.HASH_MAP_ARROW_STYLES, this.createHashMapNodeStyles(font), ); } private createHashMapNodeStyles(font: Font): NodeStyles { const textGeometryParameters = { ...this.HASH_MAP_NODE_STYLES.textGeometryParameters, font }; return { ...this.HASH_MAP_NODE_STYLES, textGeometryParameters }; } render() { this.renderer.render(this.scene, this.camera); }; private registeControls(camera: THREE.Camera, renderer: THREE.Renderer): void { const controls = new OrbitControls(camera, renderer.domElement); controls.rotateSpeed = this.CONTROLS_ROTATE_SPEEND; controls.minDistance = this.CONTROLS_MIN_DISTANCE; controls.maxDistance = this.CONTROLS_MAX_DISTANCE; controls.update(); controls.addEventListener('change', () => this.render()); } async get(key: number): Promise<T | undefined> { if (!this.hashMap) { return undefined; } if (!this.doublyLinkedList) { return undefined; } const current = this.hashMap.get(key); if (current) { const node = await this.getDoubleLinkedListNode(key); if (node) { await this.doublyLinkedList.moveToHead(node); } return new Promise(resolve => resolve(current.data));; } } private async getDoubleLinkedListNode(key: number): Promise<DoubleLinkedListNode<T> | undefined> { if (!this.hashMap) { return undefined; } if (!this.doublyLinkedList) { return undefined; } return new Promise((resolve, reject) => { if (this.hashMap) { resolve(this.hashMap.get(key)); } else { reject() } }); } async put(key: number, data: T, display: string): Promise<number> { if (!this.hashMap) { return this.currentSize; } if (!this.doublyLinkedList) { return this.currentSize; } const current = this.hashMap.get(key); if (current) { const node = await this.getDoubleLinkedListNode(key); if (node) { await this.doublyLinkedList.moveToHead(node); } return this.currentSize; } if (this.currentSize === this.capacity) { const deleted = await (this.doublyLinkedList.deleteLast()); if (deleted) { await this.hashMap.delete(deleted.key); this._currentSize -= 1; } } this._currentSize += 1; const hashMapDisplay = toMemoryAddress(memoryAddress); const node = await this.doublyLinkedList.insertFirst(data, display, memoryAddress, key); this.hashMap.push(key, node, hashMapDisplay); memoryAddress += 1; return this.currentSize; } public resize(div: HTMLDivElement): void { this.resizeWithWidthAndHeight(div.clientWidth, window.innerHeight); }; private resizeWithWidthAndHeight(width: number, height: number): void { this.camera.aspect = width / height; this.camera.updateProjectionMatrix(); this.renderer.setSize(width, height); }; get currentSize(): number { return this._currentSize; } get domElement() { return this.renderer.domElement; } } <file_sep>import * as THREE from 'three'; import { Cube } from '../_commons/cube/three/cube'; import { TextCube } from '../_commons/cube/three/text-cube'; import { wait } from '../_commons/utils'; import { IQueue } from './queue'; import QueueAlgo from './queue-algo'; export default class QueueVis<T> implements IQueue<TextCube<T>> { public duration: number; private shells: Cube[]; private queue: QueueAlgo<TextCube<T>>; private position: THREE.Vector3; constructor( position: THREE.Vector3, duration?: number ) { this.duration = duration ? duration : 0; this.position = position; this.shells = []; this.queue = new QueueAlgo(); } increaseShells(shell: Cube) { const { x, y, z } = this.position; shell.position.x = x + this.getShellsLength(); shell.position.y = y; shell.position.z = z; this.shells.push(shell); } decreaseShells(): Cube | undefined { return this.shells.pop(); } emptyShells() { let item = this.shells.pop(); while (item) { item.hide(); item = this.shells.pop(); } } get shellsLength(): number { return this.shells.length; } async empty() { let item = await this.dequeue(); while (item) { item.hide(); item = await this.dequeue(); } } private getShellsLength(): number { return this.shells.reduce((accumulator, current) => accumulator + current.width, 0) } async enqueue(item: TextCube<T>): Promise<number> { await this.playEnqueue(item); return this.queue.enqueue(item); } async dequeue(): Promise<TextCube<T> | undefined> { await this.playDequeue(); return this.queue.dequeue(); } async peek(): Promise<TextCube<T> | undefined> { const item = await this.queue.peek(); if (item) { await this.playPeek(item); } return item; } async isEmpty(): Promise<boolean> { await this.playIsEmpty(); return this.queue.isEmpty(); } async size(): Promise<number> { await this.playSize(); return this.queue.size(); } private async playEnqueue(item: TextCube<T>): Promise<void> { const width = this.sumQueueWidth(this.queue); const nodeEndPosition = this.position.clone().setX(this.position.x + width); item.move(nodeEndPosition, this.duration); await wait(this.duration); } private sumQueueWidth(queue: QueueAlgo<Cube>): number { let result = 0; const iterator = queue.iterator(); while (iterator.hasNext()) { result += iterator.next().width; } return result; } private async playDequeue(): Promise<void> { const iterator = this.queue.iterator(); while (iterator.hasNext()) { const current = iterator.next(); const position = new THREE.Vector3(current.position.x - current.width, current.position.y, current.position.z); current.move(position, this.duration); } await wait(this.duration); } private playPeek(item: TextCube<T>): Promise<void> { return Promise.resolve(); } private playIsEmpty(): Promise<void> { return Promise.resolve(); } private playSize(): Promise<void> { return Promise.resolve(); } } <file_sep>export const title = "Kth Largest Element in an Array"; export const formula = `function findKthLargest(nums: number[], k: number): number { const heapify = () => { for (let i = Math.floor(nums.length / 2) - 1; i >= 0; i--) { bubbleDown(i); } } const bubbleDown = (index: number) => { let biggest = index; const leftIndex = 2 * index + 1; const rightIndex = 2 * index + 2; if (leftIndex < nums.length && nums[leftIndex] > nums[biggest]) { biggest = leftIndex; } if (rightIndex < nums.length && nums[rightIndex] > nums[biggest]) { biggest = rightIndex; } if (biggest === index) { return; } swap(biggest, index); bubbleDown(biggest); } const swap = (i: number, j: number) => { [nums[i], nums[j]] = [nums[j], nums[i]]; } const pop = () => { const root = this.items.shift(); const last = this.items.pop(); if (last) { this.items.unshift(last); this.bubbleDown(0); } return root; } heapify(); for (let i = 0; i < k - 1; i++) { pop(); } return pop(); };`; export const description = ` Given an integer array **nums** and an integer **k**, return the **kth** largest element in the array. Note that it is the **kth** largest element in the sorted order, **not** the **kth** distinct element. You must solve it in **O(n)** time complexity. `; export const solution = ``; export const usecases = ''; export const example = ` --- **Example 1**: - Input: nums = [1,2,3,6,5,4], k = 3 - Output: 4 **Example 2**: - Input: nums = [7,4,1,1,2,2,2,3,4,5], k = 5 - Output: 3 `; <file_sep>import { title } from "./contents"; import { Game } from "../../commons/game"; import Category from "../../commons/segments/category"; import Difficulty from "../../commons/segments/difficulty"; const info: Game = { name: title, path: "/algorithms/sliding-window-maximum", categories: [Category.Queue, Category.Deque], companies: [], difficulty: Difficulty.Hard, img: "/img/sliding-window-maximum.png", leetcodeId: 239 } export default info; <file_sep>import { Point } from '../../commons/point'; export const createNewTableStyles = (table: React.CSSProperties[][]): React.CSSProperties[][] => table.map(row => row.map(() => ({}))); export const isCorrect = ({ col }: Point, results: number[], value: string): boolean => { if (col === Math.max(...results) + 1 && value.toLowerCase() === "gotcha") { return true; } if (col < Math.max(...results) + 1 && value.toLowerCase().includes("add")) { return true; } return false; }; export const isLastCell = ({ col }: Point, results: number[]): boolean => { return col === Math.max(...results) + 1; }; <file_sep>import { KnapSackItem } from './KnapsackItem'; const createDPTable = (items: KnapSackItem[], totalWeight: number): number[][] => { const rows = items.length + 1; const cols = totalWeight + 1; const table = new Array(rows).fill(0).map(() => new Array(cols).fill(0)); for (let row = 1; row < rows; row++) { const item = items[row - 1]; for (let col = 1; col < cols; col++) { if (item.weight > col) { table[row][col] = table[row - 1][col]; } else { table[row][col] = Math.max( table[row - 1][col], table[row - 1][col - item.weight] + item.value ); } } } return table; }; export default createDPTable; <file_sep>export const title = 'Number of Islands'; export const formula = `function numIslands(grid: string[][]): number { const land = "1"; const visited = "2"; const inArea = (row: number, col: number): boolean => { return row >= 0 && row < grid.length && col >= 0 && col < grid[row].length; } const dfs = (row: number, col: number) => { if (!inArea(row, col)) { return; } if (grid[row][col] !== land) { return } grid[row][col] = visited; dfs(row - 1, col); dfs(row, col + 1); dfs(row + 1, col); dfs(row, col - 1); } let result = 0; for (let row = 0; row < grid.length; row++) { for (let col = 0; col < grid[row].length; col++) { if (grid[row][col] === land) { result += 1; dfs(row, col); } } } return result; };`; export const description = ` Given an m x n 2D binary grid grid which represents a map of '1's (land) and '0's (water), return the number of islands. An island is surrounded by water and is formed by connecting adjacent lands horizontally or vertically. You may assume all four edges of the grid are all surrounded by water. `; export const usecases = ''; export const example = ` `; <file_sep>export const title = "Palindrome Partitioning"; export const formula = ` const createPalindromeBooleanTable = (sequence: string): boolean[][] => { const rows = sequence.length; const cols = sequence.length; const table = Array(rows).fill(false).map(() => Array(cols).fill(false)); // length === 1; for (let i = 0; i < table.length; i += 1) { table[i][i] = true; } // length === 2; for (let i = 0; i < table.length - 1; i += 1) { if (sequence.charAt(i) === sequence.charAt(i + 1)) { table[i][i + 1] = true; } } // length > 2; for (let len = 3; len <= table.length; len += 1) { for (let i = 0; i + len <= table.length; i += 1) { const front = sequence.charAt(i); const end = sequence.charAt(i + len - 1); if (front === end) { table[i][i + len - 1] = table[i + 1][i + len - 2]; } } } return table; }; const createDPTable = (sequence: string): number[][] => { const booleanTable = createPalindromeBooleanTable(sequence); const rows = sequence.length; const cols = sequence.length; const table = Array(rows).fill(0).map(() => Array(cols).fill(0)); // length === 1; for (let i = 0; i < table.length; i += 1) { table[i][i] = 0; } // length === 2; for (let i = 0; i < table.length - 1; i += 1) { if (sequence.charAt(i) !== sequence.charAt(i + 1)) { table[i][i + 1] = 1; } } // length > 2; for (let len = 3; len <= table.length; len += 1) { for (let row = 0; row + len <= table.length; row += 1) { const col = row + len - 1; if (!booleanTable[row][col]) { let min = sequence.length; for (let k = row; k < col; k += 1) { min = Math.min(min, table[row][k] + table[k + 1][col]); if (min === 0) { break; // optimize } } table[row][col] = min + 1; } } } return table; }; function minCut(s: string): number { const table = createDPTable(s); return table[0][s.length - 1]; }; `; export const description = `Given a string s partition s such that every substring of the partition is a palindrome. Return the minimum cuts needed for a palindrome partitioning of s.`; export const usecases = ''; export const example = ` Example 1: - Input: "mammal" - Output: 2 - Explanation: The palindrome partitioning \\["m", "amma", "l"] could be produced using 2 cut. Example 2: - Input: "m" - Output: 0 `; <file_sep>import * as THREE from 'three'; import { buildPerfectBinaryTree } from "../../../data-structures/tree/nodes/utils/perfect-binary-tree"; import TreeNode from "../../../data-structures/tree/nodes/v1/node"; import { buildTreeNode, lineMaterial, yDistance, xCenter } from "./styles"; import Position from '../../../data-structures/_commons/params/position.interface'; import { calDepth } from './depth'; export enum Direction { Left, Right } export interface Step { preorderLeft: number; preorderRight: number; inorderLeft: number; inorderRight: number; inorderRootIndex: number; leftTreeLength: number; parent?: TreeNode<number>; direction?: Direction; node: TreeNode<number>; } export interface InputOutput { preorder: number[]; inorder: number[]; steps: Step[]; xAxis: number[]; tree: TreeNode<number>[]; } const startPosition: THREE.Vector3 = new THREE.Vector3(xCenter, 11, 0); export function buildTree(preorder: number[], inorder: number[], scene: THREE.Scene): InputOutput { const depth: number = calDepth(preorder, inorder); const xAxisAlpha = 2; // expend the tree size in xAxis. const xAxis: number[] = buildPerfectBinaryTree(depth, xAxisAlpha, 2).map(node => node.x); const xAlpha = (xAxis.length === 0) ? 0 : xCenter - xAxis[0]; const steps: Step[] = []; const tree: TreeNode<number>[] = new Array(preorder.length); const inorderIndexMap = new Map<number, number>(); inorder.forEach((value, index) => inorderIndexMap.set(value, index)); const buildMyTree = ( preorderLeft: number, preorderRight: number, inorderLeft: number, inorderRight: number, center: Position, index: number, parent?: TreeNode<number>, direction?: Direction, ): TreeNode<number> | undefined => { if (preorderLeft > preorderRight) { return undefined; } const inorderRootIndex: number = inorderIndexMap.get(preorder[preorderLeft])!; const leftTreeLength = inorderRootIndex - inorderLeft; const root = buildTreeNode(preorder[preorderLeft], scene, center); root.index = index; root.show(); tree[preorderLeft] = root; const { y, z } = root.val.center; steps.push({ preorderLeft, preorderRight, inorderLeft, inorderRight, inorderRootIndex, leftTreeLength, node: root, parent, direction }); const leftPosition = { x: xAxis[root.leftChildIndex] + xAlpha, y: y + yDistance, z }; const left = buildMyTree(preorderLeft + 1, preorderLeft + leftTreeLength, inorderLeft, inorderRootIndex - 1, leftPosition, root.leftChildIndex, root, Direction.Left); if (left) { root.setLeft(left, leftPosition, lineMaterial, 0, scene); } const rightPosition = { x: xAxis[root.rightChildIndex] + xAlpha, y: y + yDistance, z }; const right = buildMyTree(preorderLeft + leftTreeLength + 1, preorderRight, inorderRootIndex + 1, inorderRight, rightPosition, root.rightChildIndex, root, Direction.Right); if (right) { root.setRight(right, rightPosition, lineMaterial, 0, scene); } return root; } buildMyTree(0, preorder.length - 1, 0, inorder.length - 1, startPosition, 0); updateTreeColor(tree, steps[0]); return { preorder, inorder, steps, xAxis, tree }; }; export function updateTreeColor(tree: TreeNode<any>[], step?: Step) { tree.map((node, i) => updateTreeNodeColor(node, i, step)); } function updateTreeNodeColor(node: TreeNode<any>, i: number, step?: Step) { if (!step) { return; } const { preorderLeft, preorderRight, leftTreeLength } = step; if (i === preorderLeft) { node.sphereColor = "lightgreen"; } else if (i > preorderLeft && i <= preorderLeft + leftTreeLength) { node.sphereColor = "yellow"; } else if (i > preorderLeft + leftTreeLength && i <= preorderRight) { node.sphereColor = "lightblue"; } else { node.sphereColor = "lightgray"; } } <file_sep>import { Point } from "../_commons/point"; export const max = Number.MAX_SAFE_INTEGER; export const isMatch = ({ col }: Point, r: number, c: number) => { return (r === 2 && col + 1 === c); }; const getValue = (value: number) => (value === max) ? 'x' : value; export const createNewTableStyles = (table: React.CSSProperties[][]): React.CSSProperties[][] => table.map(row => row.map(() => ({}))); export const updateTable = (table: (string | number)[][], point: Point, value: number): (string | number)[][] => table.map((row, rowIndex) => { return row.map((cell, colIndex) => isMatch(point, rowIndex, colIndex) ? getValue(value) : cell); }); export const nonCorrect = (comparedTable: (string | number)[][], { row, col }: Point, value: number): boolean => (comparedTable[row][col] !== value); export const isLastCell = (table: (string | number)[][], { row, col }: Point): boolean => { const colLength = table[1].length; return row === colLength - 3 && col === colLength - 2; }; export const getNextPoint = ({ row, col }: Point): Point => { if (row + 1 === col) { return { row: 0, col: col + 1 }; } else { return { row: row + 1, col }; } }; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node: TreeNode<any>; depth?: number; direction?: Direction; } export function buildSteps<T>(root?: TreeNode<T>): Step[] { const steps: Step[] = []; function height<T>(node?: TreeNode<T>, direction?: Direction, depth?: number): number { if (node === undefined) { return 0; } steps.push({ node, direction, depth }); const left = height(node.left, Direction.Left, depth); if (left === -1) { steps.push({ node, direction: Direction.Back, depth: -1 }); return -1; } const right = height(node.right, Direction.Right, depth); if (right === -1) { steps.push({ node, direction: Direction.Back, depth: -1 }); return -1; } const different = Math.abs(right - left); if (different > 1) { steps.push({ node, direction: Direction.Back, depth: -1 }); return -1; } const max = Math.max(left, right) + 1; steps.push({ node, direction: Direction.Back, depth: max }); return max; }; height(root); return steps; } <file_sep>export interface Range { left: number; right: number; } export interface LeftProps { value: number; show: boolean; } export interface MaxProps { value: number; show: boolean; } export interface Compared { chars: string[]; indices: number[]; lefts: LeftProps[]; maxs: MaxProps[]; ranges: Range[]; maps: (Map<string, number>)[]; } export const emptyCompared: Compared = { chars: [], indices: [], lefts: [], maxs: [], ranges: [], maps: [] } export const buildCompared = (input: string): Compared => { const chars: string[] = []; const indices: number[] = []; const lefts: LeftProps[] = []; const maxs: MaxProps[] = []; const ranges: Range[] = []; const maps: (Map<string, number>)[] = []; let max = 1; let left = 0; const map: Map<string, number> = new Map(); for (let i = 0; i < input.length; i++) { indices.push(i); const character = input.charAt(i); chars.push(character); const mapValue: number | undefined = map.get(character); if (mapValue !== undefined) { left = Math.max(left, mapValue + 1); } lefts.push({ value: left, show: false }); map.set(character, i); maps.push(new Map(map)); const newMax = i - left + 1; if (newMax > max) { max = newMax; ranges.push({ left, right: i }); } else { if (ranges.length === 0) { ranges.push({ left: 0, right: 0 }); } else { const previous = ranges[ranges.length - 1]; ranges.push(previous); } } max = Math.max(max, i - left + 1); maxs.push({ value: max, show: false }); } return { chars, indices, lefts, maxs, ranges, maps }; } <file_sep>import * as THREE from 'three'; import TreeNode from "./node"; import TextSphere from '../../../_commons/sphere/three/text-sphere'; import Position from '../../../_commons/params/position.interface'; import { TreeNodeProps } from '../../heap/props'; const calTextX = <T>(value: T, x: number): number => { const length: number = (value as any).toString().length; switch (length) { case 0: return x; case 1: return x - 0.3; case 2: return x - 0.6; case 3: return x - 0.8; default: return x - 1; } } export const buildNode = <T>( treeNodeProps: TreeNodeProps, value: T, scene: THREE.Scene, center: Position, index?: number ) => { const { sphereGeometry, sphereMaterial, textMaterial, textGeometryParameters } = treeNodeProps; const textSphere = new TextSphere<T>( value, sphereGeometry, sphereMaterial(), textMaterial, textGeometryParameters, scene ); const { x, y, z } = center; textSphere.center.x = x; textSphere.center.y = y; textSphere.center.z = z; textSphere.textPosition.x = calTextX(value, x); textSphere.textPosition.y = y - 0.4; textSphere.textPosition.z = z; const node = new TreeNode<T>(textSphere, index); return node; } <file_sep>export interface Comparable { compareTo(other: Comparable): number; } interface Heap<T extends Comparable | string | number> { /** * This method is used to insert a new element into the heap. * The new element is inserted at the bottom of the heap and * is then moved upwards to its appropriate position to maintain the heap property. */ push(item: T): Promise<void>; /** * This method is used to remove the root element from the heap. * After the root is removed, the last element in the heap is moved to the root position, * and it is then moved downwards to its appropriate position to maintain the heap property. */ pop(): Promise<T | undefined>; /** * This method is used to return the root element of the heap without removing it. * It is often used to check the highest-priority element in the heap. */ peek(): Promise<T | undefined>; /** * This method is used to return the number of elements in the heap. */ size(): Promise<number>; /** * This method is used to check if the heap is empty or not. */ isEmpty(): Promise<boolean>; /** * This method is used to create a heap from a given list of elements. * It takes O(n) time, where n is the number of elements in the list. */ buildHeap(items: T[]): Promise<void>; /** * This method is used to remove all items from the heap, effectively emptying the heap. * It sets the internal array of items to an empty array, thus removing all references to the items that were previously stored in the heap. * This can be useful when you want to reuse a heap for a different set of items or when you want to release memory used by the heap. */ clear(): Promise<void>; /** * Lazy deleted items */ deleted: Map<T, number>; items(): T[]; } export default Heap; <file_sep>import { Point } from "../_commons/point"; const isMatch = ({ row, col }: Point, r: number, c: number) => (row === r && col === c); export const createNewTableStyles = (table: React.CSSProperties[][]): React.CSSProperties[][] => table.map(row => row.map(() => ({}))); export const updateTable = (table: (string | number)[][], point: Point, value: number | string): (string | number)[][] => table.map((row, rowIndex) => { return row.map((cell, colIndex) => isMatch(point, rowIndex, colIndex) ? value : cell); }); export const nonCorrect = (comparedTable: (string | number)[][], { row, col }: Point, value: number): boolean => (comparedTable[row][col] !== value); export const isLastCell = (table: (string | number)[][], { row, col }: Point): boolean => { return row === 2 && col === table[row].length - 1; }; export const getNext = (table: (string | number)[][], { row, col }: Point, length: number) => { const isNextLen = row + length === table.length; const nextLength = isNextLen ? length + 1 : length; const nextRow = isNextLen ? 2 : row + 1; const nextCol = isNextLen ? nextRow + nextLength - 1 : col + 1; return { row: nextRow, col: nextCol, length: nextLength }; }; <file_sep>import Position from "./position.interface"; export default interface Mover { move: (position: Position, duration: number, onUpdate?: () => void) => Promise<void>; } <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; interface Node { index: number; left?: Node; right?: Node; } export enum Action { FindRight, Flatten, Display } export enum Direction { Left, Right, Back } export interface Step { node: Node; next?: Node; action: Action; } const buildTree = (node?: TreeNode<string>): Node | undefined => { if (!node) { return undefined; } const root: Node = { index: node.index }; root.left = buildTree(node.left); root.right = buildTree(node.right); return root; } export function buildTreeNodeMap(root?: TreeNode<string>): Map<number, TreeNode<string>> { const map: Map<number, TreeNode<string>> = new Map(); function dfs(node?: TreeNode<string>) { if (!node) { return; } map.set(node.index, node); dfs(node.left); dfs(node.right); } dfs(root); return map; } export function buildSteps(root?: TreeNode<string>): Step[] { const steps: Step[] = []; function flatten(node?: Node): void { if (!node) { return; } steps.push({ node, action: Action.Display }); if (node.left) { const next = findRight(node, node.left); steps.push({ node, next, action: Action.Flatten }); next.right = node.right; node.right = node.left; node.left = undefined; } flatten(node.right); }; function findRight(root: Node, node: Node): Node { steps.push({ node: root, next: node, action: Action.FindRight }); if (!node.right) { return node; } return findRight(root, node.right); }; const node = buildTree(root); flatten(node); return steps; } <file_sep>export enum State { Input, Playing, Finished } <file_sep>import createDPTable, { createLeftMax, createRightMax, createGuiders } from './algo'; import { Point } from "../../dp/_commons/point"; import { helperStyle, helperStyleSecondary } from '../../dp/_commons/styles'; const startPoint: Point = { row: 0, col: 0, }; interface TableSize { rows: number; cols: number; } const getTableSize = (nums: number[]): TableSize => { return { rows: Math.max(...nums), cols: nums.length + 1 }; }; const createTableMatrix = (nums: number[]): (number | string)[][] => { const { rows, cols } = getTableSize(nums); const table = new Array(rows).fill('').map(() => new Array(cols).fill('')); for (let row = 0; row < rows; row++) { table[row][0] = rows - row; } return table; }; const addHelperStyles = (styles: React.CSSProperties[][], point: Point): void => { for (let col = 0; col < styles[0].length && col <= point.col; col++) { styles[0][col] = helperStyle; } for (let row = 0; row < styles.length && row <= point.row; row++) { styles[row][0] = helperStyle; } }; const createTableStyles = (nums: number[]): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(nums); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); const dpTable = createDPTable(nums); for (let row = 0; row < rows; row++) { table[row][0] = { borderWidth: 0, border: 0 } } for (let col = 1; col < cols; col++) { const elevations = nums[col - 1]; for (let i = 0; i < elevations; i++) { table[rows - 1 - i][col] = { backgroundColor: "rgb(23,13,1)" }; } } for (let col = 1; col < cols; col++) { const elevations = nums[col - 1]; const wasters = dpTable[col - 1]; for (let i = 0; i < wasters; i++) { table[rows - 1 - elevations - i][col] = helperStyleSecondary; } } return table; }; const createButtons = (nums: number[]): string[] => { return ["--->", "<---"]; }; const createButtonsStyles = (nums: number[]): (React.CSSProperties)[] => { return createButtons(nums).map(() => ({ color: 'back' })); }; export { addHelperStyles, createTableMatrix, createTableStyles, createButtons, createButtonsStyles, createDPTable, createLeftMax, createRightMax, createGuiders, startPoint, }; <file_sep>export class Node { readonly index: number; // start from 1 readonly level: number; // start from 0 left?: Node; right?: Node; private _cx: number; constructor(index: number) { this.left = undefined; this.right = undefined; this.index = index; this.level = ~~Math.log2(index); this._cx = -1; } setLeft(): Node { this.left = new Node(2 * this.index); return this.left; } setRight(): Node { this.right = new Node(2 * this.index + 1); return this.right; } set cx(cx: number) { this._cx = cx; } get cx(): number { if (this.left && this.right) { return (this.left.cx + this.right.cx) / 2; } return this._cx; } } // Using Full_Binary_Tree to compute the cx of tree nodes. class Calculator { public readonly nodes: Map<number, Node>; private readonly root: Node; private readonly treeDepth: number; private readonly width: number; constructor(depth: number, width: number) { this.treeDepth = depth; this.width = width; this.nodes = new Map(); this.root = new Node(1); this.helper(this.root); } private helper(node: Node) { this.nodes.set(node.index, node); if (this.isLastLevel(node)) { this.setCXForNodeInLastRow(node); return; } this.helper(node.setLeft()); this.helper(node.setRight()); } private isLastLevel(node: Node): boolean { return node.level + 1 === this.treeDepth; } private setCXForNodeInLastRow(node: Node) { const countLastLevelNodes = Math.pow(2, node.level); const unit = this.width / (countLastLevelNodes + 1); node.cx = (node.index - countLastLevelNodes + 1) * unit; } } const builder = (depth: number, width: number): Map<number, Node> => { return new Calculator(depth, width).nodes; }; export default builder; <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; import MaxHeap from '../../../data-structures/tree/heap/max-heap'; import MinHeap from '../../../data-structures/tree/heap/min-heap'; import { Props, TreeNodeProps } from '../../../data-structures/tree/heap/props'; import Position from '../../../data-structures/_commons/params/position.interface'; const enabledColor = "gold"; const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; const cubeGeometry: THREE.BoxGeometry = new THREE.BoxGeometry(2, 1.5, 1.5); interface HeapPosition { arrayPosition: Position; treePosition: Position; } const buildHeapProps = ( initPosition: HeapPosition, heapPosition: HeapPosition, sphereMaterial: () => THREE.Material, cubeMaterial: () => THREE.Material, textMaterial: THREE.Material, lineMaterial: THREE.LineBasicMaterial, scene: THREE.Scene ): Props => { const treeInitDepth = 4; const treeNodeDistance = { x: 2.5, y: 2.5 }; const arrayNodeProps = { textMaterial, textGeometryParameters, cubeMaterial, cubeGeometry, initPosition: initPosition.arrayPosition, }; const treeNodeProps: TreeNodeProps = { sphereGeometry, sphereMaterial, textMaterial, textGeometryParameters, enabledTreeNodeColor: enabledColor, initPosition: initPosition.treePosition, } const treeLineProps = { material: lineMaterial } const props: Props = { arrayPosition: heapPosition.arrayPosition, arrayNodeProps, treePosition: heapPosition.treePosition, treeNodeProps, treeLineProps, treeNodeDistance, treeInitDepth, scene, duration: 1 } return props; } export const smallerHeapColor = "orange"; export const greaterHeapColor = "green"; export const buildSmaller = (scene: THREE.Scene): MaxHeap<number> => { return new MaxHeap(buildHeapProps( { arrayPosition: { x: 0, y: 0, z: 0 }, treePosition: { x: 0, y: 0, z: 0 } }, { arrayPosition: { x: -5, y: 10, z: 0 }, treePosition: { x: -10, y: -5, z: 0 } }, () => new THREE.MeshBasicMaterial({ color: smallerHeapColor, opacity: 0.6, transparent: true }), () => new THREE.MeshBasicMaterial({ color: smallerHeapColor, opacity: 0.5, transparent: true }), new THREE.MeshBasicMaterial({ color: "#000" }), new THREE.LineBasicMaterial({ color: smallerHeapColor }), scene )); } export const buildGreater = (scene: THREE.Scene): MinHeap<number> => { return new MinHeap(buildHeapProps( { arrayPosition: { x: 0, y: 0, z: 0 }, treePosition: { x: 0, y: 0, z: 0 } }, { arrayPosition: { x: 15, y: 10, z: 0 }, treePosition: { x: 10, y: -5, z: 0 } }, () => new THREE.MeshBasicMaterial({ color: greaterHeapColor, opacity: 0.6, transparent: true }), () => new THREE.MeshBasicMaterial({ color: greaterHeapColor, opacity: 0.5, transparent: true }), new THREE.MeshBasicMaterial({ color: "#000" }), new THREE.LineBasicMaterial({ color: greaterHeapColor }), scene )); } <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node: TreeNode<number>; sum: number; isLeaf: boolean; total: number; leaves: number[]; } function isLeaf(node: TreeNode<number>) { return !node.left && !node.right; } export function buildSteps(root?: TreeNode<number>): Step[] { const steps: Step[] = []; let total = 0; const leaves: number[] = []; function sumNumbers(node: TreeNode<number> | undefined, num: number) { if (!node) { return; } const sum = num * 10 + node.val.value; if (isLeaf(node)) { total += sum; leaves.push(sum); steps.push({ node, sum, total, isLeaf: true, leaves: [...leaves] }); return; } steps.push({ node, sum, total, isLeaf: false, leaves: [...leaves] }); sumNumbers(node.left, sum) sumNumbers(node.right, sum); } sumNumbers(root, 0); return steps; } <file_sep>import Position from '../../../_commons/params/position.interface'; import { TextSphere } from '../../../_commons/sphere/text-sphere.interface'; export default class TreeNode<T> { value: TextSphere<T>; index?: number; left?: TreeNode<T>; right?: TreeNode<T>; parent?: TreeNode<T>; constructor(value: TextSphere<T>, index?: number) { this.value = value; this.index = index; } set sphereColor(color: string) { this.value.sphereColor.setColor(color); } set textColor(color: string) { this.value.textColor.setColor(color); } move(distance: Position, duration: number, onUpdate?: () => void): Promise<void> { const x = this.value.center.x + distance.x; const y = this.value.center.y + distance.y; const z = this.value.center.z + distance.z; return this.moveTo({ x, y, z }, duration, onUpdate); } moveTo(dest: Position, duration: number, onUpdate?: () => void): Promise<void> { return this.value.move(dest, duration, onUpdate); } show() { this.value.show(); return this; } hide() { this.value.hide(); return this; } } <file_sep>import createDpTable from "../algo"; const getLastCell = (table: boolean[][]): boolean => { const lastRow = table[table.length - 1]; return lastRow[lastRow.length - 1]; } test("subset-sum-problem one", () => { const total = 9; const array = [3, 34, 4, 12, 5, 2]; const table = createDpTable(total, array); const distance = getLastCell(table); expect(distance).toBe(true); }); test("subset-sum-problem two", () => { const total = 35; const array = [3, 34, 4, 12, 5, 2]; const table = createDpTable(total, array); const distance = getLastCell(table); expect(distance).toBe(false); }); <file_sep>import Mover from "../params/mover.interface"; import Display from "../params/displayer.interface"; import Position from "../params/position.interface"; import Color from "../params/color.interface"; export interface Sphere extends Mover, Display { radius: number; center: Position; sphereColor: Color; } <file_sep>import Container from "../_commons/container"; import { Action, Step } from "./step"; export const sort = (arrays: Container[]): Step[] => { const steps: Step[] = []; for (let i = 1; i < arrays.length; i++) { const temp = arrays[i]; steps.push({ a: temp, action: Action.Leave }); let j = i - 1; while (j >= 0 && arrays[j].payload > temp.payload) { steps.push({ a: arrays[j], action: Action.Override, index: j + 1 }); arrays[j + 1] = arrays[j]; j--; } steps.push({ a: temp, action: Action.Insert, index: j + 1 }); arrays[j + 1] = temp; } return steps; } <file_sep>export const title = "Egg Dropping Problem"; export const formula = ` if (eggs > floors) { table[eggs][floors] = table[eggs - 1][floor]; } else { let min = floors; for (let floor = 1; f <= floors; floor += 1) { const max = Math.max( table[eggs - 1][floor - 1], // egg breaks table[eggs][floors - floor] // egg didn't break ); min = Math.min(min, max); } table[eggs][floors] = 1 + min; }`; export const description = `Objective: There are ***n*** number of eggs and building which has ***k*** floors. Write an algorithm to find the minimum number of drops is required to know the floor from which if egg is dropped, it will break. Notes: One trial is dropping an egg once from the particular floor. - If egg does not break after dropping, will be used again. - If egg breaks when dropped from some floor then it will break if dropped from any higher floor. - If egg does not break when dropped from some floor then it will not break if dropped from any lower floor `; export const usecases = ''; export const example = ` Example 1: - Input: Eggs: 1, Floors: K. - Output: K. Example 2: - Input: Eggs: Infinity, Floors: K. - Output: lg(K). `; <file_sep> import { styled } from '@mui/material/styles'; const Body = styled('main')(({ theme }) => ({ border: 0, width: "100%", height: "100%", overflow: "hidden", flexGrow: 1, padding: theme.spacing(0), })); export default Body; <file_sep>export enum State { BuildingHeap, Typing, Playing, Computing, Finished } <file_sep>import { Point } from "../../dp/_commons/point"; import { Guiders } from "./algo"; export const isSuccess = (value: string, currentPoint: Point, guiders: Guiders): boolean => { const { col } = currentPoint; const { directions } = guiders; return (value.trim() === "--->" && directions[col] === 0) || (value.trim() === "<---" && directions[col] === 1); }; <file_sep>export const title = 'Longest Common Subsequence'; export const formula = ` if (a === b) { table[row][col] = table[row - 1][col - 1] + 1; } else { table[row][col] = Math.max( table[row][col - 1], table[row - 1][col] ); } `; export const description = ` Given two sequences, find the length of longest subsequence present in both of them. A subsequence is a sequence that appears in the same relative order, but not necessarily contiguous. `; export const usecases = ''; export const example = ` ***Input*** - string1 = algorithms - string2 = alchemist ***Output*** - 5 ***Explanation*** - The longest common subsequence are ***"alhms"*** and its length is ****5****. `; <file_sep>export const title = 'Edit Distance'; export const formula = ` if (char1 === char2) { table[row][col] = table[row - 1][col - 1]; } else { const min = Math.min( table[row - 1][col - 1], table[row - 1][col], table[row][col - 1] ); table[row][col] = min + 1; }`; export const description = ` In computational linguistics and computer science, edit distance is a way of quantifying how dissimilar two strings (e.g., words) are to one another by counting the minimum number of operations required to transform one string into the other. \\- From Wikipedia. `; export const usecases = ` 1. In natural language processing, where automatic spelling correction can determine candidate corrections for a misspelled word by selecting words from a dictionary that have a low distance to the word in question. 2. In bioinformatics, it can be used to quantify the similarity of DNA sequences, which can be viewed as strings of the letters A, C, G and T. \\- From Wikipedia. `; export const example = ` The distance between kitten and sitting is 3. A minimal edit script that transforms the former into the latter is: 1. kitten → sitten (substitution of 's' for 'k') 2. sitten → sittin (substitution of 'i' for 'e') 3. sittin → sitting (insertion of 'g' at the end). \\-From Wikipedia. `; <file_sep>export const title = "Symmetric Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function isSymmetric(root: TreeNode | null): boolean { function dfs(left: TreeNode | null, right: TreeNode | null) { if (left === null && right === null) { return true; } if (left === null || right === null) { return false; } if (left.val !== right.val) { return false; } return dfs(left.left, right.right) && dfs(left.right, right.left); } return dfs(root.left, root.right); };`; export const description = ` Given the **root** of a binary tree, ***check whether it is a mirror of itself*** (i.e., symmetric around its center). `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import createDPTable, { createPalindromeBooleanTable } from './algo'; import { Point } from "../_commons/point"; import { helperStyle, helperStyleSecondary, helperStyleThird } from '../_commons/styles'; const startPoint: Point = { row: 2, col: 2, }; interface TableSize { rows: number; cols: number; } const getTableSize = (sequence: string): TableSize => { const rows = sequence.length + 2; const cols = sequence.length + 2; return { rows, cols }; }; const createTableMatrix = (sequence: string): (number | string)[][] => { const { rows, cols } = getTableSize(sequence); const table = new Array(rows).fill('').map(() => new Array(cols).fill('')); for (let col = 2; col < cols; col++) { table[0][col] = col - 2; table[1][col] = sequence.charAt(col - 2); } for (let row = 2; row < rows; row++) { table[row][0] = row - 2; table[row][1] = sequence.charAt(row - 2); } table[startPoint.row][startPoint.col] = '?'; return table; }; const createPalindromeTable = (sequence: string): string[][] => { const { rows, cols } = getTableSize(sequence); const palindromeTable = createPalindromeBooleanTable(sequence); const table = new Array(rows).fill('').map(() => new Array(cols).fill('')); for (let col = 2; col < cols; col++) { table[0][col] = col - 2; table[1][col] = sequence.charAt(col - 2); } for (let row = 2; row < rows; row++) { table[row][0] = row - 2; table[row][1] = sequence.charAt(row - 2); } for (let row = 2; row < table.length; row++) { for (let col = row; col < table[row].length; col++) { table[row][col] = palindromeTable[row - 2][col - 2] ? "T" : "F"; } } return table; }; const createPalindromeTableStyles = (sequence: string): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(sequence); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStylesToPalindromeTable(table, startPoint, 1, []); return table; }; const addHelperStylesToPalindromeTable = ( styles: React.CSSProperties[][], { row, col }: Point, nextLength: number, table: (string | number)[][] ): void => { for (let r = 0; r <= row; r++) { styles[r][col] = helperStyle; } for (let c = 0; c <= col; c++) { styles[row][c] = helperStyle; } styles[row][col] = { ...helperStyleSecondary, "fontWeight": "bolder" }; }; const createComparedTable = (sequence: string): (number | string)[][] => { const { rows, cols } = getTableSize(sequence); const dpTable = createDPTable(sequence); const tableMatrix = createTableMatrix(sequence); for (let row = 2; row < rows; row++) { for (let col = 2; col < cols; col++) { tableMatrix[row][col] = dpTable[row - 2][col - 2]; } } return tableMatrix; }; const addHelperStyles = ( styles: React.CSSProperties[][], { row, col }: Point, nextLength: number, table: (string | number)[][] ): void => { for (let i = row; i <= col; i++) { styles[0][i] = helperStyleSecondary; styles[1][i] = helperStyle; styles[i][0] = helperStyleSecondary; styles[i][1] = helperStyle; } if (nextLength === 2 && table[1][row] === table[1][row + nextLength - 1]) { styles[1][col] = helperStyleThird; styles[1][row] = helperStyleThird; styles[col][1] = helperStyleThird; styles[row][1] = helperStyleThird; } }; const createTableStyles = (sequence: string): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(sequence); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(table, startPoint, 1, []); return table; }; const createButtons = (sequence: string): number[] => { const dpTable = createDPTable(sequence); const set = new Set<number>(); for (let row = 0; row < dpTable.length; row++) { for (let col = 0; col < dpTable[row].length; col++) { set.add(dpTable[row][col]); } } return Array.from(set).sort(); }; const createButtonsStyles = (sequence: string): (React.CSSProperties)[] => { return createButtons(sequence).map(() => ({ color: 'back' })); }; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, createPalindromeTable, createPalindromeTableStyles, addHelperStylesToPalindromeTable, startPoint, }; <file_sep>export const title = 'Is Subsequence'; export const formula = ` if (ch1 === ch2 && table[row - 1][col - 1]) { table[row][col] = true; } else { table[row][col] = table[row][col - 1]; } OR if (table[row][col - 1]) { table[row][col] = true; } else if (ch1 === ch2 && table[row - 1][col - 1]) { table[row][col] = true; } else { table[row][col] = false; } `; export const description = `Given two strings s1 and s2, find if s1 is a subsequence of s2.`; export const usecases = ''; export const example = ` Example 1: - Input: s1 = che, s2 = alchemist - Output: ***true*** Example 2: - Input: s1 = chm, s2 = alchemist - Output: ***true*** Example 3: - Input: s1 = mch, s2 = alchemist - Output: ***false*** `; <file_sep>export const title = "Balanced Binary Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function isBalanced(root: TreeNode | null): boolean { function height(node: TreeNode | null): number { if (node === null) { return 0; } const left = height(node.left); if (left === -1) { return -1; } const right = height(node.right); if (right === -1) { return -1; } const different = Math.abs(right - left); if (different > 1) { return -1; } return Math.max(left, right) + 1; } return height(root) !== -1; };`; export const description = ` Given a binary tree, determine if it is ***height-balanced***. `; export const solution = `/** * If a node is balanced, return the height of the node. * If a node is unbalanced, return -1 as the height of the node. */ function isBalanced(root: TreeNode | null): boolean { function height(node: TreeNode | null): number { if (node === null) { return 0; } const left = height(node.left); if (left === -1) { return -1; } const right = height(node.right); if (right === -1) { return -1; } const different = Math.abs(right - left); if (different > 1) { return -1; } return Math.max(left, right) + 1; } return height(root) !== -1; };`; export const usecases = ''; export const example = ``; <file_sep>interface Result { maxCols: number; converted: string; } function convert(s: string, numRows: number): Result { if (numRows === 1) return { maxCols: s.length, converted: s }; const rows: string[] = []; for (let i = 0; i < numRows; i++) { rows[i] = ""; } let rowIndex = 0; let flag = -1; for (let i = 0; i < s.length; i++) { rows[rowIndex] = rows[rowIndex] + s.charAt(i); if (rowIndex === 0 || rowIndex === numRows - 1) { flag = -1 * flag; } rowIndex += flag; } const maxCols: number = rows.reduce((a, b) => (a.length > b.length) ? a : b).length; const converted: string = rows.reduce((a, b) => a + b); return { maxCols, converted } }; export { convert }; <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; import { Props, TreeNodeProps } from '../../../data-structures/tree/heap/props'; import MaxHeap from '../../../data-structures/tree/heap/max-heap'; const lineColor = "gold"; const normalSphereColor = "yellow"; const cubeColor = "yellow"; const enabledColor = "orange"; const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: normalSphereColor, opacity: 0.4, transparent: true }); } const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "green" }); const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; const lineMaterial = new THREE.LineBasicMaterial({ color: lineColor }); export const buildHeap = (scene: THREE.Scene, k: number): MaxHeap<number> => { const cubeMaterial = () => new THREE.MeshBasicMaterial({ color: cubeColor, opacity: 0.5, transparent: true }); const cubeWidth = 2; const cubeGeometry: THREE.BoxGeometry = new THREE.BoxGeometry(cubeWidth, 2, 2); const arrayX = (3 / 2) * cubeWidth - cubeWidth / 2; const treeInitDepth = Math.floor(Math.log2(k)) + 1; const arrayNodeProps = { textMaterial, textGeometryParameters, cubeMaterial, cubeGeometry, initPosition: { x: 15, y: 11, z: 0 }, }; const treeNodeProps: TreeNodeProps = { sphereGeometry, sphereMaterial, textMaterial, textGeometryParameters, enabledTreeNodeColor: enabledColor, initPosition: { x: 15, y: 11, z: 0 }, } const treeLineProps = { material: lineMaterial } const props: Props = { arrayPosition: { x: arrayX + 5, y: 9, z: 0 }, arrayNodeProps, treePosition: { x: 0 + 5, y: -Math.log2(k), z: 0 }, treeNodeProps, treeLineProps, treeNodeDistance: { x: 2.5, y: 2.5 }, treeInitDepth, scene, duration: 1 } return new MaxHeap(props); } <file_sep>export const title = "Binary Search Tree Iterator"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ class BSTIterator { private stack: TreeNode[]; constructor(root: TreeNode | null) { this.stack = []; this.pushToStack(root); } next(): number { const node = this.stack.pop(); this.pushToStack(node.right); return node.val; } hasNext(): boolean { return this.stack.length > 0; } private pushToStack(root?: TreeNode) { if (!root) { return; } this.stack.push(root); this.pushToStack(root.left); } }`; export const description = ` Implement the **BSTIterator** class that represents an iterator over the **in-order traversal** of a binary search tree (BST) with methods: - constructor() - hasNext(): boolean - next(): number Notice that by initializing the pointer to a non-existent smallest number, the first call to **next()** will return the smallest element in the BST. You may assume that **next()** calls will always be valid. That is, there will be at least a next number in the in-order traversal when **next()** is called. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import createDPTable from './algo'; import { helperStyle, helperStyleSecondary, helperStyleThird } from '../_commons/styles'; import { Point } from "../_commons/point"; const addHelperStyles = (styles: React.CSSProperties[][], point: Point, table: (number | string)[][]): void => { styles[0][point.col] = helperStyle; styles[point.row][0] = helperStyle; styles[point.row - 1][point.col] = helperStyleSecondary; const coin = Number(table[point.row][0]); if (point.col - coin - 1 >= 0) { styles[point.row][point.col - coin] = helperStyleThird; } }; const startPoint: Point = { row: 2, col: 2, }; interface TableSize { rows: number; cols: number; } const getTableSize = (coins: number[], total: number): TableSize => { const rows = coins.length + 2; const cols = total + 2; return { rows, cols }; }; const createTableMatrix = (coins: number[], total: number): (number | string)[][] => { const { rows, cols } = getTableSize(coins, total); const table = new Array(rows).fill('').map(() => new Array(cols).fill('')); for (let col = 2; col < cols; col++) { table[0][col] = col - 1; table[1][col] = Infinity; } for (let row = 2; row < rows; row++) { table[row][0] = coins[row - 2]; table[row][1] = 0; } table[startPoint.row][startPoint.col] = '?'; return table; }; const createComparedTable = (coins: number[], total: number): (number | string)[][] => { const { rows, cols } = getTableSize(coins, total); const dpTable = createDPTable(coins, total); const tableMatrix = createTableMatrix(coins, total); for (let row = 1; row < rows; row++) { for (let col = 1; col < cols; col++) { tableMatrix[row][col] = dpTable[row - 1][col - 1]; } } return tableMatrix; }; const createTableStyles = (coins: number[], total: number): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(coins, total); const dpTable = createTableMatrix(coins, total); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(table, startPoint, dpTable); return table; }; const createButtons = (coins: number[], total: number): number[] => { const dpTable = createDPTable(coins, total); const set = new Set<number>(); for (let row = 1; row < dpTable.length; row++) { for (let col = 1; col < dpTable[row].length; col++) { const value = dpTable[row][col]; if (value === Infinity) { set.add(Infinity); } else { set.add(value); } } } return Array.from(set).sort(); }; const createButtonsStyles = (coins: number[], total: number): (React.CSSProperties)[] => { return createButtons(coins, total).map(() => ({ color: 'back' })); }; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, startPoint, }; <file_sep>const createDPTable = (total: number, array: number[]): boolean[][] => { const rows = array.length + 1; const cols = total + 1; const table = new Array(rows).fill(false).map(() => new Array(cols).fill(false)); for (let row = 0; row < rows; row++) { table[row][0] = true; } for (let row = 1; row < rows; row++) { const element = array[row - 1]; for (let col = 1; col < cols; col++) { if (element > col) { table[row][col] = table[row - 1][col]; } else { table[row][col] = table[row - 1][col] || table[row - 1][col - element]; } } } return table; }; export default createDPTable; <file_sep>import { getLeftChildIndex, getRightChildIndex } from "./tree-node-utils"; export class TreeNode { x: number; y: number; index: number; left?: TreeNode; right?: TreeNode; private readonly xAlpha: number; private readonly yDistance: number; constructor(index: number, xAlpha: number, yDistance: number) { this.x = 0; this.y = 0; this.index = index; this.xAlpha = xAlpha; this.yDistance = yDistance; } setX(): number { this.x = (this.setLeftX() + this.setRightX()) / 2; return this.x; } private setLeftX(): number { if (this.left) { return this.left.setX(); } else { return this.index * this.xAlpha; } } private setRightX(): number { if (this.right) { return this.right.setX(); } else { return this.index * this.xAlpha; } } setY(): number { this.y = Math.max(this.setLeftY(), this.setRightY()) + this.yDistance; return this.y; } private setLeftY(): number { if (this.left) { return this.left.setY(); } else { return 0; } } private setRightY(): number { if (this.right) { return this.right.setY(); } else { return 0; } } } export const buildPerfectBinaryTree = (level: number, alpha: number, yDistance: number): TreeNode[] => { const size = Math.pow(2, level) - 1; const array: TreeNode[] = []; for (let i = 0; i < size; i++) { const node = new TreeNode(i, alpha, yDistance); array.push(node); } for (let i = 0; i < size; i++) { const node = array[i]; node.left = array[getLeftChildIndex(i)]; node.right = array[getRightChildIndex(i)]; } const root = array[0]; root.setX(); root.setY(); return array; } <file_sep>import { Styles } from "./TreeButton"; export const DISABLED: Styles = { rect: { fill: "#fff", stroke: "lightgray", fillOpacity: 0, }, text: { fill: "rgba(0, 0, 0, 0.26)", fontSize: "13" } }; export const ENABLED: Styles = { rect: { fill: "#fff", stroke: "rgba(25, 118, 210, 0.5)", fillOpacity: 0, }, text: { fill: "#1976d2", fontSize: "13" } }; <file_sep>import Queue from './queue-vis'; export default Queue; <file_sep>class TreeNode { val: number; left?: TreeNode; right?: TreeNode; constructor(val: number) { this.val = val; } } const getDepth = (node?: TreeNode): number => { if (node === undefined) { return 0; } const leftDepth: number = getDepth(node.left); const rightDepth: number = getDepth(node.right); return Math.max(leftDepth, rightDepth) + 1; } export function calDepth(preorder: number[], inorder: number[]): number { const inorderIndexMap = new Map<number, number>(); inorder.forEach((value, index) => inorderIndexMap.set(value, index)); const buildMyTree = (preorderLeft: number, preorderRight: number, inorderLeft: number, inorderRight: number): TreeNode | undefined => { if (preorderLeft > preorderRight) { return undefined; } const inorderRootIndex = inorderIndexMap.get(preorder[preorderLeft])!; const leftTreeLength = inorderRootIndex - inorderLeft; const root = new TreeNode(preorder[preorderLeft]); root.left = buildMyTree(preorderLeft + 1, preorderLeft + leftTreeLength, inorderLeft, inorderRootIndex - 1); root.right = buildMyTree(preorderLeft + leftTreeLength + 1, preorderRight, inorderRootIndex + 1, inorderRight); return root; } const root: TreeNode | undefined = buildMyTree(0, preorder.length - 1, 0, inorder.length - 1) return getDepth(root); }; <file_sep>export const title = 'Longest Common Substring'; export const formula = ` if (a === b) { table[row][col] = table[row - 1][col - 1] + 1; } else { table[row][col] = 0; } `; export const description = ` Given two strings ‘string1’ and ‘string2’, find the length of the longest common substring. `; export const usecases = ''; export const example = ` Example 1: - Input : string1 = "algorithms", string2 = "alchemist" - Output : 2 - Explanation: The longest common substring is "al" and is of length 2. Example 2: - Input : string1 = "abcdxyz", string2 = "xyzabcd" - Output : 4 - Explanation: The longest common substring is "abcd" and is of length 4. `; <file_sep>import { Game } from "../../commons/game"; import Category from "../../commons/segments/category"; import Difficulty from "../../commons/segments/difficulty"; const insertionSort: Game = { name: "Insertion Sort", path: "/algorithms/insertion-sort", categories: [Category.Sorting], companies: [], difficulty: Difficulty.Easy, img: "img/insertion_sort.png" } export default insertionSort; <file_sep>import Container from "./container" export const swap = (arrays: Container[], a: number, b: number): void => { const temp = arrays[a]; arrays[a] = arrays[b]; arrays[b] = temp; } export function waitSeconds(seconds: number) { return new Promise(resolve => { setTimeout(() => resolve(1), seconds * 1000); }); } <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from 'three/examples/jsm/geometries/TextGeometry'; import gsap from 'gsap'; import { font } from '../../../../commons/three'; import { queueNameStyles } from "../styles"; class QueueName { private mesh: THREE.Mesh; constructor(name: string, position: THREE.Vector3, scene: THREE.Scene) { const { color, size, height } = queueNameStyles; this.mesh = this.buildMesh(name, color, size, height); this.position = position; this.show(scene); } private buildMesh( name: string, color: string, size: number, height: number ) { const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color }); const textGeometryParameters: TextGeometryParameters = { font, size, height }; const textGeometry = new TextGeometry(name, textGeometryParameters); return new THREE.Mesh(textGeometry, textMaterial); } get position(): THREE.Vector3 { return this.mesh.position; } private set position(position: THREE.Vector3) { const { x, y, z } = position; this.mesh.position.setX(x); this.mesh.position.setY(y); this.mesh.position.setZ(z); } move(position: THREE.Vector3, duration: number) { gsap.to(this.mesh.position, { ...position, duration }); } private show(scene: THREE.Scene) { scene.add(this.mesh); } } export default QueueName; <file_sep>import DataNode from "../dataNode"; import cxCalculator from "./treeNodeCxCalculator"; import TreeNode, { Status } from "../treeNode"; import { Props as DisplayNode, NodeStyles as DisplayNodeStyles } from "../display"; import { INITIALED_NODE_ATTRIBUTES, ACTIVATED_NODE_ATTRIBUTES, ACTIVATED_LINE_ATTRIBUTES, PRINTED_NODE_ATTRIBUTES, FINISHED_NODE_ATTRIBUTES, FINISHED_LINE_ATTRIBUTES, INITIALED_LINE_ATTRIBUTES, } from "../display/nodeStyles"; const createDisplayNodeStyles = (status: Status): DisplayNodeStyles => { switch (status) { case Status.INITIAL: return INITIALED_NODE_ATTRIBUTES; case Status.ACTIVATED: return ACTIVATED_NODE_ATTRIBUTES; case Status.PRINTED: return PRINTED_NODE_ATTRIBUTES; case Status.FINISHED: return FINISHED_NODE_ATTRIBUTES; default: return INITIALED_NODE_ATTRIBUTES; } }; const createDisplayNode = ({ status, circleCx, circleCy, circleR, content, textX, textY }: TreeNode): DisplayNode => ({ nodeStyles: createDisplayNodeStyles(status), circleCx, circleCy, circleR, content, textX, textY }); export const createDisplayNodes = (propsList: TreeNode[]): DisplayNode[] => { const results: Map<number, DisplayNode> = new Map(); propsList.forEach(props => { results.set(props.index, createDisplayNode(props)) }); // connect children propsList.forEach(props => { const treeNodeProps = results.get(props.index)! if (props.left) { treeNodeProps.left = results.get(props.left.index); treeNodeProps.leftLineStyles = INITIALED_LINE_ATTRIBUTES; } if (props.right) { treeNodeProps.right = results.get(props.right.index); treeNodeProps.rightLineStyles = INITIALED_LINE_ATTRIBUTES; } }); // update line style propsList.forEach(props => { if (props.parent) { const parentTreeNodeProps: DisplayNode = results.get(props.parent.index)!; switch (props.status) { case Status.ACTIVATED: { if (props === props.parent.left) { parentTreeNodeProps.leftLineStyles = ACTIVATED_LINE_ATTRIBUTES; } else if (props === props.parent.right) { parentTreeNodeProps.rightLineStyles = ACTIVATED_LINE_ATTRIBUTES; } break; } case Status.PRINTED: { if (props === props.parent.left) { parentTreeNodeProps.leftLineStyles = ACTIVATED_LINE_ATTRIBUTES; } else if (props === props.parent.right) { parentTreeNodeProps.rightLineStyles = ACTIVATED_LINE_ATTRIBUTES; } break; } case Status.FINISHED: { if (props === props.parent.left) { parentTreeNodeProps.leftLineStyles = FINISHED_LINE_ATTRIBUTES; } else if (props === props.parent.right) { parentTreeNodeProps.rightLineStyles = FINISHED_LINE_ATTRIBUTES; } break; } } } }); return Array.from(results.values()); }; export const createTreeNodes = (root: DataNode, width: number, heightUnit: number, radius: number, y: number): TreeNode[] => { const results: Map<number, TreeNode> = new Map(); const cxMap = cxCalculator(root.depth, width); const createTreeNode = (node: DataNode, cy: number, parent?: TreeNode) => { const cx: number = cxMap.get(node.index)!.cx; const treeNode: TreeNode = { index: node.index, status: Status.INITIAL, circleCx: cx, circleCy: cy, circleR: radius, content: node.val, textX: cx - 6, textY: cy + 7, parent, returnToParentEnabled: true, printValEnabled: true, goLeftEnabled: node.left ? true : false, goRightEnabled: node.right ? true : false, }; results.set(node.index, treeNode); if (node.left) { createTreeNode(node.left, cy + heightUnit, treeNode); } if (node.right) { createTreeNode(node.right, cy + heightUnit, treeNode); } } const connectChildren = (node: DataNode) => { const treeNode: TreeNode = results.get(node.index)!; if (node.left) { treeNode.left = results.get(node.left.index); connectChildren(node.left); } if (node.right) { treeNode.right = results.get(node.right.index); connectChildren(node.right); } }; createTreeNode(root, y); connectChildren(root); return Array.from(results.values()); }; <file_sep>import { buildGraphology } from '../../../data-structures/graph/utils'; import { Graph } from "../../../data-structures/graph"; import forceLayout from 'graphology-layout-force'; export const layoutCalculator = <T>(graph: Graph<T>) => { const graphology = buildGraphology(graph); graphology.forEachNode((_, attributes) => { attributes.x = Math.random() * 15 - 4; // Set initial x position attributes.y = Math.random() * 10 - 2; // Set initial y position }); return forceLayout(graphology, { maxIterations: 50 }); } <file_sep>import * as THREE from "three"; import gsap from "gsap"; import Node from "./node"; import { PlaneParameters, TextParameters } from "../commons/node"; import { createPlaneParameters, createTextParameters } from "../commons/helpers"; import { NodeStyles } from "../commons/styles"; export default class Bucket<T> { private nodes: Node<T>[]; private scene: THREE.Scene; private planeMaterial: THREE.Material; private textMaterial: THREE.Material; private render: Function; private nodeStyles: NodeStyles; private start: THREE.Vector3; public duration: number; constructor( scene: THREE.Scene, planeMaterial: THREE.Material, textMaterial: THREE.Material, render: Function, nodeStyles: NodeStyles, start: THREE.Vector3, duration: number, ) { this.nodes = []; this.scene = scene; this.planeMaterial = planeMaterial; this.textMaterial = textMaterial; this.render = render; this.nodeStyles = nodeStyles; this.start = start; this.duration = duration; } get head(): Node<T> { return this.nodes[0]; } findNode(key: number): Node<T> | undefined { let result: Node<T> | undefined = undefined; this.nodes.forEach(node => { if (node.key === key) { result = node; } }); return result; } append(key: number, data: T, display: string): Node<T> { const last = this.last; if (last) { const node = new Node<T>(key, data, display, this.scene, this.createPlaneParameters(last.nextPlanePosition), this.createTextParameters(last.nextTextPosition) ); this.nodes.push(node); return node; } else { const { x, y, z } = this.start; const textPosition = new THREE.Vector3(x - 3, y - 1, z); const node = new Node<T>(key, data, display, this.scene, this.createPlaneParameters(this.start), this.createTextParameters(textPosition) ); this.nodes.push(node); return node; } } async delete(key: number): Promise<T | undefined> { const node = this.findNode(key); if (node === undefined) { return; } const tails = this.getTails(node); this.deleteNode(node); tails.forEach(item => { const onUpdate = () => { this.render() }; const nextPlanePosition = item.previousPlanePosition; const nextTextPosition = item.previousTextPosition; const nextIndexPosition = item.previousIndexPosition; gsap.to(item.index.position, { ...nextIndexPosition, duration: this.duration }); gsap.to(item.plane.position, { ...nextPlanePosition, duration: this.duration, onUpdate }); gsap.to(item.text.position, { ...nextTextPosition, duration: this.duration }); }); return new Promise(resolve => setTimeout(resolve, this.duration * 1000)); } private deleteNode(node: Node<T>): void { node.removeFromScene(); const index = this.nodes.indexOf(node); delete this.nodes[index]; } private getTails(node: Node<T>): Node<T>[] { const index = this.nodes.indexOf(node); return this.nodes.slice(index, this.nodes.length); } private createPlaneParameters(position: THREE.Vector3): PlaneParameters { return createPlaneParameters(this.planeMaterial, position, this.nodeStyles); } private createTextParameters(position: THREE.Vector3): TextParameters { return createTextParameters(this.textMaterial, position, this.nodeStyles); } private get last(): Node<T> | undefined { return this.isEmpty ? undefined : this.nodes[this.length - 1]; } get length(): number { return this.nodes.length; } get isEmpty(): boolean { return this.length === 0; } } <file_sep>import createDpTable from "../algo"; const getLastCell = (table: number[][]): number => { const lastRow = table[table.length - 1]; return lastRow[lastRow.length - 1]; } test("coin-change-fewest-number should return optimal solution", () => { const coins = [1, 8, 13]; const total = 16; const table = createDpTable(coins, total); const result = getLastCell(table); expect(result).toBe(2); }); test("coin-change-fewest-number should return the right result", () => { const coins = [2, 8, 15]; const total = 24; const table = createDpTable(coins, total); const result = getLastCell(table); expect(result).toBe(3); }); <file_sep>import MaxHeap from "../../../data-structures/tree/heap/max-heap"; import MinHeap from "../../../data-structures/tree/heap/min-heap"; export default class DualHeap { smaller: MaxHeap<number>; greater: MinHeap<number>; constructor(smaller: MaxHeap<number>, greater: MinHeap<number>) { this.smaller = smaller; this.greater = greater; } async median() { const smallerSize = await this.smaller.size(); const greaterSize = await this.greater.size(); if (smallerSize === greaterSize) { const one = await this.smaller.peek() || 0; const two = await this.greater.peek() || 0; return (one + two) / 2; } return await this.smaller.peek(); } async push(item: number) { const smallerTop = await this.smaller.peek(); if (smallerTop === undefined || item <= smallerTop) { await this.smaller.push(item); } else { await this.greater.push(item); } await this.balance(); } async delete(item: number) { const deleted = await this.smaller.delete(item); if (deleted === undefined) { await this.greater.delete(item); } await this.balance(); } private async balance() { const smallerSize = await this.smaller.size(); const greaterSize = await this.greater.size(); if (smallerSize > greaterSize + 1) { const top = await this.smaller.pop(); if (top !== undefined) { await this.greater.push(top); } } else if (greaterSize > smallerSize) { const top = await this.greater.pop(); if (top !== undefined) { await this.smaller.push(top); } } } } <file_sep>export const title = "Binary Tree Inorder Traversal"; export const formula = ` function inorder(node: Node) { if (node.left) { inorder(node.left); } print(node); if (node.right) { inorder(node.right); } } OR function inorder(node: Node | null) { if (node == null) { return; } inorder(node.left); print(node); inorder(node.right); } `; export const description = ` For a binary tree, they are defined as access operations at each node, starting with the current node, then go down one level to Children: - (L) Recursively traverse left subtree. - (R) Recursively traverse right subtree. - (N) Process the current node N itself. Return by going up one level and arriving at the parent node. --- In-order (LNR) 1. Traverse the left subtree by recursively calling the in-order function. 2. Access the data part of the current node. 3. Traverse the right subtree by recursively calling the in-order function. In a ***binary search tree*** ordered such that: - In each node the key is greater than all keys in its left subtree and less than all keys in its right subtree; - In-order traversal retrieves the keys in ascending sorted order. From Wikipedia. `; export const usecases = ''; export const example = ''; <file_sep>const getTableSize = (pattern: string, text: string) => { const rows = text.length + 1; const cols = pattern.length + 1; return { rows, cols }; }; const createDPTable = (pattern: string, text: string): boolean[][] => { const { rows, cols } = getTableSize(pattern, text); const table = new Array(rows).fill(false).map(() => new Array(cols).fill(false)); table[0][0] = true; for (let col = 1; col < pattern.length; col += 1) { if (pattern.charAt(col - 1) === '*') { table[0][col] = table[0][col - 2] || false; } } for (let row = 1; row < table.length; row++) { const t = text.charAt(row - 1); for (let col = 1; col < table[row].length; col++) { const p = pattern.charAt(col - 1); if (p === '.' || p === t) { table[row][col] = table[row - 1][col - 1]; } else if (p === '*') { if (table[row][col - 2]) { table[row][col] = table[row][col - 2]; } else if (pattern.charAt(col - 2) === '.' || pattern.charAt(col - 2) === t) { table[row][col] = table[row - 1][col]; } } } } return table; }; export default createDPTable; <file_sep>export const indexDeletedSolution = ` abstract class Heap { private indices: Map<number, number[]>; protected items: number[]; constructor() { this.indices = new Map(); this.items = []; } heapify() { for (let i = Math.floor(this.items.length / 2) - 1; i >= 0; i--) { this.bubbleDown(i); } } size() { return this.items.length; } peek() { return this.items[0]; } push(item: number) { this.items.push(item); this.addIndex(item, this.items.length - 1); this.bubbleUp(this.items.length - 1); } pop() { const lastIndex = this.items.length - 1; const root = this.items.shift(); this.deleteIndex(root, 0); const last = this.items.pop(); this.deleteIndex(last, lastIndex); if (last !== undefined) { this.items.unshift(last); this.addIndex(last, 0); this.bubbleDown(0); } return root; } delete(item: number): number | undefined { const values: number[] = this.indices.get(item) || []; const index = values.pop(); if (index === undefined) { return undefined; } const lastIndex = this.items.length - 1; const last = this.items.pop(); this.deleteIndex(last, lastIndex); if (index === lastIndex) { return last; } const target = this.items.splice(index, 1)[0]; this.deleteIndex(target, index); if (last !== undefined) { this.items.splice(index, 0, last); this.addIndex(last, index); this.heapify(); } return target; } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.shouldBubbleUp(index, parentIndex)) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if (this.shouldBubbleDown(target, leftChildIndex)) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if (this.shouldBubbleDown(target, rightChildIndex)) { target = rightChildIndex; } if (target !== index) { this.swap(index, target); this.bubbleDown(target); } } private swap(i: number, j: number) { this.updateIndex(this.items[i], i, j); this.updateIndex(this.items[j], j, i); [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } private addIndex(item: number, index: number) { const values: number[] = this.indices.get(item) || []; values.push(index); this.indices.set(item, values); } private deleteIndex(item: number | undefined, index: number) { if (item === undefined) { return; } const values: number[] = this.indices.get(item) || []; const target = values.indexOf(index); if (target > -1) { values.splice(target, 1); } } private updateIndex(item: number, from: number, to: number) { this.deleteIndex(item, from); this.addIndex(item, to); } protected abstract shouldBubbleUp(index: number, parentIndex: number): boolean; protected abstract shouldBubbleDown(index: number, childIndex: number): boolean; } class MinHeap extends Heap { protected shouldBubbleUp(index: number, parentIndex: number) { return this.items[index] < this.items[parentIndex]; } protected shouldBubbleDown(index: number, childIndex: number) { return this.items[index] > this.items[childIndex]; } } class MaxHeap extends Heap { protected shouldBubbleUp(index: number, parentIndex: number) { return this.items[index] > this.items[parentIndex]; } protected shouldBubbleDown(index: number, childIndex: number) { return this.items[index] < this.items[childIndex]; } } class DualHeap { smaller: MaxHeap; greater: MinHeap; constructor() { this.smaller = new MaxHeap(); this.greater = new MinHeap(); } push(item: number) { if (this.smaller.size() === 0 || item <= this.smaller.peek()) { this.smaller.push(item); } else { this.greater.push(item); } this.balance(); } private balance() { if (this.smaller.size() > this.greater.size() + 1) { const temp = this.smaller.pop(); this.greater.push(temp); } else if (this.greater.size() > this.smaller.size()) { const temp = this.greater.pop(); this.smaller.push(temp); } } delete(item: number) { if (this.smaller.delete(item) === undefined) { this.greater.delete(item); } this.balance(); } median() { if (this.smaller.size() === this.greater.size()) { const one = this.smaller.peek() || 0; const two = this.greater.peek() || 0; return (one + two) / 2; } return this.smaller.peek(); } } function medianSlidingWindow(nums: number[], k: number): number[] { const heap = new DualHeap(); let i = 0; for (; i < k; i++) { heap.push(nums[i]); } const result: number[] = []; for (; i < nums.length; i++) { result.push(heap.median()); heap.delete(nums[i - k]); heap.push(nums[i]); } result.push(heap.median()); return result; };`; <file_sep>export const title = "Merge K Sorted Lists"; export const minHeap = `class ListNode { val: number; next: ListNode | null; constructor(val?: number, next?: ListNode | null) { this.val = (val === undefined ? 0 : val); this.next = (next === undefined ? null : next); } } class MinHeap { private items: ListNode[]; constructor() { this.items = []; } push(item: ListNode) { this.items.push(item); this.bubbleUp(this.items.length - 1); } pop() { const root = this.items.shift(); const last = this.items.pop(); if (last) { this.items.unshift(last); this.bubbleDown(0); } return root; } nonEmpty() { return this.items.length > 0; } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if ( this.items[target] && this.items[leftChildIndex] && this.items[target].val > this.items[leftChildIndex].val ) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if ( this.items[target] && this.items[rightChildIndex] && this.items[target].val > this.items[rightChildIndex].val ) { target = rightChildIndex; } if (target !== index) { this.swap(index, target); this.bubbleDown(target); } } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.items[index].val < this.items[parentIndex].val) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } }`; export const mergeKLists = `function mergeKLists(lists: Array<ListNode | null>): ListNode | null { const minHeap = new MinHeap(); lists.forEach(node => { if (node) { minHeap.push(node); } }); const head = new ListNode(); let tail = head; while (minHeap.nonEmpty()) { const current = minHeap.pop(); tail.next = current; tail = current; if (current.next) { minHeap.push(current.next); } } return head.next; };`; export const formula = `/** * Definition for singly-linked list. * class ListNode { * val: number * next: ListNode | null * constructor(val?: number, next?: ListNode | null) { * this.val = (val===undefined ? 0 : val) * this.next = (next===undefined ? null : next) * } * } */ class MinHeap { private items: ListNode[]; constructor() { this.items = []; } push(item: ListNode) { this.items.push(item); this.bubbleUp(this.items.length - 1); } pop() { const root = this.items.shift(); const last = this.items.pop(); if (last) { this.items.unshift(last); } this.bubbleDown(0); return root; } nonEmpty() { return this.items.length > 0; } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if ( this.items[target] && this.items[leftChildIndex] && this.items[target].val > this.items[leftChildIndex].val ) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if ( this.items[target] && this.items[rightChildIndex] && this.items[target].val > this.items[rightChildIndex].val ) { target = rightChildIndex; } if (target === index) { return; } this.swap(index, target); this.bubbleDown(target); } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); const current = this.items[index]; const parent = this.items[parentIndex]; if (current.val < parent.val) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } } function mergeKLists(lists: Array<ListNode | null>): ListNode | null { const minHeap = new MinHeap(); lists.forEach(node => { if (node) { minHeap.push(node); } }); const head = new ListNode(); let tail = head; while (minHeap.nonEmpty()) { const current = minHeap.pop(); tail.next = current; tail = current; if (current.next) { minHeap.push(current.next); } } return head.next; };`; export const description = ` You are given an array of k linked-lists lists, each linked-list is sorted in ascending order. Merge all the linked-lists into one sorted linked-list and return it. `; export const solution = ``; export const usecases = ''; export const example = ` `; <file_sep>const createDPTable = (stringOne: string, stringTwo: string): number[][] => { const rows = stringTwo.length + 1; const cols = stringOne.length + 1; const table = new Array(rows).fill(0).map(() => new Array(cols).fill(0)); for (let row = 1; row < rows; row++) { for (let col = 1; col < cols; col++) { if (stringOne.charAt(col - 1) === stringTwo.charAt(row - 1)) { table[row][col] = table[row - 1][col - 1] + 1; } else { table[row][col] = Math.max(table[row][col - 1], table[row - 1][col]); } } } return table; }; export default createDPTable; <file_sep>export const title = "Word Break"; export const description = ` Given an input string and a dictionary of words, find out if the input string can be segmented into a space-separated sequence of dictionary words.`; export const usecases = ''; export const formula = ` const sub = s.substring(start, end + 1); if (wordDictSet.has(sub)) { table[start][end] = true; continue; } for (let i = start; i < end; i++) { if (table[start][i] && table[i + 1][end]) { table[start][end] = true; break; } } `; export const example = ` Example 1: - Input: s = "helloworld", wordDict = \\["hello", "world"] - Output: true - Explanation: Return true because "helloworld" can be segmented as "hello world". Example 2: - Input: s = "itisanice", wordDict = \\["a", "an", "i", "ice", "is", "it", "nice"] - Output: true - Explanation: Return true because "itisanice" can be segmented as "it is an ice". `;<file_sep>export const title = "Sliding Window Maximum"; export const formula = `function maxSlidingWindow(nums: number[], k: number): number[] { interface Item { value: number; index: number; } const deque: Item[] = []; for (let i = 0; i < k; i++) { const item: Item = { value: nums[i], index: i}; while (deque.length > 0 && item.value >= deque[deque.length - 1].value) { deque.pop(); } deque.push(item); } const result: number[] = [deque[0].value]; for (let i = k; i < nums.length; i++) { const item: Item = { value: nums[i], index: i}; while (deque.length > 0 && item.value >= deque[deque.length - 1].value) { deque.pop(); } deque.push(item); while (deque[0].index <= i -k) { deque.shift(); } result.push(deque[0].value); } return result; };`; export const description = `You are given an array of integers **nums**, there is a sliding window of size **k** which is moving from the very left of the array to the very right. You can only see the **k** numbers in the window. Each time the sliding window moves right by one position. Return the max sliding window.`; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { left?: TreeNode<string>; right?: TreeNode<string>; symmetric?: boolean; direction?: Direction; } export function buildSteps(root?: TreeNode<string>): Step[] { const steps: Step[] = []; function isSymmetric(left?: TreeNode<string>, right?: TreeNode<string>, direction?: Direction): boolean { if (left === undefined && right === undefined) { return true; } if (left === undefined || right === undefined) { steps.push({ left, right, direction, symmetric: false }); return false; } if (left.val.value !== right.val.value) { steps.push({ left, right, direction, symmetric: false }); return false; } steps.push({ left, right, direction, symmetric: true }); const isLeftSymmetric = isSymmetric(left.left, right.right, Direction.Left); if (!isLeftSymmetric) { steps.push({ left, right, direction, symmetric: false }); return false; } const isRightSymmetric = isSymmetric(left.right, right.left, Direction.Right); if (!isRightSymmetric) { steps.push({ left, right, direction, symmetric: false }); return false; } steps.push({ left, right, direction, symmetric: true }); return true; } steps.push({ left: root, right: root, symmetric: true }); const symmetric = isSymmetric(root?.left, root?.right); steps.push({ left: root, right: root, symmetric }); return steps; } <file_sep>export const title = "Trapping Rain Water II"; export const formula = ` public int trap(int[] height) { if(height.length < 3 ) { return 0; } int leftMax = height[0]; int rightMax = height[height.length-1]; int i = 1; int j = height.length-2; int sum = 0; while(i <= j ) { leftMax = Math.max(leftMax, height[i]) ; rightMax = Math.max(rightMax, height[j]); int min = Math.min(leftMax, rightMax); if (leftMax < rightMax) { sum += min > height[i] ? min - height[i] : 0; i ++ ; } else { sum += min > height[j] ? min - height[j] : 0; j --; } } return sum; } `; export const description = ` Given n non-negative integers representing an elevation map where the width of each bar is 1, compute how much water it is able to trap after raining. `; export const usecases = ''; export const example = ` Example 1: - Input: arr[] = { 3, 0, 2 } - Output: 2 Example 2: - Input: arr[] = { 0, 1, 0, 2, 1 } - Output: 2 `; <file_sep>export const title = "Ugly Number II"; export const minHeap = `class MinHeap { private items: number[]; constructor() { this.items = []; } push(num: number) { this.items.push(num); this.bubbleUp(this.items.length - 1); } pop() { const root = this.items.shift(); const last = this.items.pop(); if (last) { this.items.unshift(last); this.bubbleDown(0); } return root; } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.items[index] < this.items[parentIndex]) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if ( this.items[target] !== undefined && this.items[leftChildIndex] !== undefined && this.items[target] > this.items[leftChildIndex] ) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if ( this.items[target] !== undefined && this.items[rightChildIndex] !== undefined && this.items[target] > this.items[rightChildIndex] ) { target = rightChildIndex; } if (target !== index) { this.swap(index, target); this.bubbleDown(target); } } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } }`; export const nthUglyNumber = `function nthUglyNumber(n: number): number { const factors = [2, 3, 5]; const minHeap = new MinHeap(); const seen = new Set(); minHeap.push(1); for (let i = 0; i < n - 1; i++) { const root = minHeap.pop(); for (const factor of factors) { const next = root * factor; if (!seen.has(next)) { minHeap.push(next); seen.add(next); } } } return minHeap.pop(); };`; export const description = ` An **ugly number** is a positive integer whose prime factors are limited to **2**, **3**, and **5**. Given an integer n, return the **n^th** ugly number. `; export const solution = ``; export const usecases = ''; export const example = ` `; <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; import MinHeap from '../../../data-structures/tree/heap/min-heap'; import { Props, TreeNodeProps } from '../../../data-structures/tree/heap/props'; const lineColor = "gold"; const normalSphereColor = "yellow"; const cubeColor = "yellow"; const enabledColor = "gold"; const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: normalSphereColor, opacity: 0.4, transparent: true }); } const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "green" }); const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; const lineMaterial = new THREE.LineBasicMaterial({ color: lineColor }); export const buildMinHeap = (len: number, scene: THREE.Scene): MinHeap<number> => { const cubeMaterial = () => new THREE.MeshBasicMaterial({ color: cubeColor, opacity: 0.5, transparent: true }); const cubeWidth = 2; const cubeGeometry: THREE.BoxGeometry = new THREE.BoxGeometry(cubeWidth, 1.5, 1); const arrayX = len * cubeWidth / 2 - cubeWidth / 2; const arrayNodeProps = { textMaterial, textGeometryParameters, cubeMaterial, cubeGeometry, initPosition: { x: 0, y: 0, z: 0 }, }; const treeNodeProps: TreeNodeProps = { sphereGeometry, sphereMaterial, textMaterial, textGeometryParameters, enabledTreeNodeColor: enabledColor, initPosition: { x: 0, y: 0, z: 0 }, } const treeLineProps = { material: lineMaterial } const props: Props = { arrayPosition: { x: arrayX, y: 9, z: 0 }, arrayNodeProps, treePosition: { x: 0, y: -2, z: 0 }, treeNodeProps, treeLineProps, treeNodeDistance: { x: 2, y: 2 }, treeInitDepth: 4, scene, duration: 1 } return new MinHeap<number>(props); }; <file_sep>import { Node } from "../_components/binary-tree"; const tree1 = (): Node => { const root = new Node("F", true); root.left = new Node("B"); root.left.left = new Node("A"); root.left.right = new Node("D"); root.left.right.left = new Node("C"); root.left.right.right = new Node("E"); root.right = new Node("G"); root.right.right = new Node("I"); root.right.right.left = new Node("H"); return root; }; const tree2 = (): Node => { const root = new Node("6", true); root.left = new Node("2"); root.left.left = new Node("1"); root.left.right = new Node("4"); root.left.right.left = new Node("3"); root.left.right.right = new Node("5"); root.right = new Node("7"); root.right.right = new Node("9"); root.right.right.left = new Node("8"); return root; }; const tree3 = (): Node => { const root = new Node("G", true); root.left = new Node("D"); root.left.left = new Node("B"); root.left.left.left = new Node("A"); root.left.left.right = new Node("C"); root.left.right = new Node("F"); root.left.right.left = new Node("E"); root.right = new Node("J"); root.right.left = new Node("I"); root.right.left.left = new Node("H"); root.right.right = new Node("L"); root.right.right.left = new Node("K"); root.right.right.right = new Node("M"); return root; }; const random = (max: number) => Math.floor(Math.random() * max); export const createTree = (): Node => { const trees: Node[] = [tree1(), tree2(), tree3()]; const index = random(trees.length); return trees[index]; }; <file_sep>import { Cube } from './cube'; import Position from "../params/position.interface"; export type TextCube<T> = Cube & { value: T; textPosition: Position; }; <file_sep>import Container from "../_commons/container"; type Step = { min: number; a: Container; b: Container; exchange: boolean; } export default Step; <file_sep>export const title = "Kth Smallest Element in a Sorted Matrix"; export const minHeap = `type Item = { val: number; row: number; col: number; } class MinHeap { private items: Item[]; constructor() { this.items = []; } push(item: Item) { this.items.push(item); this.bubbleUp(this.items.length - 1); } pop() { const root = this.items.shift(); const last = this.items.pop(); if (last) { this.items.unshift(last); this.bubbleDown(0); } return root; } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if ( this.items[target] && this.items[leftChildIndex] && this.items[target].val > this.items[leftChildIndex].val ) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if ( this.items[target] && this.items[rightChildIndex] && this.items[target].val > this.items[rightChildIndex].val ) { target = rightChildIndex; } if (target !== index) { this.swap(index, target); this.bubbleDown(target); } } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.items[index].val < this.items[parentIndex].val) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } }`; export const kthSmallest = `function kthSmallest(matrix: number[][], k: number): number { const minHeap = new MinHeap(); matrix.forEach((row, i) => { minHeap.push({ val: row[0], row: i, col: 0 }); }); for (let i = 0; i < k - 1; i++) { const { row, col } = minHeap.pop(); if (col + 1 < matrix[row].length) { minHeap.push({ val: matrix[row][col + 1], row, col: col + 1 }) } } return minHeap.pop().val; };`; export const description = ` Given an **n x n** matrix where each of the rows and columns is sorted in ascending order, return the **kth** smallest element in the matrix. Note that it is the **kth** smallest element **in the sorted order**, **not** the kth **distinct** element. You must find a solution with a memory complexity better than O(n^2). `; export const solution = ``; export const usecases = ''; export const example = ` `; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node?: TreeNode<string>; direction?: Direction; } export function buildSteps(root?: TreeNode<string>): Step[] { const steps: Step[] = []; function connect(node?: TreeNode<string>, direction?: Direction) { if (node === undefined) { return; } steps.push({ node, direction }); connect(node.left, Direction.Left); connect(node.right, Direction.Right); } connect(root); return steps; } <file_sep>export const title = "Flatten Binary Tree to Linked List"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function flatten(root: TreeNode | null): void { if (!root) { return; } if (root.left) { const next = findRight(root.left); next.right = root.right; root.right = root.left; root.left = null; } flatten(root.right); }; function findRight(node: TreeNode) { if (node.right === null) { return node; } return findRight(node.right); };`; export const description = ` Given the **root** of a binary tree, flatten the tree into a "linked list": - The "linked list" should use the same **TreeNode** class where the **right** child pointer points to the next node in the list and the **left** child pointer is always **null**. - The "linked list" should be in the same order as a **pre-order traversal** of the binary tree. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import * as THREE from "three"; export default class Sphere extends THREE.Mesh { constructor( x: number, y: number, z: number, color?: THREE.Color | string | number, radius?: number, widthSegments?: number, heightSegments?: number, ) { const geometry = new THREE.SphereGeometry(radius, widthSegments, heightSegments); const material = new THREE.MeshBasicMaterial({ color: color, refractionRatio: 0.09, reflectivity: 0.3 }); super(geometry, material); this.position.set(x, y, z); } } <file_sep>import { Collection } from '../_commons/collection'; export interface IStack<T> extends Collection { push(t: T): Promise<number>; pop(): Promise<T | undefined>; peek(): Promise<T | undefined>; } <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; import { buildBinaryTree } from "../../../data-structures/tree/nodes/v1/binary-tree-builder"; export const enabledSphereColor = "lightblue"; export const normalSphereColor = "yellow"; const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: normalSphereColor, opacity: 0.4, transparent: true }); } const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "green" }); const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; const lineMaterial = new THREE.LineBasicMaterial({ color: "gold" }); export const buildTree = (array: (number | null)[], scene: THREE.Scene) => { const center = { x: 0, y: 8, z: 0 }; const show = true; const duration = 0; const yDistance = 3; const xAxisAlpha = 2; // expend the tree size in xAxis. return buildBinaryTree<number>( sphereGeometry, sphereMaterial, textMaterial, textGeometryParameters, lineMaterial, scene, duration, center, yDistance, xAxisAlpha, array, show ); } const indexTextMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "orange" }); const indexUpperTextGeometryParameters: TextGeometryParameters = { font, size: 0.5, height: 0.1 }; export const buildThreeText = (num: number, x: number, y: number, z: number): THREE.Mesh => { const textGeometry = new TextGeometry(num + "", indexUpperTextGeometryParameters); const mesh = new THREE.Mesh(textGeometry, indexTextMaterial); mesh.position.set(x, y, z); return mesh; } <file_sep>export enum State { Typing, Ready, running, Finished } <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { p?: TreeNode<string>; q?: TreeNode<string>; isSame?: boolean; direction?: Direction; } export function buildSteps(p?: TreeNode<string>, q?: TreeNode<string>): Step[] { const steps: Step[] = []; function isSameTree(p?: TreeNode<string>, q?: TreeNode<string>, direction?: Direction): boolean { if (p === undefined && q === undefined) { return true; } if (p === undefined || q === undefined) { steps.push({ p, q, direction, isSame: false }); return false; } if (p.val.value !== q.val.value) { steps.push({ p, q, direction, isSame: false }); return false; } steps.push({ p, q, direction, isSame: true }); const isLeftTreeSame = isSameTree(p.left, q.left, Direction.Left); if (!isLeftTreeSame) { steps.push({ p, q, direction, isSame: false }); return false; } const isRightTreeSame = isSameTree(p.right, q.right, Direction.Right); if (!isRightTreeSame) { steps.push({ p, q, direction, isSame: false }); return false; } steps.push({ p, q, direction, isSame: true }); return true; }; isSameTree(p, q); return steps; } <file_sep>enum Category { Tree = "Tree", Stack = "Stack", Queue = "Queue", Sorting = "Sorting", DynamicProgramming = "DP", TwoPointers = "Two Pointers", HashTable = "Hash Table", Math = "Math", SlidingWindow = "Sliding Window", FlipDirection = "Flip Direction", Greedy = "Greedy", Heap = "heap", Deque = "Qeque", SegmentTree = "Segment Tree", Graph = "graph", TopologicalSort = "Topological Sort", UnionFind = "Union Find(Disjoint Set)", BinarySearch = "binary-search" } export default Category; <file_sep>import * as THREE from 'three'; import { buildPerfectBinaryTree } from "../../../data-structures/tree/nodes/utils/perfect-binary-tree" import TreeNode from "../../../data-structures/tree/nodes/v1/node"; import { buildTreeNode, lineMaterial, yDistance, xCenter } from "./styles"; import Position from '../../../data-structures/_commons/params/position.interface'; import { calDepth } from './depth'; export enum Direction { Left, Right } export interface Step { inorderLeft: number; inorderRight: number; postorderLeft: number; postorderRight: number; inorderRootIndex: number; leftTreeLength: number; parent?: TreeNode<number>; direction?: Direction; node: TreeNode<number>; } export interface InputOutput { inorder: number[]; postorder: number[]; steps: Step[]; xAxis: number[]; tree: TreeNode<number>[]; } const startPosition: THREE.Vector3 = new THREE.Vector3(xCenter, 11, 0); export function buildTree(inorder: number[], postorder: number[], scene: THREE.Scene): InputOutput { const depth: number = calDepth(inorder, postorder); const xAxisAlpha = 2; // expend the tree size in xAxis. const xAxis: number[] = buildPerfectBinaryTree(depth, xAxisAlpha, 2).map(node => node.x); const xAlpha = (xAxis.length === 0) ? 0 : xCenter - xAxis[0]; const steps: Step[] = []; const tree: TreeNode<number>[] = new Array(postorder.length); const inorderIndexMap = new Map<number, number>(); inorder.forEach((value, index) => inorderIndexMap.set(value, index)); const buildMyTree = ( inorderLeft: number, inorderRight: number, postorderLeft: number, postorderRight: number, center: Position, index: number, parent?: TreeNode<number>, direction?: Direction, ): TreeNode<number> | undefined => { if (postorderLeft > postorderRight) { return undefined; } const inorderRootIndex: number = inorderIndexMap.get(postorder[postorderRight])!; const leftTreeLength = inorderRootIndex - inorderLeft; const root = buildTreeNode(postorder[postorderRight], scene, center); root.index = index; root.show(); tree[postorderRight] = root; const { y, z } = root.val.center; steps.push({ postorderLeft, postorderRight, inorderLeft, inorderRight, inorderRootIndex, leftTreeLength, node: root, parent, direction }); const leftPosition = { x: xAxis[root.leftChildIndex] + xAlpha, y: y + yDistance, z }; const left = buildMyTree(inorderLeft, inorderRootIndex - 1, postorderLeft, postorderLeft + leftTreeLength - 1, leftPosition, root.leftChildIndex, root, Direction.Left); if (left) { root.setLeft(left, leftPosition, lineMaterial, 0, scene); } const rightPosition = { x: xAxis[root.rightChildIndex] + xAlpha, y: y + yDistance, z }; const right = buildMyTree(inorderRootIndex + 1, inorderRight, postorderLeft + leftTreeLength, postorderRight - 1, rightPosition, root.rightChildIndex, root, Direction.Right); if (right) { root.setRight(right, rightPosition, lineMaterial, 0, scene); } return root; } buildMyTree(0, postorder.length - 1, 0, inorder.length - 1, startPosition, 0); updateTreeColor(tree, steps[0]); return { postorder, inorder, steps, xAxis, tree }; }; export function updateTreeColor(tree: TreeNode<any>[], step?: Step) { tree.map((node, i) => updateTreeNodeColor(node, i, step)); } function updateTreeNodeColor(node: TreeNode<any>, i: number, step?: Step) { if (!step) { return; } const { postorderLeft, postorderRight, leftTreeLength } = step; if (i === postorderRight) { node.sphereColor = "lightgreen"; // root } else if (i >= postorderLeft && i < postorderLeft + leftTreeLength) { // left node.sphereColor = "yellow"; } else if (i >= postorderLeft + leftTreeLength && i < postorderRight) { // right node.sphereColor = "lightblue"; } else { // others node.sphereColor = "lightgray"; } } <file_sep>import { Point } from "../_commons/point"; interface NumsTableParams { current: Point; data: number[]; success: boolean; } export default NumsTableParams; <file_sep>export const title = "Validate Binary Search Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function isValidBST(root: TreeNode | null): boolean { function isBST(root: TreeNode | null, lower: number, upper: number): boolean { if (root === null) { return true; } if (root.val <= lower || root.val >= upper) { return false; } return isBST(root.left, lower, root.val) && isBST(root.right, root.val, upper); } return isBST(root, -Infinity, Infinity); };`; export const description = ` Given the **root** of a binary tree, determine if it is a valid binary search tree (BST). A ***valid BST*** is defined as follows: - The left subtree of a node contains only nodes with keys **less than** the node's key. - The right subtree of a node contains only nodes with keys **greater than** the node's key. - Both the left and right subtrees must also be binary search trees. `; export const usecases = ''; export const example = ''; <file_sep>import TreeNode from "../dataNode"; export enum ActionType { GO_LEFT, GO_RIGHT, PRINT_VAL, BACK_TO_PARENT } export interface Action { node: TreeNode; action: ActionType; parent?: TreeNode; } export interface Actions { readonly length: number; get(index: number): Action; } <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export interface Step { node: TreeNode<string>; } export function buildSteps(root?: TreeNode<string>): Step[] { const steps: Step[] = []; function preorder(node?: TreeNode<string>) { if (!node) { return; } steps.push({ node }); preorder(node.left); preorder(node.right); } preorder(root); return steps; } <file_sep>export const title = "Regular Expression"; export const formula = ` const p = pattern.charAt(col - 1); const prev = pattern.charAt(col - 2); if (p === '.' || p === t) { table[row][col] = table[row - 1][col - 1]; } else if (p === '*') { if (table[row][col - 2] === true) { table[row][col] = true } else if (prev === '.' || prev === t) { table[row][col] = table[row - 1][col]; } } else { table[row][col] = false; } `; export const description = ` Given an input string (s) and a pattern (p), implement regular expression matching with support for '.' and '*'. - '.' Matches any single character. - '*' Matches zero or more of the preceding element. The matching should cover the entire input string (not partial). `; export const usecases = ''; export const example = ` | PATTERN | TRUE | FALSE | |:-------:|:----------------------:|:--------------:| | a*b | b, ab, aab, aaab | a, abb, acb | | a.b | aab, abb, acb | a, ab , b | | c*a.b | aab, caab, ccaab, cccaab, cccabb, cccacb | baab, cab, cabbb | `; <file_sep>import createDpTable from "../algo"; const getLastCell = (table: number[][]): number => { const lastRow = table[table.length - 1]; return lastRow[lastRow.length - 1]; } test("longest-common-subsequence", () => { const str1 = "algorithms"; const str2 = "alchemist"; const table = createDpTable(str1, str2); const distance = getLastCell(table); expect(distance).toBe(5); }); <file_sep>import { green } from '@mui/material/colors'; import { createTheme } from "@mui/material"; const theme = createTheme({ palette: { primary: { main: green[500], } }, typography: { fontFamily: [ "Circular", "-apple-system", "BlinkMacSystemFont", "Roboto", "Helvetica Neue", "sans-serif", "monospace" ].join(",") }, components: { MuiTable: { styleOverrides: { root: { width: '55%', maxWidth: "800px", borderRadius: 12, display: "inline-table" } } }, MuiTableCell: { styleOverrides: { root: { textAlign: 'center', borderBottom: 'none', '&:last-child': { paddingRight: 0, }, }, body: { borderWidth: 1, borderStyle: 'solid', borderColor: "gray", color: "black", borderRadius: 0, fontSize: 14, fontWeight: 400, height: 35, minWidth: 30, }, head: { borderWidth: 1, borderStyle: 'solid', borderColor: "gray", color: "black", borderRadius: 0, fontSize: 15, fontWeight: 600, height: 35, minWidth: 30, } } } } }); export default theme; <file_sep>import * as THREE from 'three'; import { OrbitControls } from "three/examples/jsm/controls/OrbitControls"; import { Font, FontLoader } from "three/examples/jsm/loaders/FontLoader"; import fontJson from "./fonts/Roboto_Regular.json"; const createRenderer = () => { const renderer = new THREE.WebGLRenderer({ alpha: true, antialias: true, }); renderer.setSize(window.innerWidth, window.innerHeight); return renderer; } const createCamera = () => { const camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000); camera.position.z = 5; return camera; } const createScene = () => { return new THREE.Scene(); } const clearScene = (scene: THREE.Scene) => { while (scene.children.length > 0) { scene.remove(scene.children[0]); } } function onWindowResize(renderer: THREE.Renderer, camera: THREE.PerspectiveCamera) { camera.aspect = window.innerWidth / window.innerHeight; camera.updateProjectionMatrix(); renderer.setSize(window.innerWidth, window.innerHeight); } const loadFont = (): Promise<Font> => { const fontPath = '/fonts/Roboto_Regular.json'; return new Promise(resolve => { new FontLoader().load(fontPath, font => resolve(font)); }); }; const font = new Font(fontJson); function registerOrbitControls(camera: THREE.Camera, renderer: THREE.Renderer, scene: THREE.Scene): void { const controls = new OrbitControls(camera, renderer.domElement); controls.rotateSpeed = 0.5; controls.update(); controls.addEventListener('change', () => renderer.render(scene, camera)); } const createDefaultGridHelper = () => { const gridHelper = new THREE.GridHelper(2000, 100, "black", "black"); gridHelper.position.y = -459; const material = gridHelper.material; if (material instanceof THREE.Material) { material.opacity = 0.95; material.transparent = true; } return gridHelper; } function registeGrid(scene: THREE.Scene, gridHelper: THREE.GridHelper) { scene.add(gridHelper); } export { createRenderer, createCamera, createScene, clearScene, onWindowResize, loadFont, font, registerOrbitControls, registeGrid, createDefaultGridHelper }; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Stage { Serialize, Deserialize } export enum Place { Pre, Post } export enum Direction { Left, Right } export interface Step { i?: number; stage: Stage; place: Place; direction?: Direction; node?: TreeNode<string>; values: string[]; } export function buildSteps(root?: TreeNode<string>): Step[] { const steps: Step[] = []; const nodes: (TreeNode<string> | undefined)[] = []; function serialize(root: TreeNode<string>): string { const result: string[] = []; function preorder(node?: TreeNode<string>, direction?: Direction) { if (!node) { result.push("#"); nodes.push(node); steps.push({ stage: Stage.Serialize, values: [...result], node, place: Place.Pre, direction, }); return; } nodes.push(node); result.push(node.val.value + ""); steps.push({ stage: Stage.Serialize, values: [...result], node, place: Place.Pre, direction, }); preorder(node.left, Direction.Left); preorder(node.right, Direction.Right); steps.push({ stage: Stage.Serialize, values: [...result], node, place: Place.Post, direction, }); } preorder(root); return result.join(","); }; function deserialize(data: string): TreeNode<string> | undefined { let i = 0; function preorder(array: string[], direction?: Direction): TreeNode<string> | undefined { const node = nodes[i]; steps.push({ stage: Stage.Deserialize, values: array, node, place: Place.Pre, i, direction }); const value = array[i]; i += 1; if (!value || value === "#") { return undefined; } preorder(array, Direction.Left); // left preorder(array, Direction.Right); // right return node; } return preorder(data.split(",")); } if (root) { const str = serialize(root); deserialize(str); } return steps; } <file_sep>export const title = "Binary Tree Preorder Traversal"; export const formula = ` function preorder(node: Node) { print(node); if (node.left) { preorder(node.left); } if (node.right) { preorder(node.right); } } OR function preorder(node: Node | null) { if (node == null) { return; } print(node); preorder(node.left); preorder(node.right); }`; export const description = ` For a binary tree, they are defined as access operations at each node, starting with the current node, then Go down one level to Children: - (L) Recursively traverse left subtree. - (R) Recursively traverse right subtree. - (N) Process the current node N itself. Return by going up one level and arriving at the parent node. --- Pre-order (NLR) 1. Access the data part of the current node. 2. Traverse the left subtree by recursively calling the pre-order function. 3. Traverse the right subtree by recursively calling the pre-order function. The pre-order traversal is a topologically sorted one, because a parent node is processed before any of its child nodes is done. From Wikipedia. `; export const usecases = ''; export const example = ''; <file_sep>import * as THREE from "three"; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; export interface ArrowStyles { color: THREE.Color | string | number; headLength: number; headWidth: number; } export interface NodeStyles { width: number; height: number; color: THREE.Color | string | number; textGeometryParameters: TextGeometryParameters; textColor: THREE.Color | string | number; } <file_sep>import { Sphere } from './sphere.interface'; import Position from "../params/position.interface"; import Color from "../params/color.interface"; export type TextSphere<T> = Sphere & { value: T; textPosition: Position; textColor: Color; }; <file_sep>export const title = "Basic Calculator"; export const shortFormula = `function calculate(s: string): number { let result = 0; let sign = 1; const stack: number[] = []; for (let i = 0; i < s.length; i++) { const c = s.charAt(i); if (isNumeric(c)) { let current: number = +c; while (i + 1 < s.length && isNumeric(s.charAt(i + 1))) { current = current * 10 + (+s.charAt(i + 1)); i++; } result += current * sign; } else if (c === "+") { sign = 1; } else if (c === "-") { sign = -1; } else if (c === "(") { stack.push(result); stack.push(sign); result = 0; sign = 1; } else if (c === ")") { const previousSign = stack.pop(); const previousResult = stack.pop(); result = previousSign * result + previousResult; } } return result; }` export const formula = `function isNumeric(n: string) { const value = parseInt(n); return !isNaN(value) && isFinite(value); } function calculate(s: string): number { let sign = 1; let result = 0; const stack: number[] = []; for (let i = 0; i < s.length; i++) { const c = s.charAt(i); if (isNumeric(c)) { let current: number = +c; while (i + 1 < s.length && isNumeric(s.charAt(i + 1))) { current = current * 10 + (+s.charAt(i + 1)); i++; } result += current * sign; } else if (c === "+") { sign = 1; } else if (c === "-") { sign = -1; } else if (c === "(") { stack.push(result); stack.push(sign); result = 0; sign = 1; } else if (c === ")") { const previousSign = stack.pop(); const previousResult = stack.pop(); result = previousSign * result + previousResult; } } return result; };`; export const description = `## ${title} --- Given a string s representing a valid expression, implement a basic calculator to evaluate it, and return the result of the evaluation. Note: You are **not** allowed to use any built-in function which evaluates strings as mathematical expressions, such as eval(). ### Constraints + 1 <= s.length <= 3 * 105 + s consists of digits, '+', '-', '(', ')', and ' '. + s represents a valid expression. + '+' is not used as a unary operation (i.e., "+1" and "+(2 + 3)" is invalid). + '-' could be used as a unary operation (i.e., "-1" and "-(2 + 3)" is valid). + There will be no two consecutive operators in the input. + Every number and running calculation will fit in a signed 32-bit integer. ### Exmaple ##### Example 1: - Input: s = "1 + 1" - Output: 2 ##### Example 2: - Input: s = "12 - 3 + 8 + 6" - Output: 23 ##### Example 3: - Input: s = "11 + (12 - (3 - (6 + 5 + 2) - 1) + 4)" - Output: 38 `; <file_sep>export const title = "Count Complete Tree Nodes"; export const NodeDefinition = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ `; export const formula0 = ` function countNodes(root: TreeNode | null): number { function getLeftLeafIndex(node: TreeNode, index: number): number { if (!node.left) { return index; } return getLeftLeafIndex(node.left, 2 * index + 1); } // Go right first, so the first value, which is >= leftLeafIndex is the target leaf. function rightLeftDfs(node: TreeNode | null, index: number, leftLeafIndex: number) { if (!node) { return 0; } if (index >= leftLeafIndex) { return index; } return rightLeftDfs(node.right, 2 * index + 2, leftLeafIndex) || rightLeftDfs(node.left, 2 * index + 1, leftLeafIndex); } if (!root) return 0; const leftLeafIndex = getLeftLeafIndex(root, 0); return rightLeftDfs(root, 0, leftLeafIndex) + 1; };` export const formula1 = ` function countNodes(root: TreeNode | null): number { function countLevel(node: TreeNode | null, level: number): number { if (!node) { return level; } return countLevel(node.left, level + 1); } function dfs(node: TreeNode | null) { if (!node) { return 0; } const leftLevels = countLevel(node.left, 0); const rightLevels = countLevel(node.right, 0); if (leftLevels === rightLevels) { return dfs(node.right) + Math.pow(2, leftLevels); } else { return dfs(node.left) + Math.pow(2, rightLevels); } } return dfs(root); };` export const description = ` Given the **root** of a complete binary tree, return the number of the nodes in the tree. According to Wikipedia, every level, except possibly the last, is completely filled in a complete binary tree, and all nodes in the last level are as far left as possible. It can have between **1** and **2^h** nodes inclusive at the last level **h**. Design an algorithm that runs in less than **O(n)** time complexity. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import Container from "../_commons/container"; import { swap } from "../_commons/helps"; import Step from "./step"; export const sort = (arrays: Container[]): Step[] => { const steps: Step[] = []; for (let i = arrays.length - 1; i >= 0; i--) { for (let y = 0; y < i; y++) { const a = arrays[y]; const b = arrays[y + 1]; let finished = undefined; let exchange = (a.payload > b.payload) ? true : false; if (exchange) { swap(arrays, y, y + 1); } if (y + 1 === i) { finished = exchange ? a : b; } const step: Step = { a, b, exchange, finished }; steps.push(step); } } return steps; } <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { TextSphere } from "../../_commons/sphere/text-sphere.interface"; import TreeNode from "../nodes/v2/node"; import Text from '../../_commons/params/text.interface'; import TextImpl from '../../_commons/three/text.class'; import Position from '../../_commons/params/position.interface'; import { calDestination, calDistance } from '../../_commons/utils'; export default class SegmentTreeNode extends TreeNode<number> { start: Text<number>; end: Text<number>; left?: SegmentTreeNode; right?: SegmentTreeNode; constructor( value: TextSphere<number>, start: number, end: number, rangeMaterial: THREE.Material, rangeGeometryParameters: TextGeometryParameters, scene: THREE.Scene, index?: number ) { super(value, index); this.start = new TextImpl(start, rangeMaterial, rangeGeometryParameters, scene); this.end = new TextImpl(end, rangeMaterial, rangeGeometryParameters, scene); } show() { super.show(); this.start.show(); this.end.show(); return this; } hide() { super.hide(); this.start.hide(); this.end.hide(); return this; } async moveTo(dest: Position, duration: number, onUpdate?: () => void): Promise<void> { const distance = calDistance(this.value.center, dest); const startDest = calDestination(this.start, distance); const endDest = calDestination(this.end, distance); const nodeMove = super.moveTo(dest, duration); const startMove = this.start.move(startDest, duration); const endMove = this.end.move(endDest, duration); return Promise.all([nodeMove, startMove, endMove]).then(() => { }); } } <file_sep>import TreeNode from "../../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export enum Action { Push, Pop } export interface Step { node: TreeNode<string>; level: number; action: Action; result: string[][]; } function cloneResult(result: string[][]): string[][] { return result.map(level => [...level]); } export function buildSteps(root?: TreeNode<string>): Step[] { const steps: Step[] = []; function levelOrder(root?: TreeNode<string>): string[][] { const result: string[][] = []; if (!root) { return result; } let level = 0; let reverse = false; const queue: TreeNode<string>[] = []; queue.push(root); steps.push({ node: root, action: Action.Push, level, result: cloneResult(result) }); while (queue.length !== 0) { const values: string[] = []; result.push(values); const length = queue.length; for (let i = 0; i < length; i++) { const node = queue.shift()!; if (reverse) { values.unshift(node.val.value); } else { values.push(node.val.value); } steps.push({ node, action: Action.Pop, level, result: cloneResult(result) }); if (node.left) { queue.push(node.left); steps.push({ node: node.left, action: Action.Push, level, result: cloneResult(result) }); } if (node.right) { queue.push(node.right); steps.push({ node: node.right, action: Action.Push, level, result: cloneResult(result) }); } } reverse = !reverse level += 1; } return result; }; levelOrder(root); return steps; } <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; import { TextCube } from '../../../data-structures/_commons/cube/three/text-cube'; import { node, text } from "./stackStyles"; class StackItemBuilder<T> { private _value: T; private _scene: THREE.Scene; private _textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: text.color }); private _textGeometryParameters: TextGeometryParameters = { font, size: text.size, height: text.height }; private _cubeMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "white", opacity: 0, transparent: true }); private _cubeGeometry: THREE.BoxGeometry = new THREE.BoxGeometry(node.size.width, node.size.height, node.size.depth); private _position: THREE.Vector3 = new THREE.Vector3(0, 0, 0); private _show: boolean; constructor(value: T, scene: THREE.Scene, show: boolean) { this._value = value; this._scene = scene; this._show = show; } position(x: number, y: number, z: number): StackItemBuilder<T> { this._position = new THREE.Vector3(x, y, z); return this; } cubeMaterial(material: THREE.Material): StackItemBuilder<T> { this._cubeMaterial = material; return this; } cubeGeometry(width: number, height: number, depth: number): StackItemBuilder<T> { this._cubeGeometry = new THREE.BoxGeometry(width, height, depth); return this; } textMaterial(material: THREE.Material): StackItemBuilder<T> { this._textMaterial = material; return this; } textGeometryParameters(parameters: TextGeometryParameters): StackItemBuilder<T> { this._textGeometryParameters = parameters; return this; } build(): TextCube<T> { const item = new TextCube<T>( this._value, this._textMaterial, this._textGeometryParameters, this._cubeMaterial, this._cubeGeometry, this._scene ); this.setPosition(item); if (this._show) { item.show(); } return item; } private setPosition(item: TextCube<T>): void { item.position.x = this._position.x; item.position.y = this._position.y; item.position.z = this._position.z; const length = String(item.value).length; if (length === 1) { item.textPosition.x = item.position.x - 0.2; } else if (length === 2) { item.textPosition.x = item.position.x - 0.32; } else if (length === 3) { item.textPosition.x = item.position.x - 0.44; } item.textPosition.y = item.position.y - 0.2; item.textPosition.z = item.position.z; } } export default StackItemBuilder; <file_sep>export const title = "Subset Sum Problem"; export const formula = ` if (element > weight) { table[row][col] = table[row - 1][col]; } else { table[row][col] = table[row - 1][col] || table[row - 1][weight - element]; } `; export const description = ` Given a set of non-negative integers, and a value sum, determine if there is a subset of the given set with sum equal to given sum. `; export const usecases = ''; export const example = ` Example 1: - Input: set = \\[3, 34, 4, 12, 5, 2\\], sum = 9 - Output: True - Explanation: subset \\[4, 5\\] with sum 9 Example 2: - Input: set = \\[3, 34, 4, 12, 5, 2\\], sum = 35 - Output: False `; <file_sep>import { Iterator } from './iterator'; export class ArrayIterator<T> implements Iterator<T> { private items: T[]; private current: number; constructor(items: T[]) { this.items = items; this.current = 0; } hasNext(): boolean { return this.current < this.items.length; } next(): T { const result = this.items[this.current]; this.current += 1; return result; } } <file_sep>import Mover from "../params/mover.interface"; import Display from "../params/displayer.interface"; import Position from "../params/position.interface"; export interface Cube extends Mover, Display { width: number; height: number; depth: number; position: Position; } <file_sep>export const lazyDeleteSolution = `abstract class Heap { protected items: number[]; private deleted: Map<number, number>; private deletedCount: number; constructor() { this.items = []; this.deleted = new Map(); this.deletedCount = 0; } size() { return this.items.length - this.deletedCount; } peek() { return this.items[0]; } push(item: number) { this.items.push(item); this.bubbleUp(this.items.length - 1); } pop() { const top = this.deleteTop(); this.prune(); return top; } private deleteTop() { const root = this.items.shift(); const last = this.items.pop(); if (last !== undefined) { this.items.unshift(last); this.bubbleDown(0); } return root; } private prune() { let top = this.peek(); while (top !== undefined && this.deleted.has(top)) { this.deleteTop(); const count = this.deleted.get(top); if (count === 1) { this.deleted.delete(top); } else { this.deleted.set(top, count - 1); } this.deletedCount -= 1; top = this.peek(); } } delete(item: number): number | undefined { const top = this.peek(); if (item === top) { this.pop(); return item; } if (this.items.includes(item)) { const count = this.deleted.get(item) || 0; this.deleted.set(item, count + 1); this.deletedCount += 1; return item; } return undefined; } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.shouldBubbleUp(index, parentIndex)) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if (this.shouldBubbleDown(target, leftChildIndex)) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if (this.shouldBubbleDown(target, rightChildIndex)) { target = rightChildIndex; } if (target !== index) { this.swap(index, target); this.bubbleDown(target); } } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } protected abstract shouldBubbleUp(index: number, parentIndex: number): boolean; protected abstract shouldBubbleDown(index: number, childIndex: number): boolean; } class MinHeap extends Heap { protected shouldBubbleUp(index: number, parentIndex: number) { return this.items[index] < this.items[parentIndex]; } protected shouldBubbleDown(index: number, childIndex: number) { return this.items[index] > this.items[childIndex]; } } class MaxHeap extends Heap { protected shouldBubbleUp(index: number, parentIndex: number) { return this.items[index] > this.items[parentIndex]; } protected shouldBubbleDown(index: number, childIndex: number) { return this.items[index] < this.items[childIndex]; } } class DualHeap { smaller: MaxHeap; greater: MinHeap; constructor() { this.smaller = new MaxHeap(); this.greater = new MinHeap(); } push(item: number) { if (this.smaller.size() === 0 || item <= this.smaller.peek()) { this.smaller.push(item); } else { this.greater.push(item); } this.balance(); } private balance() { if (this.smaller.size() > this.greater.size() + 1) { const temp = this.smaller.pop(); this.greater.push(temp); } else if (this.greater.size() > this.smaller.size()) { const temp = this.greater.pop(); this.smaller.push(temp); } } delete(item: number) { if (this.smaller.delete(item) === undefined) { this.greater.delete(item); } this.balance(); } median() { if (this.smaller.size() === this.greater.size()) { const one = this.smaller.peek() || 0; const two = this.greater.peek() || 0; return (one + two) / 2; } return this.smaller.peek(); } } function medianSlidingWindow(nums: number[], k: number): number[] { const heap = new DualHeap(); let i = 0; for (; i < k; i++) { heap.push(nums[i]); } const result: number[] = []; for (; i < nums.length; i++) { result.push(heap.median()); heap.delete(nums[i - k]); heap.push(nums[i]); } result.push(heap.median()); return result; };` <file_sep>export const title = "Minimum Path Sum"; export const formula = ` table[row][col] = table[row][col] + Math.min( table[row - 1][col], table[row][col - 1] ); `; export const description = `Given a m x n grid filled with non-negative numbers, find a path from top left to bottom right which minimizes the sum of all numbers along its path. Note: You can only move either down or right at any point in time. `; export const usecases = ''; export const example = ''; <file_sep>import IColor from "../params/color.interface"; export default class Color implements IColor { private material: THREE.Material; constructor(material: THREE.Material) { this.material = material; } setColor(color: string): Promise<void> { return (this.material as any).color.set(color); } get color(): string { return (this.material as any).color.getHexString(); } } <file_sep>import Heap from "./heap.class"; import { Comparable } from "./heap.interface"; class MinHeap<T extends Comparable | string | number> extends Heap<T>{ protected shouldBubbleUp(current: T, parent: T): boolean { if (this.isPrimaryType(current)) { return current < parent; } else { return (current as Comparable).compareTo(parent as Comparable) < 0; } } protected shouldBubbleDown(current: T, child: T): boolean { if (this.isPrimaryType(current)) { return current > child; } else { return (current as Comparable).compareTo(child as Comparable) > 0; } } } export default MinHeap; <file_sep>import { LineStyles, NodeStyles } from "./HighLevelDisplayNode"; export const INITIALED_LINE_ATTRIBUTES: LineStyles = { stroke: "gray", strokeWidth: "1", }; export const INITIALED_NODE_ATTRIBUTES: NodeStyles = { textStyles: { fill: "gray", fontSize: "20", }, circleStyles: { fill: "white", stroke: "gray", strokeWidth: "1", }, }; export const ACTIVATED_LINE_ATTRIBUTES: LineStyles = { stroke: "lightblue", strokeWidth: "1", }; export const ACTIVATED_NODE_ATTRIBUTES: NodeStyles = { textStyles: { fill: "gray", fontSize: "20", }, circleStyles: { fill: "lightblue", stroke: "", strokeWidth: "1", } }; export const FINISHED_LINE_ATTRIBUTES: LineStyles = { stroke: "green", strokeWidth: "1", }; export const FINISHED_NODE_ATTRIBUTES: NodeStyles = { textStyles: { fill: "white", fontSize: "20", }, circleStyles: { fill: "green", stroke: "", strokeWidth: "1", }, }; export const PRINTED_NODE_ATTRIBUTES: NodeStyles = { textStyles: { fill: "white", fontSize: "20", }, circleStyles: { fill: "gold", stroke: "", strokeWidth: "1", }, }; <file_sep>import { Point } from "../../commons/point"; const random = (): number => Math.round(Math.random()); export const buildGrid = (rows: number, cols: number): number[][] => { const grid: number[][] = []; for (let row = 0; row < rows; row++) { grid.push([]); for (let col = 0; col < cols; col++) { grid[row].push(random()); } } return grid; } const cloneGrid = (grid: number[][]): number[][] => grid.map(row => [...row]); export enum Direction { Up, Right, Down, Left, StartDFS, SkipDFS, Rollback } export interface Step { grid: number[][]; point: Point; direction: Direction; numIslands: number; } export const buildSteps = (grid: number[][]): Step[] => { const steps: Step[] = []; let numIslands = 0; const land = 1; const visited = 2; const inArea = (row: number, col: number): boolean => { return row >= 0 && row < grid.length && col >= 0 && col < grid[row].length; } const dfs = (row: number, col: number, direction: Direction) => { if (!inArea(row, col)) { return; } if (grid[row][col] !== land) { return } steps.push({ grid: cloneGrid(grid), point: { row, col }, numIslands, direction }); grid[row][col] = visited; dfs(row - 1, col, Direction.Up); dfs(row, col + 1, Direction.Right); dfs(row + 1, col, Direction.Down); dfs(row, col - 1, Direction.Left); } for (let row = 0; row < grid.length; row++) { for (let col = 0; col < grid[row].length; col++) { if (grid[row][col] === land) { numIslands += 1; dfs(row, col, Direction.StartDFS); } else { steps.push({ grid: cloneGrid(grid), point: { row, col }, numIslands, direction: Direction.SkipDFS }); } } } return steps; } <file_sep>export const title = 'Surrounded Regions'; export const formula = `function solve(board: string[][]): void { const inArea = (row: number, col: number): boolean => { return row >= 0 && col >= 0 && row < board.length && col < board[row].length; } const dfs = (row: number, col: number) => { if (!inArea(row, col)) { return; } if (board[row][col] !== "O") { return; } board[row][col] = "#"; dfs(row - 1, col); dfs(row, col + 1); dfs(row + 1, col); dfs(row, col - 1); } for (let row = 0; row < board.length; row++) { dfs(row, 0); dfs(row, board[row].length - 1); } for (let col = 1; col < board[0].length - 1; col++) { dfs(0, col); dfs(board.length - 1, col); } for (let row = 0; row < board.length; row++) { for (let col = 0; col < board[row].length; col++) { if (board[row][col] === 'O') { board[row][col] = 'X'; } else if (board[row][col] === "#") { board[row][col] = 'O'; } } } };`; export const description = ` Given an **m x n** matrix board containing 'X' and 'O', capture all regions that are 4-directionally surrounded by 'X'. A region is captured by flipping all 'O's into 'X's in that surrounded region. `; export const usecases = ''; export const example = ""; <file_sep>export const title = "Find Median from Data Stream"; export const heap = `abstract class Heap { protected items: number[] constructor() { this.items = []; } push(num: number) { this.items.push(num); this.bubbleUp(this.items.length - 1); } pop() { const head = this.items.shift(); const last = this.items.pop(); if (last !== undefined) { this.items.unshift(last); this.bubbleDown(0); } return head; } peek() { return this.items[0]; } size() { return this.items.length; } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if (this.shouldBubbleDown(target, leftChildIndex)) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if (this.shouldBubbleDown(target, rightChildIndex)) { target = rightChildIndex; } if (target === index) { return; } this.swap(index, target); this.bubbleDown(target); } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.shouldBubbleUp(index, parentIndex)) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } protected abstract shouldBubbleUp(index: number, parentIndex: number): boolean; protected abstract shouldBubbleDown(index: number, childIndex: number): boolean; } class MinHeap extends Heap { protected shouldBubbleUp(index: number, parentIndex: number): boolean { const current = this.items[index]; const parent = this.items[parentIndex]; return current < parent; } protected shouldBubbleDown(index: number, childIndex: number): boolean { const current = this.items[index]; const child = this.items[childIndex]; if (current === undefined || child === undefined) { return false; } return current > child; } } class MaxHeap extends Heap { protected shouldBubbleUp(index: number, parentIndex: number): boolean { const current = this.items[index]; const parent = this.items[parentIndex]; return current > parent; } protected shouldBubbleDown(index: number, childIndex: number): boolean { const current = this.items[index]; const child = this.items[childIndex]; if (current === undefined || child === undefined) { return false; } return current < child; } }`; export const medianFinder = `class MedianFinder { private smaller: MaxHeap; private greater: MinHeap; constructor() { this.smaller = new MaxHeap(); this.greater = new MinHeap(); } addNum(num: number): void { if (this.smaller.size() === 0 || num <= this.smaller.peek()) { this.smaller.push(num); if (this.smaller.size() > this.greater.size() + 1) { this.greater.push(this.smaller.pop()); } } else { this.greater.push(num); if (this.greater.size() > this.smaller.size()) { this.smaller.push(this.greater.pop()); } } } findMedian(): number { if (this.greater.size() === this.smaller.size()) { return (this.greater.peek() + this.smaller.peek() ) / 2; } else { return this.smaller.peek(); } } }`; export const formula = `abstract class Heap { protected items: number[] constructor() { this.items = []; } push(num: number) { this.items.push(num); this.bubbleUp(this.items.length - 1); } pop() { const head = this.items.shift(); const last = this.items.pop(); if(last !== undefined) { this.items.unshift(last); } this.bubbleDown(0); return head; } peek() { return this.items[0]; } size() { return this.items.length; } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if (this.shouldBubbleDown(target, leftChildIndex)) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if (this.shouldBubbleDown(target, rightChildIndex)) { target = rightChildIndex; } if (target === index) { return; } this.swap(index, target); this.bubbleDown(target); } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.shouldBubbleUp(index, parentIndex)) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } protected abstract shouldBubbleUp(index: number, parentIndex: number): boolean; protected abstract shouldBubbleDown(index: number, childIndex: number): boolean; } class MinHeap extends Heap { protected shouldBubbleUp(index: number, parentIndex: number): boolean { const current = this.items[index]; const parent = this.items[parentIndex]; return current < parent; } protected shouldBubbleDown(index: number, childIndex: number): boolean { const current = this.items[index]; const child = this.items[childIndex]; if (current === undefined || child === undefined) { return false; } return current > child; } } class MaxHeap extends Heap { protected shouldBubbleUp(index: number, parentIndex: number): boolean { const current = this.items[index]; const parent = this.items[parentIndex]; return current > parent; } protected shouldBubbleDown(index: number, childIndex: number): boolean { const current = this.items[index]; const child = this.items[childIndex]; if (current === undefined || child === undefined) { return false; } return current < child; } } class MedianFinder { private smaller: MaxHeap; private greater: MinHeap; constructor() { this.smaller = new MaxHeap(); this.greater = new MinHeap(); } addNum(num: number): void { if (this.smaller.size() === 0 || num <= this.smaller.peek()) { this.smaller.push(num); if (this.smaller.size() > this.greater.size() + 1) { this.greater.push(this.smaller.pop()); } } else { this.greater.push(num); if (this.greater.size() > this.smaller.size()) { this.smaller.push(this.greater.pop()); } } } findMedian(): number { if (this.greater.size() === this.smaller.size()) { return (this.greater.peek() + this.smaller.peek() ) / 2; } else { return this.smaller.peek(); } } }`; export const description = ` The median is the middle value in an ordered integer list. If the size of the list is even, there is no middle value, and the median is the mean of the two middle values. - For example, for arr = [3, 6, 9], the median is 6. - For example, for arr = [4, 6], the median is (4 + 6) / 2 = 5. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import SegmentTreeNode from "./segment-tree-node"; import ISegmentTree from "./segment-tree.interface"; import { wait } from "../../_commons/utils"; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import Position from "../../_commons/params/position.interface" import { buildPerfectBinaryTree, TreeNode as TreePosition } from '../nodes/utils/perfect-binary-tree'; import { getLeftChildIndex } from "../nodes/utils/tree-node-utils"; import { getRightChildIndex } from "../nodes/utils/tree-node-utils"; import { getParentIndex } from "../nodes/utils/tree-node-utils"; import TextSphere from "../../_commons/sphere/three/text-sphere"; import Line from "../nodes/line"; export default class SegmentTree implements ISegmentTree { private root?: SegmentTreeNode; private scene: THREE.Scene; private normalSphereColor: string; private enabledSphereColor: string; private sphereGeometry: THREE.SphereGeometry; private sphereMaterial: () => THREE.Material; private textMaterial: THREE.Material; private textGeometryParameters: TextGeometryParameters; private lineMaterial: THREE.LineBasicMaterial; private rangeMaterial: THREE.Material; private rangeGeometryParameters: TextGeometryParameters; private initPosition: Position; private positions: TreePosition[]; constructor( scene: THREE.Scene, normalSphereColor: string, enabledSphereColor: string, sphereGeometry: THREE.SphereGeometry, sphereMaterial: () => THREE.Material, textMaterial: THREE.Material, textGeometryParameters: TextGeometryParameters, lineMaterial: THREE.LineBasicMaterial, rangeMaterial: THREE.Material, rangeGeometryParameters: TextGeometryParameters, position: Position, depth: number, nodeDistance: Position, initPosition: Position ) { this.scene = scene; this.normalSphereColor = normalSphereColor; this.enabledSphereColor = enabledSphereColor; this.sphereGeometry = sphereGeometry; this.sphereMaterial = sphereMaterial; this.textMaterial = textMaterial; this.textGeometryParameters = textGeometryParameters; this.lineMaterial = lineMaterial; this.rangeMaterial = rangeMaterial; this.rangeGeometryParameters = rangeGeometryParameters; this.initPosition = initPosition; this.positions = this.buildTreeNodesPositions(depth, position, nodeDistance); } private buildTreeNodesPositions(depth: number, { x, y }: Position, nodeDistance: Position) { const positions = buildPerfectBinaryTree(depth, nodeDistance.x, nodeDistance.y); const xAlpha = (positions.length === 0) ? 0 : x - positions[0].x; positions.forEach(position => { position.x += xAlpha; position.y += y; }); return positions; } private async buildNode( value: number, start: number, end: number, index: number, position: Position, duration: number ): Promise<SegmentTreeNode> { const textSphere = new TextSphere<number>( value, this.sphereGeometry, this.sphereMaterial(), this.textMaterial, this.textGeometryParameters, this.scene ); const { x, y, z } = this.initPosition; textSphere.center.x = x; textSphere.center.y = y; textSphere.center.z = z; textSphere.textPosition.x = this.calTextX(value, x); textSphere.textPosition.y = y - 0.4; textSphere.textPosition.z = z; const node = new SegmentTreeNode(textSphere, start, end, this.rangeMaterial, this.rangeGeometryParameters, this.scene, index).show(); node.value.sphereColor.setColor(this.enabledSphereColor); node.start.x = this.calStartX(node.start.value, x); node.start.y = y - 1.2; node.start.z = z; node.end.x = x + 0.5; node.end.y = y - 1.2; node.end.z = z; await node.moveTo(position, duration); node.value.sphereColor.setColor(this.normalSphereColor); return node; } private calTextX(value: number, x: number): number { const length: number = ("" + value).length; switch (length) { case 0: return x; case 1: return x - 0.3; case 2: return x - 0.6; case 3: return x - 0.8; default: return x - 1; } } private calStartX(value: number, x: number): number { const length: number = ("" + value).length; switch (length) { case 0: return x; case 1: return x - 0.8; case 2: return x - 1.2; case 3: return x - 1.4; default: return x - 1.6; } } async build(nums: number[], duration: number): Promise<void> { this.root = await this.buildTree(nums, 0, nums.length - 1, 0, this.positions[0], duration); } private async buildTree( nums: number[], start: number, end: number, index: number, position: TreePosition, duration: number ): Promise<SegmentTreeNode> { if (start === end) { return this.buildNode(nums[start], start, end, index, { x: position.x, y: position.y, z: 0 }, duration); } const mid = Math.floor((start + end) / 2); const left = await this.buildTree(nums, start, mid, getLeftChildIndex(index), position.left!, duration); const right = await this.buildTree(nums, mid + 1, end, getRightChildIndex(index), position.right!, duration); const value = left.value.value + right.value.value; const node = await this.buildNode(value, start, end, index, { x: position.x, y: position.y, z: 0 }, duration); node.left = left; node.right = right; this.buildTreeLine(getLeftChildIndex(index)); this.buildTreeLine(getRightChildIndex(index)); return node; } private buildTreeLine(index: number): Line { const nodePosition = this.positions[index]; const parentPosition = this.positions[getParentIndex(index)]; return new Line( { x: parentPosition.x, y: parentPosition.y, z: 0 }, { x: nodePosition.x, y: nodePosition.y, z: 0 }, this.lineMaterial, this.scene ).show(); } update(index: number, value: number, duration: number): Promise<void> { return this.updateTree(this.root, index, value, duration); } private async updateTree(node: SegmentTreeNode | undefined, index: number, value: number, duration: number): Promise<void> { if (node === undefined) { return; } node.value.sphereColor.setColor(this.enabledSphereColor); await wait(duration); if (node.start.value === index && node.end.value === index) { node.value.value = value; node.value.sphereColor.setColor(this.normalSphereColor); await wait(duration); return; } const mid = Math.floor((node.start.value + node.end.value) / 2); if (index <= mid) { await this.updateTree(node.left, index, value, duration); } else { await this.updateTree(node.right, index, value, duration); } node.value.value = (node.left?.value.value || 0) + (node.right?.value.value || 0); node.value.sphereColor.setColor(this.normalSphereColor); return wait(duration); } query(left: number, right: number, duration: number): Promise<number | undefined> { return this.queryRange(this.root, left, right, duration); } private async queryRange(node: SegmentTreeNode | undefined, left: number, right: number, duration: number): Promise<number | undefined> { if (node === undefined) { return; } node.value.sphereColor.setColor(this.enabledSphereColor); await wait(duration); if (node.start.value === left && node.end.value === right) { node.value.sphereColor.setColor(this.normalSphereColor); await wait(duration); return node.value.value; } const mid = Math.floor((node.start.value + node.end.value) / 2); let value: number | undefined = undefined; if (right <= mid) { value = await this.queryRange(node.left, left, right, duration); } else if (left > mid) { value = await this.queryRange(node.right, left, right, duration); } else { const a = await this.queryRange(node.left, left, mid, duration) || 0; const b = await this.queryRange(node.right, mid + 1, right, duration) || 0; value = a + b; } node.value.sphereColor.setColor(this.normalSphereColor); await wait(duration); return value; } } <file_sep>export const title = "Zigzag Conversion"; export const formula = `function convert(s: string, numRows: number): string { if (numRows === 1) return s; const rows: string[] = []; for (let i = 0; i < numRows; i++) { rows[i] = ""; } let row = 0; let flag = -1; for (let i = 0; i < s.length; i++) { rows[row] = rows[row] + s.charAt(i); if (row === 0 || row === numRows - 1) { flag = -1 * flag; } row += flag; } return rows.reduce((a, b) => a + b); }`; export const description = ` #### Description The string "ALCHEMIST" is written in a ***zigzag*** pattern on a given number of rows like this: A &nbsp;&nbsp;&nbsp; E &nbsp;&nbsp;&nbsp; T L H M S C &nbsp;&nbsp;&nbsp; I And then read line by line: "AETLHMSCI". `; export const solution = ``; export const usecases = ''; export const examples = ` #### Example 1: - Input: s = "ALCHEMIST", numRows = 3 - Output: "AETLHMSCI" #### Example 2: - Input: s = "ALCHEMIST", numRows = 2 - Output: "ACEITLHMS" `; <file_sep>import Node, { Color } from "./node"; export default class Tree<T> { root?: Node<T>; constructor() { this.root = undefined; } public insert(val: T) { this.root = this.put(val, this.root); this.root.color = Color.Black; } private put(val: T, node?: Node<T>): Node<T> { if (node === undefined) { return new Node(val); } if (val < node.val) { node.left = this.put(val, node.left); } else { node.right = this.put(val, node.right); } if ( node.right && node.right.isRed && (node.left === undefined || node.left.isBlack) ) { node = this.rotateLeft(node); } if ( node.left && node.left.isRed && node.left.left && node.left.left.isRed ) { node = this.rotateRight(node); } if ( node.left && node.left.isRed && node.right && node.right.isRed ) { this.flipColors(node); } return node; } private rotateLeft(node: Node<T>): Node<T> { if (node.right === undefined) { throw new Error("can not find right child for rotateLeft"); } if (node.right.color === Color.Black) { throw new Error("right child must be red for rotateLeft"); } const x = node.right; node.right = x.left; x.left = node; x.color = node.color; node.color = Color.Red; return x; } private rotateRight(node: Node<T>): Node<T> { if (node.left === undefined) { throw new Error("can not find left child for rotateRight"); } if (node.left.color === Color.Black) { throw new Error("left child must be red for rotateRight"); } const x = node.left; node.left = x.right; x.right = node; x.color = node.color; node.color = Color.Red; return x; } private flipColors(node: Node<T>): void { if (node.right === undefined) { throw new Error("can not find right child for flipColors"); } if (node.right.color === Color.Black) { throw new Error("right child must be red for flipColors"); } if (node.left === undefined) { throw new Error("can not find left child for flipColors"); } if (node.left.color === Color.Black) { throw new Error("left child must be red for flipColors"); } if (node.color === Color.Red) { throw new Error("node must be black for flipColors"); } node.color = Color.Red; node.left.color = Color.Black; node.right.color = Color.Black; } } <file_sep>export type Item = { value: number; index: number; } export enum Action { POP, PUSH, SHIFT, NONE } export enum Target { DEQUE, RESULT } export type Step = { index: number; action: Action; target: Target; deque: Item[]; result: number[]; item?: Item; } export function maxSlidingWindow(nums: number[], k: number): Step[] { const steps: Step[] = []; const result: number[] = []; const deque: Item[] = []; let i = 0; for (; i < k; i++) { const item: Item = { value: nums[i], index: i }; while (deque.length > 0 && item.value >= deque[deque.length - 1].value) { steps.push({ index: i, action: Action.POP, target: Target.DEQUE, deque: [...deque], result: [...result] }); deque.pop(); } steps.push({ index: i, action: Action.PUSH, target: Target.DEQUE, deque: [...deque], result: [...result], item }); deque.push(item); } steps.push({ index: i - 1, action: Action.PUSH, target: Target.RESULT, deque: [...deque], result: [...result] }); result.push(deque[0].value); for (; i < nums.length; i++) { const item: Item = { value: nums[i], index: i }; while (deque.length > 0 && item.value >= deque[deque.length - 1].value) { steps.push({ index: i, action: Action.POP, target: Target.DEQUE, deque: [...deque], result: [...result] }); deque.pop(); } steps.push({ index: i, action: Action.PUSH, target: Target.DEQUE, deque: [...deque], result: [...result], item }); deque.push(item); while (deque[0].index <= i - k) { steps.push({ index: i, action: Action.SHIFT, target: Target.DEQUE, deque: [...deque], result: [...result] }); deque.shift(); } steps.push({ index: i, action: Action.PUSH, target: Target.RESULT, deque: [...deque], result: [...result] }); result.push(deque[0].value); } steps.push({ index: i, action: Action.NONE, target: Target.RESULT, deque: [...deque], result: [...result] }); return steps; }; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node: TreeNode<number>; sum: number; hasPathSum: boolean; paths: number[][]; } function isLeaf(node: TreeNode<number>) { return !node.left && !node.right; } function clone(paths: number[][]): number[][] { return paths.map(path => [...path]); } export function buildSteps(targetSum: number, root?: TreeNode<number>): Step[] { const steps: Step[] = []; function hasPathSum(root: TreeNode<number> | undefined, targetSum: number) { const paths: number[][] = []; function dfs(node: TreeNode<number> | undefined, nums: number[]) { if (node === undefined) { return false; } const path: number[] = [...nums, node.val.value]; const sum = path.reduce((a, b) => a + b, 0); if (isLeaf(node) && sum === targetSum) { paths.push(path); steps.push({ node, sum, hasPathSum: true, paths: clone(paths) }); return true; } steps.push({ node, sum, hasPathSum: false, paths: clone(paths) }); dfs(node.left, path); dfs(node.right, path); } dfs(root, []); }; hasPathSum(root, targetSum); return steps; } <file_sep>import * as THREE from "three"; import Node from "./node"; function computeDirection(from: Node<any>, to: Node<any>): THREE.Vector3 { return to.leftConnectPosition.clone().sub(from.rightConnectPosition); } function getOrigin(from: Node<any>): THREE.Vector3 { return from.rightConnectPosition; } export default class Arrow extends THREE.ArrowHelper { private readonly headLength: number; private readonly headWidth: number; private _from: Node<any>; private _to: Node<any>; constructor( from: Node<any>, to: Node<any>, color: THREE.Color | string | number, headLength: number, headWidth: number ) { const direction = computeDirection(from, to); super(direction.clone().normalize(), getOrigin(from), direction.length(), color, headLength, headWidth) this.headLength = headLength; this.headWidth = headWidth; this._from = from; this._to = to; } public update(): void { const direction = this.direction; this.position.copy(this.origin); this.setDirection(direction.clone().normalize()); this.setLength(direction.length(), this.headLength, this.headWidth); } set from(node: Node<any>) { this._from = node; this.update(); } get from() { return this._from; } set to(node: Node<any>) { this._to = node; this.update(); } get to() { return this._to; } private get origin(): THREE.Vector3 { return getOrigin(this.from); } private get direction(): THREE.Vector3 { return computeDirection(this.from, this.to); } } <file_sep>import Color from "../_commons/params/color.interface"; import Displayer from "../_commons/params/displayer.interface"; import { GraphNode } from "./node.interface"; export interface GraphEdge<T> extends Displayer, Color { source: GraphNode<T>; target: GraphNode<T>; refresh(): void; } export interface DirectedGraphEdge<T> extends GraphEdge<T> { } export interface UndirectedGraphEdge<T> extends GraphEdge<T> { } <file_sep>import { Line, Edge, Building, Step, Action } from "./AlgoContext"; class MaxHeap { private heights: number[]; private lazyDelete: Map<number, number>; constructor() { this.heights = []; this.lazyDelete = new Map(); } peek() { this.prune(); return this.heights[0]; } push(height: number) { this.heights.push(height); this.bubbleUp(this.heights.length - 1); } delete(height: number) { const top = this.heights[0]; if (top !== undefined && top === height) { this.pop(); } else { const count = this.lazyDelete.get(height) || 0; this.lazyDelete.set(height, count + 1); } } private pop() { const root = this.heights.shift(); const last = this.heights.pop(); if (last !== undefined) { this.heights.unshift(last); this.bubbleDown(0); } return root; } private prune() { let top = this.heights[0]; while (top !== undefined && this.lazyDelete.has(top)) { this.pop(); const count = this.lazyDelete.get(top) || 0; if (count === 1) { this.lazyDelete.delete(top); } else { this.lazyDelete.set(top, count - 1); } top = this.heights[0]; } } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.heights[index] > this.heights[parentIndex]) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if (this.shouldBubbleDown(target, leftChildIndex)) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if (this.shouldBubbleDown(target, rightChildIndex)) { target = rightChildIndex; } if (target !== index) { this.swap(index, target); this.bubbleDown(target); } } private shouldBubbleDown(index: number, childIndex: number): boolean { const current = this.heights[index]; const child = this.heights[childIndex]; return (current === undefined || child === undefined) ? false : current < child; } private swap(i: number, j: number) { [this.heights[i], this.heights[j]] = [this.heights[j], this.heights[i]]; } } export const compareFn = (a: Line, b: Line): number => { if (a.x !== b.x) { return a.x - b.x; } // if two starts are compared then higher building should be picked first if (a.edge === Edge.Start && b.edge === Edge.Start) { return b.height - a.height; } // if two ends are compared then lower building should be picked first if (a.edge === Edge.End && b.edge === Edge.End) { return a.height - b.height; } // if one start and one end are compared then start should be picked first return (a.edge === Edge.Start) ? -1 : 1; }; export const buildLines = (buildings: Building[]): Line[] => { return buildings.flatMap(building => { const { left, right, height } = building; const leftLine = { x: left, height, edge: Edge.Start }; const rightLine = { x: right, height, edge: Edge.End }; return [leftLine, rightLine]; }); } export const buildSteps = (lines: Line[]): Step[] => { let prevHeight = 0; const result: number[][] = []; const heap = new MaxHeap(); const steps: Step[] = []; lines.forEach(line => { const { x, height, edge } = line; if (edge === Edge.Start) { heap.push(height); steps.push({ prevHeight, action: Action.PushToHeap, height, x }); } else { heap.delete(height); steps.push({ prevHeight, action: Action.DeleteFromHeap, height, x }); } const peek = heap.peek() || 0; if (prevHeight !== peek) { result.push([x, peek]); prevHeight = peek; steps.push({ prevHeight, action: Action.PushToSkyline, x, height }); } }); return steps; } <file_sep>import createDpTable from "../algo"; const getBiggestNumber = (table: number[][]): number => { let result = 0; for (let i = 0; i < table.length; i++) { for (let j = 0; j < table[i].length; j++) { result = Math.max(result, table[i][j]); } } return result; } test("longest-common-substring one", () => { const str1 = "algorithms"; const str2 = "alchemist"; const table = createDpTable(str1, str2); const result = getBiggestNumber(table); expect(result).toBe(2); }); test("longest-common-substring two", () => { const str1 = "abcdxyz"; const str2 = "xyzabcd"; const table = createDpTable(str1, str2); const result = getBiggestNumber(table); expect(result).toBe(4); }); <file_sep>export const title = "Recover Binary Search Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function recoverTree(root: TreeNode | null): void { let errorOne: TreeNode | null = null; let errorTwo: TreeNode | null = null; let prev: TreeNode | null = null; const inorder = (node: TreeNode | null) => { if (!node) { return; } inorder(node.left); if (prev && prev.val >= node.val) { if (!errorOne) { errorOne = prev; } if (errorOne) { errorTwo = node; } } prev = node; inorder(node.right); } inorder(root); if(errorOne && errorTwo) { swap(errorOne, errorTwo); } }; const swap = (a: TreeNode, b: TreeNode) => { const temp = a.val; a.val = b.val; b.val = temp; };`; export const description = ` You are given the **root** of a binary search tree (BST), where the values of exactly two nodes of the tree were swapped by mistake. Recover the tree without changing its structure. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import { AbstractArray } from '../_commons/abstract-array'; import { IStack } from './stack'; export class StackAlgo<T> extends AbstractArray<T> implements IStack<T> { constructor() { super([]); } push(t: T): Promise<number> { return Promise.resolve(this.elements.push(t)); } pop(): Promise<T | undefined> { return Promise.resolve(this.elements.pop()); } peek(): Promise<T | undefined> { return Promise.resolve(this.elements[this.elements.length - 1]); } } <file_sep>export const title = "Verify Preorder Serialization of a Binary Tree"; export const formula = `function isValidSerialization(preorder: string): boolean { const stack: string[] = []; function eliminate() { while ( stack.length >= 3 && stack[stack.length - 1] === "#" && stack[stack.length - 2] === "#" && stack[stack.length - 3] !== "#" ) { stack.pop(); stack.pop(); stack.pop(); stack.push("#"); } } preorder.split(",").forEach(node => { stack.push(node); eliminate(); }) return stack.length === 1 && stack.pop() === "#"; };` export const description = ` One way to serialize a binary tree is to use **preorder traversal**. When we encounter a non-null node, we record the node's value. If it is a null node, we record using a sentinel value such as ***'#'***. Given a string of comma-separated values **preorder**, return **true** if it is a correct preorder traversal serialization of a binary tree. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const title = 'Coin Change (Fewest Number)'; export const formula = ` if (coin > col) { table[row][col] = table[row - 1][col]; } else { table[row][col] = Math.min( table[row][col - coin] + 1, table[row - 1][col] ); } `; export const description = ` You are given coins of different denominations and a total amount of money amount. Find the fewest number of coins that you need to make up that amount. `; export const usecases = ''; export const example = ` Greedy Algorithm Does not Always Give Optimal Solution. For instance: 1. Non optimal solution using Greedy Algorithm: - Given: Coins = {1, 8, 13}, 16 cents = ? - Greedy solution: **4 coins: 13 + 1 + 1 + 1** - Optimal solution: **2 coins: 8 + 8** 2. Can not result a solution using Greedy Algorithm: - Given: Coins = {2, 8, 15}, 24 cents = ? - Greedy solution: **no solution: coins: 15 + 8 + ? ** - Optimal solution: **3 coins: 8 + 8 + 8** `; <file_sep>import * as THREE from "three"; import { ArrowStyles, NodeStyles } from "../commons/styles"; import { createPlaneParameters, createTextParameters } from "../commons/helpers"; import Node from "./node"; import Arrow from "./arrow"; import Bucket from "./bucket"; export default class HashMap<T> { arrowStyles: ArrowStyles; nodeStyles: NodeStyles; private scene: THREE.Scene; public duration: number; private planeMaterial: THREE.Material; private textMaterial: THREE.Material; private indices: Node<number>[]; private buckets: Bucket<T>[]; private render: Function; constructor( scene: THREE.Scene, render: Function, start: THREE.Vector3, duration: number, buckets: number, arrowStyles: ArrowStyles, nodeStyles: NodeStyles, ) { this.scene = scene; this.render = render; this.duration = duration; this.arrowStyles = arrowStyles; this.nodeStyles = nodeStyles; this.planeMaterial = new THREE.MeshBasicMaterial({ color: nodeStyles.color, side: THREE.DoubleSide }); this.textMaterial = new THREE.MeshBasicMaterial({ color: nodeStyles.textColor }); this.indices = this.createIndices(scene, buckets, start, this.planeMaterial, this.textMaterial, nodeStyles); this.buckets = this.createBuckets(scene, render, start, duration, buckets, this.planeMaterial, this.textMaterial, nodeStyles); } private createBuckets( scene: THREE.Scene, render: Function, start: THREE.Vector3, duration: number, size: number, planeMaterial: THREE.Material, textMaterial: THREE.Material, nodeStyles: NodeStyles ): Bucket<T>[] { const { height, width } = nodeStyles; const buckets: Bucket<T>[] = []; for (let i = 0; i < size; i++) { const position = start.clone().setX(start.x + width * 2).setY(start.y - i * (height + 0.3)); const bucket = new Bucket<T>(scene, planeMaterial, textMaterial, render, nodeStyles, position, duration); buckets.push(bucket); } return buckets; } private createIndices( scene: THREE.Scene, size: number, start: THREE.Vector3, planeMaterial: THREE.Material, textMaterial: THREE.Material, nodeStyles: NodeStyles ): Node<number>[] { const { height } = nodeStyles; const indices: Node<number>[] = []; for (let i = 0; i < size; i++) { const nodePlanePosition = start.clone().setY(start.y - i * (height + 0.3)); const nodeTextPosition = start.clone().setY(start.y - 1 - i * (height + 0.3)).setX(start.x - 0.7); const planeParameters = createPlaneParameters(planeMaterial, nodePlanePosition, nodeStyles); const textParameters = createTextParameters(textMaterial, nodeTextPosition, nodeStyles); const display = i.toString(); const node = new Node<number>(-1, i, display, scene, planeParameters, textParameters); indices.push(node); } return indices; } get(key: number): T | undefined { const bucket: Bucket<T> = this.getBucket(key); const node = bucket.findNode(key); return node && node.data; } push(key: number, data: T, display: string): void { const bucket: Bucket<T> = this.getBucket(key); bucket.append(key, data, display); if (bucket.length === 1) { const { color, headLength, headWidth } = this.arrowStyles; const indexNode = this.getIndex(key); const bucketNode = bucket.head; const arrow = new Arrow(indexNode, bucketNode, color, headLength, headWidth); bucketNode.arrow = arrow; this.scene.add(arrow); } this.render(); } async delete(key: number): Promise<T | undefined> { const bucket = this.getBucket(key); bucket.delete(key); return; } private getIndex(key: number): Node<number> { const index = this.getBucketIndex(key); return this.indices[index]; } private getBucket(key: number): Bucket<T> { const index = this.getBucketIndex(key); return this.buckets[index]; } private getBucketIndex(key: number): number { return key % this.buckets.length; } } <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import Position from "../../_commons/params/position.interface" export type Distance = { x: number; y: number; } export type ArrayNodeProps = { textMaterial: THREE.Material; textGeometryParameters: TextGeometryParameters; cubeMaterial: () => THREE.Material; cubeGeometry: THREE.BoxGeometry; initPosition: Position; } export type TreeNodeProps = { enabledTreeNodeColor: string; sphereGeometry: THREE.SphereGeometry; sphereMaterial: () => THREE.Material; textMaterial: THREE.Material; textGeometryParameters: TextGeometryParameters; initPosition: Position; } export type TreeLineProps = { material: THREE.LineBasicMaterial; } export type Props = { arrayPosition: Position; arrayNodeProps: ArrayNodeProps; treePosition: Position; treeNodeProps: TreeNodeProps; treeLineProps: TreeLineProps; treeNodeDistance: Distance; treeInitDepth: number; scene: THREE.Scene, duration?: number; } <file_sep>// 这个算法稍微有点难度的地方是计算最大区间的起始位置和结束位置。 // 首先结束位置容易计算,当每次更新最大值的时候,同时更新结束位置就行。 // 起始位置比较麻烦: // 1. 首先定义一个 maybelaststart // 2. 每次当 current > current + currentMaxs[index - 1]; // maybelaststart = index; // 3. 每次更新最大值的时候,start = maybelaststart const createDPTable = (array: number[]): (number | string)[][] => { const currentMaxs: (string | number)[] = [array[0]]; const globalMaxs: (string | number)[] = [array[0]]; const starts: (string | number)[] = [0]; const ends: (string | number)[] = [0]; let lastStart = 0; array.forEach((value, index) => { value = Number(value); if (index < 1) { return; } let [currentMax, globalMax, end] = [0, 0, 0]; let start = starts[index - 1]; if (value > value + Number(currentMaxs[index - 1])) { currentMax = value; lastStart = index; } else { currentMax = value + Number(currentMaxs[index - 1]); } if (currentMax > globalMaxs[index - 1]) { globalMax = currentMax; end = index; start = lastStart; } else { globalMax = Number(globalMaxs[index - 1]); end = Number(ends[index - 1]); } currentMaxs.push(currentMax); globalMaxs.push(globalMax); ends.push(end); starts.push(start); }); const table: (number | string)[][] = []; starts.unshift('starts'); ends.unshift('ends'); currentMaxs.unshift('current_max'); table.push(starts); table.push(ends); table.push(currentMaxs); return table; }; export default createDPTable; <file_sep>import { GraphNode } from "./node.interface"; import { GraphEdge } from "./edge.interface"; import { SimpleGraphSkin, SimpleGraphText } from "./node.three"; import { SimpleDirectedGraphEdge, SimpleUndirectedGraphEdge } from "./edge.three"; import Displayer from "../_commons/params/displayer.interface"; type LayoutMapping = { [key: number]: { x: number; y: number } }; export class Graph<T> implements Displayer { readonly nodes: GraphNode<T>[]; readonly edges: GraphEdge<T>[]; constructor(nodes: GraphNode<T>[] = [], edges: GraphEdge<T>[] = []) { this.nodes = nodes; this.edges = edges; } show() { this.nodes.forEach(node => { node.show(); }); this.edges.forEach(edge => { edge.show(); }); }; hide() { this.nodes.forEach(node => { node.hide(); }); this.edges.forEach(edge => { edge.hide(); }); }; setPositions(calculatorPositions: (graph: Graph<T>) => LayoutMapping) { const positions = calculatorPositions(this); this.nodes.forEach(node => this.setNodePosition(node, positions)); this.edges.forEach(edge => edge.refresh()); } private setNodePosition(node: GraphNode<T>, positions: LayoutMapping) { const { x, y } = positions[node.id]; node.skin.x = x; node.skin.y = y; node.text.x = x - 0.3; node.text.y = y - 0.3; } addNode(node: GraphNode<T>) { this.nodes.push(node); } dropNode(node: GraphNode<T>) { const index = this.nodes.indexOf(node); if (index > -1) { this.nodes.splice(index, 1); node.hide(); } } addEdge(edge: GraphEdge<T>) { this.edges.push(edge); } dropEdge(edge: GraphEdge<T>) { const index = this.edges.indexOf(edge); if (index > -1) { this.edges.splice(index, 1); edge.hide(); } } } const buildGraphNodes = <T>( nodeSkinColor: string, nodeTextColor: string, adjacency: T[][], scene: THREE.Scene ): Map<T, GraphNode<T>> => { const nodes: Set<T> = new Set(); adjacency.forEach(connection => { const [a, b] = connection; nodes.add(a); nodes.add(b); }); const nodeMap: Map<T, GraphNode<T>> = new Map(); let index = 0; nodes.forEach(node => { const graphSkin = new SimpleGraphSkin(scene, nodeSkinColor); const graphText = new SimpleGraphText(node + "", scene, nodeTextColor); const graphNode = new GraphNode<T>(index, node, graphSkin, graphText); nodeMap.set(node, graphNode); index += 1; }); return nodeMap; }; export class SimpleDirectedGraph<T> extends Graph<T> { constructor( nodeSkinColor: string, nodeTextColor: string, edgeColor: string, adjacency: T[][], scene: THREE.Scene ) { const nodeMap = buildGraphNodes(nodeSkinColor, nodeTextColor, adjacency, scene); const edges: GraphEdge<T>[] = []; adjacency.forEach(connection => { const [a, b] = connection; const source: GraphNode<T> = nodeMap.get(a)!; const target: GraphNode<T> = nodeMap.get(b)!; const edge = new SimpleDirectedGraphEdge(source, target, scene, edgeColor); edges.push(edge); }); super(Array.from(nodeMap.values()), edges); } } export class SimpleUndirectedGraph<T> extends Graph<T> { constructor( nodeSkinColor: string, nodeTextColor: string, edgeColor: string, adjacency: T[][], scene: THREE.Scene ) { const nodeMap = buildGraphNodes(nodeSkinColor, nodeTextColor, adjacency, scene); const edges: GraphEdge<T>[] = []; adjacency.forEach(connection => { const [a, b] = connection; const source: GraphNode<T> = nodeMap.get(a)!; const target: GraphNode<T> = nodeMap.get(b)!; const edge = new SimpleUndirectedGraphEdge(source, target, scene, edgeColor); edges.push(edge); }); super(Array.from(nodeMap.values()), edges); } } <file_sep>import createDpTable from "../algo"; const getResult = (table: boolean[][]): boolean => { const lastRow = table[table.length - 1]; return lastRow.includes(true); } test("is-substring one", () => { const str1 = "alchemist"; const str2 = "che"; const table = createDpTable(str1, str2); const result = getResult(table); expect(result).toBe(true); }); test("is-substring two", () => { const str1 = "alchemist"; const str2 = "chm"; const table = createDpTable(str1, str2); const result = getResult(table); expect(result).toBe(false); }); test("is-substring three", () => { const str1 = "alchemist"; const str2 = "mist"; const table = createDpTable(str1, str2); const result = getResult(table); expect(result).toBe(true); }); test("is-substring three", () => { const str1 = "alchemist"; const str2 = "chis"; const table = createDpTable(str1, str2); const result = getResult(table); expect(result).toBe(false); }); <file_sep>import Node, { Color } from "./node"; export default class PositionNode<T> extends Node<T> { index: number; x: number; y: number; z: number; left?: PositionNode<T>; right?: PositionNode<T>; constructor(val: T, index: number, x: number, y: number, z: number, color: Color) { super(val); this.index = index; this.x = x; this.y = y; this.z = z; this.color = color; } } <file_sep>import createDPTable from './algo'; import { Point } from "../_commons/point"; import { helperStyle } from '../_commons/styles'; const startPoint: Point = { row: 2, col: 1 }; interface TableSize { rows: number; cols: number; } const getTableSize = (array: number[]): TableSize => { const rows = 3; const cols = array.length + 1; return { rows, cols }; }; const createTableMatrix = (array: number[]): (number | string)[][] => { const { rows, cols } = getTableSize(array); const table = new Array(rows).fill('').map(() => new Array(cols).fill('')); table[0][0] = 'INDEX'; table[1][0] = 'VALUE'; table[2][0] = 'CUR_MAX'; for (let col = 1; col < cols; col++) { table[0][col] = col - 1; table[1][col] = array[col - 1]; } table[startPoint.row][startPoint.col] = '?'; return table; }; const createComparedTable = (array: number[]): (number | string)[][] => createDPTable(array); const addHelperStyles = (styles: React.CSSProperties[][], point: Point, table: (string | number)[][]): void => { }; const addMaxSumRange = (styles: React.CSSProperties[][], point: Point, table: (string | number)[][]): void => { const start = Number(table[0][point.col]) + 1; const end = Number(table[1][point.col]) + 1; for (let i = start; i <= end && styles.length; i++) { styles[1][i] = helperStyle; } }; const updateMaxValueStyles = (styles: React.CSSProperties[][], point: Point, table: (string | number)[][]): void => { const currentMaxs = table[2]; const findMax = (): number => { let max = Number(currentMaxs[1]); for (let i = 2; i < currentMaxs.length; i++) { max = Math.max(max, Number(currentMaxs[i])); } return max; } const updateStyles = (max: number): void => { for (let i = 0; i < currentMaxs.length; i++) { const current = Number(currentMaxs[i]); if (max === current) { styles[2][i] = helperStyle; } } } const max = findMax(); updateStyles(max); }; const createTableStyles = (array: number[]): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(array); const compareTable = createComparedTable(array); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(table, startPoint, compareTable); return table; }; const createButtons = (array: number[]): number[] => { const set = new Set<number>(); const dpTable = createDPTable(array); for (let col = 0; col < array.length; col++) { const currentMax = Number(dpTable[2][col + 1]); set.add(currentMax); } return Array.from(set).sort(((a, b) => a - b)); }; const createButtonsStyles = (array: number[]): (React.CSSProperties)[] => { return createButtons(array).map(() => ({ color: 'back' })); }; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, addMaxSumRange, updateMaxValueStyles, startPoint, };<file_sep>enum Company { Google = 1, Apple = 2, Amazone = 3, Facebook = 4, Twitter = 5, } export default Company; <file_sep>import createDPTable from './algo'; import { Point } from "../_commons/point"; import { helperStyle, helperStyleSecondary } from '../_commons/styles'; const startPoint: Point = { row: 0, col: 1 }; interface TableSize { rows: number; cols: number; } const getTableSize = (table: number[][]): TableSize => { const rows = table.length; const cols = table[rows - 1].length; return { rows, cols }; }; const createComparedTable = (table: number[][]): (number | string)[][] => { return createDPTable(table); }; const addHelperStyles = (styles: React.CSSProperties[][], { row, col }: Point): void => { const helper = helperStyle; const target = helperStyleSecondary; if (row === 0) { styles[0][col - 1] = helper; styles[0][col] = target; } else if (col === 0) { styles[row - 1][0] = helper; styles[row][0] = target; } else { styles[row - 1][col] = helper; styles[row][col - 1] = helper; styles[row][col] = target; } }; const createTableStyles = (table: number[][]): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(table); const styles = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(styles, startPoint); return styles; }; const createButtons = (table: number[][]): number[] => { const dpTable = createDPTable(table); const set = new Set<number>(); for (let col = 1; col < dpTable[0].length; col++) { set.add(dpTable[0][col]); } for (let row = 1; row < dpTable.length; row++) { set.add(dpTable[row][0]); } for (let row = 1; row < dpTable.length; row++) { for (let col = 1; col < dpTable[row].length; col++) { set.add(dpTable[row][col]); } } return Array.from(set).sort((a, b) => a - b); }; const createButtonsStyles = (table: number[][]): (React.CSSProperties)[] => { return createButtons(table).map(() => ({ color: 'back' })); }; export { addHelperStyles, createComparedTable, createTableStyles, createButtons, createButtonsStyles, startPoint, }; <file_sep>import Node from "./node"; import Tree from "./tree"; export default class SimpleTree<T> extends Tree<T> { treeRoot: Node<T> constructor(input: T[]) { super(); this.assertEmpty(input); this.buildTree(input); this.treeRoot = this.root!; } private buildTree(input: T[]) { input.forEach(item => this.insert(item)); } private assertEmpty(input: T[]) { if (input.length === 0) { throw new Error("input can not be empty in simple black tree."); } } } <file_sep>import TreeNode from "../v2/node"; export const getDepth = <T>(node?: TreeNode<T>): number => { if (node === undefined) { return 0; } const leftDepth: number = getDepth(node.left); const rightDepth: number = getDepth(node.right); return Math.max(leftDepth, rightDepth) + 1; } export const getLeftChildIndex = (index: number): number => { return 2 * index + 1; } export const getRightChildIndex = (index: number): number => { return 2 * index + 2; } export const getParentIndex = (index: number): number => { return Math.floor((index - 1) / 2); } <file_sep>export const title = "Binary Tree Right Side View"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ // preorder, iterate right subtree first. function rightSideView(root: TreeNode | null): number[] { const result: number[] = []; function dfs(node: TreeNode | null, depth: number) { if (node === null) { return; } if (result.length === depth) { result.push(node.val); } dfs(node.right, depth + 1); dfs(node.left, depth + 1); } dfs(root, 0); return result; };`; export const description = ` Given the **root** of a binary tree, imagine yourself standing on the **right side** of it, return the values of the nodes you can see ordered from top to bottom. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>interface Array<T> { /** * Represents the number of elements in that array. * The value is an unsigned, 32-bit integer that is always numerically greater than the highest index in the array. */ readonly length: number; /** * Adds one or more elements to the end of an array and returns the new length of the array. */ push(...items: T[]): Promise<number>; /** * Removes the last element from an array and returns that element. */ pop(): Promise<T | undefined>; /** * Removes the first element from an array and returns that element. */ shift(): Promise<T | undefined>; /** * Adds one or more elements to the beginning of an array and returns the new length of the array. */ unshift(...items: T[]): Promise<number>; update(index: number, item: T): Promise<void>; swap(i: number, j: number): Promise<void>; } export default Array <file_sep>import { Collection } from './collection'; import { ArrayIterator } from './array-iterator'; import { Iterable } from './iterable'; import { Iterator } from './iterator'; export abstract class AbstractArray<T> implements Collection, Iterable<T> { protected elements: T[]; constructor(elements: T[]) { this.elements = elements; } isEmpty(): Promise<boolean> { return Promise.resolve(this.elements.length === 0); } size(): Promise<number> { return Promise.resolve(this.elements.length); } iterator(): Iterator<T> { return new ArrayIterator<T>(this.elements); } } <file_sep>export const title = "Top K Frequent Words"; export const minHeap = `interface HeapItem { word: string; count: number; } class MinHeap { private items: HeapItem[]; constructor() { this.items = []; } push(item: HeapItem) { this.items.push(item); this.bubbleUp(this.items.length - 1); } pop() { const root = this.items.shift(); const last = this.items.pop(); if (last) { this.items.unshift(last); this.bubbleDown(0); } return root; } size() { return this.items.length; } private bubbleUp(index: number) { if (index < 1) { return; } const parentIdex = Math.floor((index - 1) / 2); if (this.shouldBubbleUp(index, parentIdex)) { this.swap(index, parentIdex); this.bubbleUp(parentIdex); } } private shouldBubbleUp(index: number, parentIdex: number): boolean { const current = this.items[index]; const parent = this.items[parentIdex]; return (current.count < parent.count) || ( current.count === parent.count && current.word > parent.word ); } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if (this.shouldBubbleDown(target, leftChildIndex)) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if (this.shouldBubbleDown(target, rightChildIndex)) { target = rightChildIndex; } if (target !== index) { this.swap(target, index); this.bubbleDown(target); } } private shouldBubbleDown(index: number, childIndex: number): boolean { const current = this.items[index]; const child = this.items[childIndex]; if (!current || !child) { return false; } return (current.count > child.count) || ( current.count === child.count && current.word < child.word ); } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } }`; export const topKFrequent = `function topKFrequent(words: string[], k: number): string[] { const counts: Map<string, number> = new Map(); words.forEach(word => { const count = counts.get(word) || 0; counts.set(word, count + 1); }); const heap = new MinHeap(); counts.forEach((count, word) => { heap.push({ word, count }); if (heap.size() > k) { heap.pop(); } }); const result: string[] = []; while (heap.size() > 0) { const { word } = heap.pop(); result.unshift(word); } return result; };`; export const description = ` Given an array of strings **words** and an integer **k**, return the **k** most frequent strings. Return the answer **sorted** by the **frequency** from highest to lowest. Sort the words with the same frequency by their **lexicographical order**. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const title = "Sum Root to Leaf Numbers"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function sumNumbers(root: TreeNode | null): number { function dfs(node: TreeNode | null, sum: number): number { if (node === null) { return 0; } if (!node.left && !node.right) { return sum * 10 + node.val; } return dfs(node.left, sum * 10 + node.val) + dfs(node.right, sum * 10 + node.val); } return dfs(root, 0); };`; export const description = ` You are given the **root** of a binary tree containing digits from 0 to 9 only. Each root-to-leaf path in the tree represents a number. - For example, the root-to-leaf path **1 -> 2 -> 3** represents the number **123**. Return the total sum of all root-to-leaf numbers. Test cases are generated so that the answer will fit in a 32-bit integer. A **leaf** node is a node with no children. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const createLeftMax = (heights: number[]): number[] => { const result: number[] = new Array(heights.length).fill(0); let max = 0; for (let i = 1; i < heights.length; i++) { max = Math.max(max, heights[i - 1]); result[i] = max; } return result; } export const createRightMax = (heights: number[]): number[] => { const result: number[] = new Array(heights.length).fill(0); let max = 0; for (let i = heights.length - 2; i >= 0; i--) { max = Math.max(max, heights[i + 1]); result[i] = max; } return result; } const createDPTable = (heights: number[]): number[] => { const leftMax = createLeftMax(heights); const rightMax = createRightMax(heights); const waters: number[] = new Array(heights.length).fill(0); for (let i = 0; i < heights.length; i++) { const waterLevel = Math.min(leftMax[i], rightMax[i]); const water = (waterLevel - heights[i]); if (water > 0) { waters[i] = water; } } return waters; }; export default createDPTable; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node?: TreeNode<number>; p: number; q: number; direction?: Direction; islowestCommonAncestor?: boolean; } export function buildSteps(p: number, q: number, root?: TreeNode<number>): Step[] { const steps: Step[] = []; function lowestCommonAncestor(p: number, q: number, node?: TreeNode<number>, direction?: Direction): TreeNode<number> | undefined { if (node === undefined) { return undefined; } if (node.val.value === p || node.val.value === q) { steps.push({ node, p, q, direction, islowestCommonAncestor: true }); return node; } if (node.val.value > Math.min(p, q) && node.val.value < Math.max(p, q)) { steps.push({ node, p, q, direction, islowestCommonAncestor: true }); return node; } steps.push({ node, p, q, direction }); const left = lowestCommonAncestor(p, q, node.left, Direction.Left); if (left) { steps.push({ node, p, q, direction, islowestCommonAncestor: true }); return left; } const right = lowestCommonAncestor(p, q, node.right, Direction.Right); if (right) { steps.push({ node, p, q, direction, islowestCommonAncestor: true }); return right; } steps.push({ node, p, q, direction, islowestCommonAncestor: false }); return undefined; }; lowestCommonAncestor(p, q, root); return steps; } <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; import { build } from '../../../data-structures/tree/nodes/v1/tree-node-builder'; import Position from '../../../data-structures/_commons/params/position.interface'; const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: "green", opacity: 0.5, transparent: true }); } const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "black" }); const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; export const lineMaterial = new THREE.LineBasicMaterial({ color: "gold" }); export const rootCenter = { x: 0, y: 7, z: 0 }; export const initCenter = { x: 0, y: -8, z: 0 }; export const yDistance = 3; export const duration = 1; export const xAxisAlpha = 2; // expend the tree size in xAxis. export const buildTreeNode = (value: number, index: number, scene: THREE.Scene, center: Position, show: boolean) => { const node = build<number>( index, sphereGeometry, sphereMaterial(), textMaterial, textGeometryParameters, value, scene, center ); if (show) { node.show(); } return node; } <file_sep>export interface Roman { value: number; symbol: string; } export interface Data { num: number; roman: string; } export interface Item { index: number; current: Data; roman: Roman; next: Data; } export function intToRoman(num: number): Item[] { const items: Item[] = []; const valueSymbols = [ [1000, "M"], [900, "CM"], [500, "D"], [400, "CD"], [100, "C"], [90, "XC"], [50, "L"], [40, "XL"], [10, "X"], [9, "IX"], [5, "V"], [4, "IV"], [1, "I"] ]; let roman = ""; for (let i = 0; i < valueSymbols.length; i++) { const [value, symbol] = valueSymbols[i]; while (num >= value) { const current: Data = { num, roman }; roman += symbol; num -= (+value); const next: Data = { num, roman }; items.push({ index: i, current, roman: { value: +value, symbol: symbol + "" }, next }); } } return items; }; <file_sep>export const title = "Number of Provinces"; export const formula = `function findCircleNum(isConnected: number[][]): number { class DisjointSetNode { rank: number; parent: DisjointSetNode; constructor() { this.rank = 0; this.parent = this; } } class DisjointSet { private readonly map: Map<number, DisjointSetNode>; constructor() { this.map = new Map(); } countRoots(): number { const set: Set<DisjointSetNode> = new Set(); Array .from(this.map.values()) .map(node => this.findRootByNode(node)) .forEach(root => set.add(root)); return set.size; } union(a: number, b: number) { const rootA = this.findRootByValue(a); const rootB = this.findRootByValue(b); if (rootA === rootB) { return; } if (rootA.rank === rootB.rank) { rootA.rank += 1; rootB.parent = rootA; } else if (rootA.rank > rootB.rank) { rootB.parent = rootA; } else { rootA.parent = rootB; } } private findRootByValue(value: number): DisjointSetNode { const node = this.getNode(value); return this.findRootByNode(node); } private findRootByNode(node: DisjointSetNode): DisjointSetNode { if (node.parent === node) { return node; } node.parent = this.findRootByNode(node.parent); return node.parent; } private getNode(value: number): DisjointSetNode { if (!this.map.has(value)) { this.map.set(value, new DisjointSetNode()); } return this.map.get(value); } } const disjointSet = new DisjointSet(); for (let row = 0; row < isConnected.length; row++) { for (let col = row; col < isConnected[row].length; col++) { if (isConnected[row][col] === 1) { disjointSet.union(row, col); } } } return disjointSet.countRoots(); };`; export const description = `There are **n** cities. Some of them are connected, while some are not. If city **a** is connected directly with city **b**, and city **b** is connected directly with city **c**, then city **a** is connected indirectly with city **c**. A **province** is a group of directly or indirectly connected cities and no other cities outside of the group. You are given an **n x n** matrix **isConnected** where **isConnected[i][j] = 1** if the **i_th** city and the **j_th** city are directly connected, and **isConnected[i][j] = 0** otherwise. Return the total number of **provinces**.`; export const usecases = ''; export const example = ``; <file_sep>import { Game } from "../../commons/game"; import Category from "../../commons/segments/category"; import Difficulty from "../../commons/segments/difficulty"; import Company from "../../commons/segments/company"; const info: Game = { name: "LRU Cache", path: "/algorithms/lru-cache", categories: [Category.HashTable], companies: [Company.Google, Company.Amazone], difficulty: Difficulty.Hard, img: "/img/lru_cache.png" } export default info; <file_sep>import * as THREE from "three"; import Node from "./node"; abstract class Arrow extends THREE.ArrowHelper { private readonly headLength: number; private readonly headWidth: number; protected abstract _from: Node<any>; protected abstract _to: Node<any>; protected abstract get direction(): THREE.Vector3; protected abstract get origin(): THREE.Vector3; constructor( direction: THREE.Vector3, origin: THREE.Vector3, color: THREE.Color | string | number, headLength: number, headWidth: number ) { super(direction.clone().normalize(), origin, direction.length(), color, headLength, headWidth) this.headLength = headLength; this.headWidth = headWidth; } public update(): void { this.position.copy(this.origin); this.setDirection(this.direction.normalize()); this.setLength(this.direction.length(), this.headLength, this.headWidth); } set from(node: Node<any>) { this._from = node; this.update(); } get from() { return this._from; } set to(node: Node<any>) { this._to = node; this.update(); } get to() { return this._to; } } export class ForwardArrow extends Arrow { protected _from: Node<any>; protected _to: Node<any>; constructor( from: Node<any>, to: Node<any>, color: THREE.Color | string | number, headLength: number, headWidth: number ) { const origin: THREE.Vector3 = from.rightUpperConnectPosition; const dest: THREE.Vector3 = to.leftUpperConnectPosition; const direction = dest.clone().sub(origin); super(direction, origin, color, headLength, headWidth) this._from = from; this._to = to; } protected get origin(): THREE.Vector3 { return this.from.rightUpperConnectPosition; } protected get direction(): THREE.Vector3 { return this.to.leftUpperConnectPosition.clone().sub(this.from.rightUpperConnectPosition); } } export class BackwardArrow extends Arrow { protected _from: Node<any>; protected _to: Node<any>; constructor( from: Node<any>, to: Node<any>, color: THREE.Color | string | number, headLength: number, headWidth: number ) { const origin: THREE.Vector3 = from.leftLowerConnectPosition; const dest: THREE.Vector3 = to.rightLowerConnectPosition; const direction = dest.clone().sub(origin); super(direction, origin, color, headLength, headWidth) this._from = from; this._to = to; } protected get direction(): THREE.Vector3 { return this.to.rightLowerConnectPosition.clone().sub(this.from.leftLowerConnectPosition); } protected get origin(): THREE.Vector3 { return this.from.leftLowerConnectPosition; } } <file_sep>export interface Step { left: number; right: number; mid: number; } export function searchInsert(nums: number[], target: number): Step[] { const steps: Step[] = []; let [left, right] = [0, nums.length - 1]; while (left <= right) { const mid = left + Math.floor((right - left) / 2); steps.push({ left, right, mid }); if (nums[mid] === target) { // return mid; return steps; } if (nums[mid] < target) { left = mid + 1; } else { right = mid - 1; } } // return left; return steps; }; <file_sep>class Node { readonly index: number; // start from 1 readonly level: number; // start from 0 left?: Node; right?: Node; x?: number; constructor(index: number) { this.left = undefined; this.right = undefined; this.index = index; this.level = ~~Math.log2(index); } setLeft(): Node { this.left = new Node(2 * this.index); return this.left; } setRight(): Node { this.right = new Node(2 * this.index + 1); return this.right; } setX(): void { if (this.left && this.right && this.left.x && this.right.x) { this.x = (this.left.x + this.right.x) / 2; return; } if (this.level === 0) { this.x = 0; return; } // 4 5 6 7 // -2 -1 1 2 // 0 1 2 3 // 123 456 789 10-11-12 // -6-5-4 -3-2-1 123 456 const lastLeveLNodes: number = Math.pow(2, this.level); const middle = lastLeveLNodes / 2; const rowIndex = this.index - lastLeveLNodes; if (rowIndex - middle < 0) { this.x = (rowIndex - middle) * 3 + 1; } else { this.x = (rowIndex - middle + 1) * 3 - 1; } } } // Using Full_Binary_Tree to compute the x coordinate of tree nodes. class Calculator { // index -> x coordinate public readonly xCoordinatesMap: Map<number, number>; private readonly treeDepth: number; constructor(depth: number) { this.treeDepth = depth; this.xCoordinatesMap = new Map(); const root = new Node(1); this.buildTree(root); this.setXCoordinates(root); } private buildTree(node: Node): void { if (this.isLastLevel(node)) return; this.buildTree(node.setLeft()); this.buildTree(node.setRight()); } private setXCoordinates(node: Node): void { if (node.left) { this.setXCoordinates(node.left); } if (node.right) { this.setXCoordinates(node.right); } node.setX(); this.xCoordinatesMap.set(node.index, node.x!); } private isLastLevel(node: Node): boolean { return node.level + 1 === this.treeDepth; } } // index -> x coordinate const builder = (depth: number): Map<number, number> => { return new Calculator(depth).xCoordinatesMap; }; export default builder; <file_sep>interface INode<T> { val: T; left?: INode<T>; right?: INode<T>; index: number; // index of this node in the tree. start from 1. readonly depth: number; // depths of this tree(node). readonly level: number; // level of this node in the tree. start from 0 readonly size: number; // how many nodes of this tree. } const countNodes = <T>(root: INode<T>): number => { let result = 0; const helper = <T>(node?: INode<T>): void => { if (node === undefined) return; result++; helper(node.left); helper(node.right); } helper(root); return result; }; const findDepth = <T>(root: INode<T>): number => { const helper = <T>(node: INode<T> | undefined, depth: number): number => { if (node === undefined) return depth; const left = helper(node.left, depth + 1); const right = helper(node.right, depth + 1); return Math.max(left, right); } return helper(root, 0); }; export default class Node implements INode<string> { val: string; _left?: Node; _right?: Node; index: number; constructor(val: string, isRoot: boolean = false) { this.val = val; this._left = undefined; this._right = undefined; this.index = -1; if (isRoot) { this.index = 1; } } get left() { return this._left; } set left(node: Node | undefined) { this._left = node; if (this._left) { this._left.index = 2 * this.index; } } get right() { return this._right; } set right(node: Node | undefined) { this._right = node; if (this._right) { this._right.index = 2 * this.index + 1; } } get depth(): number { return findDepth(this); } get level(): number { return ~~Math.log2(this.index); } get size(): number { return countNodes(this); } } <file_sep>import TreeNode from "../../../../data-structures/tree/nodes/v1/node"; export interface Step { node: TreeNode<string>; result: string[][]; } function cloneResult(result: string[][]): string[][] { return result.map(level => [...level]); } export function buildSteps(root?: TreeNode<string>): Step[] { const steps: Step[] = []; const result: string[][] = []; function dfs(node: TreeNode<string> | undefined, level: number) { if (!node) { return; } if (result.length === level) { result.push([]); } result[level].push(node.val.value); steps.push({ node, result: cloneResult(result) }); dfs(node.left, level + 1); dfs(node.right, level + 1); } dfs(root, 0); return steps; } <file_sep>export const title = "Populating Next Right Pointers in Each Node"; export const formula = `/** * Definition for Node. * class Node { * val: number * left: Node | null * right: Node | null * next: Node | null * constructor(val?: number, left?: Node, right?: Node, next?: Node) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * this.next = (next===undefined ? null : next) * } * } */ // preorder function connect(root: Node | null): Node | null { if (root === null) { return null; } if (root.left) { root.left.next = root.right; } if (root.right && root.next) { if (root.next.left) { root.right.next = root.next.left; } else { root.right.next = root.next.right; } } connect(root.left); connect(root.right); return root; };`; export const description = ` You are given a **perfect binary tree** where all leaves are on the same level, and every parent has two children. Populate each next pointer to point to its next right node. If there is no next right node, the next pointer should be set to NULL. Initially, all next pointers are set to NULL. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const title = "0/1 Knapsack Problem"; export const formula = ` if (itemWeight > currentWeight) { table[row][col] = table[row - 1][col]; } else { table[row][col] = Math.max( table[row - 1][col], table[row - 1][currentWeight - itemWeight] + itemValue ); } `; export const description = ` Given weights and values of n items, put these items in a knapsack of capacity W to get the maximum total value in the knapsack. V: value, W: Weight `; export const usecases = ''; export const example = ''; <file_sep>import Line from '../nodes/line'; import { buildNode } from '../nodes/v2/builder'; import TreeNode from "../nodes/v2/node"; import { getLeftChildIndex, getParentIndex, getRightChildIndex } from '../nodes/utils/tree-node-utils'; import IHeap, { Comparable } from "./heap.interface"; import { buildPerfectBinaryTree, TreeNode as TreePosition } from '../nodes/utils/perfect-binary-tree'; import Position from '../../_commons/params/position.interface'; import Array from '../../array/array.class'; import { Props } from './props'; import { TextCube } from '../../_commons/cube/three/text-cube'; import { wait } from '../../_commons/utils'; abstract class Heap<T extends Comparable | string | number> implements IHeap<T>{ public props: Props; private array: Array<T>; private treeNodesPositions: TreePosition[]; private treeNodes: TreeNode<T>[]; private treeLines: Map<number, Line>; readonly deleted: Map<T, number>; private deletedCount: number; constructor(props: Props) { this.props = props; const { arrayPosition, treeInitDepth, treePosition, treeNodeDistance, duration } = props; this.treeNodes = []; this.treeLines = new Map(); this.treeNodesPositions = this.buildTreeNodesPositions(treeInitDepth, treePosition, treeNodeDistance.x, treeNodeDistance.y); this.array = new Array<T>(arrayPosition, duration); this.deleted = new Map(); this.deletedCount = 0; } private buildTreeNodesPositions(depth: number, { x, y }: Position, xDistance: number, yDistance: number) { const positions = buildPerfectBinaryTree(depth, xDistance, yDistance); const xAlpha = (positions.length === 0) ? 0 : x - positions[0].x; positions.forEach(position => { position.x += xAlpha; position.y += y; }) return positions; } async push(item: T): Promise<void> { const index = this.treeNodes.length; await Promise.all([ this.array.push(this.buildArrayNode(item)), this.insertTreeNode(item, index, this.props.treeNodeProps.initPosition) ]) return this.bubbleUp(index); } private calTextX<T>(value: T, x: number): number { const length: number = (value as any).toString().length; switch (length) { case 0: return x; case 1: return x - 0.3; case 2: return x - 0.6; case 3: return x - 0.8; default: return x - 1; } } private buildArrayNode(item: T): TextCube<T> { const { textMaterial, textGeometryParameters, cubeMaterial, cubeGeometry, initPosition } = this.props.arrayNodeProps; const { x, y, z } = initPosition; const cube = new TextCube<T>( item, textMaterial, textGeometryParameters, cubeMaterial(), cubeGeometry, this.props.scene ); cube.position.x = x; cube.position.y = y; cube.position.z = z; cube.textPosition.x = this.calTextX(item, x); cube.textPosition.y = y - 0.26; cube.textPosition.z = z; cube.show(); return cube; } private insertTreeNode(item: T, index: number, position: Position): Promise<void> { const node = this.buildTreeNode(item, position).show(); this.treeNodes.push(node); const line = this.buildTreeLine(index); if (line) { this.treeLines.set(index, line.show()); } return this.moveNode(node, line, index); } private moveNode(node: TreeNode<T>, line: Line | undefined, index: number): Promise<void> { const { x, y } = this.treeNodesPositions[index]; const dest: Position = { x, y, z: 0 }; const onUpdate = () => { if (line) { line.end = node.value.center; } }; return node.moveTo(dest, this.props.duration || 0, onUpdate); } private buildTreeNode(item: T, position: Position) { return buildNode<T>(this.props.treeNodeProps, item, this.props.scene, position); } private buildTreeLine(index: number): Line | undefined { const nodePosition = this.props.treeNodeProps.initPosition; const parentPosition = this.treeNodesPositions[getParentIndex(index)]; if (parentPosition) { return new Line( { x: parentPosition.x, y: parentPosition.y, z: 0 }, { x: nodePosition.x, y: nodePosition.y, z: 0 }, this.props.treeLineProps.material, this.props.scene ); } else { return; } } private async bubbleUp(index: number): Promise<void> { if (index < 1) return Promise.resolve(); const parentIndex = getParentIndex(index); if (this.shouldBubbleUp(this.getValue(index), this.getValue(parentIndex))) { await Promise.all([ this.swap(index, parentIndex), this.array.swap(index, parentIndex) ]); return this.bubbleUp(parentIndex); } return Promise.resolve(); } protected abstract shouldBubbleUp(current: T, parent: T): boolean; private deleteLastLine() { const key = this.treeNodes.length - 1; const last = this.treeLines.get(key); if (last) { last.hide(); this.treeLines.delete(key); } } private async deleteTop(): Promise<T | undefined> { this.deleteLastLine(); // remove root const treeRoot = this.treeNodes.shift(); treeRoot?.hide(); const arrayHead = await this.array.shift(); arrayHead?.hide(); // move last to root const treeLast = this.treeNodes.pop(); const arrayLast = await this.array.pop(); if (treeLast && arrayLast) { this.treeNodes.unshift(treeLast); const { x, y } = this.treeNodesPositions[0]; await Promise.all([ this.array.unshift(arrayLast!), treeLast.moveTo({ x, y, z: 0 }, this.props.duration || 0) ]); await this.bubbleDown(0); } const result: T | undefined = treeRoot?.value.value; return Promise.resolve(result); } private async prune() { let top: T | undefined = await this.peek(); while (top !== undefined && this.deleted.has(top)) { await this.deleteTop(); const count = this.deleted.get(top); if (count !== undefined) { if (count === 1) { this.deleted.delete(top); } else { this.deleted.set(top, count - 1); } } this.deletedCount -= 1; top = await this.peek(); } } async pop(): Promise<T | undefined> { const top = await this.deleteTop(); await this.prune(); return top; } async delete(item: T): Promise<T | undefined> { const top: T | undefined = await this.peek(); if (top !== undefined && top === item) { return this.pop(); } if (this.treeNodes.map(node => node.value.value).indexOf(item) < 0) { return undefined; } const count: number = this.deleted.get(item) || 0; this.deleted.set(item, count + 1); this.deletedCount += 1; return item; } private getValue(index: number): T { return this.treeNodes[index].value.value; } private async bubbleDown(index: number): Promise<void> { let target = index; const leftIndex = getLeftChildIndex(index); if (leftIndex < this.treeNodes.length && this.shouldBubbleDown( this.getValue(target), this.getValue(leftIndex) )) { target = leftIndex; } const rightIndex = getRightChildIndex(index); if (rightIndex < this.treeNodes.length && this.shouldBubbleDown( this.getValue(target), this.getValue(rightIndex) )) { target = rightIndex; } if (target === index) { return Promise.resolve(); } await Promise.all([ this.swap(target, index), this.array.swap(target, index) ]); return this.bubbleDown(target); } protected abstract shouldBubbleDown(current: T, child: T): boolean; private async swap(i: number, j: number): Promise<void> { const x = this.treeNodes[i]; const y = this.treeNodes[j]; const back = x.value.sphereColor.color; x.value.sphereColor.setColor(this.props.treeNodeProps.enabledTreeNodeColor); y.value.sphereColor.setColor(this.props.treeNodeProps.enabledTreeNodeColor); const a = this.clonePosition(x.value.center); const b = this.clonePosition(y.value.center); [this.treeNodes[i], this.treeNodes[j]] = [this.treeNodes[j], this.treeNodes[i]]; await Promise.all([ x.moveTo(b, this.props.duration || 0), y.moveTo(a, this.props.duration || 0) ]); x.value.sphereColor.setColor("#" + back); y.value.sphereColor.setColor("#" + back); return; } peek(): Promise<T | undefined> { return Promise.resolve(this.treeNodes[0]?.value.value); } size(): Promise<number> { return Promise.resolve(this.treeNodes.length - this.deletedCount); } isEmpty(): Promise<boolean> { return Promise.resolve(this.treeNodes.length === 0); } async buildHeap(items: T[],): Promise<void> { for (let i = 0; i < items.length; i++) { const { x, y } = this.treeNodesPositions[i]; this.array.push(this.buildArrayNode(items[i])); await this.insertTreeNode(items[i], i, { x, y, z: 0 }); } } async heapify(): Promise<void> { for (let i = Math.floor(this.treeNodes.length / 2) - 1; i >= 0; i--) { await this.bubbleDown(i); } await wait(0.1); } clear(): Promise<void> { this.treeNodes.forEach(node => node.hide()); this.treeLines.forEach(line => line.hide()); this.treeNodes = []; this.treeLines = new Map(); return Promise.resolve(); } private clonePosition({ x, y, z }: Position): Position { return { x, y, z }; } protected isPrimaryType(value: T): boolean { return typeof value === 'string' || typeof value === 'number'; } items(): T[] { return this.treeNodes.map(node => node.value.value); } } export default Heap; <file_sep>import createDPTable from './algo'; import { Point } from "../_commons/point"; import { helperStyle } from '../_commons/styles'; const startPoint: Point = { row: 1, col: 2, }; interface TableSize { rows: number; cols: number; } const getTableSize = (pattern: string, text: string): TableSize => { const rows = text.length + 2; const cols = pattern.length + 2; return { rows, cols }; }; const createTableMatrix = (pattern: string, text: string): (boolean | string)[][] => { const { rows, cols } = getTableSize(pattern, text); const table = new Array(rows).fill('').map(() => new Array(cols).fill('')); for (let col = 2; col < cols; col++) { table[0][col] = pattern.charAt(col - 2); } for (let row = 2; row < rows; row++) { table[row][0] = text.charAt(row - 2); } table[1][1] = 'T'; for (let row = 2; row < rows; row++) { table[row][1] = 'F'; } table[startPoint.row][startPoint.col] = '?'; return table; }; const createComparedTable = (pattern: string, text: string): (boolean | string)[][] => { const { rows, cols } = getTableSize(pattern, text); const dpTable = createDPTable(pattern, text); const tableMatrix = createTableMatrix(pattern, text); for (let row = 1; row < rows; row++) { for (let col = 1; col < cols; col++) { tableMatrix[row][col] = dpTable[row - 1][col - 1]; } } return tableMatrix; }; const addHelperStyles = (styles: React.CSSProperties[][], point: Point): void => { for (let col = 0; col < styles[0].length && col <= point.col; col++) { styles[0][col] = helperStyle; } for (let row = 0; row < styles.length && row <= point.row; row++) { styles[row][0] = helperStyle; } }; const createTableStyles = (pattern: string, text: string): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(pattern, text); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(table, startPoint); return table; }; const createButtons = (): boolean[] => [true, false]; const createButtonsStyles = (): (React.CSSProperties)[] => [{}, {}]; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, startPoint, }; <file_sep>import gsap from 'gsap'; import IMover from "../params/mover.interface"; import Position from '../params/position.interface'; import { wait } from "../utils"; export default class Mover implements IMover { private mesh: THREE.Mesh; constructor(mesh: THREE.Mesh) { this.mesh = mesh; } move(position: Position, duration: number, onUpdate?: () => void): Promise<void> { gsap.to(this.mesh.position, { ...position, duration, onUpdate }); return wait(duration); } } <file_sep>import * as THREE from "three"; import { TextGeometry, TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; export interface PlaneParameters { width: number; height: number; position: THREE.Vector3; material: THREE.Material; } export interface TextParameters { position: THREE.Vector3; textGeometryParameters: TextGeometryParameters; material: THREE.Material; } export default class Node<T> { data: T; display: string; scene: THREE.Scene; plane: THREE.Mesh; protected planeGeometry: THREE.PlaneGeometry; text: THREE.Mesh; private textGeometry: TextGeometry; constructor( data: T, display: string, scene: THREE.Scene, planeParameters: PlaneParameters, textParameters: TextParameters ) { this.data = data; this.display = display; this.scene = scene; this.planeGeometry = new THREE.PlaneGeometry(planeParameters.width, planeParameters.height); this.plane = new THREE.Mesh(this.planeGeometry, planeParameters.material); this.plane.position.copy(planeParameters.position); this.textGeometry = new TextGeometry(display, textParameters.textGeometryParameters); this.text = new THREE.Mesh(this.textGeometry, textParameters.material); this.text.position.copy(textParameters.position); this.addToScene(); } addToScene(): void { this.scene.add(this.plane); this.scene.add(this.text); } isInScene(): boolean { return this.scene.children.indexOf(this.plane) > -1 || this.scene.children.indexOf(this.text) > -1 } removeFromScene(): void { this.scene.remove(this.plane); this.scene.remove(this.text); } } <file_sep>interface TableSize { rows: number; cols: number; } const getTableSize = (coins: number[], total: number): TableSize => { const rows = coins.length + 1; const cols = total + 1; return { rows, cols }; }; const createDPTable = (coins: number[], total: number): number[][] => { const { rows, cols } = getTableSize(coins, total); const table = new Array(rows).fill(0).map(() => new Array(cols).fill(0)); for (let row = 1; row < rows; row++) { table[row][0] = 1; } for (let row = 1; row < table.length; row += 1) { const coin = coins[row - 1]; for (let col = 1; col < table[row].length; col += 1) { if (coin > col) { table[row][col] = table[row - 1][col]; } else { table[row][col] = table[row][col - coin] + table[row - 1][col]; } } } return table; }; export default createDPTable; <file_sep>export const title = "Rod Cutting Problem"; export const formula = ` if (rodLength > currentLength) { table[row][col] = table[row - 1][col]; } else { table[row][col] = Math.max( table[row - 1][col], table[row][currentLength - rodLength] + rodPrice ); } `; export const description = `Assume a company buys long steel rods and cuts them into shorter rods for sale to its customers. If each cut is free and rods of different lengths can be sold for different amounts, we wish to determine how to best cut the original rods to maximize the revenue. P: price, L: length `; export const usecases = ''; export const example = ` ### 1 If length of the rod is 8 and the values of different pieces are given as following, then the maximum obtainable value is ***22***. | length | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | |--------|---|---|---|---|----|----|----|----| | price | 1 | 5 | 8 | 9 | 10 | 17 | 17 | 20 | By cutting in two pieces of lengths 2 and 6. ### 2 And if the prices are as following, then the maximum obtainable value is ***24***. | length | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | |--------|---|---|---|---|----|----|----|----| | price | 3 | 5 | 8 | 9 | 10 | 17 | 17 | 20 | By cutting in 8 pieces of length 1. ` <file_sep>import * as THREE from 'three'; import Position from '../../../_commons/params/position.interface'; import { TextSphere } from '../../../_commons/sphere/text-sphere.interface'; import Line from "../line"; const getDepth = <T>(node?: TreeNode<T>): number => { if (node === undefined) { return 0; } const leftDepth: number = getDepth(node.left); const rightDepth: number = getDepth(node.right); return Math.max(leftDepth, rightDepth) + 1; } export default class TreeNode<T> { private _index: number; private _val: TextSphere<T>; private _left?: TreeNode<T>; private _right?: TreeNode<T>; private _leftLine?: Line; private _rightLine?: Line; constructor(val: TextSphere<T>) { this._val = val; this._index = 0; } get leftLine(): Line | undefined { return this._leftLine; } get rightLine(): Line | undefined { return this._rightLine; } set index(index: number) { this._index = index; } get index(): number { return this._index; } get left(): TreeNode<T> | undefined { return this._left; } get right(): TreeNode<T> | undefined { return this._right; } get val() { return this._val; } get depth(): number { return getDepth(this); } set sphereColor(color: string) { this.val.sphereColor.setColor(color); } set textColor(color: string) { this.val.textColor.setColor(color); } move(distance: Position, duration: number): Promise<void> { const x = this.val.center.x + distance.x; const y = this.val.center.y + distance.y; const z = this.val.center.z + distance.z; return this.moveTo({ x, y, z }, duration); } moveTo(dest: Position, duration: number): Promise<void> { const onUpdate = () => { if (this._leftLine) { this._leftLine.start = this.val.center if (this._left) { this._leftLine.end = this._left.val.center; } } if (this._rightLine) { this._rightLine.start = this.val.center if (this._right) { this._rightLine.end = this._right.val.center; } } } return this.val.move(dest, duration, onUpdate); } setLeft(node: TreeNode<T>, position: Position, lineMaterial: THREE.LineBasicMaterial, duration: number, scene: THREE.Scene): Promise<void> { this._left = node; this._left.index = this.leftChildIndex; this._leftLine = new Line(this._val.center, node._val.center, lineMaterial, scene); this._leftLine.show(); const onUpdate = () => { if (this._leftLine) { this._leftLine.end = node.val.center; } } return node._val.move(position, duration, onUpdate); } setRight(node: TreeNode<T>, position: Position, lineMaterial: THREE.LineBasicMaterial, duration: number, scene: THREE.Scene): Promise<void> { this._right = node; this._right.index = this.rightChildIndex; this._rightLine = new Line(this._val.center, node._val.center, lineMaterial, scene); this._rightLine.show(); const onUpdate = () => { if (this._rightLine) { this._rightLine.end = node._val.center; } } return node._val.move(position, duration, onUpdate); } deleteLeft(): TreeNode<T> | undefined { if (this._leftLine) { this._leftLine.hide(); this._leftLine = undefined; } return this._left; } deleteRight(): TreeNode<T> | undefined { if (this._rightLine) { this._rightLine.hide(); this._rightLine = undefined; } return this._right; } show() { this._val.show(); } hide() { this._val.hide(); if (this._leftLine) { this._leftLine.hide(); } if (this._rightLine) { this._rightLine.hide(); } } get leftChildIndex(): number { return 2 * this._index + 1; } get rightChildIndex(): number { return 2 * this._index + 2; } get parentIndex(): number { return Math.floor((this._index - 1) / 2); } } <file_sep>import Color from "../../_commons/params/color.interface"; import Displayer from "../../_commons/params/displayer.interface"; import Mover from "../../_commons/params/mover.interface"; import Position from "../../_commons/params/position.interface"; export default interface Text<T> extends Position, Mover, Displayer, Color { value: T; } <file_sep>export const title = "Binary Tree Postorder Traversal"; export const formula = ` function postorder(node: Node) { if (node.left) { postorder(node.left); } if (node.right) { postorder(node.right); } print(node); } OR function postorder(node: Node | null) { if (node == null) { return; } postorder(node.left); postorder(node.right); print(node); }`; export const description = ` For a binary tree, they are defined as access operations at each node, starting with the current node, then Go down one level to Children: - (L) Recursively traverse left subtree. - (R) Recursively traverse right subtree. - (N) Process the current node N itself. Return by going up one level and arriving at the parent node. --- Post-order (LRN) 1. Traverse the left subtree by recursively calling the post-order function. 2. Traverse the right subtree by recursively calling the post-order function. 3. Access the data part of the current node. From Wikipedia. `; export const usecases = ''; export const example = ''; <file_sep>export interface KnapSackItem { weight: number; value: number; } <file_sep>import Queue from '../queue-algo'; test('queue enqueue and dequeue as FIFO', async () => { const queue = new Queue<number>(); await queue.enqueue(10); await queue.enqueue(11); await queue.enqueue(12); await expect(queue.dequeue()).resolves.toBe(10); await expect(queue.dequeue()).resolves.toBe(11); await expect(queue.dequeue()).resolves.toBe(12); }); test('queue peek', async () => { const queue = new Queue<number>(); await queue.enqueue(10); await expect(queue.peek()).resolves.toBe(10); await queue.enqueue(11); await expect(queue.peek()).resolves.toBe(10); await queue.enqueue(12); await expect(queue.peek()).resolves.toBe(10); await queue.dequeue(); await expect(queue.peek()).resolves.toBe(11); }); test("queue return it's size after enqueue the new item", async () => { const queue = new Queue<number>(); const size1 = await queue.enqueue(10); expect(size1).toBe(1); const size2 = await queue.enqueue(17); expect(size2).toBe(2); }); test('queue size', async () => { const queue = new Queue<number>(); const promises = [queue.enqueue(1), queue.enqueue(2), queue.enqueue(3)]; await Promise.all(promises); await expect(queue.size()).resolves.toBe(promises.length); await queue.dequeue(); await expect(queue.size()).resolves.toBe(promises.length - 1); }); test('queue isEmpty', async () => { const queue = new Queue<number>(); await expect(queue.isEmpty()).resolves.toBeTruthy(); const enqueuePromises = [ queue.enqueue(1), queue.enqueue(2), queue.enqueue(3), ]; await Promise.all(enqueuePromises); await expect(queue.isEmpty()).resolves.toBeFalsy(); const dequeuePromises = [queue.dequeue(), queue.dequeue(), queue.dequeue()]; await Promise.all(dequeuePromises); await expect(queue.isEmpty()).resolves.toBeTruthy(); }); <file_sep>import Container from "../_commons/container"; export enum Action { Leave, Insert, Override } export type Step = { a: Container; action: Action; index?: number; } <file_sep>import { Game } from "../../commons/game"; import Category from "../../commons/segments/category"; import Difficulty from "../../commons/segments/difficulty"; const info: Game = { name: "2-3Tree Vs RedBlackTree", path: "/algorithms/two-three-tree-red-black-tree", categories: [Category.Tree], companies: [], difficulty: Difficulty.Hard, img: "/img/two_three_tree_red_black_tree.png" } export default info; <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; export const errorSphereColor = "red"; export const enabledSphereColor = "lightgreen"; export const normalSphereColor = "yellow"; export const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); export const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: normalSphereColor, opacity: 0.4, transparent: true }); } export const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "green" }); export const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; export const lineMaterial = new THREE.LineBasicMaterial({ color: "gold" }); export const center = { x: 0, y: 10, z: 0 }; export const yDistance = 3; export const duration = 0; export const xAxisAplha = 2; const getText = (value: number): string => { switch (value) { case -Infinity: return "min" case Infinity: return "max"; default: return value + ""; } } export const lowerUpperTextMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "orange" }); export const lowerUpperTextGeometryParameters: TextGeometryParameters = { font, size: 0.5, height: 0.1 }; export const buildThreeText = (value: number, x: number, y: number, z: number): THREE.Mesh => { const text = getText(value); const textGeometry = new TextGeometry(text, lowerUpperTextGeometryParameters); const mesh = new THREE.Mesh(textGeometry, lowerUpperTextMaterial); mesh.position.set(x, y, z); return mesh; } <file_sep>export const nodeOriginalSkinColor: string = "yellow"; export const nodeOriginalTextColor: string = "green"; export const nodeEnabledSkinColor: string = "green"; export const nodeEnabledTextColor: string = "gray"; export const regularEdgeColor: string = "gold"; export const redundantEdgeColor: string = "red"; <file_sep>export const title = "Kth Smallest Element in a BST"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ // inorder function kthSmallest(root: TreeNode | null, k: number): number { let index = 0; let value = 0; function dfs(node: TreeNode | null) { if (node === null) { return; } dfs(node.left); index = index + 1; if (index === k) { value = node.val; return; } dfs(node.right); } dfs(root); return value; };`; export const description = ` Given the **root** of a binary search tree, and an integer **k**, return the **kth** smallest value ***(1-indexed)*** of all the values of the nodes in the tree. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const title = "Top K Frequent Elements"; export const formula = `function topKFrequent(nums: number[], k: number): number[] { type HeapItem = { num: number; count: number; } class MinHeap { items: HeapItem[]; constructor() { this.items = []; } peek() { return this.items[0]; } delete() { const root = this.items.shift(); const last = this.items.pop(); if (last !== undefined) { this.items.unshift(last); this.bubbleDown(0); } return root; } push(num: number, count: number) { this.items.push({ num, count }); this.bubbleUp(this.items.length - 1); } size() { return this.items.length; } private bubbleDown(index: number) { let target = index; const leftIndex = 2 * index + 1; if (this.items[target] !== undefined && this.items[leftIndex] !== undefined && this.items[target].count > this.items[leftIndex].count ) { target = leftIndex; } const rightIndex = 2 * index + 2; if (this.items[target] !== undefined && this.items[rightIndex] !== undefined && this.items[target].count > this.items[rightIndex].count ) { target = rightIndex; } if (target === index) { return; } this.swap(target, index); this.bubbleDown(target); } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.items[index].count < this.items[parentIndex].count) { this.swap(parentIndex, index); this.bubbleUp(parentIndex); } } private swap(i: number, j: number) { [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; } } const map: Map<number, number> = new Map(); nums.forEach(num => { const count = map.get(num) || 0; map.set(num, count + 1); }) const heap = new MinHeap(); map.forEach((value, key) => { if (heap.size() === k) { if (heap.peek().count < value) { heap.delete(); heap.push(key, value); } } else { heap.push(key, value); } }) return heap.items.map(item => item.num); };`; export const description = ` Given an integer array **nums** and an integer **k**, return the **k** most frequent elements. You may return the answer in **any order**. `; export const solution = ``; export const usecases = ''; export const example = ` --- **Example 1**: - Input: nums = [3,3,3,1,1,2,2,2,3], k = 2 - Output: [3,2] **Example 2**: - Input: nums = [5], k = 1 - Output: [5] `; <file_sep>import { uuidv4 } from "./uuid"; export default class Node<T> { x?: number; y?: number; z?: number; private populateCoordinates(startY: number, marginY: number, z: number): void { this.z = z; if (this.isLeaf) { this.y = startY; return; } const child: Node<T> = this.children[0]; if (child && child.y) { this.y = child.y + marginY; } if (this.minChild.x !== undefined && this.maxChild.x !== undefined) { this.x = (this.minChild.x + this.maxChild.x) / 2 } } populateTreeCoordinates(startY: number, marginY: number, z: number): void { this.children.forEach(item => { item.populateTreeCoordinates(startY, marginY, z); item.populateCoordinates(startY, marginY, z); }); this.populateCoordinates(startY, marginY, z); } private id: string; vals: T[]; children: Node<T>[]; parent?: Node<T>; private removeChild(childId: string) { this.children = this.children.filter(child => child.id !== childId); } constructor(val: T) { this.id = uuidv4(); this.vals = []; this.children = []; this.vals.push(val); } private sortVals() { this.vals.sort((a, b) => { if (a < b) { return -1; } else if (a > b) { return 1; } else { return 0; } }); } private get isLeaf(): boolean { return this.children.length === 0; } public insert(val: T): Node<T> { if (this.isLeaf) { this.vals.push(val); this.sortVals(); switch (this.vals.length) { case 2: if (this.parent) { return this.parent.insertFromChild(); } else { return this; } case 3: const left = new Node(this.vals[0]); const right = new Node(this.vals[2]); if (this.parent) { this.parent.removeChild(this.id); this.parent.children.push(left); this.parent.children.push(right); left.parent = this.parent; right.parent = this.parent; this.parent.sortChildren(); return this.parent.insertFromChild(this.vals[1]); } else { const parent = new Node<T>(this.vals[1]); left.parent = parent; right.parent = parent; parent.children.push(left); parent.children.push(right); parent.sortChildren(); return parent; } default: throw new Error(`insert errors inLeaf with wrong size of vals: ${this.vals.length}`); } } else { switch (this.vals.length) { case 1: if (val < this.vals[0]) { return this.minChild.insert(val); } else { return this.maxChild.insert(val); } case 2: if (val < this.vals[0]) { return this.minChild.insert(val); } else if (val > this.vals[1]) { return this.maxChild.insert(val); } else { return this.midChild.insert(val); } default: throw new Error(`insert errors nonLeft with wrong size of vals: ${this.vals.length}`); } } } private insertFromChild(val?: T): Node<T> { if (this.parent) { if (val) { switch (this.vals.length) { case 1: this.vals.push(val); this.sortVals(); return this.parent.insertFromChild(); case 2: this.vals.push(val); this.sortVals(); const left_temp = new Node(this.vals[0]); left_temp.children.push(this.children[0]); left_temp.children.push(this.children[1]); this.children[0].parent = left_temp; this.children[1].parent = left_temp; left_temp.parent = this.parent; this.parent.children.push(left_temp); const right_temp = new Node(this.vals[2]); right_temp.children.push(this.children[2]); right_temp.children.push(this.children[3]); this.children[2].parent = right_temp; this.children[3].parent = right_temp; this.parent.children.push(right_temp); right_temp.parent = this.parent; this.parent.removeChild(this.id); this.parent.sortChildren(); return this.parent.insertFromChild(this.vals[1]); default: throw new Error("node vals lenght unexpected in insertFromChild"); } } else { return this.parent.insertFromChild(); } } else { if (val) { switch (this.vals.length) { case 1: this.vals.push(val); this.sortVals(); return this; case 2: this.vals.push(val); this.sortVals(); const root = new Node<T>(this.vals[1]); const left_temp = new Node(this.vals[0]); left_temp.children.push(this.children[0]); left_temp.children.push(this.children[1]); this.children[0].parent = left_temp; this.children[1].parent = left_temp; root.children.push(left_temp); left_temp.parent = root; const right_temp = new Node(this.vals[2]); right_temp.children.push(this.children[2]); right_temp.children.push(this.children[3]); this.children[2].parent = right_temp; this.children[3].parent = right_temp; root.children.push(right_temp); right_temp.parent = root; root.sortChildren(); return root; default: throw new Error("node vals lenght unexpected in insertFromChild"); } } else { return this; } } } private sortChildren() { this.children.sort((a, b) => { if (a.vals[0] < b.vals[0]) { return -1; } else if (a.vals[0] > b.vals[0]) { return 1; } else { return 0; } }); } private get minChild(): Node<T> { this.sortChildren(); return this.children[0]; } private get maxChild(): Node<T> { this.sortChildren(); return this.children[this.children.length - 1]; } private get midChild(): Node<T> { this.sortChildren(); return this.children[1]; } } <file_sep>export interface Step { left: number; right: number; answer: number; mid: number; square: number; } export interface Result { steps: Step[]; sqrt: number; } const calculateMid = (left: number, right: number): number => { return left + Math.floor((right - left) / 2); } export function mySqrt(x: number): Result { const steps: Step[] = []; let [left, right, answer] = [0, x, -1]; const mid = calculateMid(left, right); steps.push({ left, right, mid, square: mid * mid, answer }); while (left <= right) { const mid = calculateMid(left, right); if (mid * mid <= x) { answer = mid; left = mid + 1; } else { right = mid - 1; } const nextMid = calculateMid(left, right); steps.push({ left, right, answer, mid: nextMid, square: nextMid * nextMid }); } steps.pop(); return { steps, sqrt: answer }; }; <file_sep>import Category from "./segments/category"; import Company from "./segments/company"; import Difficulty from "./segments/difficulty"; export interface Game { name: string; path: string; categories: Category[]; companies: Company[]; difficulty: Difficulty; img: string; leetcodeId?: number; } <file_sep>import createDpTable from "../algo"; const getLastCell = (table: number[][]): number => { const lastRow = table[table.length - 1]; return lastRow[lastRow.length - 1]; } test("edit-distance", () => { const str1 = "apple"; const str2 = "oppo"; const table = createDpTable(str1, str2); const distance = getLastCell(table); expect(distance).toBe(3); }); <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node: TreeNode<number>; lower: number; upper: number; isBalanced?: boolean; } export function buildSteps(root?: TreeNode<number>): Step[] { const steps: Step[] = []; function isBST(lower: number, upper: number, node?: TreeNode<number>, isBalanced?: boolean): boolean { if (node === undefined) { return true; } if (node.val.value <= lower) { steps.push({ node, lower, upper, isBalanced: false }); return false; } if (node.val.value >= upper) { steps.push({ node, lower, upper, isBalanced: false }); return false; } steps.push({ node, lower, upper, isBalanced }); const isLeftBST = isBST(lower, node.val.value, node.left, isBalanced); if (!isLeftBST) { steps.push({ node, lower, upper, isBalanced: false }); return false; } const isRightBST = isBST(node.val.value, upper, node.right, isBalanced); if (!isRightBST) { steps.push({ node, lower, upper, isBalanced: false }); return false; } steps.push({ node, lower, upper, isBalanced: true }); return true; } isBST(-Infinity, Infinity, root, undefined); return steps; } <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; export const lineColor = "gold"; export const normalSphereColor = "yellow"; export const enabledSphereColor = "orange"; export const textColor = "green"; export const rangeColor = "orange"; export const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); export const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: normalSphereColor, opacity: 0.4, transparent: true }); } export const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: textColor }); export const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; export const lineMaterial = new THREE.LineBasicMaterial({ color: lineColor }); export const rangeMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: rangeColor }); export const rangeGeometryParameters: TextGeometryParameters = { font, size: 0.4, height: 0.1 }; <file_sep>import createDPTable from './algo'; import { Point } from "../_commons/point"; import { helperStyle, helperStyleSecondary } from '../_commons/styles'; const startPoint: Point = { row: 2, col: 2, }; interface TableSize { rows: number; cols: number; } const getTableSize = (sentence: string, dictionary: string[]): TableSize => { const rows = sentence.length + 2; const cols = sentence.length + 2; return { rows, cols }; }; const createTableMatrix = (sentence: string, dictionary: string[]): (boolean | string)[][] => { const { rows, cols } = getTableSize(sentence, dictionary); const table = new Array(rows).fill('').map(() => new Array(cols).fill('')); for (let col = 2; col < cols; col++) { table[0][col] = sentence.charAt(col - 2); table[1][col] = col - 1; } for (let row = 2; row < rows; row++) { table[row][0] = sentence.charAt(row - 2); table[row][1] = row - 1; } table[1][1] = 0; table[startPoint.row][startPoint.col] = '?'; return table; }; const createComparedTable = (sentence: string, dictionary: string[]): (boolean | string)[][] => { const { rows, cols } = getTableSize(sentence, dictionary); const dpTable = createDPTable(sentence, dictionary); const tableMatrix = createTableMatrix(sentence, dictionary); for (let row = 2; row < rows; row++) { for (let col = 2; col < cols; col++) { tableMatrix[row][col] = dpTable[row - 2][col - 2]; } } return tableMatrix; }; const addHelperStyles = (styles: React.CSSProperties[][], { row, col }: Point): void => { for (let i = row; i <= col; i++) { styles[0][i] = helperStyleSecondary; styles[1][i] = helperStyle; styles[i][0] = helperStyleSecondary; styles[i][1] = helperStyle; } }; const createTableStyles = (sentence: string, dictionary: string[]): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(sentence, dictionary); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(table, startPoint); return table; }; const createButtons = (sentence: string, dictionary: string[]): boolean[] => [true, false]; const createButtonsStyles = (sentence: string, dictionary: string[]): (React.CSSProperties)[] => { return createButtons(sentence, dictionary).map(() => ({ color: 'back' })); }; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, startPoint, }; <file_sep>import { Point } from "../_commons/point"; const isMatch = ({ row, col }: Point, r: number, c: number) => (row === r && col === c); export const createNewTableStyles = (table: React.CSSProperties[][]): React.CSSProperties[][] => table.map(row => row.map(() => ({}))); export const updateTable = (table: (string | number)[][], point: Point, value: number): (string | number)[][] => table.map((row, rowIndex) => { return row.map((cell, colIndex) => isMatch(point, rowIndex, colIndex) ? value : cell); }); export const nonCorrect = (comparedTable: (string | number)[][], { row, col }: Point, value: number): boolean => (comparedTable[row][col] !== value); export const isLastCell = (table: (string | number)[][], point: Point): boolean => { const row = table.length - 1; const col = table[row].length - 1; return isMatch(point, row, col); }; export const getNextPoint = (table: (string | number)[][], { row, col }: Point): Point => (col === table[row].length - 1) ? { row: row + 1, col: 2 } : { row, col: col + 1 }; interface Helpers { helperTable: (string | number)[][]; resultsInDifferentFloors: number[]; } export const createHelperTable = (point: Point, comparedTable: (string | number)[][]): Helpers => { const eggs = point.row; const floors = point.col - 1; let helperTable: (string | number)[][] = []; let resultsInDifferentFloors: number[] = []; if (eggs > floors) { return { helperTable, resultsInDifferentFloors }; } else { for (let floor = 1; floor <= floors; floor++) { const row: (string | number)[] = []; // floor number row.push(floor); const breaks = Number(comparedTable[eggs - 1][floor]); const nonBreaks = Number(comparedTable[eggs][floors - floor + 1]); // breaks row.push(`Eggs-1 = ${eggs}-1 = ${eggs - 1}`); row.push(`Floor-1 = ${floor}-1 = ${floor - 1}`); row.push(`T[${eggs - 1}][${floor - 1}] = ${breaks}`); // non-breaks row.push(`Eggs = ${eggs}`); row.push(`Floors-Floor = ${floors}-${floor} = ${floors - floor}`); row.push(`T[${eggs}][${floors - floor}] = ${nonBreaks}`); // result // row.push(`1+ Max(Breaks, NonBreaks)`); row.push(`1 + Max(${breaks}, ${nonBreaks}) = ${1 + Math.max(breaks, nonBreaks)}`); resultsInDifferentFloors.push(1 + Math.max(breaks, nonBreaks)); helperTable.push(row); } } return { helperTable, resultsInDifferentFloors }; }; <file_sep>export interface Collection { isEmpty(): Promise<boolean>; size(): Promise<number>; } <file_sep>import * as THREE from "three"; import { Material } from "three"; import { PlaneParameters, TextParameters } from "./node"; import { NodeStyles } from "./styles"; export function createPlaneParameters( material: Material, position: THREE.Vector3, { width, height }: NodeStyles, ): PlaneParameters { return { width, height, position, material }; } export function createTextParameters( material: Material, position: THREE.Vector3, { textGeometryParameters }: NodeStyles, ): TextParameters { return { material, position, textGeometryParameters }; } export function toMemoryAddress(address: number): string { const hex = address.toString(16).toUpperCase(); return hex.length === 1 ? "0x0" + hex : "0x" + hex; } <file_sep>class TreeNode { val: number; left?: TreeNode; right?: TreeNode; constructor(val: number) { this.val = val; } } const getDepth = (node?: TreeNode): number => { if (node === undefined) { return 0; } const leftDepth: number = getDepth(node.left); const rightDepth: number = getDepth(node.right); return Math.max(leftDepth, rightDepth) + 1; } export function calDepth(inorder: number[], postorder: number[],): number { const inorderIndexMap = new Map<number, number>(); inorder.forEach((value, index) => inorderIndexMap.set(value, index)); const buildMyTree = (inorderLeft: number, inorderRight: number, postorderLeft: number, postorderRight: number): TreeNode | undefined => { if (postorderLeft > postorderRight) { return undefined; } const inorderRootIndex = inorderIndexMap.get(postorder[postorderRight])!; const leftTreeLength = inorderRootIndex - inorderLeft; const root = new TreeNode(postorder[postorderRight]); root.left = buildMyTree(inorderLeft, inorderRootIndex - 1, postorderLeft, postorderLeft + leftTreeLength - 1); root.right = buildMyTree(inorderRootIndex + 1, inorderRight, postorderLeft + leftTreeLength, postorderRight - 1); return root; } const root: TreeNode | undefined = buildMyTree(0, inorder.length - 1, 0, postorder.length - 1) return getDepth(root); }; <file_sep>export default class Queue<T> { private items: T[]; constructor() { this.items = []; } public enqueue(item: T) { this.items.push(item); } public dequeue(): T | undefined { return this.items.shift(); } public isEmpty(): boolean { return this.items.length === 0; } public get size(): number { return this.items.length; } } <file_sep>import * as THREE from "three"; import Container from "../../_commons/container"; import { sort } from "../algo"; test("InsertionSort", () => { const fakePosition = new THREE.Vector3(0, 0, 0); const arrays: Container[] = []; arrays.push({ position: fakePosition, payload: 5 }); arrays.push({ position: fakePosition, payload: 2 }); arrays.push({ position: fakePosition, payload: 1 }); arrays.push({ position: fakePosition, payload: 4 }); arrays.push({ position: fakePosition, payload: 3 }); arrays.push({ position: fakePosition, payload: 7 }); arrays.push({ position: fakePosition, payload: 6 }); arrays.push({ position: fakePosition, payload: 8 }); arrays.push({ position: fakePosition, payload: 9 }); arrays.push({ position: fakePosition, payload: 0 }); arrays.push({ position: fakePosition, payload: 6 }); sort(arrays); for (let i = 0; i < arrays.length - 1; i++) { const a = arrays[i]; const b = arrays[i + 1]; expect(a.payload).toBeLessThanOrEqual(b.payload); } }); <file_sep>export default interface Color { setColor(color: string): Promise<void>; get color(): string; } <file_sep>export enum State { Clearing, Inserting, Ready, } <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; import { buildBinaryTree } from "../../../data-structures/tree/nodes/v1/binary-tree-builder"; const lineColor = "gold"; export const enabledSphereColor = "lightblue"; export const normalSphereColor = "yellow"; const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: normalSphereColor, opacity: 0.4, transparent: true }); } const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "green" }); const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; const lineMaterial = new THREE.LineBasicMaterial({ color: lineColor }); export const buildTree = (array: (string | null)[], scene: THREE.Scene) => { const center = { x: 0, y: 7, z: 0 }; const show = true; const duration = 0; const yDistance = 3; const xAxisAlpha = 2; // expend the tree size in xAxis. return buildBinaryTree<string>( sphereGeometry, sphereMaterial, textMaterial, textGeometryParameters, lineMaterial, scene, duration, center, yDistance, xAxisAlpha, array, show ); } const getText = (value: number): string => { switch (value) { case -Infinity: return "min" case Infinity: return "max"; default: return value + ""; } } const indexTextMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "orange" }); const indexUpperTextGeometryParameters: TextGeometryParameters = { font, size: 0.5, height: 0.1 }; export const buildThreeText = (value: number, x: number, y: number, z: number): THREE.Mesh => { const text = getText(value); const textGeometry = new TextGeometry(text, indexUpperTextGeometryParameters); const mesh = new THREE.Mesh(textGeometry, indexTextMaterial); mesh.position.set(x, y, z); return mesh; } <file_sep>import { Game } from "./game"; import Category from "./segments/category"; import Company from "./segments/company"; import Difficulty from "./segments/difficulty"; export const filter = ( games: Game[], categories: Category[], companies: Company[], difficulties: Difficulty[], ): Game[] => { if (categories.length === 0 && companies.length === 0 && difficulties.length === 0) { return games; } const filteredOne = filterCategory(games, categories); const filteredTwo = filterCompany(games, companies); const filteredThree = filterDifficulty(games, difficulties); return intersect(intersect(filteredOne, filteredTwo), filteredThree); } const intersect = <T>(arrayOne: T[], arrayTwo: T[]): T[] => { return arrayOne.filter(x => arrayTwo.includes(x)); } const filterCategory = (games: Game[], categories: Category[]): Game[] => { if (categories.length === 0) { return games; } else { return games.filter(game => intersect(categories, game.categories).length > 0); } } const filterCompany = (games: Game[], companies: Company[]): Game[] => { if (companies.length === 0) { return games; } else { return games.filter(game => intersect(companies, game.companies).length > 0); } } const filterDifficulty = (games: Game[], difficulties: Difficulty[]): Game[] => { if (difficulties.length === 0) { return games; } else { return games.filter(game => difficulties.includes(game.difficulty)); } } <file_sep>export const title = "Convert Sorted Array to Binary Search Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function sortedArrayToBST(nums: number[]): TreeNode | null { function buildTree(left: number, right: number): TreeNode | null { if (left > right) { return null; } const mid = ~~((left + right) / 2); const node = new TreeNode(nums[mid]); node.left = buildTree(left, mid - 1); node.right = buildTree(mid + 1, right); return node; } return buildTree(0, nums.length - 1); };`; export const description = ` Given an integer array **nums** where the elements are sorted in **ascending order**, convert it to a ***height-balanced*** binary search tree. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export default interface SegmentTree { build(array: number[], duration: number): Promise<void>; update(index: number, value: number, duration: number): Promise<void>; query(left: number, right: number, duration: number): Promise<number | undefined>; } <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from 'three/examples/jsm/geometries/TextGeometry'; import Color from "../../_commons/params/color.interface"; import ColorImpl from "../../_commons/three/color.class"; import Displayer from "../../_commons/params/displayer.interface"; import DisplayerImpl from "../../_commons/three/displayer.class"; import Mover from "../../_commons/params/mover.interface"; import MoverImpl from "../../_commons/three/mover.class"; import Position from "../../_commons/params/position.interface"; import PositionImpl from "../../_commons/three/position.class" import IText from "../../_commons/params/text.interface"; export default class Text<T> extends PositionImpl implements IText<T> { private _value: T; private _mover: Mover; private _displayer: Displayer; private _color: Color; constructor( value: T, material: THREE.Material, geometryParameters: TextGeometryParameters, scene: THREE.Scene ) { const geometry = new TextGeometry(value + '', geometryParameters); const mesh = new THREE.Mesh(geometry, material); super(mesh) this._value = value; this._mover = new MoverImpl(mesh); this._displayer = new DisplayerImpl(scene, mesh); this._color = new ColorImpl(material); } get value(): T { return this._value; } setColor(color: string): Promise<void> { return this._color.setColor(color); } get color(): string { return this._color.color; } show() { this._displayer.show(); } hide() { this._displayer.hide(); } move(position: Position, duration: number, onUpdate?: (() => void) | undefined): Promise<void> { return this._mover.move(position, duration, onUpdate); } } <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right } export enum ActionType { CountLevel, DFS } export interface Step { node: TreeNode<string>; actionType: ActionType; count: number; direction?: Direction; } export function buildSteps(root?: TreeNode<string>): Step[] { const steps: Step[] = []; let count: number = 0; function countLevel(node: TreeNode<string> | undefined, level: number): number { if (!node) { return level; } steps.push({ node, actionType: ActionType.CountLevel, count }); return countLevel(node.left, level + 1); } function dfs(node?: TreeNode<string>): number { if (!node) { return 0; } const leftLevels = countLevel(node.left, 0); const rightLevels = countLevel(node.right, 0); if (leftLevels === rightLevels) { count += Math.pow(2, leftLevels); steps.push({ node, actionType: ActionType.DFS, direction: Direction.Right, count }); return dfs(node.right) + Math.pow(2, leftLevels); } else { count += Math.pow(2, rightLevels); steps.push({ node, actionType: ActionType.DFS, direction: Direction.Left, count }); return dfs(node.left) + Math.pow(2, rightLevels); } } dfs(root); return steps; } <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import TreeNode from "./node"; import Position from '../../../_commons/params/position.interface'; import { build } from "./tree-node-builder"; import { buildPerfectBinaryTree } from '../utils/perfect-binary-tree'; export const buildBinaryTree = <T>( sphereGeometry: THREE.SphereGeometry, sphereMaterial: () => THREE.Material, textMaterial: THREE.Material, textGeometryParameters: TextGeometryParameters, lineMaterial: THREE.LineBasicMaterial, scene: THREE.Scene, duration: number, position: Position, yDistance: number, xAxisAlpha: number, // expend the tree size in xAxis. array: (T | null)[], show: boolean ): TreeNode<T> | undefined => { const depth = Math.floor(Math.log2(array.length)) + 1; const xAxis: number[] = buildPerfectBinaryTree(depth, xAxisAlpha, 2).map(node => node.x); const xAlpha = (xAxis.length === 0) ? 0 : position.x - xAxis[0]; const buildTree = (index: number, center: Position): TreeNode<T> | undefined => { const value = array[index]; if (value === null || value === undefined) { return undefined; } const node = build<T>( index, sphereGeometry, sphereMaterial(), textMaterial, textGeometryParameters, value, scene, center ); if (show) { node.show(); } const { y, z } = center; const leftPosition = { x: xAxis[node.leftChildIndex] + xAlpha, y: y - yDistance, z }; const left = buildTree(node.leftChildIndex, leftPosition); if (left) { node.setLeft(left, leftPosition, lineMaterial, duration, scene); } const rightPosition = { x: xAxis[node.rightChildIndex] + xAlpha, y: y - yDistance, z }; const right = buildTree(node.rightChildIndex, rightPosition); if (right) { node.setRight(right, rightPosition, lineMaterial, duration, scene); } return node; } return buildTree(0, position); } <file_sep>const initTable = (eggs: number, floors: number) => { const table = []; table.push(Array.from(Array(floors + 1).keys())); for (let row = 1; row < eggs; row += 1) { table.push(Array(floors + 1).fill(0)); } return table; }; const createDPTable = (eggs: number, floors: number): number[][] => { const table = initTable(eggs, floors); for (let row = 1; row < table.length; row += 1) { for (let col = 1; col < table[row].length; col += 1) { if (row + 1 > col) { table[row][col] = table[row - 1][col]; } else { let min = floors; for (let f = 1; f <= col; f += 1) { const max = Math.max(table[row - 1][f - 1], table[row][col - f]); min = Math.min(min, max); } table[row][col] = 1 + min; } } } return table; }; export default createDPTable; <file_sep>import Color from "../_commons/params/color.interface"; import Displayer from "../_commons/params/displayer.interface"; import Mover from "../_commons/params/mover.interface"; import Position from "../_commons/params/position.interface"; import { calDistance, calDestination } from "../_commons/utils"; export interface Base extends Mover, Displayer, Color, Position { } export interface GraphSkin extends Base { } export interface GraphText extends Base { text: string; } export class GraphNode<T> implements Mover, Displayer { value: T; readonly id: number; readonly skin: GraphSkin; readonly text: GraphText; constructor( id: number, value: T, skin: GraphSkin, text: GraphText, ) { this.id = id; this.value = value; this.skin = skin; this.text = text; } show() { this.skin.show(); this.text.show(); } hide() { this.skin.hide(); this.text.hide(); } async move(position: Position, duration: number, onUpdate?: (() => void) | undefined) { const skinMove = this.skin.move(position, duration, onUpdate); const textMove = this.text.move(this.calTextDestination(position), duration); return Promise.all([skinMove, textMove]).then(() => { }) } private calTextDestination(destination: Position): Position { const distance = calDistance(this.skin, destination); return calDestination(this.text, distance); } } <file_sep>export const title = "House Robber"; export const formula = ` for (int i = 2; i < nums.length; i++) { nums[i] = Math.max(nums[i] + nums[i-2], nums[i-1]); } `; export const description = `You are a professional robber planning to rob houses along a street. Each house has a certain amount of money stashed, the only constraint stopping you from robbing each of them is that adjacent houses have security system connected and it will automatically contact the police if two adjacent houses were broken into on the same night. Given a list of non-negative integers representing the amount of money of each house, ***determine the maximum amount of money you can rob tonight without alerting the police.***`; export const usecases = ''; export const example = ` Input: \\[2, 4, 3, 1, 6, 5, 7, 6\\] Output: 18 Explanation: - Rob house 1 (money = 2), rob house 3 (money = 3) and rob house 5 (money = 6) rob house 7 (money = 7). - Total amount you can rob = 2 + 3 + 6 + 7 = 18. `; <file_sep>import { Point } from "../_commons/point"; export const getLastPoint = (table: (string | boolean)[][]): Point => { const row = 2; const col = table[row].length - 1; return { row, col }; }; const booleanToString = (value: boolean | string): string => { if (value === "?") return value; return (value ? 'T' : 'F'); }; const isMatch = ({ row, col }: Point, r: number, c: number) => (row === r && col === c); export const createNewTableStyles = (table: React.CSSProperties[][]): React.CSSProperties[][] => table.map(row => row.map(() => ({}))); export const updateTable = (table: (string | boolean)[][], point: Point, value: boolean | string): (string | boolean)[][] => table.map((row, rowIndex) => { return row.map((cell, colIndex) => isMatch(point, rowIndex, colIndex) ? booleanToString(value) : cell); }); export const nonCorrect = (comparedTable: (string | boolean)[][], { row, col }: Point, value: boolean): boolean => (comparedTable[row][col] !== value); export const isLastCell = (table: (string | boolean)[][], { row, col }: Point): boolean => { return row === 2 && col === table[2].length - 1; }; export const createNextCol = (col: number, len: number, table: (string | boolean)[][]): number => { const nextCol = (col + 1) % table.length; return nextCol < 2 ? 1 + len : nextCol; }; export const getNextPoint = (table: (string | boolean)[][], { row, col }: Point, length: number) => { const nextRow = col + 1 === table.length ? 2 : row + 1; const nextLen = col + 1 === table.length ? length + 1 : length; const nextCol = createNextCol(col, nextLen, table); return { row: nextRow, col: nextCol, length: nextLen }; }; <file_sep>import * as THREE from 'three'; export const text = { color: "green", size: 0.6, height: 0.1 }; export const node = { size: { width: 1, height: 1, depth: 1 }, } export const queue = { material: new THREE.MeshBasicMaterial({ color: "lightgrey", opacity: 0.5, transparent: true }), }; export const queueOnePosition = { name: new THREE.Vector3(-3.6, 3.8, -4), queue: new THREE.Vector3(-3, 3, -4), } export const queueTwoPosition = { name: new THREE.Vector3(-3.6, 0.7, -4), queue: new THREE.Vector3(-3, 0, -4) } export const queueNameStyles = { color: "orange", size: 0.4, height: 0.1 }; <file_sep>export const title = "Longest Increasing Subsequence"; export const formula = ` // Core Formula: if (array[x] < array[y]) { values[y] = Math.max(values[y], values[x] + 1); } // Full Java Code: public int lengthOfLIS(int[] nums) { if (nums.length == 0) { return 0; } int[] dp = new int[nums.length]; for (int i = 0; i < dp.length; i++) { dp[i] = 1; } for (int i = 1; i < dp.length; i++) { for (int j = 0; j < i; j++) { if (nums[i] > nums[j]) { dp[i] = Math.max(dp[i], dp[j] + 1); } } } int max = 1; for (int i = 0; i < dp.length; i++) { max = Math.max(max, dp[i]); } return max; } `; export const description = `Given an unsorted array of integers, find the length of longest increasing subsequence. `; export const usecases = ''; export const example = ` - Given \\[6, 7, 0, 1, 9, 3, 5, 8, 4\\] - The longest increasing subsequence is \\[0, 1, 3, 5, 8\\], therefore the length is 5. `; <file_sep>import Container from "../_commons/container"; import { swap } from "../_commons/helps"; import Step from "./step"; export const sort = (arrays: Container[]): Step[] => { const steps: Step[] = []; for (let i = 0; i < arrays.length - 1; i++) { let min = i; for (let j = i + 1; j < arrays.length; j++) { if (arrays[j].payload < arrays[min].payload) { min = j; } steps.push({ min, a: arrays[i], b: arrays[j], exchange: false }); } if (min !== i) { steps.push({ min, a: arrays[i], b: arrays[min], exchange: true }); swap(arrays, i, min); } else { steps.push({ min, a: arrays[i], b: arrays[min], exchange: true }); } } return steps; } <file_sep>export const title = "Roman to Integer"; export const formula = `function romanToInt(s: string): number { const getValue = (ch: string): number => { switch (ch) { case 'I': return 1; case 'V': return 5; case 'X': return 10; case 'L': return 50; case 'C': return 100; case 'D': return 500; case 'M': return 1000; default: return 0; } } let sum = 0; let prev = getValue(s.charAt(0)); for (let i = 1; i < s.length; i++) { const num = getValue(s.charAt(i)); if (prev < num) { sum -= prev; } else { sum += prev; } prev = num; } sum += prev; return sum; };`; export const description = ` Roman numerals are represented by seven different symbols: **I**, **V**, **X**, **L**, **C**, **D** and **M**. | Symbol | Value | |:------:|------:| | I | 1 | | V | 5 | | X | 10 | | L | 50 | | C | 100 | | D | 500 | | M | 1000 | For example, **2** is written as **II** in Roman numeral, just two one's added together. **12** is written as **XII**, which is simply **X + II**. The number **27** is written as **XXVII**, which is **XX + V + II**. Roman numerals are usually written largest to smallest from left to right. However, the numeral for four is not **IIII**. Instead, the number four is written as **IV**. Because the one is before the five we subtract it making four. The same principle applies to the number nine, which is written as **IX**. There are six instances where subtraction is used: - **I** can be placed before **V** (5) and **X** (10) to make 4 and 9. - **X** can be placed before **L** (50) and **C** (100) to make 40 and 90. - **C** can be placed before **D** (500) and **M** (1000) to make 400 and 900. Given a roman numeral, convert it to an integer. **Constraints:** - 1 <= s.length <= 15 - **s** contains only the characters ('I', 'V', 'X', 'L', 'C', 'D', 'M'). - 1 <= num <= 3999 `; export const solution = ``; export const usecases = ''; export const examples = ` --- #### Example 1: - Input: s = "III" - Output: 3 - Explanation: III = 3. #### Example 2: - Input: s = "LVIII" - Output: 58 - Explanation: L = 50, V= 5, III = 3. #### Example 3: - Input: s = "MCMXCIV" - Output: 1994 - Explanation: M = 1000, CM = 900, XC = 90 and IV = 4. `; <file_sep>export const title = "Basic Calculator II"; export const shortFormula = `function calculate(s: string): number { let previousSign = '+'; const stack: number[] = []; for (let i = 0; i < s.length; i++) { const character = s.charAt(i); if (isNumeric(character)) { let current = +character; while (i + 1 < s.length && isNumeric(s.charAt(i + 1))) { current = current * 10 + (+s.charAt(i + 1)); i++; } if (previousSign === "+") { stack.push(current); } else if (previousSign === "-") { stack.push(current * -1); } else if (previousSign === "*") { stack.push(stack.pop() * current); } else if (previousSign === "/") { stack.push((stack.pop() / current) | 0); } } else if (character !== " ") { previousSign = character; } } return stack.reduce((a, b) => a + b); };` export const formula = `function isNumeric(str: string) { const num = parseInt(str); return !isNaN(num); } function calculate(s: string): number { let previousSign = '+'; const stack: number[] = []; for (let i = 0; i < s.length; i++) { const character = s.charAt(i); if (isNumeric(character)) { let current = +character; while (i + 1 < s.length && isNumeric(s.charAt(i + 1))) { current = current * 10 + (+s.charAt(i + 1)); i++; } if (previousSign === "+") { stack.push(current); } else if (previousSign === "-") { stack.push(current * -1); } else if (previousSign === "*") { stack.push(stack.pop() * current); } else if (previousSign === "/") { stack.push((stack.pop() / current) | 0); } } else if (character !== " ") { previousSign = character; } } return stack.reduce((a, b) => a + b); };`; export const description = `## ${title} --- Given a string s which represents an expression, evaluate this expression and return its value. The integer division should truncate toward zero. You may assume that the given expression is always valid. Note: You are not allowed to use any built-in function which evaluates strings as mathematical expressions, such as eval(). ### Constraints + 1 <= s.length <= 3 * 10^5 + s consists of integers and operators ('+', '-', '*', '/') separated by some number of spaces. + s represents a valid expression. + All the integers in the expression are non-negative integers in the range [0, 2^31 - 1]. + Every number and running calculation will fit in a signed 32-bit integer. ### Exmaple ##### Example 1: - Input: s = "1 + 2 * 3" - Output: 7 ##### Example 2: - Input: s = "1 + 2 * 3 / 4" - Output: 2 ##### Example 3: - Input: s = "1 + 2 - 3 * 4 * 5 / 6 + 7 - 8" - Output: -8 `; <file_sep>export const title = "Course Schedule II"; export const formula = `function findOrder(numCourses: number, prerequisites: number[][]): number[] { const adjacency: Map<number, number[]> = new Map(); for (let i = 0; i < numCourses; i++) { adjacency.set(i, []); } prerequisites.forEach(prerequisite => { const [a, b] = prerequisite; adjacency.get(b).push(a); }); const stack: number[] = []; let hasCycle = false; const visited: Set<number> = new Set(); const dfs = (current: number) => { if (visited.has(current)) { hasCycle = true; return; } if (stack.indexOf(current) >= 0) { return; } visited.add(current); const children = adjacency.get(current); for (let i = 0; i < children.length; i++) { dfs(children[i]); if (hasCycle) { return; } } visited.delete(current); stack.push(current); } for (let i = 0; i < numCourses; i++) { dfs(i); if (hasCycle) { return []; } } return stack.reverse(); };`; export const description = ` There are a total of numCourses courses you have to take, labeled from 0 to numCourses - 1. You are given an array prerequisites where prerequisites[i] = [a_i, b_i] indicates that you must take course b_i first if you want to take course a_i. > For example, the pair [0, 1], indicates that to take course 0 you have to first take course 1. Return the ordering of courses you should take to finish all courses. If there are many valid answers, return any of them. If it is impossible to finish all courses, return an empty array. `; export const usecases = ''; export const example = ``; <file_sep>export const title = 'Sqrt(x)'; export const formula = `function mySqrt(x: number): number { let [left, right, answer] = [0, x, -1]; while (left <= right) { const mid = left + Math.floor((right - left) / 2); if (mid * mid <= x) { answer = mid; left = mid + 1; } else { right = mid - 1; } } return answer; };`; export const description = ` Given a non-negative integer **x**, return the ***square root of*** **x** ***rounded down to the nearest integer***. The returned integer should be non-negative as well. You must **not** use any built-in exponent function or operator. > For example, do not use ***pow(x, 0.5)*** in c++ or ***x ** 0.5*** in python. `; export const usecases = ''; export const example = ` ### Example 1 - Input: x = 4 - Output: 2 - Explanation: The square root of 4 is 2, so we return 2. ### Example 2 - Input: x = 8 - Output: 2 - Explanation: The square root of 8 is 2.82842..., and since we round it down to the nearest integer, 2 is returned. `; <file_sep>export interface Step { adjacency: Map<number, number[]>; visited: number[]; current?: number; canFinish?: boolean; } const calculateNumCourses = (prerequisites: number[][]): number => { let numCourses: number = prerequisites .map(array => Math.max(...array)) .reduce((prev, curr) => Math.max(prev, curr), 0); numCourses += 1; return numCourses; } const copyMap = (map: Map<number, number[]>): Map<number, number[]> => { const result: Map<number, number[]> = new Map(); Array.from(map.entries()).forEach(entry => { const [key, values] = entry; result.set(key, values); }); return result; } export function canFinish(prerequisites: number[][]): Step[] { let numCourses: number = calculateNumCourses(prerequisites); const steps: Step[] = []; const adjacency: Map<number, number[]> = new Map(); for (let i = 0; i < numCourses; i++) { adjacency.set(i, []); } prerequisites.forEach(prerequisite => { const [a, b] = prerequisite; adjacency.get(b)?.push(a); }); const visited: Set<number> = new Set(); const dfs = (current: number): boolean => { const returnEarly: boolean = visited.has(current) || adjacency.get(current)!.length === 0; if (returnEarly) { // only add new step if return-early to avoid add same node multple times. steps.push({ visited: Array.from(visited), current, adjacency: copyMap(adjacency) }); } if (visited.has(current)) { return false; } if (adjacency.get(current)!.length === 0) { return true; } visited.add(current); steps.push({ visited: Array.from(visited), current, adjacency: copyMap(adjacency) }); const children = adjacency.get(current)!; for (let i = 0; i < children.length; i++) { const child = children[i]; if (!dfs(child)) { return false; } } visited.delete(current); adjacency.set(current, []); steps.push({ visited: Array.from(visited), current, adjacency: copyMap(adjacency) }); return true; } for (let i = 0; i < numCourses; i++) { if (!dfs(i)) { steps.push({ visited: Array.from(visited), current: i, canFinish: false, adjacency: copyMap(adjacency) }); return steps; } } steps.push({ visited: Array.from(visited), canFinish: true, adjacency: copyMap(adjacency) }); return steps; }; <file_sep>import { RodCuttingItem } from './RodCuttingItem'; interface TableSize { rows: number; cols: number; } const getTableSize = (items: RodCuttingItem[], totalLength: number): TableSize => { const rows = items.length + 1; const cols = totalLength + 1; return { rows, cols }; }; const createDPTable = (items: RodCuttingItem[], totalLength: number): number[][] => { const { rows, cols } = getTableSize(items, totalLength); const table = new Array(rows).fill(0).map(() => new Array(cols).fill(0)); for (let row = 1; row < rows; row++) { const item = items[row - 1]; for (let col = 1; col < cols; col++) { if (item.length > col) { table[row][col] = table[row - 1][col]; } else { table[row][col] = Math.max( table[row - 1][col], table[row][col - item.length] + item.price ); } } } return table; }; export default createDPTable; <file_sep>const createDPTable = (stringOne: string, stringTwo: string): number[][] => { const rows = stringTwo.length + 1; const cols = stringOne.length + 1; const table = new Array(rows).fill(0).map(() => new Array(cols).fill(0)); for (let col = 0; col < cols; col++) { table[0][col] = col; } for (let row = 0; row < rows; row++) { table[row][0] = row; } for (let row = 1; row < rows; row++) { for (let col = 1; col < cols; col++) { if (stringOne.charAt(col - 1) === stringTwo.charAt(row - 1)) { table[row][col] = table[row - 1][col - 1]; } else { const min = Math.min( table[row - 1][col - 1], table[row - 1][col], table[row][col - 1] ); table[row][col] = min + 1; } } } return table; }; export default createDPTable; <file_sep>import createDpTable from "../algo"; const getLastCell = (table: number[][]): number => { const lastRow = table[table.length - 1]; return lastRow[lastRow.length - 1]; } test("rod-cutting-problem one", () => { const items = [ { price: 1, length: 1 }, { price: 5, length: 2 }, { price: 8, length: 3 }, { price: 9, length: 4 }, { price: 10, length: 5 }, { price: 17, length: 6 }, { price: 17, length: 7 }, { price: 20, length: 8 }, ]; const totalLength = 8; const table = createDpTable(items, totalLength); const distance = getLastCell(table); expect(distance).toBe(22); }); test("rod-cutting-problem two", () => { const items = [ { price: 3, length: 1 }, { price: 5, length: 2 }, { price: 8, length: 3 }, { price: 9, length: 4 }, { price: 10, length: 5 }, { price: 17, length: 6 }, { price: 17, length: 7 }, { price: 20, length: 8 }, ]; const totalLength = 8; const table = createDpTable(items, totalLength); const distance = getLastCell(table); expect(distance).toBe(24); }); <file_sep>// union find set has another name: disjoint set class DisjointSetNode { value: number; rank: number; parent: DisjointSetNode; constructor(value: number) { this.value = value; this.rank = 0; this.parent = this; } } export class DisjointSet { readonly map: Map<number, DisjointSetNode>; constructor() { this.map = new Map(); } union(a: number, b: number) { const rootA = this.find(a); const rootB = this.find(b); if (rootA === rootB) { return; } if (rootA.rank === rootB.rank) { rootA.rank += 1; rootB.parent = rootA; } else if (rootA.rank > rootB.rank) { rootB.parent = rootA; } else { rootA.parent = rootB; } } find(value: number): DisjointSetNode { const node = this.getNode(value); return this.findRoot(node); } private findRoot(node: DisjointSetNode): DisjointSetNode { if (node.parent === node) { return node; } node.parent = this.findRoot(node.parent); return node.parent; } private getNode(value: number): DisjointSetNode { if (!this.map.has(value)) { this.map.set(value, new DisjointSetNode(value)); } return this.map.get(value)!; } } <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from 'three/examples/jsm/geometries/TextGeometry'; import Color from '../../params/color.interface'; import ColorImpl from '../../three/color.class'; import Display from '../../params/displayer.interface'; import DisplayImpl from "../../three/displayer.class"; import Mover from '../../params/mover.interface'; import MoveImpl from "../../three/mover.class"; import Position from '../../params/position.interface'; import PositionImpl from "../../three/position.class" import { TextSphere as ITextSphere } from "../text-sphere.interface"; import Sphere from './sphere'; import { calDestination } from '../../utils'; export default class TextSphere<T> extends Sphere implements ITextSphere<T> { private _value: T; private textDisplay: Display; private textMover: Mover; textColor: Color; textPosition: Position; private textGeometryParameters: TextGeometryParameters; private textMaterial: THREE.Material; private scene: THREE.Scene; constructor( value: T, sphereGeometry: THREE.SphereGeometry, sphereMaterial: THREE.Material, textMaterial: THREE.Material, textGeometryParameters: TextGeometryParameters, scene: THREE.Scene ) { super(sphereGeometry, sphereMaterial, scene); this._value = value; this.textGeometryParameters = textGeometryParameters; this.textMaterial = textMaterial; this.scene = scene; const textGeometry = new TextGeometry(value + '', this.textGeometryParameters); const textMesh = new THREE.Mesh(textGeometry, this.textMaterial); this.textPosition = new PositionImpl(textMesh); this.textMover = new MoveImpl(textMesh); this.textDisplay = new DisplayImpl(this.scene, textMesh); this.textColor = new ColorImpl(this.textMaterial); } get value(): T { return this._value; } set value(t: T) { const { x, y, z } = this.textPosition; this.textDisplay.hide(); this.createTextMesh(t); this.textPosition.x = x; this.textPosition.y = y; this.textPosition.z = z; this._value = t; this.textDisplay.show(); } private createTextMesh(value: T) { const textGeometry = new TextGeometry(value + '', this.textGeometryParameters); const textMesh = new THREE.Mesh(textGeometry, this.textMaterial); this.textPosition = new PositionImpl(textMesh); this.textMover = new MoveImpl(textMesh); this.textDisplay = new DisplayImpl(this.scene, textMesh); this.textColor = new ColorImpl(this.textMaterial); } async move(position: Position, duration: number, onUpdate?: () => void) { const distance = super.distance(position); const textEndPosition = calDestination(this.textPosition, distance); const sphereMove = super.move(position, duration, onUpdate); const textMove = this.textMover.move(textEndPosition, duration, onUpdate); return Promise.all([sphereMove, textMove]).then(() => { }); } show() { super.show(); this.textDisplay.show(); } hide() { super.hide(); this.textDisplay.hide(); } } <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; import { buildBinaryTree } from "../../../data-structures/tree/nodes/v1/binary-tree-builder"; export enum SolutionType { BFS, DFS } export const enabledSphereColor = "lightgreen"; export const normalSphereColor = "yellow"; const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: normalSphereColor, opacity: 0.4, transparent: true }); } const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "green" }); const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; const lineMaterial = new THREE.LineBasicMaterial({ color: "gold" }); export const duration = 0.5; export const buildTree = (array: (string | null)[], scene: THREE.Scene) => { const center = { x: 0, y: 5, z: 0 }; const show = true; const duration = 0; const yDistance = 3; const xAxisAlpha = 2; // expend the tree size in xAxis. return buildBinaryTree<string>( sphereGeometry, sphereMaterial, textMaterial, textGeometryParameters, lineMaterial, scene, duration, center, yDistance, xAxisAlpha, array, show ); } const indexTextMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "orange" }); const indexUpperTextGeometryParameters: TextGeometryParameters = { font, size: 0.5, height: 0.1 }; export const buildThreeText = (text: string, x: number, y: number, z: number): THREE.Mesh => { const textGeometry = new TextGeometry(text, indexUpperTextGeometryParameters); const mesh = new THREE.Mesh(textGeometry, indexTextMaterial); mesh.position.set(x, y, z); return mesh; } export const minShellSize = 5; export const text = { size: 0.5, height: 0.1 }; export const nodeSize = { width: 1, height: 1, depth: 1 }; export const shellMterial = new THREE.MeshBasicMaterial({ color: "lightgrey", opacity: 0.3, transparent: true }); export const queueNamePosition = new THREE.Vector3(-2.5, 2.9, 10); export const queuePosition = new THREE.Vector3(-2, 4, 10); export const queueNameStyles = { color: "lightgreen", size: 0.4, height: 0.1 }; const colors = ["gold", "orange", "pink", "purple", "red"]; export const getNodeColor = (level: number) => { const colorIndex = level % colors.length; return colors[colorIndex]; } <file_sep>import * as THREE from "three"; import { TextGeometry, TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import BaseNode, { PlaneParameters, TextParameters } from "../commons/node"; import Arrow from "./arrow"; export default class Node<T> extends BaseNode<T> { key: number; arrow?: Arrow; private indexGeometry: TextGeometry index: THREE.Mesh; constructor( key: number, data: T, display: string, scene: THREE.Scene, planeParameters: PlaneParameters, textParameters: TextParameters, ) { super(data, display, scene, planeParameters, textParameters) this.key = key; const { x, y, z } = planeParameters.position; const { width, height } = planeParameters; const indexGeometryParameters: TextGeometryParameters = { ...textParameters.textGeometryParameters, size: 1.2 } this.indexGeometry = new TextGeometry(key + "", indexGeometryParameters); this.index = new THREE.Mesh(this.indexGeometry, textParameters.material); this.index.position .setX(x - width / 2 + 0.3) .setY(y + height / 2 - 1.5) .setZ(z); if (key > 0) { scene.add(this.index); } } removeFromScene() { super.removeFromScene(); this.scene.remove(this.index); } get leftConnectPosition(): THREE.Vector3 { const { width } = this.planeGeometry.parameters; const { x, y, z } = this.plane.position; return new THREE.Vector3(x - width / 2, y, z); } get rightConnectPosition(): THREE.Vector3 { const { width } = this.planeGeometry.parameters; const { x, y, z } = this.plane.position; return new THREE.Vector3(x + width / 2, y, z); } get nextPlanePosition(): THREE.Vector3 { const { width } = this.planeGeometry.parameters; const { x, y, z } = this.plane.position; return new THREE.Vector3(x + width, y, z); } get previousPlanePosition(): THREE.Vector3 { const { width } = this.planeGeometry.parameters; const { x, y, z } = this.plane.position; return new THREE.Vector3(x - width, y, z); } get nextTextPosition(): THREE.Vector3 { const { width } = this.planeGeometry.parameters; const { x, y, z } = this.text.position; return new THREE.Vector3(x + width, y, z); } get previousTextPosition(): THREE.Vector3 { const { width } = this.planeGeometry.parameters; const { x, y, z } = this.text.position; return new THREE.Vector3(x - width, y, z); } get nextIndexPosition(): THREE.Vector3 { const { width } = this.planeGeometry.parameters; const { x, y, z } = this.index.position; return new THREE.Vector3(x + width, y, z); } get previousIndexPosition(): THREE.Vector3 { const { width } = this.planeGeometry.parameters; const { x, y, z } = this.index.position; return new THREE.Vector3(x - width, y, z); } } <file_sep># 🎒 背包问题 今天我们讲背包问题。首先什么是背包问题?背包问题可以描述为:给定 N 件物品和一个限定最大承受重量背包,每种物品都有自己的重量 weight(w)和价格 value(v),如何选择最合适的物品放置于给定的背包中,才能使背包中物品的总重量小于等于背包的最大承受重量,同时使得背包中物品的价值最高。 PS:背包问题有很多种,我们今天讲的是 0/1 背包问题,就是说每种物品只能选择 0 次或 1 次。每种物品不能被选择多次也不可以被拆开选择。 ## 转移方程: ```javascript if (itemWeight > knapsackWeight) { table[row][col] = table[row - 1][col] } else { table[row][col] = Math.max( table[row - 1][col], table[row - 1][knapsackWeight - itemWeight] + itemValue ) } ``` or ```javascript if (item.weight > knapsack.weight) { table[row][col] = getPreviousValue(row, col) } else { table[row][col] = Math.max( getPreviousValue(row, col), getUpdatedValue(row, knapsack, item) ) } const getPreviousValue = (row, col) => { return table[row - 1][col] } const getUpdatedValue = (row, knapsack, itemWeight, itemValue) => { return table[row - 1][knapsack.weight - item.weight] + item.value } ``` 然后它的计算原理/转移方程的意思是: - 当物品的重量大于当前背包的最大承受重量的时候,这件物品不能被放入背包中,所以保持 copy 原先背包中物品的总价值。 - else 的意思是,当物品的重量小于等于当前背包的最大承受重量时候,我们从下面两个值中选一个较大值更新当前背包中商品的总价值。第一个是指这个物品没有放入背包以前背包中物品的总价值,第二个是指从用当前的物品替换背包中相同重量的物品以后背包中商品的总价值。然后在他们两个中,找一个较大值更新当前背包中物品的总价值。 ## Go Through an Example: 可能公式写的或者我讲的不是很清楚,下面咱们通过一个例子来详细的理解一下怎么计算背包问题。 首先我们有 4 个物品,它们的 weight 和 value 分别是: 在这个例子里面,我们有 4 件物品,然后背包的最大承受重量是 6。首先我们为了计算方便,添加了两排 0,但是它们也有它们的意义: - 竖的一排 0 是说:当背包的最大承受重量为 0 时,它不能装任何物品,所以背包中物品的总价值只能为 0。 - 横的一排 0 是说:当把一个重量为 0,价值也为 0 的的物品装进背包中,背包的中物品的总价值仍然是 0. 然后咱们从物品 1 开始。 keywords: - 背包中原先商品的总价值 <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node: TreeNode<number>; isRightSide: boolean; result: number[]; } export function buildSteps(root?: TreeNode<number>): Step[] { const steps: Step[] = []; function rightSideView(root?: TreeNode<number>): number[] { const result: number[] = []; function dfs(node: TreeNode<number> | undefined, depth: number) { if (!node) { return; } let isRightSide = false; if (result.length === depth) { result.push(node.val.value); isRightSide = true; } steps.push({ node, isRightSide, result: [...result] }); dfs(node.right, depth + 1); dfs(node.left, depth + 1); } dfs(root, 0); return result; }; rightSideView(root); return steps; } <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from 'three/examples/jsm/geometries/TextGeometry'; import { font } from '../../../commons/three'; import { StackNameStyles } from "./stackStyles"; class StackName { constructor(name: string, position: THREE.Vector3, scene: THREE.Scene) { const { color, size, height } = StackNameStyles; const mesh = this.buildMesh(name, color, size, height); this.setPosition(mesh, position); this.show(mesh, scene); } private buildMesh( name: string, color: string, size: number, height: number ) { const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color }); const textGeometryParameters: TextGeometryParameters = { font, size, height }; const textGeometry = new TextGeometry(name, textGeometryParameters); return new THREE.Mesh(textGeometry, textMaterial); } private setPosition(mesh: THREE.Mesh, { x, y, z }: THREE.Vector3) { mesh.position.setX(x); mesh.position.setY(y); mesh.position.setZ(z); } private show(mesh: THREE.Mesh, scene: THREE.Scene) { scene.add(mesh); } } export default StackName; <file_sep>import * as THREE from 'three'; const createRenderer = () => { const renderer = new THREE.WebGLRenderer({ alpha: true, antialias: true, }); renderer.setSize(window.innerWidth, window.innerHeight); return renderer; } const createCamera = () => { const camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000); camera.position.z = 5; return camera; } const createScene = () => { return new THREE.Scene(); } const clearScene = (scene: THREE.Scene) => { while (scene.children.length > 0) { scene.remove(scene.children[0]); } } function onWindowResize(renderer: THREE.Renderer, camera: THREE.PerspectiveCamera) { camera.aspect = window.innerWidth / window.innerHeight; camera.updateProjectionMatrix(); renderer.setSize(window.innerWidth, window.innerHeight); } export { createRenderer, createCamera, createScene, clearScene, onWindowResize }; <file_sep>export const title = "Reverse Integer"; export const formula1 = `// Solution 1 function reverse(x: number): number { const max = Math.pow(2, 31) - 1; // 2147483647 const min = Math.pow(-2, 31); // -2147483648 let reversed = 0; while (x !== 0) { const digit = x % 10; if (reversed > ~~(max / 10) || (reversed === ~~(max / 10) && digit > max % 10)) { return 0; } if (reversed < ~~(min / 10) || (reversed === ~~(min / 10) && digit < min % 10)) { return 0; } reversed = reversed * 10 + digit; x = ~~(x / 10); } return reversed; };`; export const formula2 = `// Solution 2 function reverse(x: number): number { const max = Math.pow(2, 31) - 1; // 2147483647 const min = Math.pow(-2, 31); // -2147483648 let reversed = 0; while (x !== 0) { if (reversed > ~~(max / 10)) { return 0; } if (reversed < ~~(min / 10)) { return 0; } const digit = x % 10; reversed = reversed * 10 + digit; x = ~~(x / 10); } return reversed; };`; export const formula3 = `function reverse(x: number): number { const max = Math.pow(2, 31) - 1; const min = Math.pow(-2, 31); let reversed = 0; while (x !== 0) { if (reversed > ~~(max / 10) || reversed < ~~(min / 10)) { return 0; } reversed = reversed * 10 + x % 10; x = ~~(x / 10); } return reversed; };`; export const description = `#### Description Given a signed 32-bit integer **x**, return **x** with its digits reversed. If reversing **x** causes the value to go outside the signed 32-bit integer range [-231, 231 - 1], then return **0**. Assume the environment does not allow you to store 64-bit integers (signed or unsigned). `; export const solution = ``; export const usecases = ''; export const examples = ` --- #### Example 1: - Input: x = 123 - Output: 321 #### Example 2: - Input: x = -123 - Output: -321 #### Example 3: - Input: x = 120 - Output: 21 `; <file_sep>import * as THREE from "three"; export const init = (width: number, height: number) => { const camera = new THREE.PerspectiveCamera(75, width / height, 0.01, 1000); camera.position.x = 3; camera.position.y = 3; camera.position.z = 20; const renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true }); renderer.setSize(width, height); return { renderer, camera }; }; export const getContentCoordinates = (content: string) => { switch (content.length) { case 1: return { x: -0.2, y: 0.7 }; case 2: return { x: -0.25, y: 0.7 }; case 3: return { x: -0.35, y: 0.7 }; default: return { x: -0.5, y: 0.7 }; } }; export const clearScene = (scene: THREE.Scene): void => { while (scene.children.length > 0) { scene.remove(scene.children[0]); } }; export const getWidth = () => { return window.innerWidth / 2.02; }; export const getHeight = () => { return window.innerHeight / 5 * 4; }; export const createPlane = (): THREE.Mesh => { const planeGeometry = new THREE.PlaneGeometry(getWidth() / 2, getHeight() / 2, 64, 48); const planeMaterial = new THREE.MeshBasicMaterial({ color: "black", transparent: true, opacity: 0.1, wireframe: true, side: THREE.DoubleSide, }); const plane = new THREE.Mesh(planeGeometry, planeMaterial); plane.position.x = 1; plane.position.y = -15; plane.rotateX(- Math.PI / 2); return plane; }; export const createLight = (): THREE.Light => { return new THREE.SpotLight("black", 2); }; export const resize = (renderer: THREE.Renderer, camera: THREE.PerspectiveCamera): void => { const width = getWidth(); const height = getHeight(); camera.aspect = width / height; camera.updateProjectionMatrix(); renderer.setSize(width, height); }; export const textFont = { size: 0.5, height: 0.02, }; <file_sep>export interface Step { left: number; right: number; mid: number; } export const isBadVersion = (num: number, n: number, bad: number): boolean => num >= bad && num <= n; export const solution = function (n: number, bad: number): Step[] { const steps: Step[] = []; let [left, right] = [1, n]; while (left < right) { const mid = left + Math.floor((right - left) / 2); steps.push({ left, right, mid }); if (isBadVersion(mid, n, bad)) { right = mid; } else { left = mid + 1; } } return steps; }; <file_sep>export enum State { Typing, Standby, Ready, Playing, Finished } <file_sep>import * as THREE from "three"; interface Container { position: THREE.Vector3; payload: number; } export default Container; <file_sep>import * as THREE from 'three'; import Position from '../../_commons/params/position.interface'; class Line { private scene: THREE.Scene; private instance: THREE.Line; private _start: Position; private _end: Position; constructor( start: Position, end: Position, material: THREE.LineBasicMaterial, scene: THREE.Scene ) { const geometry = new THREE.BufferGeometry().setFromPoints([ this.buildThreePosition(start), this.buildThreePosition(end) ]); this.instance = new THREE.Line(geometry, material); this.scene = scene; this._start = start; this._end = end; } get start(): Position { return this._start; } get end(): Position { return this._end; } set start(position: Position) { this._start = position; this.update(position, 0, 1, 2); } set end(position: Position) { this._end = position; this.update(position, 3, 4, 5); } show() { this.scene.add(this.instance); return this; } hide() { this.scene.remove(this.instance); return this; } private update(position: Position, xIndex: number, yIndex: number, zIndex: number) { this.instance.geometry.attributes.position.needsUpdate = true; const positions = this.instance.geometry.attributes.position.array; const { x, y, z } = position; (positions[xIndex] as any) = x; (positions[yIndex] as any) = y; (positions[zIndex] as any) = z; } private buildThreePosition({ x, y, z }: Position): THREE.Vector3 { return new THREE.Vector3(x, y, z); } } export default Line; <file_sep>export const title = 'Search Insert Position'; export const formula = `function searchInsert(nums: number[], target: number): number { let [left, right] = [0, nums.length - 1]; while (left <= right) { const mid = left + Math.floor((right - left) / 2); if (nums[mid] === target) { return mid; } if (nums[mid] < target) { left = mid + 1; } else { right = mid - 1; } } return left; };`; export const description = ` Given a **sorted** array of distinct integers and a target value, return the index if the target is found. If not, return the index where it would be if it were inserted in order. You must write an algorithm with ***O(log n)*** runtime complexity. `; export const usecases = ''; export const example = ` ### Example 1: - Input: nums = [1,3,5,6], target = 5 - Output: 2 ### Example 2: - Input: nums = [1,3,5,6], target = 2 - Output: 1 ### Example 3: - Input: nums = [1,3,5,6], target = 7 - Output: 4 `; <file_sep>export const MaxHeap = `class MaxHeap { private heights: number[]; private lazyDelete: Map<number, number>; constructor() { this.heights = []; this.lazyDelete = new Map(); } peek() { this.prune(); return this.heights[0]; } push(height: number) { this.heights.push(height); this.bubbleUp(this.heights.length - 1); } delete(height: number) { const top = this.heights[0]; if (top !== undefined && top === height) { this.pop(); } else { const count = this.lazyDelete.get(height) || 0; this.lazyDelete.set(height, count + 1); } } private pop() { const root = this.heights.shift(); const last = this.heights.pop(); if (last !== undefined) { this.heights.unshift(last); this.bubbleDown(0); } return root; } private prune() { let top = this.heights[0]; while (top !== undefined && this.lazyDelete.has(top)) { this.pop(); const count = this.lazyDelete.get(top) || 0; if (count === 1) { this.lazyDelete.delete(top); } else { this.lazyDelete.set(top, count - 1); } top = this.heights[0]; } } private bubbleUp(index: number) { if (index < 1) { return; } const parentIndex = Math.floor((index - 1) / 2); if (this.heights[index] > this.heights[parentIndex]) { this.swap(index, parentIndex); this.bubbleUp(parentIndex); } } private bubbleDown(index: number) { let target = index; const leftChildIndex = 2 * index + 1; if (this.shouldBubbleDown(target, leftChildIndex)) { target = leftChildIndex; } const rightChildIndex = 2 * index + 2; if (this.shouldBubbleDown(target, rightChildIndex)) { target = rightChildIndex; } if (target !== index) { this.swap(index, target); this.bubbleDown(target); } } private shouldBubbleDown(index: number, childIndex: number): boolean { const current = this.heights[index]; const child = this.heights[childIndex]; return (current === undefined || child === undefined) ? false : current < child; } private swap(i: number, j: number) { [this.heights[i], this.heights[j]] = [this.heights[j], this.heights[i]]; } }`; export const getSkyline = `enum Edge { Start, End } type Item = { x: number; height: number; edge: Edge; } function getSkyline(buildings: number[][]): number[][] { const items: Item[] = []; buildings.forEach(building => { const [start, end, height] = building; items.push({ x: start, height, edge: Edge.Start }); items.push({ x: end, height, edge: Edge.End }); }); const compareFn = (a: Item, b: Item): number => { if (a.x !== b.x) { return a.x - b.x; } // if two starts are compared then higher building should be picked first if (a.edge === Edge.Start && b.edge === Edge.Start) { return b.height - a.height; } // if two ends are compared then lower building should be picked first if (a.edge === Edge.End && b.edge === Edge.End) { return a.height - b.height; } // if one start and one end are compared then start should be picked first return (a.edge === Edge.Start) ? -1 : 1; }; items.sort(compareFn); let prevMaxHeight = 0; const result: number[][] = []; const heap = new MaxHeap(); items.forEach(item => { const { x, height, edge } = item if (edge === Edge.Start) { heap.push(height); } else { heap.delete(height); } const peek = heap.peek() || 0; if (prevMaxHeight !== peek) { result.push([x, peek]); prevMaxHeight = peek; } }); return result; };`; <file_sep>import * as THREE from 'three'; import { TextGeometryParameters } from "three/examples/jsm/geometries/TextGeometry"; import { font } from '../../../commons/three'; import Position from '../../../data-structures/_commons/params/position.interface'; import { build } from '../../../data-structures/tree/nodes/v1/tree-node-builder'; const sphereGeometry: THREE.SphereGeometry = new THREE.SphereGeometry(1, 32, 16); const sphereMaterial = (): THREE.Material => { return new THREE.MeshBasicMaterial({ color: "yellow", opacity: 0.6, transparent: true }); } const textMaterial: THREE.Material = new THREE.MeshBasicMaterial({ color: "green" }); const textGeometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; export const yDistance = -2.5; export const xCenter = -5; export const lineMaterial = new THREE.LineBasicMaterial({ color: "gold" }); export const buildTreeNode = (value: number, scene: THREE.Scene, center: Position) => { return build<number>( 0, sphereGeometry, sphereMaterial(), textMaterial, textGeometryParameters, value, scene, center ); } <file_sep>import { Guiders } from "./algo"; import { Point } from "../../dp/_commons/point"; interface NumsTableParams { current: Point; data: number[]; success: boolean; guiders: Guiders; } export default NumsTableParams; <file_sep>export const title = "Longest Palindromic Subsequence"; export const formula = ` public int longestPalindromeSubseq(String s) { int table[][] = new int[s.length()][s.length()]; // length === 1; for (int i = 0; i < table.length; i++) { table[i][i] = 1; } // length === 2; for (int i = 0; i < table.length - 1; i += 1) { if (s.charAt(i) == s.charAt(i + 1)) { table[i][i + 1] = 2; } else { table[i][i + 1] = 1; } } // length > 2; for (int len = 3; len <= table.length; len += 1) { for (int i = 0; i + len <= table.length; i += 1) { char front = s.charAt(i); char end = s.charAt(i + len - 1); if (front == end) { table[i][i + len - 1] = 2 + table[i + 1][i + len - 2]; } else { table[i][i + len - 1] = Math.max(table[i][i + len - 2], table[i + 1][i + len - 1]); } } } return table[0][table[0].length-1]; } `; export const description = `Given a string find the longest palindromic subsequence's length.`; export const usecases = ''; export const example = ` Example: 1 - Input: "abcdbab" - Output: 5 // "abcba" - PS: Result of Longest palindromic string: 3 // bab Example: 2 - Input: "abdca" - Output: 3 // "aba" - PS: Result of Longest palindromic string: 1 // a `; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node: TreeNode<any>; depth?: number; direction?: Direction; } export function buildSteps<T>(root?: TreeNode<T>): Step[] { const steps: Step[] = []; function minDepth<T>(node?: TreeNode<T>, direction?: Direction, depth?: number): number { if (node === undefined) { return 0; } steps.push({ node, direction, depth }); const left = minDepth(node.left, Direction.Left, depth); const right = minDepth(node.right, Direction.Right, depth); if (node.left === undefined) { steps.push({ node, direction: Direction.Back, depth: right + 1 }); return right + 1; } if (node.right === undefined) { steps.push({ node, direction: Direction.Back, depth: left + 1 }); return left + 1; } const min = Math.min(left, right) + 1; steps.push({ node, direction: Direction.Back, depth: min }); return min; }; minDepth(root); return steps; } <file_sep>import { title } from "./contents"; import { Game } from "../../commons/game"; import Category from "../../commons/segments/category"; import Difficulty from "../../commons/segments/difficulty"; const info: Game = { name: title, path: "/algorithms/basic-calculator", categories: [Category.Stack], companies: [], difficulty: Difficulty.Hard, img: "/img/basic-calculator.png" } export default info; <file_sep>import Graphology from "graphology"; import forceAtlas2 from 'graphology-layout-forceatlas2'; import { Graph } from "."; export function buildGraphology<T>(graph: Graph<T>): Graphology { const graphology = new Graphology(); graph.nodes.forEach(node => { graphology.addNode(node.id); }); graph.edges.forEach(edge => { if (!graphology.hasEdge(edge.source.id, edge.target.id)) { graphology.addEdge(edge.source.id, edge.target.id); } }); return graphology; } export function forceAtlas2Layout<T>(graph: Graph<T>) { const graphology = buildGraphology(graph); graphology.forEachNode((_, attributes) => { attributes.x = Math.random() * 10; // Set initial x position attributes.y = Math.random() * 10; // Set initial y position }); const sensibleSettings = forceAtlas2.inferSettings(graphology); return forceAtlas2(graphology, { iterations: 50, settings: sensibleSettings }); } <file_sep>import * as THREE from "three"; import gsap from "gsap"; import { PlaneParameters, TextParameters } from "../commons/node"; import { ArrowStyles, NodeStyles } from "../commons/styles"; import { createPlaneParameters, createTextParameters } from "../commons/helpers"; import Node from "./node"; export default class DoublyLinkedList<T> { private head: Node<T>; private tail: Node<T>; scene: THREE.Scene; margin: number; arrowStyles: ArrowStyles; nodeStyles: NodeStyles; private planeMaterial: THREE.Material; private textMaterial: THREE.Material; private render: Function; duration: number; constructor( scene: THREE.Scene, margin: number, start: THREE.Vector3, arrowStyles: ArrowStyles, nodeStyles: NodeStyles, render: Function, duration: number ) { this.render = render; this.duration = duration; this.scene = scene; this.margin = margin; this.arrowStyles = arrowStyles; this.nodeStyles = nodeStyles; this.planeMaterial = new THREE.MeshBasicMaterial({ color: this.nodeStyles.color, side: THREE.DoubleSide }); this.textMaterial = new THREE.MeshBasicMaterial({ color: this.nodeStyles.textColor }); this.head = this.createHead(start); this.tail = this.createEnd(start); this.head.next = this.tail; this.tail.previous = this.head; } async insertFirst(data: T, display: string, address: number, key: number): Promise<Node<T>> { this.getTails(this.head).forEach(item => { const onUpdate = () => { if (item.previous === this.head) { this.head.update(); } item.update(); this.render(); }; const nextPlanePosition = this.calculateNextPosistion(item.plane.position); const nextTextPosition = this.calculateNextPosistion(item.text.position); const nextAddressPosition = this.calculateNextPosistion(item.address.position); gsap.to(item.plane.position, { ...nextPlanePosition, duration: this.duration, onUpdate }); gsap.to(item.text.position, { ...nextTextPosition, duration: this.duration }); gsap.to(item.address.position, { ...nextAddressPosition, duration: this.duration }); }); return new Promise(resolve => { setTimeout(() => { const nextPosition = this.calculateNextPosistion(this.head.plane.position); const newNode = this.createNode(key, data, display, address, nextPosition); this.head.append(newNode); this.render(); resolve(newNode); }, this.duration * 1000) }); } async deleteLast(): Promise<Node<T> | undefined> { const target = this.tail.previous; if (!target) { return; } if (target === this.head) { return; } target.delete(); this.render(); this.getTails(target).forEach(item => { const onUpdate = () => { if (item.previous) { item.previous.update(); } item.update(); this.render(); }; const nextPlanePosition = this.calculatePreviousPosistion(item.plane.position); const nextTextPosition = this.calculatePreviousPosistion(item.text.position); const nextAddressPosition = this.calculatePreviousPosistion(item.address.position); gsap.to(item.plane.position, { ...nextPlanePosition, duration: this.duration, onUpdate }); gsap.to(item.text.position, { ...nextTextPosition, duration: this.duration }); gsap.to(item.address.position, { ...nextAddressPosition, duration: this.duration }); }); return new Promise(resolve => setTimeout(() => resolve(target), this.duration * 1000)); } async moveToHead(node: Node<T>) { if (node === this.head || node === this.tail || node.previous === this.head) { return; } const next = node.next; const heads = this.getHeads(node); heads.pop(); // remove this.head from the head list node.delete(); if (!node.isInScene()) { node.addToScene(); } this.render(); const tempHeight = this.nodeStyles.height; const tempPlanePosition = this.calculateNextPosistion(this.head.plane.position); tempPlanePosition.setY(tempPlanePosition.y + tempHeight); const tempTextPosition = this.calculateNextPosistion(this.head.text.position); tempTextPosition.setY(tempTextPosition.y + tempHeight); const nextAddressPosition = this.calculateNextPosistion(this.head.address.position); nextAddressPosition.setY(nextAddressPosition.y + tempHeight); // move node to the top of destination gsap.to(node.text.position, { ...tempTextPosition, duration: this.duration }); gsap.to(node.address.position, { ...nextAddressPosition, duration: this.duration }); gsap.to(node.plane.position, { ...tempPlanePosition, duration: this.duration, onUpdate: () => this.render() }); // move previous nodes to back to give the space for the node. heads.forEach(item => { const onUpdate = () => { if (item.previous === this.head) { this.head.update(); } if (next && item.next === next) { item.next.update(); } item.update(); this.render(); }; const nextPlanePosition = this.calculateNextPosistion(item.plane.position); const nextTextPosition = this.calculateNextPosistion(item.text.position); const nextAddressPosition = this.calculateNextPosistion(item.address.position); gsap.to(item.address.position, { ...nextAddressPosition, duration: this.duration }); gsap.to(item.text.position, { ...nextTextPosition, duration: this.duration }); gsap.to(item.plane.position, { ...nextPlanePosition, duration: this.duration, onUpdate }); }); // move node next to this.head return new Promise(resolve => { setTimeout(() => { this.head.append(node); this.render(); const onUpdate = () => { if (node.previous) { node.previous.update(); } if (node.next) { node.next.update(); } node.update(); this.render(); }; const onComplete = () => { resolve(0); }; const nextPlanePosition = this.calculateNextPosistion(this.head.plane.position); const nextTextPosition = this.calculateNextPosistion(this.head.text.position); const nextAddressPosition = this.calculateNextPosistion(this.head.address.position); gsap.to(node.address.position, { ...nextAddressPosition, duration: this.duration }); gsap.to(node.text.position, { ...nextTextPosition, duration: this.duration }); gsap.to(node.plane.position, { ...nextPlanePosition, duration: this.duration, onUpdate, onComplete }); }, this.duration * 1000); }); } private getTails(node: Node<any>): Node<any>[] { const result: Node<any>[] = []; let current = node.next; while (current) { result.push(current); current = current.next; } return result; } private getHeads(node: Node<T>): Node<any>[] { const result: Node<any>[] = []; let current = node.previous; while (current) { result.push(current); current = current.previous; } return result; } private calculatePreviousPosistion(position: THREE.Vector3): THREE.Vector3 { return position.clone().setX(this.calculatePreviousX(position.x)); } private calculatePreviousX(x: number): number { return x - this.nodeStyles.width - this.margin; } private calculateNextPosistion(position: THREE.Vector3): THREE.Vector3 { return position.clone().setX(this.calculateNextX(position.x)); } private calculateNextX(x: number): number { return x + this.nodeStyles.width + this.margin; } private createHead(position: THREE.Vector3): Node<T> { const textPosition = new THREE.Vector3(position.x - 3.6, position.y - 1, position.z); const planeParameters: PlaneParameters = this.createPlaneParameters(position); const textParameters: TextParameters = this.createTextParameters(textPosition); return new Node(-1, "HEAD" as any, "HEAD", 0, this.scene, planeParameters, textParameters, this.arrowStyles, this.nodeStyles.color); } private createEnd(position: THREE.Vector3): Node<T> { const x = position.x + this.nodeStyles.width + this.margin; const nodePosition = position.clone().setX(x); const textPosition = new THREE.Vector3(x - 2.8, position.y - 1, position.z); const planeParameters: PlaneParameters = this.createPlaneParameters(nodePosition); const textParameters: TextParameters = this.createTextParameters(textPosition); return new Node(-1, "TAIL" as any, "TAIL", 1, this.scene, planeParameters, textParameters, this.arrowStyles, this.nodeStyles.color); } private createNode( key: number, data: T, display: string, address: number, nodePosition: THREE.Vector3, ): Node<T> { const textPosition: THREE.Vector3 = new THREE.Vector3(this.calX(display, nodePosition.x), nodePosition.y - 1, nodePosition.z); const planeParameters: PlaneParameters = this.createPlaneParameters(nodePosition); const textParameters: TextParameters = this.createTextParameters(textPosition); return new Node(key, data, display, address, this.scene, planeParameters, textParameters, this.arrowStyles, this.nodeStyles.color); } private calX(display: string, x: number): number { switch (display.length) { case 1: return x - 0.8; case 2: return x - 1.4; default: return x - 2.5; } } private createTextParameters(position: THREE.Vector3): TextParameters { return createTextParameters(this.textMaterial, position, this.nodeStyles); } private createPlaneParameters(position: THREE.Vector3): PlaneParameters { return createPlaneParameters(this.planeMaterial, position, this.nodeStyles); } } <file_sep>export const title = "Minimum Jumps To End"; export const formula = ` public int jump(int[] nums) { int[] dp = new int[nums.length]; dp[0] = 0; for (int i = 1; i < dp.length; i++) { dp[i] = nums.length + 1; } for (int i = 1; i < nums.length; i++) { for (int j = 0; j < i; j++) { if (j + nums[j] >= i) { dp[i] = Math.min(dp[i], dp[j] + 1); } } } return dp[dp.length - 1]; } `; export const description = ` Given an array of non negative integers, start from the first element and reach the last by jumping. The jump length can be at most the value at the current position in the array. Optimum result is when you reach the goal in minimum number of jumps.`; export const usecases = ''; export const example = ` ***Input***: \\[2, 3, 1, 1, 4, 1, 2, 3] ***Output***: 3 ***Explanation***: - The minimum number of jumps to reach the last index is 3. - Paths: 2 -> 3 -> 4 -> end. `; <file_sep># Flip Direction <file_sep>export const title = "Sum of Left Leaves"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ // preorder function sumOfLeftLeaves(root: TreeNode | null): number { let sum = 0; function dfs(node: TreeNode | null) { if (node === null) { return null; } if (node.left && isLeafNode(node.left)) { sum += node.left.val; } dfs(node.left); dfs(node.right); } function isLeafNode(node: TreeNode) { return !node.left && !node.right; } dfs(root); return sum; };`; export const formula1 = `// preorder and postorder function sumOfLeftLeaves(root: TreeNode | null): number { function isLeafNode(node: TreeNode) { return !node.left && !node.right; } function dfs(node: TreeNode | null): number { if (node===null) { return 0; } let sum = 0; if (node.left && isLeafNode(node.left)) { sum += node.left.val; } sum += dfs(node.left); sum += dfs(node.right); return sum; } return dfs(root); };` export const description = ` Given the **root** of a binary tree, return the sum of all left leaves. A **leaf** is a node with no children. A **left leaf** is a leaf that is the left child of another node. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export interface Step { prev?: TreeNode<number>; node: TreeNode<number>; errorOne?: TreeNode<number>; errorTwo?: TreeNode<number>; } export function buildSteps(root?: TreeNode<number>): Step[] { const steps: Step[] = []; let errorOne: TreeNode<number> | undefined = undefined; let errorTwo: TreeNode<number> | undefined = undefined; let prev: TreeNode<number> | undefined = undefined; const inorder = (node?: TreeNode<number>) => { if (!node) { return; } inorder(node.left); if (prev && prev.val.value >= node.val.value) { if (!errorOne) { errorOne = prev; } if (errorOne) { errorTwo = node; } } steps.push({ prev, node, errorOne, errorTwo }) prev = node; inorder(node.right); } inorder(root); return steps; } <file_sep>import createDPTable from './algo'; import { Point } from "../_commons/point"; import { helperStyle, helperStyleSecondary } from '../_commons/styles'; const startPoint: Point = { row: 1, col: 1, }; interface TableSize { rows: number; cols: number; } const getTableSize = (array: number[]): TableSize => { return { rows: 2, cols: array.length }; }; const createTableMatrix = (array: number[]): number[][] => { const table: number[][] = []; table[0] = array.map(item => item); table[1] = Array(array.length).fill(1); return table; }; const createComparedTable = (array: number[]): number[][] => { return createDPTable(array); }; const addHelperStyles = (styles: React.CSSProperties[][], { row, col }: Point): void => { styles[0][row - 1] = helperStyleSecondary; styles[1][row - 1] = helperStyle; styles[0][col] = helperStyleSecondary; styles[1][col] = helperStyle; }; const createTableStyles = (array: number[]): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(array); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(table, startPoint); return table; }; const createButtons = (array: number[]): number[] => { const dpTable = createDPTable(array); const set = new Set<number>(); for (let row = 1; row < dpTable.length; row++) { for (let col = 1; col < dpTable[row].length; col++) { set.add(dpTable[row][col]); } } return Array.from(set).sort(); }; const createButtonsStyles = (array: number[]): (React.CSSProperties)[] => { return createButtons(array).map(() => ({ color: 'back' })); }; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, startPoint, }; <file_sep>import createDPTable from './algo'; import { Point } from "../_commons/point"; import { helperStyle, helperStyleSecondary } from '../_commons/styles'; const startPoint: Point = { row: 0, col: 1, }; interface TableSize { rows: number; cols: number; } const getTableSize = (houses: number[]): TableSize => { const rows = 1; const cols = houses.length; return { rows, cols }; }; const createTableMatrix = (houses: number[]): (number | string)[][] => { const table = []; table[0] = houses.map(house => house); return table; }; const createComparedTable = (houses: number[]): (number | string)[][] => { const table = []; table[0] = createDPTable(houses); return table; }; const addHelperStyles = (styles: React.CSSProperties[][], { row, col }: Point): void => { styles[row][col] = helperStyle; styles[row][col - 1] = helperStyleSecondary; if (col > 1) { styles[row][col - 2] = helperStyle; } }; const createTableStyles = (houses: number[]): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(houses); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(table, startPoint); return table; }; const createButtons = (houses: number[]): number[] => { const dpTable = createDPTable(houses); const set = new Set<number>(); for (let i = 1; i < dpTable.length; i++) { set.add(dpTable[i]); } return Array.from(set).sort((a, b) => a - b); }; const createButtonsStyles = (houses: number[]): (React.CSSProperties)[] => { return createButtons(houses).map(() => ({ color: 'back' })); }; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, startPoint, }; <file_sep>import Position from './params/position.interface'; export const wait = (seconds: number): Promise<void> => { return new Promise(resolve => setTimeout(resolve, seconds * 1000)); }; export const calDistance = (from: Position, to: Position): Position => { return { x: to.x - from.x, y: to.y - from.y, z: to.z - from.z }; }; export const calDestination = (from: Position, distance: Position): Position => { return { x: from.x + distance.x, y: from.y + distance.y, z: from.z + distance.z }; } <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node: TreeNode<number>; paths: string[]; } function isLeafNode(node: TreeNode<number>) { return !node.left && !node.right; } export function buildSteps(root?: TreeNode<number>): Step[] { const steps: Step[] = []; let paths: string[] = []; function binaryTreePaths(node: TreeNode<number> | undefined, parents: number[]) { if (!node) { return; } const path = [...parents, node.val.value]; if (isLeafNode(node)) { paths.push(path.join("->")); steps.push({ node, paths: [...paths] }); return; } steps.push({ node, paths: [...paths] }); binaryTreePaths(node.left, path); binaryTreePaths(node.right, path); } binaryTreePaths(root, []); return steps; } <file_sep>export const title = "Maximum Depth of Binary Tree"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function maxDepth(root: TreeNode | null): number { if (root === null) { return 0; } const left = maxDepth(root.left); const right = maxDepth(root.right); return Math.max(left, right) + 1; };`; export const description = ` Given the **root** of a binary tree, return its maximum depth. A binary tree's maximum depth is the number of nodes along the longest path from the root node down to the farthest leaf node. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>const max: number = Number.MAX_SAFE_INTEGER; const initTable = (array: number[]): number[][] => { const table: number[][] = []; for (let row = 0; row < array.length; row++) { table.push(Array(array.length).fill(max)); } return table; }; const createDPTable = (array: number[]): number[][] => { const table = initTable(array); if (array[0] <= 0) { return table; } table[0][0] = 0; for (let i = 0; i < table.length; i++) { if (i > 0) { table[i] = [...table[i - 1]]; } for (let j = 1; j <= array[i]; j++) { if (i + j >= array.length) { break; } table[i][i + j] = Math.min(table[i][i + j], table[i][i] + 1); } } return table; }; export default createDPTable; <file_sep>import createDPTable from './algo'; import { Point } from "../_commons/point"; import { helperStyle, helperStyleSecondary, helperStyleThird } from '../_commons/styles'; const updatedSecondaryHelper = { ...helperStyleSecondary, backgroundColor: "lightgray" }; const max = Number.MAX_SAFE_INTEGER; const startPoint: Point = { row: 0, col: 1, }; interface TableSize { rows: number; cols: number; } const getTableSize = (array: number[]): TableSize => { const rows = 3; const cols = array.length + 1; return { rows, cols }; }; const createTableMatrix = (array: number[]): (number | string)[][] => { const table: (number | string)[][] = []; const indices: (string | number)[] = ['INDEX', ...Array.from(Array(array.length).keys())]; const jumps: (string | number)[] = ['JUMPS', ...array]; const result: (string | number)[] = ['RESULT', 0, ...Array(array.length - 1).fill('x')]; table[0] = indices; table[1] = jumps; table[2] = result; return table; }; const createComparedTable = (array: number[]): number[][] => createDPTable(array); const addHelperStyles = (styles: React.CSSProperties[][], point: Point, table: (string | number)[][]): void => { styles[0][point.row + 1] = helperStyle; styles[0][point.col + 1] = helperStyle; const jumpIndex = point.row + 2; const jumpLength = Number(table[1][point.row + 1]); const jumpsStyles = styles[1]; for (let i = jumpIndex; i < jumpsStyles.length && i < (jumpIndex + jumpLength); i++) { jumpsStyles[i] = updatedSecondaryHelper; } if (jumpIndex + jumpLength > point.col + 1) { styles[2][jumpIndex - 1] = helperStyleThird; } styles[2][point.col + 1] = helperStyleThird; }; const createTableStyles = (array: number[]): (React.CSSProperties)[][] => { const { rows, cols } = getTableSize(array); const table = new Array(rows).fill(0).map(() => new Array(cols).fill({})); addHelperStyles(table, startPoint, createTableMatrix(array)); return table; }; const createButtons = (array: number[]): (string | number)[] => { const dpTable = createDPTable(array); const set = new Set<string | number>(); for (let row = 0; row < dpTable.length; row++) { for (let col = 0; col < dpTable[row].length; col++) { const value = dpTable[row][col]; if (value === max) { set.add('x'); } else { set.add(value); } } } set.delete(0); return Array.from(set).sort(); }; const createButtonsStyles = (array: number[]): (React.CSSProperties)[] => { return createButtons(array).map(() => ({ color: 'back' })); }; export { addHelperStyles, createTableMatrix, createComparedTable, createTableStyles, createButtons, createButtonsStyles, startPoint, }; <file_sep>function hex(c: number) { var s = "0123456789abcdef"; var i = c // parseInt(c); if (i === 0 || isNaN(c)) return "00"; i = Math.round(Math.min(Math.max(0, i), 255)); return s.charAt((i - i % 16) / 16) + s.charAt(i % 16); } /* Remove '#' in color hex string */ function trim(s: string) { return (s.charAt(0) === '#') ? s.substring(1, 7) : s } /* Convert an RGB triplet to a hex string */ function convertToHex(rgb: number[]) { return "#" + hex(rgb[0]) + hex(rgb[1]) + hex(rgb[2]); } /* Convert a hex string to an RGB triplet */ function convertToRGB(hex: string) { const color = []; color[0] = parseInt((trim(hex)).substring(0, 2), 16); color[1] = parseInt((trim(hex)).substring(2, 4), 16); color[2] = parseInt((trim(hex)).substring(4, 6), 16); return color; } export function generateColor(colorStart: string, colorEnd: string, colorCount: number) { const start = convertToRGB(colorStart); // The beginning of your gradient const end = convertToRGB(colorEnd); // The end of your gradient const len = colorCount; // The number of colors to compute let alpha = 0.0; //Alpha blending amount const saida = []; for (let i = 0; i < len; i++) { const c = []; alpha += (1.0 / len); c[0] = start[0] * alpha + (1 - alpha) * end[0]; c[1] = start[1] * alpha + (1 - alpha) * end[1]; c[2] = start[2] * alpha + (1 - alpha) * end[2]; saida.push(convertToHex(c)); } return saida; } <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node?: TreeNode<number>; sum: number; hasPathSum: boolean; } function isLeaf(node: TreeNode<number>) { return !node.left && !node.right; } export function buildSteps(targetSum: number, root?: TreeNode<number>): Step[] { const steps: Step[] = []; function hasPathSum(root: TreeNode<number> | undefined, targetSum: number): boolean { function dfs(node: TreeNode<number> | undefined, num: number): boolean { if (node === undefined) { return false; } const sum = node.val.value + num; if (isLeaf(node) && sum === targetSum) { steps.push({ node, sum, hasPathSum: true }); return true; } steps.push({ node, sum, hasPathSum: false }); return dfs(node.left, sum) || dfs(node.right, sum); } return dfs(root, 0); }; hasPathSum(root, targetSum); return steps; } <file_sep>import * as THREE from 'three'; export const text = { color: "green", size: 0.6, height: 0.1 }; export const node = { size: { width: 1, height: 1, depth: 1 }, } export const shell = { material: new THREE.MeshBasicMaterial({ color: "lightgrey", opacity: 0.2, transparent: true }), }; export const stackPosition = { name: new THREE.Vector3(-3.5, 6, 8), stack: new THREE.Vector3(-1.3, 6.1, 8), } export const StackNameStyles = { color: "orange", size: 0.4, height: 0.1 }; export const duration = 0.5; export const minShellSize = 5; <file_sep>export const title = "Implement Queue Using Stacks"; export const formula = `class MyQueue { private stackIn: number[]; private stackOut: number[]; constructor() { this.stackIn = []; this.stackOut = []; } enqueue(x: number): void { this.stackIn.push(x); } dequeue(): number | undefined { this.checkAndShift(); return this.stackOut.pop(); } peek(): number { this.checkAndShift(); return this.stackOut[this.stackOut.length - 1]; } private checkAndShift(): void { if (this.stackOut.length === 0) { this.shift(); } } private shift(): void { let item = this.stackIn.pop(); while (item) { this.stackOut.push(item); item = this.stackIn.pop(); } } empty(): boolean { return this.stackIn.length === 0 && this.stackOut.length === 0; } }`; export const description = `Implement a first in first out (FIFO) queue using only two stacks. The implemented queue should support all the functions of a normal queue. The functions are ***enqueue***, ***dequeue***, ***peek***, and ***empty***.`; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export interface Result { items: ResultItem[]; isPalindrome: boolean; } interface ResultItem { reverted: number; x: number; } export const defaultResult: Result = { items: [], isPalindrome: false }; export function calculatePalindrome(x: number): Result { const items: ResultItem[] = []; if (x < 0 || (x % 10 === 0 && x !== 0)) { return { items, isPalindrome: false } } let reverted: number = 0; while (x > reverted) { reverted = reverted * 10 + x % 10; x = Math.floor(x / 10); items.push({ reverted, x }) } const isPalindrome: boolean = x === reverted || x === Math.floor(reverted / 10); return { items, isPalindrome }; }; <file_sep>export const title = "Maximum Subarray (Kadane's algorithm)"; export const formula = ` function maxSubArray(nums: number[]): number { for (let i = 1; i < nums.length; i++) { if (nums[i - 1] > 0) { nums[i] = nums[i] + nums[i-1]; } } return Math.max(...nums); }; `; export const description = `Finding the contiguous subarray within a one-dimensional array of numbers which has the largest sum. The list usually contains both positive and negative numbers. `; export const usecases = ''; export const example = ` **Input**: \\[-2, -1, 3, 5, 10, -2, -1, 2, 5, -2\\] **Output**: 22 **Explanation**: \\[3, 5, 10, -2, -1, 2, 5\\] has the largest sum = 22. `; <file_sep>import IPosition from "../params/position.interface" export default class Position implements IPosition { private mesh: THREE.Mesh; constructor(mesh: THREE.Mesh) { this.mesh = mesh; } public get x(): number { return this.mesh.position.x; } public set x(v: number) { this.mesh.position.setX(v); } public get y(): number { return this.mesh.position.y; } public set y(v: number) { this.mesh.position.setY(v); } public get z(): number { return this.mesh.position.z; } public set z(v: number) { this.mesh.position.setZ(v); } } <file_sep>import { TextCube } from "../_commons/cube/text-cube"; import Position from "../_commons/params/position.interface"; import IArray from "./array.interface"; class Array<T> implements IArray<TextCube<T>>{ private items: TextCube<T>[] = []; readonly length: number = this.items.length; private position: Position; public duration?: number; constructor( position: Position, duration?: number ) { this.position = position; this.duration = duration; } async swap(i: number, j: number): Promise<void> { if (this.items[i] === undefined || this.items[j] === undefined) { return; } const a = this.clonePosition(this.items[i].position); const b = this.clonePosition(this.items[j].position); await Promise.all([ this.items[i].move(b, this.duration || 0), this.items[j].move(a, this.duration || 0) ]); [this.items[i], this.items[j]] = [this.items[j], this.items[i]]; return; } private clonePosition({ x, y, z }: Position): Position { return { x, y, z }; } /** * Adds one or more elements to the end of an array and returns the new length of the array. */ async push(...items: TextCube<T>[]): Promise<number> { for (let i = 0; i < items.length; i++) { const item = items[i]; const position = this.calculateLastPosition(); await item.move(position, this.duration || 0); this.items.push(item); } return Promise.resolve(this.items.length); } private calculateLastPosition(): Position { if (this.items.length === 0) { return this.position; } else { const last = this.items[this.items.length - 1]; const { x, y, z } = last.position return { x: x - 2 * last.width, y, z }; } } pop(): Promise<TextCube<T> | undefined> { return Promise.resolve(this.items.pop()); } shift(): Promise<TextCube<T> | undefined> { return Promise.resolve(this.items.shift()); } /** * Adds one or more elements to the beginning of an array and returns the new length of the array. */ async unshift(...items: TextCube<T>[]): Promise<number> { for (let i = 0; i < items.length; i++) { const item = items[i]; const position = this.calculateHeadPosition(); await item.move(position, this.duration || 0); this.items.unshift(item); } return Promise.resolve(this.items.length); } private calculateHeadPosition(): Position { const item = this.items[0]; if (item === undefined) { return this.position; } const { x, y, z } = this.position; return { x: x + Math.floor(item.width / 2), y, z }; } async update(index: number, item: TextCube<T>): Promise<void> { if (this.items[index] === undefined) { return; } const position = this.clonePosition(this.items[index].position); this.items[index].hide(); await item.move(position, this.duration || 0); this.items[index] = item; } } export default Array; <file_sep>import { StackAlgo as Stack } from '../stack-algo'; test('stack push and pop as FILO', async () => { const stack = new Stack<number>(); await stack.push(10); await stack.push(11); await stack.push(12); await expect(stack.pop()).resolves.toBe(12); await expect(stack.pop()).resolves.toBe(11); await expect(stack.pop()).resolves.toBe(10); }); test('stack peek', async () => { const stack = new Stack<number>(); await stack.push(10); await expect(stack.peek()).resolves.toBe(10); await stack.push(11); await expect(stack.peek()).resolves.toBe(11); await stack.push(12); await expect(stack.peek()).resolves.toBe(12); await stack.pop(); await expect(stack.peek()).resolves.toBe(11); }); test("stack return it's size after push the new item", async () => { const stack = new Stack<number>(); const size1 = await stack.push(10); expect(size1).toBe(1); const size2 = await stack.push(17); expect(size2).toBe(2); const size3 = await stack.push(11); expect(size3).toBe(3); }); test('stack size', async () => { const stack = new Stack<number>(); const promises = [stack.push(1), stack.push(2), stack.push(3)]; await Promise.all(promises); await expect(stack.size()).resolves.toBe(promises.length); await stack.pop(); await expect(stack.size()).resolves.toBe(promises.length - 1); }); test('stack isEmpty', async () => { const stack = new Stack<number>(); await expect(stack.isEmpty()).resolves.toBeTruthy(); const pushPromises = [stack.push(1), stack.push(2), stack.push(3)]; await Promise.all(pushPromises); await expect(stack.isEmpty()).resolves.toBeFalsy(); const popPromises = [stack.pop(), stack.pop(), stack.pop()]; await Promise.all(popPromises); await expect(stack.isEmpty()).resolves.toBeTruthy(); }); <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from 'three/examples/jsm/geometries/TextGeometry'; import Display from '../../params/displayer.interface'; import Mover from '../../params/mover.interface'; import Position from '../../params/position.interface'; import DisplayImpl from "../../three/displayer.class"; import MoveImpl from "../../three/mover.class"; import PositionImpl from "../../three/position.class" import { Cube } from './cube'; import { TextCube as ITextCube } from '../text-cube'; import { calDestination } from '../../utils'; export class TextCube<T> extends Cube implements ITextCube<T> { value: T; textPosition: Position; private textDisplay: Display; private textMover: Mover; constructor( value: T, textMaterial: THREE.Material, textGeometryParameters: TextGeometryParameters, cubeMaterial: THREE.Material, cubeGeometry: THREE.BoxGeometry, scene: THREE.Scene ) { super(cubeGeometry, cubeMaterial, scene); this.value = value; const textGeometry = new TextGeometry(value + '', textGeometryParameters); const textMesh = new THREE.Mesh(textGeometry, textMaterial); this.textPosition = new PositionImpl(textMesh); this.textMover = new MoveImpl(textMesh); this.textDisplay = new DisplayImpl(scene, textMesh); } public async move(position: Position, duration: number) { const distance = super.distance(position); const textEndPosition = calDestination(this.textPosition, distance); const cubeMove = super.move(position, duration) const textMove = this.textMover.move(textEndPosition, duration); return Promise.all([cubeMove, textMove]).then(() => { }); } public show(): void { super.show(); this.textDisplay.show(); } public hide(): void { super.hide(); this.textDisplay.hide(); } } <file_sep>const createDPTable = (sequence: string): number[][] => { const rows = sequence.length + 1; const cols = sequence.length + 1; const table = new Array(rows).fill(0).map(() => new Array(cols).fill(0)); // length === 1; for (let i = 0; i < table.length; i += 1) { table[i][i] = 1; } // length === 2; for (let i = 0; i < table.length - 1; i += 1) { if (sequence.charAt(i) === sequence.charAt(i + 1)) { table[i][i + 1] = 2; } else { table[i][i + 1] = 1; } } // length > 2; for (let len = 3; len <= table.length; len += 1) { for (let i = 0; i + len <= table.length; i += 1) { const front = sequence.charAt(i); const end = sequence.charAt(i + len - 1); if (front === end) { table[i][i + len - 1] = 2 + table[i + 1][i + len - 2]; } else { table[i][i + len - 1] = Math.max(table[i][i + len - 2], table[i + 1][i + len - 1]); } } } return table; }; export default createDPTable; <file_sep>import * as THREE from 'three'; import Display from '../../params/displayer.interface'; import Mover from '../../params/mover.interface'; import Position from '../../params/position.interface'; import DisplayImpl from "../../three/displayer.class"; import MoveImpl from "../../three/mover.class"; import PositionImpl from "../../three/position.class" import { Cube as ICube } from '../cube'; import { calDistance } from '../../utils'; export class Cube implements ICube { position: Position; private display: Display; private mover: Mover; private mesh: THREE.Mesh; constructor( geometry: THREE.BoxGeometry, material: THREE.Material, scene: THREE.Scene ) { this.mesh = new THREE.Mesh(geometry, material); this.position = new PositionImpl(this.mesh); this.mover = new MoveImpl(this.mesh); this.display = new DisplayImpl(scene, this.mesh); } public get width(): number { return this.mesh.scale.x; } public set width(v: number) { this.mesh.scale.setX(v); } public get height(): number { return this.mesh.scale.y; } public set height(v: number) { this.mesh.scale.setY(v); } public get depth(): number { return this.mesh.scale.z; } public set depth(v: number) { this.mesh.scale.setZ(v); } protected distance(position: Position): Position { return calDistance(this.position, position); } public move(position: Position, duration: number) { return this.mover.move(position, duration); } public show(): void { this.display.show(); } public hide(): void { this.display.hide(); } } <file_sep>import * as THREE from 'three'; import { TextGeometry, TextGeometryParameters } from 'three/examples/jsm/geometries/TextGeometry'; import Displayer from "../_commons/params/displayer.interface"; import DisplayerImpl from "../_commons/three/displayer.class"; import Position from "../_commons/params/position.interface"; import PositionImpl from '../_commons/three/position.class'; import Mover from '../_commons/params/mover.interface'; import MoverImpl from '../_commons/three/mover.class'; import Color from '../_commons/params/color.interface'; import ColorImpl from '../_commons/three/color.class'; import { Base as IBase, GraphSkin as ISkin, GraphText as IText } from "./node.interface"; import { font } from '../../commons/three'; class Base extends PositionImpl implements IBase { private displayer: Displayer; private mover: Mover; private readonly colorProxy: Color; constructor( scene: THREE.Scene, geometry: THREE.BufferGeometry, material: THREE.Material ) { const mesh = new THREE.Mesh(geometry, material); super(mesh); this.colorProxy = new ColorImpl(material); this.displayer = new DisplayerImpl(scene, mesh); this.mover = new MoverImpl(mesh); } setColor(color: string): Promise<void> { return this.colorProxy.setColor(color); } get color(): string { return this.colorProxy.color; } show() { this.displayer.show(); } hide() { this.displayer.hide(); } move(position: Position, duration: number, onUpdate?: (() => void) | undefined) { return this.mover.move(position, duration, onUpdate); } } export class GraphSkin extends Base implements ISkin { } export class GraphText extends Base implements IText { public text: string; constructor( text: string, scene: THREE.Scene, geometryParameters: TextGeometryParameters, material: THREE.Material, ) { const geometry = new TextGeometry(text, geometryParameters); super(scene, geometry, material); this.text = text; } } export class SimpleGraphSkin extends GraphSkin { constructor(scene: THREE.Scene, color: string) { const geometry = new THREE.SphereGeometry(1, 32, 16); const material = new THREE.MeshBasicMaterial({ color, opacity: 0.4, transparent: true }); super(scene, geometry, material); } } export class SimpleGraphText extends GraphText { constructor(text: string, scene: THREE.Scene, color: string) { const material = new THREE.MeshBasicMaterial({ color }); const geometryParameters: TextGeometryParameters = { font, size: 0.8, height: 0.1 }; super(text, scene, geometryParameters, material); } } <file_sep>interface TableSize { rows: number; cols: number; } const getTableSize = (sequence: string): TableSize => { const rows = sequence.length; const cols = sequence.length; return { rows, cols }; }; const createMarksTable = (sequence: string, truthTable: boolean[][]): number[][] => { const { rows, cols } = getTableSize(sequence); const table = Array(rows).fill(1).map(() => Array(cols).fill(1)); let max = 1; for (let i = 0; i < table.length - 1; i += 1) { if (sequence.charAt(i) === sequence.charAt(i + 1)) { max = 2; } table[i][i + 1] = max; } for (let len = 3; len <= table.length; len += 1) { for (let i = 0; i + len <= table.length; i += 1) { const front = sequence.charAt(i); const end = sequence.charAt(i + len - 1); if (front === end && truthTable[i][i + len - 1]) { max = Math.max(max, 2 + table[i + 1][i + len - 2]); } table[i][i + len - 1] = max; } } return table; }; const createDPTable = (sequence: string): boolean[][] => { const { rows, cols } = getTableSize(sequence); const table = Array(rows).fill(false).map(() => Array(cols).fill(false)); for (let i = 0; i < table.length; i++) { table[i][i] = true; } for (let i = 0; i < table.length - 1; i++) { if (sequence.charAt(i) === sequence.charAt(i + 1)) { table[i][i + 1] = true; } else { table[i][i + 1] = false; } } for (let len = 3; len <= table.length; len++) { for (let i = 0; i + len <= table.length; i++) { const front = sequence.charAt(i); const end = sequence.charAt(i + len - 1); if (front === end) { table[i][i + len - 1] = table[i + 1][i + len - 2]; } else { table[i][i + len - 1] = false; } } } return table; }; export { createDPTable, createMarksTable }; <file_sep>export const title = "Construct Binary Tree from Inorder and Postorder Traversal"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function buildTree(inorder: number[], postorder: number[]): TreeNode | null { const inorderIndexMap: Map<number, number> = new Map(); inorder.forEach((num, i) => inorderIndexMap.set(num, i)); function buildMyTree(inorderLeft: number, inorderRight: number, postorderLeft: number, postorderRight: number): TreeNode | null { if (postorderLeft > postorderRight) { return null; } const inorderRootIndex = inorderIndexMap.get(postorder[postorderRight])!; const leftTreeLength = inorderRootIndex - inorderLeft; const root = new TreeNode(postorder[postorderRight]); root.left = buildMyTree(inorderLeft, inorderRootIndex - 1, postorderLeft, postorderLeft + leftTreeLength - 1); root.right = buildMyTree(inorderRootIndex + 1, inorderRight, postorderLeft + leftTreeLength, postorderRight - 1); return root; } return buildMyTree(0, inorder.length - 1, 0, postorder.length - 1); };`; export const description = ` Given two integer arrays **inorder** and **postorder** where **inorder** is the inorder traversal of a binary tree and **postorder** is the postorder traversal of the same tree, construct and return ***the binary tree***. `; export const usecases = ''; export const example = ''; <file_sep>import { Point } from "../../commons/point"; const random = (): number => Math.floor(Math.random() * 4); export const buildBoard = (rows: number, cols: number): string[][] => { const board: string[][] = []; for (let row = 0; row < rows; row++) { board.push([]); for (let col = 0; col < cols; col++) { const value = random() ? "X" : "O"; board[row].push(value); } } return board; } const cloneGrid = (grid: string[][]): string[][] => grid.map(row => [...row]); export enum Direction { Up, Right, Down, Left, StartDFS, SkipDFS, Update } export interface Step { board: string[][]; point: Point; direction?: Direction; } export const buildSteps = (board: string[][]): Step[] => { const steps: Step[] = []; const inArea = (row: number, col: number): boolean => { return row >= 0 && row < board.length && col >= 0 && col < board[row].length; } const dfs = (row: number, col: number, direction: Direction) => { if (!inArea(row, col)) { return; } if (board[row][col] !== "O") { return } steps.push({ board: cloneGrid(board), point: { row, col }, direction }); board[row][col] = "#"; dfs(row - 1, col, Direction.Up); dfs(row, col + 1, Direction.Right); dfs(row + 1, col, Direction.Down); dfs(row, col - 1, Direction.Left); } for (let row = 0; row < board.length; row++) { if (board[row][0] !== "O") { steps.push({ board: cloneGrid(board), point: { row, col: 0 }, direction: Direction.SkipDFS }); } dfs(row, 0, Direction.StartDFS); if (board[row][board[row].length - 1] !== "O") { steps.push({ board: cloneGrid(board), point: { row, col: board[row].length - 1 }, direction: Direction.SkipDFS }); } dfs(row, board[row].length - 1, Direction.StartDFS); } for (let col = 1; col < board[0].length - 1; col++) { if (board[0][col] !== "O") { steps.push({ board: cloneGrid(board), point: { row: 0, col }, direction: Direction.SkipDFS }); } dfs(0, col, Direction.StartDFS); if (board[board.length - 1][0] !== "O") { steps.push({ board: cloneGrid(board), point: { row: board.length - 1, col }, direction: Direction.SkipDFS }); } dfs(board.length - 1, col, Direction.StartDFS); } for (let row = 0; row < board.length; row++) { for (let col = 0; col < board[row].length; col++) { if (board[row][col] === 'O') { board[row][col] = 'X'; } else if (board[row][col] === "#") { board[row][col] = 'O'; } steps.push({ board: cloneGrid(board), point: { row, col }, direction: Direction.Update }); } } return steps; } <file_sep>import TreeNode from "../dataNode"; import { Action, ActionType, Actions } from "./action"; export default class PreOrder implements Actions { private actions: Action[]; constructor(root: TreeNode) { this.actions = []; this.recurse(root, undefined); } get(index: number): Action { return this.actions[index]; } get length(): number { return this.actions.length; } private push(action: Action) { this.actions.push(action); } private recurse(node: TreeNode, parent?: TreeNode) { this.push({ node, action: ActionType.PRINT_VAL, parent }); if (node.left) { this.push({ node, action: ActionType.GO_LEFT, parent }); this.recurse(node.left, node); } if (node.right) { this.push({ node, action: ActionType.GO_RIGHT, parent }); this.recurse(node.right, node); } this.push({ node, action: ActionType.BACK_TO_PARENT, parent }); } } <file_sep>export const title = "Implement Stack Using Queues"; export const formula = `class MyStack { private queueIn: number[]; private queueOut: number[]; constructor() { this.queueIn = []; this.queueOut = []; } push(x: number): void { this.queueIn.push(x); let item = this.queueOut.shift(); while (item) { this.queueIn.push(item); item = this.queueOut.shift(); } this.swap(); } private swap(): void { const temp = this.queueIn; this.queueIn = this.queueOut; this.queueOut = temp; } pop(): number | undefined { return this.queueOut.shift(); } top(): number { return this.queueOut[0]; } empty(): boolean { return this.queueOut.length === 0; } }`; export const description = `Implement a last in first out (LIFO) stack using only two queues. The implemented stack should support all the functions of a normal stack. The functions are ***push***, ***pop***, ***top***, and ***empty***.`; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>export const title = "Path Sum II"; export const formula = `/** * Definition for a binary tree node. * class TreeNode { * val: number * left: TreeNode | null * right: TreeNode | null * constructor(val?: number, left?: TreeNode | null, right?: TreeNode | null) { * this.val = (val===undefined ? 0 : val) * this.left = (left===undefined ? null : left) * this.right = (right===undefined ? null : right) * } * } */ function pathSum(root: TreeNode | null, targetSum: number): number[][] { const result: number[][] = []; function dfs(node: TreeNode | null, nums: number[]) { if (node === null) { return; } const sum = [...nums, node.val]; if (!node.left && !node.right) { if (sum.reduce((a, b) => a + b, 0) === targetSum) { result.push(sum); } } dfs(node.left, sum); dfs(node.right, sum); } dfs(root, []); return result; };`; export const description = ` Given the **root** of a binary tree and an integer **targetSum**, return all **root-to-leaf** paths where the sum of the node values in the path equals targetSum. Each path should be returned as a list of the node values, not node references. A **root-to-leaf** path is a path starting from the root and ending at any leaf node. A **leaf** is a node with no children. `; export const solution = ``; export const usecases = ''; export const example = ``; <file_sep>import TreeNode from "../../../data-structures/tree/nodes/v1/node"; export enum Direction { Left, Right, Back } export interface Step { node?: TreeNode<number>; sum: number; isLeftLeafNode: boolean; } function isLeafNode(node: TreeNode<number>) { return !node.left && !node.right; } export function buildSteps(root?: TreeNode<number>): Step[] { const steps: Step[] = []; let sum = 0; function sumOfLeftLeaves(node?: TreeNode<number>) { if (!node) { return; } if (node.left && isLeafNode(node.left)) { sum += node.left.val.value; steps.push({ node, sum, isLeftLeafNode: true }); } else { steps.push({ node, sum, isLeftLeafNode: false }); } sumOfLeftLeaves(node.left); sumOfLeftLeaves(node.right); } sumOfLeftLeaves(root); return steps; } <file_sep>import * as THREE from "three"; export default class Arrow extends THREE.ArrowHelper { private readonly headLength: number; private readonly headWidth: number; origin: THREE.Vector3; dest: THREE.Vector3; constructor( origin: THREE.Vector3, dest: THREE.Vector3, color: THREE.Color | string | number, headLength: number, headWidth: number ) { const direction = dest.clone().sub(origin); super(direction.clone().normalize(), origin, direction.length(), color, headLength, headWidth) this.origin = origin; this.dest = dest; this.headLength = headLength; this.headWidth = headWidth; } get direction(): THREE.Vector3 { return this.dest.clone().sub(this.origin); } public update(): void { this.position.copy(this.origin); this.setDirection(this.direction.normalize()); this.setLength(this.direction.length(), this.headLength, this.headWidth); } } <file_sep>export const title = "Redundant Connection"; export const formula = `function findRedundantConnection(edges: number[][]): number[] { class UnionFindSetNode { rank: number; parent: UnionFindSetNode; constructor() { this.rank = 0; this.parent = this; } } class UnionFindSet { private readonly map: Map<number, UnionFindSetNode>; constructor() { this.map = new Map(); } union(a: number, b: number) { const rootA = this.find(a); const rootB = this.find(b); if (rootA === rootB) { return; } if (rootA.rank === rootB.rank) { rootA.rank += 1; rootB.parent = rootA } else if (rootA.rank > rootB.rank) { rootB.parent = rootA; } else { rootA.parent = rootB } } find(a: number): UnionFindSetNode { const node = this.getNode(a); return this.findRoot(node); } private findRoot(node: UnionFindSetNode): UnionFindSetNode { if (node.parent === node) { return node; } node.parent = this.findRoot(node.parent); return node.parent; } private getNode(value: number): UnionFindSetNode { if (!this.map.has(value)) { this.map.set(value, new UnionFindSetNode()); } return this.map.get(value)!; } } const unionFindSet = new UnionFindSet(); for (let i = 0; i < edges.length; i++) { const [a, b] = edges[i]; const rootA = unionFindSet.find(a); const rootB = unionFindSet.find(b); if (rootA !== rootB) { unionFindSet.union(a, b); } else { return [a, b]; } } return []; };`; export const description = `In this problem, a tree is an **undirected graph** that is connected and has no cycles. You are given a graph that started as a tree with **n** nodes labeled from **1** to **n**, with one additional edge added. The added edge has two different vertices chosen from 1 to n, and was not an edge that already existed. The graph is represented as an array edges of length n where edges[i] = [a_i, b_i] indicates that there is an edge between nodes a_i and b_i in the graph. Return an edge that can be removed so that the resulting graph is a tree of n nodes. If there are multiple answers, return the answer that occurs last in the input.`; export const usecases = ''; export const example = ``; export const tips = `// Use union-find-set(disjoint set) to solve this problem for (let i = 0; i < edges.length; i++) { const [a, b] = edges[i]; const rootA = unionFindSet.find(a); const rootB = unionFindSet.find(b); if (rootA !== rootB) { unionFindSet.union(a, b); } else { return [a, b]; } }`; <file_sep>export enum Color { Red = "red", Black = "black" } export default class Node<T> { val: T; left?: Node<T>; right?: Node<T>; color: Color; constructor(val: T) { this.val = val; this.left = undefined; this.right = undefined; this.color = Color.Red; } get isRed(): boolean { return this.color === Color.Red; } get isBlack(): boolean { return this.color === Color.Black; } } <file_sep>import { title } from "./contents"; import { Game } from "../../commons/game"; import Category from "../../commons/segments/category"; import Difficulty from "../../commons/segments/difficulty"; const info: Game = { name: title, path: "/algorithms/serialize-and-deserialize-binary-tree", categories: [Category.Tree], companies: [], difficulty: Difficulty.Hard, img: "/img/serialize-and-deserialize-binary-tree.png", leetcodeId: 297 } export default info; <file_sep>import * as THREE from 'three'; import Color from '../../params/color.interface'; import ColorImpl from '../../three/color.class'; import Display from '../../params/displayer.interface'; import DisplayImpl from "../../three/displayer.class"; import Mover from '../../params/mover.interface'; import MoveImpl from "../../three/mover.class"; import Position from '../../params/position.interface'; import PositionImpl from "../../three/position.class" import { Sphere as ISphere } from "../sphere.interface"; import { calDistance } from '../../utils'; export default class Sphere implements ISphere { center: Position; private display: Display; private mover: Mover; private geometry: THREE.SphereGeometry; sphereColor: Color; constructor( geometry: THREE.SphereGeometry, material: THREE.Material, scene: THREE.Scene ) { this.geometry = geometry; const mesh = new THREE.Mesh(geometry, material); this.display = new DisplayImpl(scene, mesh); this.center = new PositionImpl(mesh); this.mover = new MoveImpl(mesh); this.sphereColor = new ColorImpl(material); } move(position: Position, duration: number, onUpdate?: () => void) { return this.mover.move(position, duration, onUpdate); } show() { this.display.show(); } hide() { this.display.hide(); } protected distance(position: Position): Position { return calDistance(this.center, position); } get radius() { return this.geometry.parameters.radius; } } <file_sep>const createDPTable = (stringOne: string, stringTwo: string): boolean[][] => { const rows = stringTwo.length + 1; const cols = stringOne.length + 1; const table = new Array(rows).fill(false).map(() => new Array(cols).fill(false)); for (let col = 0; col < cols; col++) { table[0][col] = true; } for (let row = 1; row < rows; row++) { const ch1 = stringTwo.charAt(row - 1); for (let col = 1; col < cols; col++) { const ch2 = stringOne.charAt(col - 1); if (ch1 === ch2 && table[row - 1][col - 1]) { table[row][col] = true; } } } return table; }; export default createDPTable; <file_sep>export const nodeOriginalSkinColor: string = "yellow"; export const nodeOriginalTextColor: string = "green"; export const nodeEnabledSkinColor: string = "green"; export const nodeEnabledTextColor: string = "gray"; export const edgeOriginalColor: string = "gold"; export const edgeDisabledColor: string = "lightgray"; <file_sep>import createDpTable from "../algo"; const getLastCell = (table: boolean[][]): boolean => { const lastRow = table[table.length - 1]; return lastRow[lastRow.length - 1]; } test("is-subsequence one", () => { const str1 = "alchemist"; const str2 = "che"; const table = createDpTable(str1, str2); const result = getLastCell(table); expect(result).toBe(true); }); test("is-subsequence two", () => { const str1 = "alchemist"; const str2 = "chm"; const table = createDpTable(str1, str2); const result = getLastCell(table); expect(result).toBe(true); }); test("is-subsequence two", () => { const str1 = "alchemist"; const str2 = "mch"; const table = createDpTable(str1, str2); const result = getLastCell(table); expect(result).toBe(false); }); <file_sep>export const title = "Serialize and Deserialize Binary Tree"; export const formula = `function serialize(root: TreeNode | null): string { const result: string[] = []; function preorder (node: TreeNode | null) { if(!node) { result.push("#"); return; } result.push(node.val + ""); preorder(node.left); preorder(node.right); } preorder(root); return result.join(","); }; function deserialize(data: string): TreeNode | null { let index = 0; function preorder(array: string[]) : TreeNode | null { const value = array[index]; index += 1; if (!value || value === "#") { return null; } const node = new TreeNode(+value); node.left = preorder(array); node.right = preorder(array); return node; } return preorder(data.split(",")); };`; export const description = ` Serialization is the process of converting a data structure or object into a sequence of bits so that it can be stored in a file or memory buffer, or transmitted across a network connection link to be reconstructed later in the same or another computer environment. Design an algorithm to serialize and deserialize a binary tree. There is no restriction on how your serialization/deserialization algorithm should work. You just need to ensure that a binary tree can be serialized to a string and this string can be deserialized to the original tree structure. `; export const solution = ``; export const usecases = ''; export const example = ``;
8f4044cb425956a8f89979b3059daed7dd5b3c0d
[ "Markdown", "TypeScript" ]
346
TypeScript
zhou-dong/alchemist
e2110efc294d1fbd07fbbaa9768277065f834fbe
8c198354eff354a5274df65bab822c1c08281244
refs/heads/main
<repo_name>nbhargav0098/geektrustAssignment<file_sep>/src/App.js import {Component} from 'react' import DisplayEachUser from './components/DisplayEachUser' import './App.css' class App extends Component { state = {userDetails: [], searchInput: ''} componentDidMount() { this.fetchUserDetails() } fetchUserDetails = async () => { const userDetailsResponse = await fetch( `https://geektrust.s3-ap-southeast-1.amazonaws.com/adminui-problem/members.json`, ) const responseData = await userDetailsResponse.json() const newList = responseData.map(eachItem => ({ id: eachItem.id, email: eachItem.email, role: eachItem.role, name: eachItem.name, })) this.setState({userDetails: newList}) } onChangeInput = event => { this.setState({searchInput: event.target.value}) } deleteUserId = id => { const {userDetails} = this.state const newUsersData = userDetails.filter(eachItem => eachItem.id !== id) this.setState({ userDetails: newUsersData, }) } render() { const {userDetails, searchInput} = this.state const filteredResults = userDetails.filter(eachUser => eachUser.name.toLowerCase().includes(searchInput.toLowerCase()), ) return ( <div className="background-container"> <input type="search" className="search-input" onChange={this.onChangeInput} value={searchInput} /> <ul> <div className="list-headings"> <div className="li-heading-container"> <input type="checkbox" /> </div> <div className="li-heading-container"> <p className="headings">Name</p> </div> <div className="li-heading-container"> <p className="headings">Email</p> </div> <div className="li-heading-container"> <p className="headings">Role</p> </div> <div className="li-heading-container"> <p className="headings">Actions</p> </div> </div> <hr /> {filteredResults.map(eachUserDetails => ( <DisplayEachUser userDetails={eachUserDetails} key={eachUserDetails.id} deleteUserId={this.deleteUserId} /> ))} </ul> </div> ) } } export default App <file_sep>/src/components/DisplayEachUser/index.js import {IconContext} from 'react-icons' import {AiOutlineDelete} from 'react-icons/ai' import {FiEdit} from 'react-icons/fi' import './index.css' const DisplayEachUser = props => { const {userDetails, deleteUserId} = props const {name, id, email, role} = userDetails const onDeleteUser = () => { deleteUserId(id) } return ( <> <li className="list-item" htmlFor={`checkbox${id}`}> <div className="list-container"> <input type="checkbox" id={`checkbox${id}`} /> </div> <div className="list-container"> <p className="details">{name}</p> </div> <div className="list-container"> <p className="details">{email}</p> </div> <div className="list-container"> <p className="details">{role.toUpperCase()}</p> </div> <div className="list-container"> <div className="icons-container"> <div className="icons"> <IconContext.Provider value={{style: {fontSize: '14px', color: 'black'}}} > <div> <FiEdit /> </div> </IconContext.Provider> </div> <div className="icons"> <IconContext.Provider value={{style: {fontSize: '16px', color: 'red'}}} > <div> <AiOutlineDelete onClick={onDeleteUser} /> </div> </IconContext.Provider> </div> </div> </div> </li> <hr /> </> ) } export default DisplayEachUser
1130ebd21b083fff6598fcf5eea04f5843aa1868
[ "JavaScript" ]
2
JavaScript
nbhargav0098/geektrustAssignment
b32882084cd60eaf45bd5306fae71e852aedc6ef
60d2349387d123a1e24f4df254ffdb4b3e304ccc
refs/heads/master
<repo_name>timsilog/hydra<file_sep>/client.c /* ************************************************************************** */ /* */ /* ::: :::::::: */ /* client.c :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: tjose <<EMAIL>> +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2017/02/16 16:33:05 by tjose #+# #+# */ /* Updated: 2017/02/16 20:12:39 by tjose ### ########.fr */ /* */ /* ************************************************************************** */ #include <string.h> #include <sys/socket.h> #include <unistd.h> #include <stdio.h> #include <netinet/in.h> #include <arpa/inet.h> int main(void) { int sock_fd; char sendline[1000]; char recline[1000]; struct sockaddr_in servaddr; sock_fd = socket(AF_INET, SOCK_STREAM, 0); bzero(&servaddr, sizeof(servaddr)); servaddr.sin_family = AF_INET; servaddr.sin_port = htons(22000); inet_pton(AF_INET, "127.0.0.1", &(servaddr.sin_addr)); connect(sock_fd, (struct sockaddr*)&servaddr, sizeof(servaddr)); while(1) { bzero(sendline, 1000); bzero(recline, 1000); fgets(sendline, 1000, stdin); write(sock_fd, sendline, strlen(sendline) + 1); read(sock_fd, recline, 1000); write(1, recline, strlen(recline) + 1); } } <file_sep>/server.c /* ************************************************************************** */ /* */ /* ::: :::::::: */ /* server.c :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: tjose <<EMAIL>> +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2017/02/16 16:33:10 by tjose #+# #+# */ /* Updated: 2017/02/16 20:12:37 by tjose ### ########.fr */ /* */ /* ************************************************************************** */ #include <string.h> #include <sys/socket.h> #include <unistd.h> #include <netinet/in.h> #include <stdlib.h> #include <stdio.h> int main(int argc, char **argv) { char str[1000]; int listen_fd; int comm_fd; struct sockaddr_in servaddr; pid_t pid; if (argc == 2) { if (!strcmp(argv[1], "-D")) { pid = fork(); if (pid < 0) { printf("fork failed\n"); exit(1); } if (pid > 0) { printf("pid of child is %d\n", pid); exit(0); } } } listen_fd = socket(AF_INET, SOCK_STREAM, 0); bzero(&servaddr, sizeof(servaddr)); servaddr.sin_family = AF_INET; servaddr.sin_addr.s_addr = htons(INADDR_ANY); servaddr.sin_port = htons(22000); bind(listen_fd, (struct sockaddr*)&servaddr, sizeof(servaddr)); listen(listen_fd, 10); comm_fd = accept(listen_fd, NULL, NULL); while(1) { bzero(str, 1000); read(comm_fd, str, 1000); if (!strcmp(str, "ping\n")) write(comm_fd, "pong\npong\n", 11); else write(comm_fd, "", 1); } } <file_sep>/README.md # hydra A server in C that will answer any ping request with two pongs
1b86ce69441f8a0289e3bde0b355a377e2a83e83
[ "Markdown", "C" ]
3
C
timsilog/hydra
123b8928efc0aa639e696bbc975d17b23ab5a1ad
0d076511b7351c3262bacee9bf7e636c629d14ea
refs/heads/master
<repo_name>pedroosorio/minho_team_simulation_tools<file_sep>/world_plugin/src/minho_world.cc #include "minho_world.hh" using namespace gazebo; Minho_World *me; // Register this plugin with the simulator GZ_REGISTER_WORLD_PLUGIN(Minho_World); /// \brief Constructor Minho_World::Minho_World() { models_.clear(); model_config_.clear(); me = this; } /// \brief deletes allocated memory Minho_World::~Minho_World() { for(unsigned int i=0;i<model_config_.size();i++) free(model_config_[i]); } /// \brief Plugin Load function /// \param _parent - Model pointer to the model defining this plugin /// \param _sdf - pointer to the SDF of the model void Minho_World::Load(physics::WorldPtr _parent, sdf::ElementPtr _sdf) { ROS_INFO("Starting world plugin ..."); node_ = transport::NodePtr(new transport::Node()); world_ = _parent; node_->Init(_parent->GetName()); initPluginConfiguration(_sdf); world_mutex_ = _parent->GetSetWorldPoseMutex(); //Init subsciber and publisher factory_pub_ = node_->Advertise<gazebo::msgs::Factory>("/gazebo/default/factory"); cmd_sub_ = node_->Subscribe("/gazebo/default/factorybridge", &Minho_World::parseCommand); } /// \brief initializes the parameters in the plugin's configuration, /// such as available models /// \param _sdf - configurations for the plugin in sdf file void Minho_World::initPluginConfiguration(sdf::ElementPtr _sdf) { // count number of models unsigned int defined_models = 0; sdf::ElementPtr model; model = _sdf->GetFirstElement(); while(model){ if(model->HasAttribute("model_id")){ // valid model defined_models++; } model = model->GetNextElement(); } ROS_INFO("Found %d defined models in world plugin.",defined_models); // actually load models int model_id = 0; model = _sdf->GetFirstElement(); while(model){ if(model->HasAttribute("model_id")){ // valid model if(model->GetAttribute("model_id")->Get(model_id)){ AvailableModelPtr mod = new AvailableModel; mod->id = model_id; mod->path_to = model->GetValue()->GetAsString(); model_config_.push_back(mod); } else { ROS_WARN("Model definition error in %s",model->GetValue()->GetAsString().c_str()); } } model = model->GetNextElement(); } } /// \brief spawns a model into a world, given its id, in relation with /// model_config_ models, defined in plugin's sdf. /// \param id - id of the type of model to be spawned (matchin) plugin configuration /// \param default_name - name of the new model void Minho_World::spawnModel(unsigned int id, std::string default_name) { bool pauseState = world_->IsPaused(); world_->SetPaused(true); for(unsigned int i=0;i<model_config_.size();i++){ if(model_config_[i]->id == id){ gazebo::msgs::Factory new_model; if(default_name.compare("")!=0){ std::string filename = common::ModelDatabase::Instance()->GetModelFile( model_config_[i]->path_to); std::ifstream sdf_file; sdf_file.open(filename); std::string line; std::string sdf_data = ""; if(sdf_file.is_open()){ while(getline(sdf_file,line)){sdf_data += line; sdf_data +="\n";} } else { ROS_ERROR("Cannot open file for %s",model_config_[i]->path_to.c_str()); } std::size_t found = sdf_data.find("model name=",0); if (found!=std::string::npos){ std::size_t found_end = sdf_data.find("\"",static_cast<int>(found)+13); std::string sub_name = "model name=\""; sub_name += default_name; sub_name += "\""; sdf_data.replace(static_cast<int>(found),static_cast<int>(found_end)-static_cast<int>(found)+1 ,sub_name); new_model.set_sdf(sdf_data); } else new_model.set_sdf_filename(model_config_[i]->path_to); } else new_model.set_sdf_filename(model_config_[i]->path_to); // Pose to initialize the model to msgs::Set(new_model.mutable_pose(), ignition::math::Pose3d( ignition::math::Vector3d(5, 0, 0), ignition::math::Quaterniond(0, 0, 0)) ); factory_pub_->Publish(new_model); ROS_INFO("\nSpawned model '%s'",default_name.c_str()); break; } } world_->SetPaused(pauseState); } /// \brief deletes a model with a given name. /// \param name - name of the model to be deleted void Minho_World::deleteModel(std::string name) { bool model_deleted = false; models_ = world_->GetModels(); // get models for(unsigned int i = 0; i<models_.size(); i++){ if(name.compare(models_[i]->GetName())==0){ ROS_INFO("Found model to be deleted."); //world_->RemoveModel(models_[i]->GetName()); model_deleted = true; } } if(model_deleted) ROS_INFO("\nDeleted model '%s'",name.c_str()); else ROS_INFO("\nFailed to delete model '%s'",name.c_str()); std::stringstream models_list; models_list << "Current Models in World:"; for(unsigned int m = 0;m<models_.size();m++) models_list << "\n\t\t\t\tModel " << m << " -> " << models_[m]->GetName(); models_list << "\n"; ROS_INFO("%s",models_list.str().c_str()); } /// \brief receives messages in string form to add or remove a model. /// \param _msg - data received in callback, containing info about /// spawning or removing a model void Minho_World::parseCommand(ConstGzStringPtr &_msg) { std::string command = _msg->data(); std::string action = ""; std::string value = ""; size_t pos = 0; if((pos = command.find(":")) != std::string::npos) { action = command.substr(0, pos); command.erase(0, pos + 1); } if(action.compare("spawn")==0){ size_t pos_2 = 0; if((pos_2 = command.find(",")) != std::string::npos) { value = command.substr(0, pos_2); command.erase(0, pos_2 + 1); int type = std::atoi(value.c_str()); ROS_INFO("Spawning model '%s' as type '%d'",command.c_str(),type); me->spawnModel(type,command); } } else if(action.compare("remove")==0){ ROS_INFO("Removing model '%s'",command.c_str()); me->deleteModel(command); } else { ROS_ERROR("Wrong model managing command : '%s'",_msg->data().c_str()); } } <file_sep>/world_plugin/include/minho_world.hh #ifndef _GAZEBO_MINHO_HARDWARE_HH_ #define _GAZEBO_MINHO_HARDWARE_HH_ #include <gazebo/gazebo.hh> #include <gazebo/physics/physics.hh> #include <gazebo/common/common.hh> #include <gazebo/msgs/msgs.hh> #include <gazebo/transport/transport.hh> #include <gazebo/common/Events.hh> #include <ignition/math/Vector3.hh> #include <ignition/math/Pose3.hh> #include <ignition/math/Vector2.hh> #include <stdio.h> #include <stdlib.h> #include <string> #include <sstream> #include <vector> #include <sdf/Param.hh> #include <sdf/sdf.hh> #include "ros/ros.h" #include <cstdlib> #include <fstream> #define DEG_TO_RAD M_PI/180.0 #define RAD_TO_DEG 180.0/M_PI typedef struct AvailableModel{ int id; std::string path_to; } AvailableModel; typedef struct AvailableModel* AvailableModelPtr; namespace gazebo { /// \brief A plugin to control a soccer robot's omnidirectional movement, kicker and dribbler class Minho_World : public WorldPlugin { public: /// \brief Constructor Minho_World(); /// \brief Destructor virtual ~Minho_World(); /// \brief Plugin Load function /// \param _parent - Model pointer to the model defining this plugin /// \param _sdf - pointer to the SDF of the model void Load(physics::WorldPtr _parent, sdf::ElementPtr _sdf); /// \brief initializes the parameters in the plugin's configuration, /// such as available models /// \param _sdf - configurations for the plugin in sdf file void initPluginConfiguration(sdf::ElementPtr _sdf); /// \brief spawns a model into a world, given its id, in relation with /// model_config_ models, defined in plugin's sdf. /// \param id - id of the type of model to be spawned (matchin) plugin configuration /// \param default_name - name of the new model void spawnModel(unsigned int id, std::string default_name); /// \brief deletes a model with a given name. /// \param name - name of the model to be deleted void deleteModel(std::string name); /// \brief receives messages in string form to add or remove a model. /// \param _msg - data received in callback, containing info about /// spawning or removing a model static void parseCommand(ConstGzStringPtr &_msg); private: // VARIABLES /// \brief Transport node used to communicate with the transport system transport::NodePtr node_; /// \brief pointer to the world object of the simulation physics::WorldPtr world_; /// \brief vector to hold the current models present in the world std::vector<physics::ModelPtr> models_; /// \brief vector of structs to hold model configuration to be able /// to configure and easily change the models that are available for /// the world. std::vector<AvailableModelPtr> model_config_; /// \brief pointer to publish over factory topic, to add models to /// the simulation gazebo::transport::PublisherPtr factory_pub_; /// \brief pointer to subscriver over gz_string topic, to receive /// add or delete comands for models in the simulation. gazebo::transport::SubscriberPtr cmd_sub_; /// \brief mutex to synchronize threads when adding or removing models /// from world, increasing thread-safety. boost::mutex *world_mutex_; }; } #endif <file_sep>/robot_plugin/src/minho_robot.cc /* * Copyright (C) 2015-2016 Open Source Robotics Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "minho_robot.hh" #define Y_AXIS_MULTIPLIER -1.0 using namespace gazebo; //Functions //GetWorldPose () //GetId () //GetName () //SetAngularVel (const math::Vector3 &_vel) //SetLinearVel (const math::Vector3 &_vel) //SetWorldPose (const math::Pose &_pose, bool _notify=true, bool _publish=true) // Register this plugin with the simulator GZ_REGISTER_MODEL_PLUGIN(Minho_Robot); /// \brief Constructor. Initialized deafult variables for various variables Minho_Robot::Minho_Robot() { initial_pose_ = math::Pose(0.0,-6.4*(-Y_AXIS_MULTIPLIER),0.0,0.0,0.0,0.0); linear_velocity_ = angular_velocity_ = math::Vector3(0.0,0.0,0.0); team_id_ = 0; //null is_ros_initialized_ = false; teleop_active_ = false; game_ball_in_world_ = false; poss_threshold_distance_ = 0.3; kick_requested_ = false; kick_stab_counter = 0; // Default values MAX_LIN_VEL = 2.5; MAX_ANG_VEL = 15.0; MAX_BALL_VEL = 8.0; SHOOT_ANGLE = 0.0; targetCommand = math::Vector3(0,0,0); currentCommand = lastCommand = targetCommand; BALL_MODEL_NAME = "RoboCup MSL Ball"; controlCommandsReceived = false; VISION_RANGE_RADIUS = 5.0; MAX_BACKWARDS_VEL = GRIP_DECAY = MAX_ROTATION_VEL = 30.0; } /// \brief Destructor Minho_Robot::~Minho_Robot() { for(int i=0;i<childs.size();i++)childs[i].terminate(); event::Events::DisconnectWorldUpdateBegin(_update_connection_); // Removes all callbacks from the queue. Does not wait for calls currently in progress to finish. message_queue_.clear(); // Disable the queue, meaning any calls to addCallback() will have no effect. message_queue_.disable(); message_callback_queue_thread_.join(); _node_ros_->shutdown(); delete _node_ros_; } /// \brief Plugin Load function. Initializes all ros topics for the robot model, /// also starting message queue thread. Connects gazebo events like world update, /// time reset and world reset /// \param _parent - Model pointer to the model defining this plugin /// \param _sdf - pointer to the SDF of the model void Minho_Robot::Load(physics::ModelPtr _parent, sdf::ElementPtr _sdf) { // Rename model to specification _model_ = _parent; _node_ = transport::NodePtr(new transport::Node()); _node_->Init(_model_->GetWorld()->GetName()); model_id_ = _model_->GetId(); // Stop simulation to prevent odd behaviors gazebo::physics::WorldPtr world = _model_->GetWorld(); world_mutex_ = world->GetSetWorldPoseMutex(); bool pauseState = world->IsPaused(); world->SetPaused(true); world->DisableAllModels(); world->EnablePhysicsEngine(false); initializePluginParameters(_sdf); autoRenameRobot(); ROS_WARN("#############################################################"); ROS_WARN("Loading Minho_Robot Plugin for '%s' ...", _model_->GetName().c_str()); if (!ros::isInitialized()){ ROS_FATAL_STREAM("A ROS node for Gazebo has not been initialized, unable to load plugin."); is_ros_initialized_ = false; } else is_ros_initialized_ = true; // Initialize ROS interface if(!is_ros_initialized_){ ROS_ERROR("Plugin ROS failed to start on '%s'.", _model_->GetName().c_str()); } else { ROS_INFO("Plugin ROS successfuly initialized."); // Initialize ROS publishers and subscribers _node_ros_ = new ros::NodeHandle(); std::stringstream control_topic; control_topic << "minho_gazebo_robot" << std::to_string(team_id_) << "/controlInfo"; std::stringstream teleop_topic; teleop_topic << "minho_gazebo_robot" << std::to_string(team_id_) << "/teleop"; ros::SubscribeOptions control_info = ros::SubscribeOptions::create<minho_team_ros::controlInfo>( control_topic.str(), 100, boost::bind( &Minho_Robot::controlInfoCallback,this,_1), ros::VoidPtr(), &this->message_queue_); control_info_sub_ = _node_ros_->subscribe(control_info); ros::SubscribeOptions teleop_info = ros::SubscribeOptions::create<minho_team_ros::teleop>( teleop_topic.str(), 100, boost::bind( &Minho_Robot::teleopCallback,this,_1), ros::VoidPtr(), &this->message_queue_); teleop_state_sub_ = _node_ros_->subscribe(teleop_info); std::stringstream robotinfo_topic; robotinfo_topic << "minho_gazebo_robot" << std::to_string(team_id_) << "/robotInfo"; robot_info_pub_ = _node_ros_->advertise<minho_team_ros::robotInfo>(robotinfo_topic.str(),100); std::stringstream kick_service_topic; kick_service_topic << "minho_gazebo_robot" << std::to_string(team_id_) << "/requestKick"; kick_service = _node_ros_->advertiseService(kick_service_topic.str(),&Minho_Robot::kickServiceCallback,this); // Custom Callback Queue Thread. Use threads to process message and service callback queue message_callback_queue_thread_ = boost::thread(boost::bind(&Minho_Robot::message_queue_thread,this)); ROS_WARN("Plugin ROS started on '%s'.", _model_->GetName().c_str()); } // Connect server update callback _update_connection_ = event::Events::ConnectWorldUpdateBegin( boost::bind(&Minho_Robot::onUpdate, this)); _reset_connection_ = event::Events::ConnectTimeReset( boost::bind(&Minho_Robot::onReset, this)); _timeres_connection_ = event::Events::ConnectWorldReset( boost::bind(&Minho_Robot::onReset, this)); getGameBallModel(); world->EnablePhysicsEngine(true); world->EnableAllModels(); world->SetPaused(pauseState); //Place the robot in the initial position regarding its team_id_ initial_pose_.pos.x = 0.8*(float)team_id_; _model_->SetWorldPose(initial_pose_); // Boots other ROS Nodes as coms, control, AI ... bootROSNodes(_sdf); //setupSensors(); } /// \brief Applies the desired velocities to the robot, given /// the linear velocity, direction of movement and angular velocity /// \param command - has 3 components: (1) is linear velocity [0~100] /// (2) is direction of movement [0~360] and (3) is angular velocity [-100~100] void Minho_Robot::applyVelocities(math::Vector3 command) { // Apply angular velocity if(command.z>=-100.0 && command.z<=100.0) angular_velocity_.z = mapVelocity(command.z,MAX_ANG_VEL); else angular_velocity_.z = 0; // Apply linear velocity if((command.x>=0.0 && command.x<=100.0) && (command.y>=0.0 && command.y<=360.0)) { double angle = command.y*DEG_TO_RAD+M_PI/2.0+ model_pose_.rot.GetAsEuler().z; linear_velocity_.x = mapVelocity(command.x*cos(angle),MAX_LIN_VEL); linear_velocity_.y = mapVelocity(command.x*sin(angle),MAX_LIN_VEL); } else { linear_velocity_.x = linear_velocity_.y = 0.0; } _model_->SetLinearVel(linear_velocity_); _model_->SetAngularVel(angular_velocity_); } /// \brief maps a velocity of the maximum velocity given a percentage. /// \param percentage - percentage of the maximum velocity to be applied /// \param limit - max velocity to be applied /// \return mapped value/velocity given a limit and a percentage double Minho_Robot::mapVelocity(double percentage, double limit) { return (percentage*limit)/100.0; } /// \brief gets a list of models in the world and renames itself, accordingly to the /// existing robots already spawned. void Minho_Robot::autoRenameRobot() { team_id_ = 1; physics::WorldPtr world = _model_->GetWorld(); std::vector<physics::ModelPtr> models_list = world->GetModels(); if(_model_->GetName().compare("minho_robot")==0){ std::vector<int> used_team_ids; used_team_ids.clear(); for(unsigned int id = 0; id < models_list.size(); id++){ // Generate used id's list // tag starts as : minho_robot and suffix is $(number) -> minho_robot$1 std::string mod = models_list[id]->GetName().substr(0,11); if(mod.compare("minho_robot")==0 && models_list[id]->GetName().compare(_model_->GetName())!=0){ used_team_ids.push_back(std::stoi(models_list[id]->GetName().substr(12,1),nullptr)); } } std::stringstream new_name; if(used_team_ids.size()==0){ // first model to be loaded team_id_ = 1; } else { // find available ids between 1 and 6 team_id_ = 1; std::sort (used_team_ids.begin(), used_team_ids.end()); //order used id vector for(unsigned int id = 0; id < used_team_ids.size(); id++){ if(team_id_>=used_team_ids[id]) team_id_++; else { break; } } } new_name << "minho_robot_" << std::to_string(team_id_); _model_->SetName(new_name.str()); } else { std::string tid = _model_->GetName(); char id = tid[tid.length()-1]; team_id_ = id-'0'; } // Auto numbering std::string pref = "asd"; sdf::ElementPtr original_sdf = _model_->GetSDF(); sdf::ElementPtr root_link; sdf::ElementPtr numbering_back, numbering_left, numbering_right; sdf::ElementPtr number_uri; bool root_link_found = false; root_link = original_sdf->GetElement("link"); while(!root_link_found && root_link){ if(root_link->GetAttribute("name")->GetAsString().compare("numberings")==0){ root_link_found = true; } else root_link = root_link->GetNextElement(); } // numbering back bool back_visual_found = false; numbering_back = root_link->GetElement("visual"); while(!back_visual_found && numbering_back){ if(numbering_back->HasAttribute("name") && (numbering_back->GetAttribute("name")->GetAsString().compare("numbering_back")==0)){ back_visual_found = true; } else numbering_back = numbering_back->GetNextElement(); } // Apply correct stl matching team_id_ if(back_visual_found) { number_uri = numbering_back->GetElement("geometry")->GetElement("mesh")->GetElement("uri"); std::stringstream new_uri; new_uri << "model://minhoteam_msl_robot/meshes/nr_" << std::to_string(team_id_)<< ".stl"; number_uri->GetValue()->SetFromString(new_uri.str()); } // numbering left bool left_visual_found = false; numbering_left = root_link->GetElement("visual"); while(!left_visual_found && numbering_left){ if(numbering_left->HasAttribute("name") && (numbering_left->GetAttribute("name")->GetAsString().compare("numbering_left")==0)){ left_visual_found = true; } else numbering_left = numbering_left->GetNextElement(); } // Apply correct stl matching team_id_ if(left_visual_found) { number_uri = numbering_left->GetElement("geometry")->GetElement("mesh")->GetElement("uri"); std::stringstream new_uri; new_uri << "model://minhoteam_msl_robot/meshes/nr_" << std::to_string(team_id_)<< ".stl"; number_uri->GetValue()->SetFromString(new_uri.str()); } // numbering right bool right_visual_found = false; numbering_right = root_link->GetElement("visual"); while(!right_visual_found && numbering_right){ if(numbering_right->HasAttribute("name") && (numbering_right->GetAttribute("name")->GetAsString().compare("numbering_right")==0)){ right_visual_found = true; } else numbering_right = numbering_right->GetNextElement(); } // Apply correct stl matching team_id_ if(right_visual_found) { number_uri = numbering_right->GetElement("geometry")->GetElement("mesh")->GetElement("uri"); std::stringstream new_uri; new_uri << "model://minhoteam_msl_robot/meshes/nr_" << std::to_string(team_id_)<< ".stl"; number_uri->GetValue()->SetFromString(new_uri.str()); } // Update model's SDF _model_->Update(); } /// \brief called by event signal every server simulation iteration. Used to reset /// parameters like velocities and others void Minho_Robot::onUpdate() { static int kick_timer = 0; static int timeout_counter = 0; if(kick_stab_counter<=30) { kick_stab_counter++; kick_requested_ = false; } // Check and kill defunct child processes int status; pid_t p; for(int i=0;i<childs.size();i++){ p = waitpid(childs[i].get_id(), &status, WNOHANG); } // lock resources if(!controlCommandsReceived) timeout_counter++; else timeout_counter = 0; if(timeout_counter>1000 && !controlCommandsReceived) timeout_counter = 5; control_info_mutex_.lock(); model_pose_ = _model_->GetWorldPose(); // get robot position model_pose_.pos.y *= Y_AXIS_MULTIPLIER; getGameBallModel(); // finds and computes distance to game ball if(timeout_counter<5){ // Apply defined velocities to the robot //_model_->SetLinearVel(linear_velocity_); //_model_->SetAngularVel(angular_velocity_); applyVelocities(getAccelDeccelVelocity()); // Activate dribbling algorithm, if the ball is in possession and // dribling is activated if(kick_requested_ && has_game_ball_) kick_timer = 10; else if(kick_timer>0) kick_timer--; if(dribblers_on_ && has_game_ball_ && (kick_timer<=0)) dribbleGameBall(); if(kick_requested_ && has_game_ball_ && kick_stab_counter>30) { kickGameBall(kick_is_pass_,kick_force_,kick_dir_); kick_stab_counter = 0; } }else { linear_velocity_.x = linear_velocity_.y = angular_velocity_.x = angular_velocity_.y = 0; targetCommand = math::Vector3(0,0,0); currentCommand = lastCommand = targetCommand; _model_->SetLinearVel(linear_velocity_); _model_->SetAngularVel(angular_velocity_); } // Publish robotInfo over ROS publishRobotInfo(); controlCommandsReceived = false; last_state = current_state; // unlock resources control_info_mutex_.unlock(); } /// \brief called by event signal when a model or server reset happens void Minho_Robot::onReset() { _model_->SetWorldPose(initial_pose_); } /// \brief callback to receive ROS messages published over the matching ros topic, /// in order to retrieve data about comands for robot motion. void Minho_Robot::controlInfoCallback(const controlInfo::ConstPtr& msg) { // lock resources control_info_mutex_.lock(); if(teleop_active_){ if(msg->is_teleop) { //Apply robot velocities linear_vel_ = msg->linear_velocity; mov_direction_ = 360-msg->movement_direction; angular_vel_ = msg->angular_velocity; //applyVelocities(math::Vector3(msg->linear_velocity, 360-msg->movement_direction, msg->angular_velocity)); targetCommand = math::Vector3(msg->linear_velocity, 360-msg->movement_direction, msg->angular_velocity); lastCommand = currentCommand; //Apply dribbling dribblers_on_ = msg->dribbler_on; controlCommandsReceived = true; } } else { // Autonomous if(!msg->is_teleop) { //Apply robot velocities linear_vel_ = msg->linear_velocity; mov_direction_ = 360-msg->movement_direction; angular_vel_ = msg->angular_velocity; //applyVelocities(math::Vector3(msg->linear_velocity, 360-msg->movement_direction, msg->angular_velocity)); targetCommand = math::Vector3(msg->linear_velocity, 360-msg->movement_direction, msg->angular_velocity); lastCommand = currentCommand; //Apply dribbling dribblers_on_ = msg->dribbler_on; controlCommandsReceived = true; } } // unlock resources control_info_mutex_.unlock(); } math::Vector3 Minho_Robot::getAccelDeccelVelocity() { float deltavx = -constDeccel, deltavr = -constDeccel; float difx = targetCommand.x-lastCommand.x, dify = targetCommand.y-lastCommand.y, difz = targetCommand.z-lastCommand.z; /*if(targetCommand.y!=lastCommand.y) currentCommand.x = 0; if(difx!=0){ if(difx>0) deltavx = constAccel; currentCommand.x += deltavx; if(deltavx>0 && currentCommand.x>targetCommand.x) currentCommand.x = targetCommand.x; if(targetCommand.x!=0) { currentCommand.y = targetCommand.y; ROS_INFO("Here"); } } else currentCommand.x = targetCommand.x;*/ currentCommand.x = targetCommand.x; currentCommand.y = targetCommand.y; if(difz!=0){ if(difz>0) deltavr = constAccel; currentCommand.z += deltavr; if(deltavr>0 && currentCommand.z>targetCommand.z) currentCommand.z = targetCommand.z; } else currentCommand.z = targetCommand.z; return currentCommand; } /// \brief function to actuate kicker, in order to kick the ball. Only kicks if /// the robot detects that has the ball inside /// \param req - request data received in requestKick service /// \param res - response data, flaggin if the kick was taken or not bool Minho_Robot::kickServiceCallback(requestKick::Request &req,requestKick::Response &res) { //Apply ball kicking if(req.kick_strength>0){ kick_requested_ = true; kick_force_ = req.kick_strength; kick_dir_ = req.kick_direction; kick_is_pass_ = req.kick_is_pass; dribblers_on_ = false; } res.kicked = has_game_ball_; return true; } /// \brief callback to receive ROS messages published over the matching ros topic, /// in order to retrieve data about comands for robot motion. void Minho_Robot::teleopCallback(const teleop::ConstPtr& msg) { // lock resources tele_op_mutex_.lock(); teleop_active_ = msg->set_teleop; if(teleop_active_)ROS_INFO("Teleop activated for '%s'.",_model_->GetName().c_str()); else { ROS_INFO("Teleop deactivated for '%s'.",_model_->GetName().c_str()); linear_velocity_ = angular_velocity_ = math::Vector3(0.0,0.0,0.0); kick_requested_ = false; } // unlock resources tele_op_mutex_.unlock(); } /// \brief thread to queue incoming data to callback queue, that subsequently calls /// the respective callback, passing the matching data to it. void Minho_Robot::message_queue_thread() { static const double timeout = 0.01; while (_node_ros_->ok()) { // Invoke all callbacks currently in the queue. If a callback was not ready to be called, // pushes it back onto the queue. This version includes a timeout which lets you specify // the amount of time to wait for a callback to be available before returning. message_queue_.callAvailable(ros::WallDuration(timeout)); } } /// \brief searches for game ball inside the world and stores a pointer to the model void Minho_Robot::getGameBallModel() { physics::WorldPtr world = _model_->GetWorld(); _ball_ = world->GetModel(BALL_MODEL_NAME); if(!_ball_) { has_game_ball_ = false; game_ball_in_world_ = false;} else { ball_pose_ = _ball_->GetWorldPose(); // Compute center position ball_pose_.pos.y *= Y_AXIS_MULTIPLIER; detectBallPossession(); game_ball_in_world_ = true; } } /// \brief runs ball detection "sensor", using ball and robot pose's, computing the /// distance between them, comparing with a threshold. void Minho_Robot::detectBallPossession() { has_game_ball_ = false; ignition::math::Vector2<float> ball_position = ignition::math::Vector2<float>((float)ball_pose_.pos.x,(float)ball_pose_.pos.y); if(ball_pose_.pos.z>=-0.2 && ball_pose_.pos.z<= 0.22){ // If the ball is on game floor distance_to_ball_ = ball_position.Distance(ignition::math::Vector2<float>((float)model_pose_.pos.x,(float)model_pose_.pos.y)); double robot_orientation = -model_pose_.rot.GetAsEuler().z+M_PI; while(robot_orientation>(2.0*M_PI)) robot_orientation -= (2.0*M_PI); while(robot_orientation<0) robot_orientation += (2.0*M_PI); robot_orientation *= (180.0/M_PI); double direction = std::atan2(ball_pose_.pos.y-model_pose_.pos.y,ball_pose_.pos.x-model_pose_.pos.x)*(180.0/M_PI)-robot_orientation; while(direction<0) direction += 360.0; while(direction>360.0) direction -= 360.0; // Has to be in reach of the imaginary grabbers if((distance_to_ball_<= poss_threshold_distance_) && (direction>=60&&direction<=120)) has_game_ball_ = true; else has_game_ball_ = false; } } /// \brief creates a ROS message, updates all the information and sends it through the /// publisher void Minho_Robot::publishRobotInfo() { // Robot pose current_state.robot_pose.x = model_pose_.pos.x; current_state.robot_pose.y = model_pose_.pos.y; current_state.robot_pose.z = -model_pose_.rot.GetAsEuler().z+M_PI; // Orientation transposed to values used in our referential // Ball position if(game_ball_in_world_ && distance_to_ball_<=VISION_RANGE_RADIUS) { // Add noise double min = 0.0, max = 0.5; double ratio = distance_to_ball_/VISION_RANGE_RADIUS; if(ratio <= 0.3) {min = 0.0; max = 0.4;} else if(ratio <= 0.6) {min = 0.0; max = 0.5;} else if(ratio <= 0.8) {min = 0.05; max = 0.6;} else if(ratio <= 1.0) {min = 0.2; max = 0.8;} double error = generateNoise(0.0,0.25,min,max); double direction = std::atan2(ball_pose_.pos.y-model_pose_.pos.y ,ball_pose_.pos.x-model_pose_.pos.x); current_state.ball_position.x = ball_pose_.pos.x+error*cos(direction); current_state.ball_position.y = ball_pose_.pos.y+error*sin(direction); current_state.ball_position.z = 0.11; current_state.sees_ball = true; } else current_state.sees_ball = false; // Ball sensor if(has_game_ball_) current_state.has_ball = 1; else current_state.has_ball = 0; // Imu while(current_state.robot_pose.z>(2.0*M_PI)) current_state.robot_pose.z -= (2.0*M_PI); while(current_state.robot_pose.z<0) current_state.robot_pose.z += (2.0*M_PI); current_state.robot_pose.z = current_state.robot_pose.z*(180.0/M_PI); // Obstacles current_state.obstacles = detectObstacles(); //mockObstacleDetection(); computeVelocities(); if(robot_info_pub_) robot_info_pub_.publish(current_state); } /// \brief dribbles the ball, given the velocity vector of the robot void Minho_Robot::dribbleGameBall() { double grip_force = 1.0; double linear_grip_reducer = 0.0; // linear movement if(mov_direction_>=0.0 && mov_direction_ <= 60.0) linear_grip_reducer = 0.0; else if(mov_direction_>=300.0 && mov_direction_ <= 360.0) linear_grip_reducer = 0.0; else { if(linear_vel_<MAX_BACKWARDS_VEL) linear_grip_reducer = 0.0; else { linear_grip_reducer = (1.0/GRIP_DECAY)*linear_vel_-(MAX_BACKWARDS_VEL/GRIP_DECAY); } } if(linear_grip_reducer>1.0) linear_grip_reducer = 1.0; double rotation_grip_reducer = 0.0; // linear movement if(abs(angular_vel_)<MAX_ROTATION_VEL) rotation_grip_reducer = 0.0; else { rotation_grip_reducer = (1.0/GRIP_DECAY)*abs(angular_vel_)-(MAX_ROTATION_VEL/GRIP_DECAY); } if(linear_grip_reducer>1.0) linear_grip_reducer = 1.0; if(rotation_grip_reducer>1.0) rotation_grip_reducer = 1.0; grip_force -= (linear_grip_reducer+rotation_grip_reducer); // max grip 2/3 of ball inside robot, minimum grip 1/3 of ball inside robot math::Vector3 robot_position = math::Vector3((float)model_pose_.pos.x,(float)model_pose_.pos.y*Y_AXIS_MULTIPLIER,0.0); double robot_heading = model_pose_.rot.GetAsEuler().z+(float)M_PI/2.0; double distance_null = 0.21326; double distance = distance_null+((-0.14674*grip_force)+0.14674); math::Vector3 ball_position = robot_position + math::Vector3(distance*cos(robot_heading),distance*sin(robot_heading),0.11); _ball_->SetWorldPose(math::Pose(ball_position,math::Quaternion(0,0,0,0))); if(mov_direction_>60 && mov_direction_<300){ double release_velocity = 0.05*mapVelocity(linear_vel_,MAX_ANG_VEL); _ball_->SetLinearVel(math::Vector3(-release_velocity*cos(robot_heading),-release_velocity*sin(robot_heading),0.0)); } else _ball_->SetLinearVel(math::Vector3(0,0,0)); // otherwise let the ball suffer forces } /// \brief kicks the ball, through the floor (passing) or the air (shoothing) /// allowing a slight variation in the direction of kicking /// \param pass - define wether the kick is a pass or not /// \param strength - define the strength of the kick /// \param direction - define the direction of the kick void Minho_Robot::kickGameBall(bool pass, int strength, int direction) { math::Vector3 kicking_vector; if(pass){ double magnitude = mapVelocity(strength,MAX_BALL_VEL); double direction = mapVelocity(direction,20.0); double robot_heading = model_pose_.rot.GetAsEuler().z+(float)M_PI/2.0; kicking_vector.x = magnitude*cos(-direction+robot_heading); kicking_vector.y = magnitude*sin(-direction+robot_heading); kicking_vector.z = 0.0; } else { double shooting_angle = SHOOT_ANGLE; double magnitude = mapVelocity(strength,MAX_BALL_VEL); double magnitude_front = magnitude*cos(shooting_angle*DEG_TO_RAD); double direction = mapVelocity(direction,20.0); double robot_heading = model_pose_.rot.GetAsEuler().z+(float)M_PI/2.0; kicking_vector.x = magnitude_front*cos(-direction+robot_heading); kicking_vector.y = magnitude_front*sin(-direction+robot_heading); kicking_vector.z = magnitude*sin(shooting_angle*DEG_TO_RAD); } _ball_->SetLinearVel(kicking_vector); kick_requested_ = false; } /// \brief reads and parses the elements passed to the plugin inside the model's sdf /// and initializes the matching variables /// \param _sdf - pointer to sdf element containing void Minho_Robot::initializePluginParameters(sdf::ElementPtr _sdf) { if(_sdf->HasElement("max_linear_velocity")){ _sdf->GetElement("max_linear_velocity")->GetValue()->Get(MAX_LIN_VEL); } else ROS_WARN("No maximum linear velocity parameter defined in plugin's SDF"); if(_sdf->HasElement("max_angular_velocity")){ _sdf->GetElement("max_angular_velocity")->GetValue()->Get(MAX_ANG_VEL); } else ROS_WARN("No maximum angular velocity parameter defined in plugin's SDF"); if(_sdf->HasElement("max_ball_velocity")){ _sdf->GetElement("max_ball_velocity")->GetValue()->Get(MAX_BALL_VEL); } else ROS_WARN("No maximum ball velocity parameter defined in plugin's SDF"); if(_sdf->HasElement("ball_kick_angle")){ _sdf->GetElement("ball_kick_angle")->GetValue()->Get(SHOOT_ANGLE); } else ROS_WARN("No ball kicking angle parameter defined in plugin's SDF"); if(_sdf->HasElement("ball_model_name")){ _sdf->GetElement("ball_model_name")->GetValue()->Get(BALL_MODEL_NAME); } else ROS_WARN("No ball model name parameter defined in plugin's SDF"); if(_sdf->HasElement("vision_range_radius")){ _sdf->GetElement("vision_range_radius")->GetValue()->Get(VISION_RANGE_RADIUS); } else ROS_WARN("No vision range radius parameter defined in plugin's SDF"); if(_sdf->HasElement("max_backwards_grip_vel")){ _sdf->GetElement("max_backwards_grip_vel")->GetValue()->Get(MAX_BACKWARDS_VEL); } else ROS_WARN("No max grip backwards velocity parameter defined in plugin's SDF"); if(_sdf->HasElement("max_rotation_grip_vel")){ _sdf->GetElement("max_rotation_grip_vel")->GetValue()->Get(MAX_ROTATION_VEL); } else ROS_WARN("No max grip rotation velocity parameter defined in plugin's SDF"); if(_sdf->HasElement("grip_decay")){ _sdf->GetElement("grip_decay")->GetValue()->Get(GRIP_DECAY); } else ROS_WARN("No grip decay parameter defined in plugin's SDF"); if(_sdf->HasElement("accel")){ _sdf->GetElement("accel")->GetValue()->Get(constAccel); } else ROS_WARN("No acceleration parameter defined in plugin's SDF"); if(_sdf->HasElement("deccel")){ _sdf->GetElement("deccel")->GetValue()->Get(constDeccel); } else ROS_WARN("No decceleration parameter defined in plugin's SDF"); } /// \brief generates random noise to add gaussian noise to a variable read from the /// world like ball position and obstacles position /// \param mean - mean of the error to be generated /// \param stdev - standard deviation of the error to /// \param min - minimum value of the output value /// \param max - maximum value of the output value /// \return generated noise value double Minho_Robot::generateNoise(double mean, double stdev, double min, double max) { double noise = 0; unsigned seed = std::chrono::system_clock::now().time_since_epoch().count(); std::default_random_engine generator (seed); std::normal_distribution<double> distribution (mean,stdev); do{ noise = distribution(generator); }while(noise<=min || noise>=max); return noise; } /// \brief detects obstacles in the view range, whether they being friendly or foe, /// at this point, in reality, the robot doesn't distinguish between friends or foes /// return vector of positions containing the position of the detected obstalces std::vector<minho_team_ros::obstacle> Minho_Robot::detectObstacles() { std::vector<minho_team_ros::obstacle>obstacles; obstacles.clear(); physics::WorldPtr world = _model_->GetWorld(); std::vector<physics::ModelPtr> models_list = world->GetModels(); minho_team_ros::obstacle obs; double min = 0.0, max = 0.5; double ratio = 0.0; int type; bool isObstacle; for(unsigned int id = 0; id < models_list.size(); id++){ // Generate used id's list isObstacle = false; std::string mod = models_list[id]->GetName().substr(0,11); if(mod.compare("minho_robot")==0){isObstacle=true;type=0;} if(mod.compare("other_robot")==0){isObstacle=true;type=1;} if(isObstacle&& models_list[id]->GetName().compare(_model_->GetName())!=0){ math::Pose obstacle_pose = models_list[id]->GetWorldPose(); obstacle_pose.pos.y *= Y_AXIS_MULTIPLIER; math::Vector3 robot_position = math::Vector3((float)model_pose_.pos.x,(float)model_pose_.pos.y,0.0); double distance = robot_position.Distance(obstacle_pose.pos.x, obstacle_pose.pos.y, 0.0); if(distance <= VISION_RANGE_RADIUS){ ratio = distance/VISION_RANGE_RADIUS; if(ratio <= 0.3) {min = 0.0; max = 0.03;} else if(ratio <= 0.6) {min = 0.0; max = 0.05;} else if(ratio <= 0.8) {min = 0.05; max = 0.2;} else if(ratio <= 1.0) {min = 0.2; max = 0.3;} // Add gaussian noise double error = generateNoise(0.0,0.25,min,max); double direction = std::atan2(obstacle_pose.pos.y-model_pose_.pos.y,obstacle_pose.pos.x-model_pose_.pos.x); obs.x = obstacle_pose.pos.x+error*cos(direction); obs.y = obstacle_pose.pos.y+error*sin(direction); obs.isenemy = type; obstacles.push_back(obs); } } } return obstacles; } /// \brief computes both ball and robot velocities to send in robotInfo using /// a simple low pass filter approach to reduce noisy estimates void Minho_Robot::computeVelocities() { int it_limit = 4; static int iteration = it_limit; float time_interval = 0.03125; if(iteration<(it_limit-1)) iteration++; else { iteration = 0; float lpf_weight = 0.9; float lpf_minor = 1-lpf_weight; // First, compute the robot velocities based on final localization estimate // ######################################################################## // Time interval between estimates is requiredTiming = 33ms/30Hz current_state.robot_velocity.x = lpf_weight*((current_state.robot_pose.x-last_vel_state.robot_pose.x)/(time_interval*(float)it_limit))+lpf_minor*last_vel_state.robot_velocity.x; current_state.robot_velocity.y = lpf_weight*((current_state.robot_pose.y-last_vel_state.robot_pose.y)/(time_interval*(float)it_limit))+lpf_minor*last_vel_state.robot_velocity.y; current_state.robot_velocity.w = lpf_weight*((current_state.robot_pose.z-last_vel_state.robot_pose.z)/(time_interval*(float)it_limit))+lpf_minor*last_vel_state.robot_velocity.z; // ######################################################################## if(current_state.robot_velocity.x>2.5) current_state.robot_velocity.x = 0; if(current_state.robot_velocity.y>2.5) current_state.robot_velocity.y = 0; lpf_weight = 0.8; if(distance_to_ball_>(2.0*VISION_RANGE_RADIUS)/3.0) lpf_weight = 0.4; lpf_minor = 1-lpf_weight; current_state.ball_velocity.x = lpf_weight*((current_state.ball_position.x-last_vel_state.ball_position.x)/(time_interval*(float)it_limit))+lpf_minor*last_vel_state.ball_velocity.x; current_state.ball_velocity.y = lpf_weight*((current_state.ball_position.y-last_vel_state.ball_position.y)/(time_interval*(float)it_limit))+lpf_minor*last_vel_state.ball_velocity.y; current_state.ball_velocity.z = lpf_weight*((current_state.ball_position.z-last_vel_state.ball_position.z)/(time_interval*(float)it_limit))+lpf_minor*last_vel_state.ball_velocity.z; last_vel_state = current_state; } } /// \brief boots indicated ROS nodes to add functionalities to the model /// This nodes have to be specified in plugin's SDF correctly, naming the /// package, node names, flags and placed in boot order /// \param _sdf - sdf struct containing plugin's data, containing Robot parameters /// and ROS boot node info void Minho_Robot::bootROSNodes(sdf::ElementPtr _sdf) { ROS_WARN("Booting complementary ROS Nodes ..."); childs.clear(); std::string executable_name = "rosrun"; sdf::ElementPtr boot_list; if(_sdf->HasElement("ros_boot")){ boot_list = _sdf->GetElement("ros_boot"); } else { ROS_WARN("No ROS Node Boot List found."); return; } sdf::ElementPtr node; if(boot_list->HasElement("node")){ node = boot_list->GetFirstElement(); while(node){ std::string package_name = ""; std::string node_name = ""; std::vector<std::string> flags_str; // Parse information if(node->HasElement("package")){ package_name = node->GetElement("package")->GetValue()->GetAsString(); if(node->HasElement("name")){ node_name = node->GetElement("name")->GetValue()->GetAsString(); flags_str.clear(); sdf::ElementPtr flags; if(node->HasElement("flags")){ flags = node->GetElement("flags"); sdf::ElementPtr flag; flag = flags->GetFirstElement(); while(flag){ flags_str.push_back(assertFlagValue(flag->GetValue()->GetAsString())); flag = flag->GetNextElement(); } } else ROS_WARN("No flags in Boot List for %s", node_name.c_str()); //Add defined node to boot list //ROS_Boot_List std::vector<std::string> args; std::string exe = boost::process::find_executable_in_path(executable_name); args.push_back(package_name); args.push_back(node_name); ROS_INFO("Booting %s %s",package_name.c_str(), node_name.c_str()); ROS_INFO(" Flags:"); for(int i=0;i<flags_str.size();i++) { ROS_INFO(" %s",flags_str[i].c_str()); args.push_back(flags_str[i]); } childs.push_back(boost::process::create_child(exe,args)); } else ROS_ERROR("Error in boot node definition, missing name."); } else ROS_ERROR("Error in boot node definition, missing package."); node = node->GetNextElement(); } } else { ROS_WARN("No ROS Nodes found in Boot List."); return; } ROS_INFO("ROS Node Boot complete."); ROS_WARN("#############################################################"); } /// \brief asserts flag value for ros node booting procedure. It can translate values /// started with '$' with plugin run time variables /// \param value - initial parameter value to be asserted /// \return - string with the asserted parameter std::string Minho_Robot::assertFlagValue(std::string value) { if((value.size()>=1) && (value[0]!='$')) return value; if(!strcmp(value.c_str(),"$ID")) { return std::to_string(team_id_); } ROS_ERROR("Error in flag assert."); return ""; } /// \brief setus up model sensors, performing detection and type identification void Minho_Robot::setupSensors() { // Check model sensors sensors_.clear(); gazebo::sensors::SensorManager *manager = gazebo::sensors::SensorManager::Instance(); if(manager){ std::vector<gazebo::sensors::SensorPtr> sensors = manager->GetSensors(); for(int i=0;i<sensors.size();i++){ if(!_model_->GetName().compare(sensors[i]->ParentName().substr(0,sensors[i]->ParentName().find(":")))){ sensors_.push_back(sensors[i]); } } } ROS_INFO("Found %lu sensors attached to %s",sensors_.size(),_model_->GetName().c_str()); // Look for obstacle_detector sensor first for(int i=0;i<sensors_.size();i++){ if(sensors_[i]->Name().compare("obstacle_detector")){ obstacle_detector = dynamic_cast<gazebo::sensors::RaySensor *>(sensors_[i].get()); } } } /// \brief mapps a detected point relative to the robot to the world position /// \param robot - position of the robot in the field, in meters /// \param robot_heading - heading of the robot in º /// \param dist - detected distance in meters /// \param theta - angle of the detected point in relation to world's 0º /// \return - position of the mapped point in meters position Minho_Robot::mapPointToWorld(position robot, float robot_heading, float dist, float theta) { double pointRelX = dist*cos(theta); double pointRelY = dist*sin(theta); position mappedPoint; mappedPoint.x = robot.x-cos(robot_heading)*pointRelX-sin(robot_heading)*pointRelY; mappedPoint.y = robot.y-sin(robot_heading)*pointRelX+cos(robot_heading)*pointRelY; return mappedPoint; } <file_sep>/MinhoSimulator/include/action_dialog.h #ifndef ACTIONDIALOG_H #define ACTIONDIALOG_H #include <QWidget> #include <QDialog> #include <stdlib.h> #include <QMessageBox> #include <QDebug> #include <QFileDialog> namespace Ui { class Form; } typedef enum ACTION {INVALID=0,NEW,OPEN,REPLAY,EXIT} ACTION; class ActionDialog : public QDialog { Q_OBJECT public: explicit ActionDialog(ACTION *act, QString *file, QDialog *parent = 0); ~ActionDialog(); private slots: void on_bt_newsim_clicked(); void on_bt_opensim_clicked(); void on_bt_replay_clicked(); void on_bt_exit_clicked(); void show_not_implemented(); private: void setupUI(); private: Ui::Form *ui; QString *filepath; QString home; ACTION *action; }; #endif // WIDGET_H <file_sep>/MinhoSimulator/src/replay.cpp #include "replay.h" Replay::Replay(int *ret, QString filepath) { ifstream infile; infile.open(filepath.toStdString().c_str()); if(!infile.is_open()) { (*ret) = 0; return; } // 0 for error failing string replay((istreambuf_iterator<char>(infile)), istreambuf_iterator<char>()); infile.close(); Document document; char *cstr = new char[replay.length() + 1]; strcpy(cstr, replay.c_str()); if (document.ParseInsitu(cstr).HasParseError() || !document.IsObject()){ (*ret) = 0; return; }else { if(document.HasMember("Replay_Settings")){ GenericValue<UTF8<> > settings; settings = document["Replay_Settings"].GetObject(); // Go to Replay_Settings if(settings.HasMember("replay_name") && settings["replay_name"].IsString()) replay_name = QString::fromStdString(settings["replay_name"].GetString()); else {(*ret) = 0; return;} if(settings.HasMember("replay_name") && settings["replay_date"].IsString()) replay_date = QString::fromStdString(settings["replay_date"].GetString()); else {(*ret) = 0; return;} if(settings.HasMember("replay_name") && settings["replay_frequency"].IsNumber()) replay_frequency = settings["replay_frequency"].GetInt64(); else {(*ret) = 0; return;} if(settings.HasMember("replay_name") && settings["replay_total_samples"].IsNumber()) replay_total_samples = settings["replay_total_samples"].GetInt64(); else {(*ret) = 0; return;} } else {(*ret) = 0; return;} } (*ret) = 1; } Replay::~Replay() { } <file_sep>/other_plugin/src/other_robot.cc /* * Copyright (C) 2015-2016 Open Source Robotics Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "other_robot.hh" #define Y_AXIS_MULTIPLIER -1.0 using namespace gazebo; //Functions //GetWorldPose () //GetId () //GetName () //SetAngularVel (const math::Vector3 &_vel) //SetLinearVel (const math::Vector3 &_vel) //SetWorldPose (const math::Pose &_pose, bool _notify=true, bool _publish=true) // Register this plugin with the simulator GZ_REGISTER_MODEL_PLUGIN(Other_Robot); /// \brief Constructor. Initialized deafult variables for various variables Other_Robot::Other_Robot() { } /// \brief Destructor Other_Robot::~Other_Robot() { } /// \brief Plugin Load function. Initializes all ros topics for the robot model, /// also starting message queue thread. Connects gazebo events like world update, /// time reset and world reset /// \param _parent - Model pointer to the model defining this plugin /// \param _sdf - pointer to the SDF of the model void Other_Robot::Load(physics::ModelPtr _parent, sdf::ElementPtr _sdf) { // Rename model to specification _model_ = _parent; _node_ = transport::NodePtr(new transport::Node()); _node_->Init(_model_->GetWorld()->GetName()); // Stop simulation to prevent odd behaviors gazebo::physics::WorldPtr world = _model_->GetWorld(); int team_id_ = 0; ROS_INFO("ModelName: %s",_model_->GetName().c_str()); if(_model_->GetName().compare("other_robot")==0)team_id_=0; else team_id_=std::stoi(_model_->GetName().substr(12,12))+1; ROS_INFO("Team ID: %d",team_id_); //Place the robot in the initial position regarding its team_id_ initial_pose_.pos.x = -0.8*(float)team_id_; initial_pose_.pos.y = 6.4; _model_->SetWorldPose(initial_pose_); } <file_sep>/MinhoSimulator/CMakeLists.txt cmake_minimum_required(VERSION 2.8.3) project(minhosimulator C CXX) ## Find catkin macros and libraries ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) ## is used, also find other catkin packages find_package(catkin REQUIRED COMPONENTS roscpp std_msgs message_generation sensor_msgs genmsg minho_team_ros cv_bridge image_transport ) set(PROJECT_DIRECTORY ${minhosimulator_SOURCE_DIR}) include_directories(${catkin_INCLUDE_DIRS} ${Boost_INCLUDE_DIR}) #Linking steps for Qt Libraries ######################################## find_package(Qt5 COMPONENTS Core Widgets Gui REQUIRED) add_definitions (${QT_DEFINITIONS}) set(CMAKE_INCLUDE_CURRENT_DIR ON) set(CMAKE_AUTOMOC ON) set(CMAKE_AUTOUIC ON) set(CMAKE_AUTORCC ON) set(qt_LIBRARIES Qt5::Widgets Qt5::Core Qt5::Gui) include_directories(${Qt5Widgets_INCLUDE_DIRS}) include_directories(${Qt5Core_INCLUDE_DIRS}) include_directories(${Qt5Gui_INCLUDE_DIRS}) ######################################## ###################################################################### include_directories(${PROJECT_DIRECTORY}/libs) set(MSIM_SRCDIR ${PROJECT_DIRECTORY}/src) set(msim_MOC_H ${PROJECT_DIRECTORY}/include/action_dialog.h ${PROJECT_DIRECTORY}/include/replay_window.h) qt5_wrap_cpp(msim_CPP ${msim_MOC_H}) set(msim_SRC ${MSIM_SRCDIR}/main.cpp ${MSIM_SRCDIR}/action_dialog.cpp ${MSIM_SRCDIR}/replay_window.cpp ${MSIM_SRCDIR}/replay.cpp) add_executable(minhosimulator ${msim_SRC} ${msim_CPP}) target_include_directories(minhosimulator PUBLIC $<BUILD_INTERFACE:${PROJECT_DIRECTORY}/include>) set_target_properties(minhosimulator PROPERTIES COMPILE_FLAGS "-fPIC") target_compile_features(minhosimulator PRIVATE cxx_range_for) target_link_libraries(minhosimulator ${catkin_LIBRARIES} ${qt_LIBRARIES}) add_dependencies(minhosimulator minho_team_ros_generate_messages_cpp) ###################################################################### <file_sep>/MinhoSimulator/include/replay_window.h #ifndef REPLAYWINDOW_H #define REPLAYWINDOW_H #include <QWidget> #include <QDebug> #include <QDialog> #include <QMessageBox> #include <QTimer> #include "replay.h" namespace Ui { class Window; } class ReplayWindow : public QDialog { Q_OBJECT public: explicit ReplayWindow(QString file, QDialog *parent = 0); ~ReplayWindow(); private slots: void on_hbar_time_valueChanged(int value); void on_bt_startpause_clicked(); void on_bt_restart_clicked(); void on_bt_movback_clicked(); void on_bt_movforw_clicked(); void renderTrigger(); private: void setupUI(); void renderReplayFrame(int frame_id); private: Ui::Window *ui; QString home; bool replay_running; Replay *replay; QString total_seconds; long int current_rendered_frame; unsigned int total_frames; QTimer *render_timer; }; #endif // WIDGET_H <file_sep>/models/bnr_oneA/misc-data.md totalmass = 1.045kg ##wheel *2 mass = 30g diameter = 65mm length = 26.5mm ##front wheel *1 mass = 50g length = 45.3mm width = 21.45mm height = 25.32mm wheelcenter = 20mm wheeldiameter = 32mm wheelthickness = 14.3mm ##circuit board *1 spacers = 14mm mass = 160g width = 102.50mm length = 180mm Round corners ##battery *1 mass = 195g width = 49mm length = 70.6mm height = 28.5mm ##motor *2 mass = 212g width = 35mm widthsup = 40mm length = 62.5mm height = 41mm axislength = 20mm distfrombase = 15.5mm ##base *1 mass = 35g width = 140mm length = 205mm height = 4mm Center of Mass is 1.025728 -0.700027 0.020000 Inertia Tensor is : | 0.015998 -0.000002 -0.000000 | | -0.000002 0.032043 -0.000000 | | -0.000000 -0.000000 0.048013 | <file_sep>/MinhoSimulator/src/action_dialog.cpp #include "action_dialog.h" #include "ui_action_dialog.h" #define BUTTON_ICON_SIZE 80 /// \brief class constructor ActionDialog::ActionDialog(ACTION *act, QString *file,QDialog *parent) : QDialog(parent), ui(new Ui::Form) { ui->setupUi(this); this->setupUI(); filepath = file; action = act; } /// \brief class destructor ActionDialog::~ActionDialog() { delete ui; } void ActionDialog::setupUI() { home = QString::fromStdString(getenv("HOME")); this->setStyleSheet("QToolButton:hover { border-style: solid;\ border-width: 4px;\ border-radius: 10px;\ border-color: #d96f05;\ font-size:14px;\ font-weight: bold;\ }"); this->setWindowIcon(QIcon(home+"/.msim/resources/images/logo.png")); ui->bt_newsim->setIcon(QIcon(home+"/.msim/resources/images/new.png")); ui->bt_newsim->setIconSize(QSize(BUTTON_ICON_SIZE,BUTTON_ICON_SIZE)); ui->bt_newsim->setToolButtonStyle(Qt::ToolButtonTextUnderIcon); ui->bt_newsim->setText("New\n Simulation"); ui->bt_opensim->setIcon(QIcon(home+"/.msim/resources/images/open.png")); ui->bt_opensim->setIconSize(QSize(BUTTON_ICON_SIZE,BUTTON_ICON_SIZE)); ui->bt_opensim->setToolButtonStyle(Qt::ToolButtonTextUnderIcon); ui->bt_opensim->setText("Open\n Simulation"); ui->bt_replay->setIcon(QIcon(home+"/.msim/resources/images/replay.png")); ui->bt_replay->setIconSize(QSize(BUTTON_ICON_SIZE,BUTTON_ICON_SIZE)); ui->bt_replay->setToolButtonStyle(Qt::ToolButtonTextUnderIcon); ui->bt_replay->setText("Replay\n Simulation"); ui->bt_exit->setIcon(QIcon(home+"/.msim/resources/images/exit.png")); ui->bt_exit->setIconSize(QSize(BUTTON_ICON_SIZE,BUTTON_ICON_SIZE)); ui->bt_exit->setToolButtonStyle(Qt::ToolButtonTextUnderIcon); ui->bt_exit->setText("Exit"); } void ActionDialog::on_bt_newsim_clicked() { (*action) = INVALID; //not implemented (*filepath) = "NEW FILE"; show_not_implemented(); this->close(); } void ActionDialog::on_bt_opensim_clicked() { (*action) = INVALID; //not implemented (*filepath) = "OPEN FILE"; show_not_implemented(); this->close(); } void ActionDialog::on_bt_replay_clicked() { (*action) = INVALID; (*filepath) = QFileDialog::getOpenFileName(this, tr("Open Simulation Replay"), home+"/MinhoSimulator/Replays", tr("SimReplay Files (*.srp)")); if((*filepath)!="") { (*action) = REPLAY; this->close(); } } void ActionDialog::on_bt_exit_clicked() { (*action) = EXIT; this->close(); } void ActionDialog::show_not_implemented() { QMessageBox not_av; not_av.setText("Sorry, but the feature you requested is not available at this time."); not_av.setWindowTitle("Minho Simulator: Feature not available"); not_av.addButton(QMessageBox::Ok); not_av.setPalette(ui->bt_exit->palette()); not_av.exec(); } <file_sep>/MinhoSimulator/src/main.cpp #include <QApplication> #include "action_dialog.h" #include "replay_window.h" #include <QDebug> int main(int argc, char *argv[]) { QApplication a(argc, argv); QString filepath = ""; ACTION ret = INVALID; while(ret!=EXIT){ // Loop unitl Exit is triggered ActionDialog action(&ret,&filepath); action.exec(); switch(ret){ case NEW:{ break; } case OPEN:{ break; } case REPLAY:{ ReplayWindow replay(filepath); replay.exec(); ret = INVALID; break; } default:{ return 0; break; } } } return a.exec(); } <file_sep>/install.sh #!/bin/bash sudo cp libs/process.hpp /usr/include/boost sudo cp -r libs/process /usr/include/boost cd echo "export GAZEBO_PLUGIN_PATH=\$GAZEBO_PLUGIN_PATH:~/catkin_ws/src/minho_team_simulation_tools/plugins" >> .profile echo "export GAZEBO_PLUGIN_PATH=\$GAZEBO_PLUGIN_PATH:~/catkin_ws/src/minho_team_simulation_tools/plugins" >> .bashrc echo "export GAZEBO_MODEL_PATH=\$GAZEBO_MODEL_PATH:~/catkin_ws/src/minho_team_simulation_tools/models" >> .profile echo "export GAZEBO_MODEL_PATH=\$GAZEBO_MODEL_PATH:~/catkin_ws/src/minho_team_simulation_tools/models" >> .bashrc echo "export GAZEBO_RESOURCE_PATH=\$GAZEBO_RESOURCE_PATH:~/catkin_ws/src/minho_team_simulation_tools/worlds" >> .profile echo "export GAZEBO_RESOURCE_PATH=\$GAZEBO_RESOURCE_PATH:~/catkin_ws/src/minho_team_simulation_tools/worlds" >> .bashrc echo "export GAZEBO_RESOURCE_PATH=\$GAZEBO_RESOURCE_PATH:/usr/share/gazebo-7" >> .profile echo "export GAZEBO_RESOURCE_PATH=\$GAZEBO_RESOURCE_PATH:/usr/share/gazebo-7" >> .bashrc . .profile echo "Installation of libs and paths completed!" <file_sep>/CMakeLists.txt cmake_minimum_required(VERSION 2.8.3) project(minho_team_simulation_tools C CXX) ## Find catkin macros and libraries ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) ## is used, also find other catkin packages find_package(catkin REQUIRED COMPONENTS roscpp std_msgs message_generation sensor_msgs genmsg minho_team_ros cv_bridge image_transport ) ################################### ## catkin specific configuration ## ################################### ## The catkin_package macro generates cmake config files for your package ## Declare things to be passed to dependent projects ## INCLUDE_DIRS: uncomment this if you package contains header files ## LIBRARIES: libraries you create in this project that dependent projects also need ## CATKIN_DEPENDS: catkin_packages dependent projects also need ## DEPENDS: system dependencies of this project that dependent projects also need catkin_package( # INCLUDE_DIRS include # LIBRARIES minho_team CATKIN_DEPENDS message_runtime std_msgs sensor_msgs DEPENDS sensor_msgs ) set(PROJECT_DIRECTORY ${minho_team_simulation_tools_SOURCE_DIR}) include_directories(${catkin_INCLUDE_DIRS} ${Boost_INCLUDE_DIR}) find_package(gazebo REQUIRED) include_directories(${GAZEBO_INCLUDE_DIRS}) link_directories(${GAZEBO_LIBRARY_DIRS}) list(APPEND CMAKE_CXX_FLAGS "${GAZEBO_CXX_FLAGS}") #Linking steps for Qt Libraries ######################################## find_package(Qt5 COMPONENTS Core Widgets Gui REQUIRED) add_definitions (${QT_DEFINITIONS}) set(CMAKE_INCLUDE_CURRENT_DIR ON) set(CMAKE_AUTOMOC ON) set(CMAKE_AUTOUIC ON) set(CMAKE_AUTORCC ON) set(qt_LIBRARIES Qt5::Widgets Qt5::Core Qt5::Gui) include_directories(${Qt5Widgets_INCLUDE_DIRS}) include_directories(${Qt5Core_INCLUDE_DIRS}) include_directories(${Qt5Gui_INCLUDE_DIRS}) ######################################## ###################################################################### #include_directories(${PROJECT_DIRECTORY}/MinhoSimulator/libs) #include_directories(${PROJECT_DIRECTORY}/MinhoSimulator/include) #set(MSIM_SRCDIR ${PROJECT_DIRECTORY}/MinhoSimulator/src) #set(msim_MOC_H ${PROJECT_DIRECTORY}/MinhoSimulator/include/action_dialog.h ${PROJECT_DIRECTORY}/MinhoSimulator/include/replay_window.h) #qt5_wrap_cpp(msim_CPP ${msim_MOC_H}) #set(msim_SRC ${MSIM_SRCDIR}/main.cpp ${MSIM_SRCDIR}/action_dialog.cpp ${MSIM_SRCDIR}/replay_window.cpp # ${MSIM_SRCDIR}/replay.cpp) #add_executable(minhosimulator ${msim_SRC} ${msim_CPP}) #target_include_directories(minhosimulator PUBLIC #$<BUILD_INTERFACE:${PROJECT_DIRECTORY}/include>) #set_target_properties(minhosimulator PROPERTIES COMPILE_FLAGS "-fPIC") #target_compile_features(minhosimulator PRIVATE cxx_range_for) #target_link_libraries(minhosimulator ${catkin_LIBRARIES} ${qt_LIBRARIES}) #add_dependencies(minhosimulator minho_team_ros_generate_messages_cpp) ###################################################################### ################## ## Build Plugin ## ################## set(HARDWARE_DIR ${PROJECT_DIRECTORY}/robot_plugin) include_directories(${HARDWARE_DIR}/include) add_library(minho_robot SHARED ${HARDWARE_DIR}/src/minho_robot.cc) set_target_properties( minho_robot PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${PROJECT_DIRECTORY}/plugins ) target_link_libraries(minho_robot ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES}) add_dependencies(minho_robot minho_team_ros_generate_messages_cpp) set(OTHER_DIR ${PROJECT_DIRECTORY}/other_plugin) include_directories(${OTHER_DIR}/include) add_library(other_robot SHARED ${OTHER_DIR}/src/other_robot.cc) set_target_properties( other_robot PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${PROJECT_DIRECTORY}/plugins ) target_link_libraries(other_robot ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES}) add_dependencies(other_robot minho_team_ros_generate_messages_cpp) set(BSR_DIR ${PROJECT_DIRECTORY}/bs_robot_plugin) include_directories(${BSR_DIR}/include) add_library(bs_rplugin SHARED ${BSR_DIR}/src/bs_rplugin.cc) set_target_properties( bs_rplugin PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${PROJECT_DIRECTORY}/plugins ) target_link_libraries(bs_rplugin ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES}) add_dependencies(bs_rplugin minho_team_ros_generate_messages_cpp) set(WORLD_DIR ${PROJECT_DIRECTORY}/world_plugin) include_directories(${WORLD_DIR}/include) add_library(minho_world SHARED ${WORLD_DIR}/src/minho_world.cc) set_target_properties( minho_world PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${PROJECT_DIRECTORY}/plugins ) target_link_libraries(minho_world ${GAZEBO_LIBRARIES} ${catkin_LIBRARIES}) add_dependencies(minho_world minho_team_ros_generate_messages_cpp) <file_sep>/MinhoSimulator/include/replay.h #include <iostream> #include <fstream> #include <QDebug> #include "rapidjson/document.h" using namespace std; using namespace rapidjson; class Replay{ public: Replay(int *ret, QString filepath); ~Replay(); /* Get functions */ QString inline getName() {return replay_name;} QString inline getDate() {return replay_date;} float inline getTotalTime() {return (float)replay_total_samples*(1.0/(float)replay_frequency);} float inline getTimeStep() {return 1.0/(float)replay_frequency;} unsigned int inline getTotalSamples() {return replay_total_samples;} private: /* Replay Settings data */ QString replay_name; QString replay_date; unsigned int replay_frequency; unsigned int replay_total_samples; }; <file_sep>/bs_robot_plugin/include/bs_rplugin.hh #ifndef _GAZEBO_MINHO_HARDWARE_HH_ #define _GAZEBO_MINHO_HARDWARE_HH_ #include <gazebo/gazebo.hh> #include <gazebo/physics/physics.hh> #include <gazebo/common/common.hh> #include <gazebo/msgs/msgs.hh> #include <gazebo/transport/transport.hh> #include <gazebo/common/Events.hh> #include <gazebo/sensors/sensors.hh> #include <ignition/math/Vector3.hh> #include <ignition/math/Pose3.hh> #include <ignition/math/Angle.hh> #include <ignition/math/Vector2.hh> #include <stdio.h> #include <stdlib.h> #include <string> #include <sstream> #include <vector> #include <boost/thread.hpp> #include <boost/thread/mutex.hpp> #include <ros/callback_queue.h> #include <ros/subscribe_options.h> #include <ros/advertise_service_options.h> #include <sdf/Param.hh> #include <boost/thread/thread_time.hpp> #include <boost/thread/locks.hpp> #include <boost/thread/mutex.hpp> #include <boost/process.hpp> #define DEG_TO_RAD M_PI/180.0 #define RAD_TO_DEG 180.0/M_PI namespace gazebo { /// \brief A plugin to control a soccer robot's omnidirectional movement, kicker and dribbler class Bs_RPlugin : public ModelPlugin { public: /// \brief Constructor. Initialized deafult variables for various variables Bs_RPlugin(); /// \brief Destructor virtual ~Bs_RPlugin(); /// \brief Plugin Load function. Initializes all ros topics for the robot model, /// also starting message queue thread. Connects gazebo events like world update, /// time reset and world reset /// \param _parent - Model pointer to the model defining this plugin /// \param _sdf - pointer to the SDF of the model void Load(physics::ModelPtr _parent, sdf::ElementPtr _sdf); private: /// \brief gets a list of models in the world and renames itself, accordingly to the /// existing robots already spawned. void autoRenameRobot(); /// \brief called by event signal when a model or server reset happens void onReset(); // VARIABLES /// \brief Pointer to the model that defines this plugin physics::ModelPtr _model_; /// \brief Model unique ID unsigned int model_id_; /// \brief Robot unique ID inside team structure ranging from 1~6 unsigned int team_id_; /// \brief Transport node used to communicate with the transport system transport::NodePtr _node_; /// \brief Starting pose of the robot math::Pose initial_pose_; /// \brief pointer to server update event event::ConnectionPtr _reset_connection_; event::ConnectionPtr _timeres_connection_; }; } #endif <file_sep>/MinhoSimulator/src/replay_window.cpp #include "replay_window.h" #include "ui_replay_window.h" #define BUTTON_ICON_SIZE 30 /// \brief class constructor ReplayWindow::ReplayWindow(QString file,QDialog *parent) : QDialog(parent), ui(new Ui::Window) { ui->setupUi(this); this->setupUI(); replay_running = false; int ret = 0; replay = new Replay(&ret,file); if(ret==0){ // failed to load replay file QMessageBox not_av; not_av.setText("Sorry, but we encountered errors when reading the SimReplay file you provided."); not_av.setWindowTitle("Minho Simulator: SimReplay file error"); not_av.addButton(QMessageBox::Ok); not_av.setPalette(ui->bt_startpause->palette()); not_av.exec(); this->close(); } else { ui->lb_simname->setText(replay->getName()); ui->lb_simdate->setText(replay->getDate()); total_seconds = " / "; float total_secs = replay->getTotalTime(); int minutes = total_secs/60; float seconds = total_secs-(int)total_secs/60; int mseconds = (seconds-(int)seconds)*1000; total_seconds += QString("%1").arg(minutes, 2, 10, QChar('0'))+":" + QString("%1").arg((int)seconds, 2, 10, QChar('0'))+":" + QString("%1").arg(mseconds, 3, 10, QChar('0')); total_frames = replay->getTotalSamples(); ui->hbar_time->setMaximum(total_frames); current_rendered_frame = 0; on_hbar_time_valueChanged(0); render_timer = new QTimer(); connect(render_timer,SIGNAL(timeout()),this,SLOT(renderTrigger())); } } /// \brief class destructor ReplayWindow::~ReplayWindow() { delete ui; } void ReplayWindow::setupUI() { home = QString::fromStdString(getenv("HOME")); this->setWindowIcon(QIcon(home+"/.msim/resources/images/logo.png")); ui->bt_startpause->setIcon(QIcon(home+"/.msim/resources/images/start.png")); ui->bt_startpause->setIconSize(QSize(BUTTON_ICON_SIZE,BUTTON_ICON_SIZE)); ui->bt_startpause->setToolButtonStyle(Qt::ToolButtonIconOnly); ui->bt_restart->setIcon(QIcon(home+"/.msim/resources/images/restart.png")); ui->bt_restart->setIconSize(QSize(BUTTON_ICON_SIZE,BUTTON_ICON_SIZE)); ui->bt_restart->setToolButtonStyle(Qt::ToolButtonIconOnly); ui->bt_movback->setIcon(QIcon(home+"/.msim/resources/images/mov_back.png")); ui->bt_movback->setIconSize(QSize(BUTTON_ICON_SIZE,BUTTON_ICON_SIZE)); ui->bt_movback->setToolButtonStyle(Qt::ToolButtonIconOnly); ui->bt_movforw->setIcon(QIcon(home+"/.msim/resources/images/mov_forw.png")); ui->bt_movforw->setIconSize(QSize(BUTTON_ICON_SIZE,BUTTON_ICON_SIZE)); ui->bt_movforw->setToolButtonStyle(Qt::ToolButtonIconOnly); } void ReplayWindow::renderReplayFrame(int frame_id) { qDebug() << "Rendering frame " << frame_id; } void ReplayWindow::on_hbar_time_valueChanged(int value) { renderReplayFrame(value); float total_secs = (float)value*replay->getTimeStep(); int minutes = total_secs/60; float seconds = total_secs-(int)total_secs/60; int mseconds = (seconds-(int)seconds)*1000; ui->lb_time->setText(QString("%1").arg(minutes, 2, 10, QChar('0'))+":" + QString("%1").arg((int)seconds, 2, 10, QChar('0'))+":" + QString("%1").arg(mseconds, 3, 10, QChar('0'))+total_seconds); } void ReplayWindow::on_bt_startpause_clicked() { replay_running = !replay_running; if(replay_running) { ui->bt_startpause->setIcon(QIcon(home+"/.msim/resources/images/pause.png")); render_timer->start(1000*replay->getTimeStep()); }else { render_timer->stop(); ui->bt_startpause->setIcon(QIcon(home+"/.msim/resources/images/start.png")); } } void ReplayWindow::on_bt_restart_clicked() { render_timer->stop(); current_rendered_frame = 0; ui->hbar_time->setValue(0); } void ReplayWindow::on_bt_movback_clicked() { render_timer->stop(); current_rendered_frame--; if(current_rendered_frame<0) current_rendered_frame = 0; ui->hbar_time->setValue(current_rendered_frame); } void ReplayWindow::on_bt_movforw_clicked() { render_timer->stop(); current_rendered_frame++; if(current_rendered_frame>total_frames) current_rendered_frame = total_frames; ui->hbar_time->setValue(current_rendered_frame); } void ReplayWindow::renderTrigger() { current_rendered_frame++; if(current_rendered_frame>total_frames) { on_bt_startpause_clicked(); current_rendered_frame = 0; } ui->hbar_time->setValue(current_rendered_frame); } <file_sep>/bs_robot_plugin/src/bs_rplugin.cc /* * Copyright (C) 2015-2016 Open Source Robotics Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "bs_rplugin.hh" #define Y_AXIS_MULTIPLIER -1.0 using namespace gazebo; // Register this plugin with the simulator GZ_REGISTER_MODEL_PLUGIN(Bs_RPlugin); /// \brief Constructor. Initialized deafult variables for various variables Bs_RPlugin::Bs_RPlugin() { initial_pose_ = math::Pose(0.0,-6.4*(-Y_AXIS_MULTIPLIER),0.0,0.0,0.0,0.0); team_id_ = 0; //null } /// \brief Destructor Bs_RPlugin::~Bs_RPlugin() { } /// \brief Plugin Load function. Initializes all ros topics for the robot model, /// also starting message queue thread. Connects gazebo events like world update, /// time reset and world reset /// \param _parent - Model pointer to the model defining this plugin /// \param _sdf - pointer to the SDF of the model void Bs_RPlugin::Load(physics::ModelPtr _parent, sdf::ElementPtr _sdf) { // Rename model to specification _model_ = _parent; _node_ = transport::NodePtr(new transport::Node()); _node_->Init(_model_->GetWorld()->GetName()); model_id_ = _model_->GetId(); // Stop simulation to prevent odd behaviors gazebo::physics::WorldPtr world = _model_->GetWorld(); bool pauseState = world->IsPaused(); world->SetPaused(true); world->DisableAllModels(); world->EnablePhysicsEngine(false); autoRenameRobot(); _reset_connection_ = event::Events::ConnectTimeReset( boost::bind(&Bs_RPlugin::onReset, this)); _timeres_connection_ = event::Events::ConnectWorldReset( boost::bind(&Bs_RPlugin::onReset, this)); world->EnablePhysicsEngine(true); world->EnableAllModels(); world->SetPaused(pauseState); //Place the robot in the initial position regarding its team_id_ initial_pose_.pos.x = 0.8*(float)team_id_; _model_->SetWorldPose(initial_pose_); } /// \brief gets a list of models in the world and renames itself, accordingly to the /// existing robots already spawned. void Bs_RPlugin::autoRenameRobot() { team_id_ = 1; std::string tid = _model_->GetName(); char id = tid[tid.length()-1]; team_id_ = id-'0'; // Auto numbering std::string pref = "asd"; sdf::ElementPtr original_sdf = _model_->GetSDF(); sdf::ElementPtr root_link; sdf::ElementPtr numbering_back, numbering_left, numbering_right; sdf::ElementPtr number_uri; bool root_link_found = false; root_link = original_sdf->GetElement("link"); while(!root_link_found && root_link){ if(root_link->GetAttribute("name")->GetAsString().compare("numberings")==0){ root_link_found = true; } else root_link = root_link->GetNextElement(); } // numbering back bool back_visual_found = false; numbering_back = root_link->GetElement("visual"); while(!back_visual_found && numbering_back){ if(numbering_back->HasAttribute("name") && (numbering_back->GetAttribute("name")->GetAsString().compare("numbering_back")==0)){ back_visual_found = true; } else numbering_back = numbering_back->GetNextElement(); } // Apply correct stl matching team_id_ if(back_visual_found) { number_uri = numbering_back->GetElement("geometry")->GetElement("mesh")->GetElement("uri"); std::stringstream new_uri; new_uri << "model://bs_robot/meshes/nr_" << std::to_string(team_id_)<< ".stl"; number_uri->GetValue()->SetFromString(new_uri.str()); } // numbering left bool left_visual_found = false; numbering_left = root_link->GetElement("visual"); while(!left_visual_found && numbering_left){ if(numbering_left->HasAttribute("name") && (numbering_left->GetAttribute("name")->GetAsString().compare("numbering_left")==0)){ left_visual_found = true; } else numbering_left = numbering_left->GetNextElement(); } // Apply correct stl matching team_id_ if(left_visual_found) { number_uri = numbering_left->GetElement("geometry")->GetElement("mesh")->GetElement("uri"); std::stringstream new_uri; new_uri << "model://bs_robot/meshes/nr_" << std::to_string(team_id_)<< ".stl"; number_uri->GetValue()->SetFromString(new_uri.str()); } // numbering right bool right_visual_found = false; numbering_right = root_link->GetElement("visual"); while(!right_visual_found && numbering_right){ if(numbering_right->HasAttribute("name") && (numbering_right->GetAttribute("name")->GetAsString().compare("numbering_right")==0)){ right_visual_found = true; } else numbering_right = numbering_right->GetNextElement(); } // Apply correct stl matching team_id_ if(right_visual_found) { number_uri = numbering_right->GetElement("geometry")->GetElement("mesh")->GetElement("uri"); std::stringstream new_uri; new_uri << "model://bs_robot/meshes/nr_" << std::to_string(team_id_)<< ".stl"; number_uri->GetValue()->SetFromString(new_uri.str()); } // Update model's SDF _model_->Update(); } /// \brief called by event signal when a model or server reset happens void Bs_RPlugin::onReset() { _model_->SetWorldPose(initial_pose_); } <file_sep>/robot_plugin/include/minho_robot.hh #ifndef _GAZEBO_MINHO_HARDWARE_HH_ #define _GAZEBO_MINHO_HARDWARE_HH_ #include <gazebo/gazebo.hh> #include <gazebo/physics/physics.hh> #include <gazebo/common/common.hh> #include <gazebo/msgs/msgs.hh> #include <gazebo/transport/transport.hh> #include <gazebo/common/Events.hh> #include <gazebo/sensors/sensors.hh> #include <ignition/math/Vector3.hh> #include <ignition/math/Pose3.hh> #include <ignition/math/Angle.hh> #include <ignition/math/Vector2.hh> #include <stdio.h> #include <stdlib.h> #include <string> #include <sstream> #include <vector> #include <boost/thread.hpp> #include <boost/thread/mutex.hpp> #include <ros/callback_queue.h> #include <ros/subscribe_options.h> #include <ros/advertise_service_options.h> #include <sdf/Param.hh> #include <boost/thread/thread_time.hpp> #include <boost/thread/locks.hpp> #include <boost/thread/mutex.hpp> #include <boost/process.hpp> #define DEG_TO_RAD M_PI/180.0 #define RAD_TO_DEG 180.0/M_PI //ROS includes #include "ros/ros.h" #include "minho_team_ros/robotInfo.h" #include "minho_team_ros/controlInfo.h" #include "minho_team_ros/teleop.h" #include "minho_team_ros/position.h" #include "minho_team_ros/obstacle.h" #include "minho_team_ros/requestKick.h" using namespace ros; using minho_team_ros::robotInfo; //Namespace for robot information msg - PUBLISHING using minho_team_ros::controlInfo; //Namespace for control information msg - SUBSCRIBING using minho_team_ros::teleop; //Namespace for teleop information msg - SUBSCRIBING using minho_team_ros::requestKick; // Namespace for kicking service using minho_team_ros::position; using minho_team_ros::obstacle; namespace gazebo { /// \brief A plugin to control a soccer robot's omnidirectional movement, kicker and dribbler class Minho_Robot : public ModelPlugin { public: /// \brief Constructor. Initialized deafult variables for various variables Minho_Robot(); /// \brief Destructor virtual ~Minho_Robot(); /// \brief Plugin Load function. Initializes all ros topics for the robot model, /// also starting message queue thread. Connects gazebo events like world update, /// time reset and world reset /// \param _parent - Model pointer to the model defining this plugin /// \param _sdf - pointer to the SDF of the model void Load(physics::ModelPtr _parent, sdf::ElementPtr _sdf); /// \brief Applies the desired velocities to the robot, given /// the linear velocity, direction of movement and angular velocity /// \param command - has 3 components: (1) is linear velocity [0~100] /// (2) is direction of movement [0~360] and (3) is angular velocity [-100~100] void applyVelocities(math::Vector3 command); private: /// \brief maps a velocity of the maximum velocity given a percentage. /// \param percentage - percentage of the maximum velocity to be applied /// \param limit - max velocity to be applied /// \return mapped value/velocity given a limit and a percentage double mapVelocity(double percentage, double limit); /// \brief gets a list of models in the world and renames itself, accordingly to the /// existing robots already spawned. void autoRenameRobot(); /// \brief called by event signal every server simulation iteration. Used to reset /// parameters like velocities and others void onUpdate(); /// \brief called by event signal when a model or server reset happens void onReset(); /// \brief callback to receive ROS messages published over the matching ros topic, /// in order to retrieve data about comands for robot motion. void controlInfoCallback(const controlInfo::ConstPtr& msg); /// \brief callback to receive ROS messages published over the matching ros topic, /// in order to retrieve data about comands for robot motion. void teleopCallback(const teleop::ConstPtr& msg); /// \brief function to actuate kicker, in order to kick the ball. Only kicks if /// the robot detects that has the ball inside /// \param req - request data received in requestKick service /// \param res - response data, flaggin if the kick was taken or not bool kickServiceCallback(requestKick::Request &req,requestKick::Response &res); /// \brief thread to queue incoming data to callback queue, that subsequently calls /// the respective callback, passing the matching data to it. void message_queue_thread(); /// \brief searches for game ball inside the world and stores a pointer to the model void getGameBallModel(); /// \brief runs ball detection "sensor", using ball and robot pose's, computing the /// distance between them, comparing with a threshold. void detectBallPossession(); /// \brief creates a ROS message, updates all the information and sends it through the /// publisher void publishRobotInfo(); /// \brief dribbles the ball, given the velocity vector of the robot void dribbleGameBall(); /// \brief kicks the ball, through the floor (passing) or the air (shoothing) /// allowing a slight variation in the direction of kicking /// \param pass - define wether the kick is a pass or not /// \param strength - define the strength of the kick /// \param direction - define the direction of the kick void kickGameBall(bool pass, int strength, int direction); /// \brief reads and parses the elements passed to the plugin inside the model's sdf /// and initializes the matching variables /// \param _sdf - pointer to sdf element containing void initializePluginParameters(sdf::ElementPtr _sdf); /// \brief generates random noise to add gaussian noise to a variable read from the /// world like ball position and obstacles position /// \param mean - mean of the error to be generated /// \param stdev - standard deviation of the error to /// \param min - minimum value of the output value /// \param max - maximum value of the output value /// \return generated noise value double generateNoise(double mean, double stdev, double min, double max); /// \brief detects obstacles in the view range, whether they being friendly or foe, /// at this point, in reality, the robot doesn't distinguish between friends or foes /// return vector of positions containing the position of the detected obstalces std::vector<minho_team_ros::obstacle>detectObstacles(); /// \brief mapps a detected point relative to the robot to the world position /// \param robot - position of the robot in the field, in meters /// \param robot_heading - heading of the robot in º /// \param dist - detected distance in meters /// \param theta - angle of the detected point in relation to world's 0º /// \return - position of the mapped point in meters position mapPointToWorld(position robot, float robot_heading, float dist, float theta); /// \brief computes both ball and robot velocities to send in robotInfo using /// a simple low pass filter approach to reduce noisy estimates void computeVelocities(); /// \brief boots indicated ROS nodes to add functionalities to the model /// This nodes have to be specified in plugin's SDF correctly, naming the /// package, node names, flags and placed in boot order /// \param _sdf - sdf struct containing plugin's data, containing Robot parameters /// and ROS boot node info void bootROSNodes(sdf::ElementPtr _sdf); /// \brief asserts flag value for ros node booting procedure. It can translate values /// started with '$' with plugin run time variables /// \param value - initial parameter value to be asserted /// \return - string with the asserted parameter std::string assertFlagValue(std::string value); /// \brief setus up model sensors, performing detection and type identification void setupSensors(); math::Vector3 getAccelDeccelVelocity(); // VARIABLES /// \brief Pointer to the model that defines this plugin physics::ModelPtr _model_; /// \brief Pointer to the model of the game ball physics::ModelPtr _ball_; /// \brief Model unique ID unsigned int model_id_; /// \brief Robot unique ID inside team structure ranging from 1~6 unsigned int team_id_; /// \brief Transport node used to communicate with the transport system transport::NodePtr _node_; /// \brief Starting pose of the robot math::Pose initial_pose_; /// \brief current velocities aplied to the model math::Vector3 linear_velocity_, angular_velocity_; /// \brief direction of movement of omnidirectional platform float linear_vel_,mov_direction_, angular_vel_; /// \brief to enable or disable ros capabilities given the initialization state bool is_ros_initialized_; /// \brief node handler for ROS communication with publishers and subscribers ros::NodeHandle *_node_ros_; /// \brief subscriber for ControlInfo messages ros::Subscriber control_info_sub_; /// \brief subscriber for Teleop state messages ros::Subscriber teleop_state_sub_; /// \brief publisher for robotInfo messages ros::Publisher robot_info_pub_; /// \brief pointer to server update event event::ConnectionPtr _update_connection_; event::ConnectionPtr _reset_connection_; event::ConnectionPtr _timeres_connection_; /// \brief custom callback queue to implement a custom spinner to ROS. ros::CallbackQueue message_queue_; // Custom Callback Queue. /// \brief thread object for the running calback thread boost::thread message_callback_queue_thread_; /// \brief data mutex to implement thread safety boost::mutex control_info_mutex_; boost::mutex tele_op_mutex_; /// \brief that defines whether the model is being controlled by teleop or by /// autonomous commands bool teleop_active_; /// \brief defines whether the current robot has the ball or not, given its distance to the /// center of the ball. bool has_game_ball_; /// \brief defines whether there is a model in the parent world that is the game ball. bool game_ball_in_world_; /// \brief threshold distance to judge whether the robot has the ball in its possession or not. /// Given the fact that if the ball is inside de "mouth" the distance is much smaller that in any /// other situation, it's an easy thing to judge. double poss_threshold_distance_; /// \brief hold the euclidean distance from the model to the ball, to detect ball possession. double distance_to_ball_; /// \brief holds the current model pose, that is updated every iteration of the simulation /// server. math::Pose model_pose_; math::Pose ball_pose_; /// \brief flags the state of the dribbling system. If true, when the ball is in possession of a robot /// he will dribble the ball, keeping it closer to himself. bool dribblers_on_; /// \brief flags if a kick to the ball has been requested. If its true, it will atempt to kick the ball /// once it has the ball in his possession. bool kick_requested_; /// \brief variables to control the kicking strength and direction int kick_force_, kick_dir_; bool kick_is_pass_; /// \brief variables to control aspects of the robot's behaviour, expressed in the sdf file of the robot /// under the plugin tag float MAX_LIN_VEL, MAX_ANG_VEL, MAX_BALL_VEL, SHOOT_ANGLE; std::string BALL_MODEL_NAME; float VISION_RANGE_RADIUS; float MAX_BACKWARDS_VEL; float GRIP_DECAY; float MAX_ROTATION_VEL; /// \brief pointer to world's set pose mutex. This pointer will make changes to the /// models become more thread safe. boost::mutex *world_mutex_; /// \brief kick service server to handle kick server calls ros::ServiceServer kick_service; // Service to relay the configuration /// \brief robotInfo state variables robotInfo current_state, last_state, last_vel_state; /// \brief vector holding handles to child processes std::vector<boost::process::child> childs; /// \brief vector holding handles to sensors attached to the model std::vector<gazebo::sensors::SensorPtr> sensors_; /// \brief handler of obstacle_detector mock sensor gazebo::sensors::RaySensor *obstacle_detector; /// \brief flag to implement control timeout bool controlCommandsReceived; /// \brief flag to control timer between kicks unsigned int kick_stab_counter; /// \brief acceleration and decceleration constants float constAccel, constDeccel; math::Vector3 targetCommand, lastCommand, currentCommand; }; } #endif <file_sep>/other_plugin/include/other_robot.hh #ifndef _GAZEBO_MINHO_HARDWARE_HH_ #define _GAZEBO_MINHO_HARDWARE_HH_ #include <gazebo/gazebo.hh> #include <gazebo/physics/physics.hh> #include <gazebo/common/common.hh> #include <gazebo/msgs/msgs.hh> #include <gazebo/transport/transport.hh> #include <gazebo/common/Events.hh> #include <gazebo/sensors/sensors.hh> #include <ignition/math/Vector3.hh> #include <ignition/math/Pose3.hh> #include <ignition/math/Angle.hh> #include <ignition/math/Vector2.hh> #include <stdio.h> #include <stdlib.h> #include <string> #include <sstream> #include <vector> #include <boost/thread.hpp> #include <boost/thread/mutex.hpp> #include <ros/callback_queue.h> #include <ros/subscribe_options.h> #include <ros/advertise_service_options.h> #include <sdf/Param.hh> #include <boost/thread/thread_time.hpp> #include <boost/thread/locks.hpp> #include <boost/thread/mutex.hpp> #include <boost/process.hpp> #define DEG_TO_RAD M_PI/180.0 #define RAD_TO_DEG 180.0/M_PI //ROS includes #include "ros/ros.h" using namespace ros; namespace gazebo { /// \brief A plugin to control a soccer robot's omnidirectional movement, kicker and dribbler class Other_Robot : public ModelPlugin { public: /// \brief Constructor. Initialized deafult variables for various variables Other_Robot(); /// \brief Destructor virtual ~Other_Robot(); /// \brief Plugin Load function. Initializes all ros topics for the robot model, /// also starting message queue thread. Connects gazebo events like world update, /// time reset and world reset /// \param _parent - Model pointer to the model defining this plugin /// \param _sdf - pointer to the SDF of the model void Load(physics::ModelPtr _parent, sdf::ElementPtr _sdf); /// \brief Pointer to the model that defines this plugin physics::ModelPtr _model_; /// \brief Transport node used to communicate with the transport system transport::NodePtr _node_; /// \brief Starting pose of the robot math::Pose initial_pose_; }; } #endif <file_sep>/README.md ##MinhoTeam Simulation Tools Package This repository holds the source code for the MinhoTeam's simulator, based on Gazebo, ROS, Qt and is written in C++. The purpose of this simulator is to allow earlier teams to developp code while sitting in a chair, prior to real implementation. The package will in future include the following RoboCup leagues: * Middle Size League (MSL) * RoboCup Junior Soccer * RoboCup@Factory * Autonomous Driving * General Simulation Where the simulation cannot provide full interface to the user, it will have many tutorials, guides and examples to make the usage of the simulator as smooth as possible. The package will includ the following: * Standard models and worlds for each league * Model plugins to provide ROS interfacing with the model * Guides to create, configure and run custom models and worlds *Developed by <NAME> & MinhoTeam @2016*
3bd144e04b76cc6f8853e1c2d4b102baf542856d
[ "Markdown", "CMake", "C++", "Shell" ]
20
C++
pedroosorio/minho_team_simulation_tools
b8d45860167be97d399ee928f437ea10dcf88e0b
495cd0c064bdcca3186656129eea8d6418ba2db9
refs/heads/master
<file_sep>package me.kevinnovak.voidteleport; import java.util.List; import java.util.concurrent.ThreadLocalRandom; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.configuration.file.FileConfiguration; public class VoidWorld { private boolean enabled; private World toWorld; private boolean useRandom; private World world; private Location spawn; private int minX, maxX, minY, maxY, minZ, maxZ; private List<Integer> dontSpawnOn; private int maxSpawnAttempts; VoidWorld(World world, FileConfiguration worldData, int maxSpawnAttempts) { this.world = world; this.enabled = worldData.getBoolean("teleportOutOfVoid.enabled"); String toWorldName = worldData.getString("teleportOutOfVoid.toWorld"); this.toWorld = Bukkit.getWorld(toWorldName); this.useRandom = worldData.getBoolean("teleportOutOfVoid.useRandom"); int spawnXPos = worldData.getInt("worldSpawn.x-Pos"); int spawnYPos = worldData.getInt("worldSpawn.y-Pos"); int spawnZPos = worldData.getInt("worldSpawn.z-Pos"); float spawnYaw = (float) worldData.getDouble("worldSpawn.yaw"); float spawnPitch = (float) worldData.getDouble("worldSpawn.pitch"); this.spawn = new Location(this.world, spawnXPos, spawnYPos, spawnZPos, spawnYaw, spawnPitch); this.spawn.add(0.5, 0, 0.5); this.minX = worldData.getInt("worldRandomSpawn.x-Range.min"); this.maxX = worldData.getInt("worldRandomSpawn.x-Range.max"); this.minY = worldData.getInt("worldRandomSpawn.y-Range.min"); this.maxY = worldData.getInt("worldRandomSpawn.y-Range.max"); this.minZ = worldData.getInt("worldRandomSpawn.z-Range.min"); this.maxZ = worldData.getInt("worldRandomSpawn.z-Range.max"); this.dontSpawnOn = worldData.getIntegerList("worldRandomSpawn.dontSpawnOn"); this.maxSpawnAttempts = maxSpawnAttempts; } @SuppressWarnings("deprecation") Location getRandomVoidLocation() { Location randLocation; int attempt = 1; while (attempt <= maxSpawnAttempts) { randLocation = getRandomLocation(); while (attempt <= maxSpawnAttempts && randLocation.getBlockY() >= this.minY) { int randX = randLocation.getBlockX(); int randY = randLocation.getBlockY(); int randZ = randLocation.getBlockZ(); Material blockAboveMaterial = new Location(this.world, randX, randY+1, randZ).getBlock().getType(); Material blockBelowMaterial = new Location(this.world, randX, randY-1, randZ).getBlock().getType(); if (randLocation.getBlock().getType() == Material.AIR) { if (blockAboveMaterial == Material.AIR) { if (blockBelowMaterial != Material.AIR) { boolean forbidden = false; for (Integer itemID : this.dontSpawnOn) { if (blockBelowMaterial == Material.getMaterial(itemID)) { forbidden = true; } } if (!forbidden) { randLocation.add(0.5, 0, 0.5); Bukkit.getLogger().info("attempts: " + attempt); return randLocation; } } } } randLocation.subtract(0, 1, 0); attempt++; } } Bukkit.getLogger().info("SPAWNING AT SPAWN"); return this.spawn; } Location getRandomLocation() { //Bukkit.getLogger().info("GETTING RANDOM LOCATION"); int randX = ThreadLocalRandom.current().nextInt(this.minX, this.maxX + 1); // TOP-DOWN: int randY = this.maxY; // RANDOM-DOWN: //int randY = ThreadLocalRandom.current().nextInt(this.minY, this.maxY + 1); int randZ = ThreadLocalRandom.current().nextInt(this.minZ, this.maxZ + 1); Location randLocation = new Location(this.world, randX, randY, randZ); return randLocation; } boolean getEnabled() { return this.enabled; } void setEnable(boolean enabled) { this.enabled = enabled; } World getToWorld() { return this.toWorld; } void setToWorld(World toWorld) { this.toWorld = toWorld; } boolean getUseRandom() { return this.useRandom; } void setUseRandom(boolean useRandom) { this.useRandom = useRandom; } World getWorld() { return this.world; } void setName(World world) { this.world = world; } Location getSpawn() { return this.spawn; } void setSpawn(Location spawn) { spawn.add(0.5, 0, 0.5); this.spawn = spawn; } int getMinX() { return this.minX; } void setMinX(int minX) { this.minX = minX; } int getMaxX() { return this.maxX; } void setMaxX(int maxX) { this.maxX = maxX; } int getMinZ() { return this.minZ; } void setMinZ(int minZ) { this.minZ = minZ; } int getMaxZ() { return this.maxZ; } void setMaxZ(int maxZ) { this.maxZ = maxZ; } List<Integer> getDontSpawnOn() { return this.dontSpawnOn; } void setDontSpawnOn(List<Integer> dontSpawnOn) { this.dontSpawnOn = dontSpawnOn; } }<file_sep>package me.kevinnovak.voidteleport; import java.io.File; import java.util.List; import org.bukkit.configuration.file.YamlConfiguration; public class CommandManager { public VoidTeleport plugin; public List<String> setSpawnCommands, spawnCommands, randomCommands; public CommandManager(VoidTeleport plugin) { this.plugin = plugin; } void load() { File commandsFile = new File(plugin.getDataFolder() + "/commands.yml"); if (!commandsFile.exists()) { this.plugin.log("Copying default commands file."); this.plugin.saveResource("commands.yml", false); } this.plugin.log("Loading commands file."); YamlConfiguration commandsData = YamlConfiguration.loadConfiguration(commandsFile); this.setSpawnCommands = commandsData.getStringList("setSpawn"); this.spawnCommands = commandsData.getStringList("spawn"); this.randomCommands = commandsData.getStringList("random"); } }
da3f3dc7f7467a2997d3b8ce725d593dfa98a6bf
[ "Java" ]
2
Java
KevinNovak/VoidTeleport
f7515da06ab62ebeab90d1ddfe2270981a218d40
86c5549cf5724512a372177228f1147ce774c71c
refs/heads/master
<repo_name>eyaladi/renderer-engine<file_sep>/NewEncoder/VideoEncoder/avc.h #pragma once #include <stdint.h> #include <inttypes.h> extern "C" { #ifndef __STDC_CONSTANT_MACROS # define __STDC_CONSTANT_MACROS #endif //#define UINT16_MAX _UI16_MAX #define INT8_C(val) val##i8 #define INT16_C(val) val##i16 #define INT32_C(val) val##i32 #define INT64_C(val) val##i64 #define UINT8_C(val) val##ui8 #define UINT16_C(val) val##ui16 #define UINT32_C(val) val##ui32 #define UINT64_C(val) val##ULL // 7.18.4.2 Macros for greatest-width integer constants #define INTMAX_C INT64_C #define UINTMAX_C UINT64_C #include "libavformat/avformat.h" #include "libavutil/intreadwrite.h" } class avc { public: avc(void); ~avc(void); static int ff_avc_parse_nal_units(AVIOContext *s, const uint8_t *buf, int size); static int ff_avc_parse_nal_units_buf(const uint8_t *buf_in, uint8_t **buf, int *size); static int ff_isom_write_avcc(AVIOContext *pb, const uint8_t *data, int len); static const uint8_t *ff_avc_find_startcode(const uint8_t *p, const uint8_t *end); static int ff_avc_write_annexb_extradata(const uint8_t *in, uint8_t **buf, int *size); static const uint8_t *ff_avc_mp4_find_startcode(const uint8_t *start,const uint8_t *end,int nal_length_size); }; <file_sep>/utils/Utils/EngineInitProtector.cpp // EngineInitProtector.cpp // Prevent two engines to enter some critical init section simultaneously // This is done by maintaining a list of the active engines, sorted by their launch time. // An engine is removed from the list when it ends its init. // Before an engine starts its init, it makes sure that it is the first one in the list. // The list is protected by a mutex // #include <windows.h> #include "EngineInitProtector.h" #include "MutexAutoLock.h" #include "SWException.h" EngineInitProtector::EngineInitProtector() { m_activeEnginesMutex = CreateMutex (NULL, FALSE, NULL); if (m_activeEnginesMutex == NULL) { throw SWException( "Failed to create mutex for active engines" ); } } EngineInitProtector::~EngineInitProtector() { if ( m_activeEnginesMutex ) CloseHandle(m_activeEnginesMutex); } // Engine launched - Add to list void EngineInitProtector::EngineLaunched(const Engine *engine) { MutexAutoLock autoLock(m_activeEnginesMutex); m_activeEngines.push_back(engine); } // Before engine starts init - make sure it's first in the list (or not in the list at all - which shouldn't really happen) // If not - wait and restart - up to some timeout bool EngineInitProtector::WaitBeforeEngineInit(const Engine *engine, int timeoutMS) const { #define INTER_TEST_INTERVAL_MS 100 int niters = timeoutMS / INTER_TEST_INTERVAL_MS + 1; for ( int iter = 0; iter < niters; iter++ ) { if ( CanEngineInit( engine ) ) return true; Sleep(INTER_TEST_INTERVAL_MS); } return false; } // Before engine starts init - make sure it's first in the list (or not in the list at all - which shouldn't really happen) bool EngineInitProtector::CanEngineInit(const Engine *engine) const { MutexAutoLock autoLock(m_activeEnginesMutex); if ( m_activeEngines.size() <= 1 ) return true; // It's definitely not behind any engine. std::list<const Engine *>::const_iterator iter = m_activeEngines.begin(); iter++; // Start from second iterm for ( ; iter != m_activeEngines.end(); iter++ ) { if ( *iter == engine ) return false; } // Not in the list - how is that possible? (maybe removed and now retrying?...) return true; } void EngineInitProtector::EngineInitEnded(const Engine *engine) // Remove from list { MutexAutoLock autoLock(m_activeEnginesMutex); m_activeEngines.remove(engine); } <file_sep>/RenderingManager/RenderingManager/HeaderPrinter.h #pragma once #include <string> #include "DebugLevel.h" using namespace std; class Engine; class HeaderPrinter { // This is a class of static functions. public: static int gettimeofday(struct timeval * tp, struct timezone * tzp); static void printHeader(FILE *file, Engine *engine, DebugLevel dbgLvl); static void printHeader(FILE *file, DebugLevel dbgLvl); static void printHeader(FILE *file, Engine *engine); static void printHeader(FILE *file); private: static void printTime(FILE *file); }; <file_sep>/RenderingManager/RenderingManager/include/ProgressTracker.h #pragma once #include <string> using namespace std; class RenderingManager; class ProgressTracker { private: RenderingManager * m_rndMng; int m_progress; int m_outOf; double m_notifyPeriod; // between 0 and 1. How often to notify the RenderingManager (e.g 0.2 means notify every 20% progress). double m_lastNotify; // between 0 and 1 (-1 means none). double m_nextNotify; // between 0 and 1 float m_startRenditionTime; public: ProgressTracker(RenderingManager * rndMng, double notifyPeriod); void setStartRenditionTime(float startRenditionTime); float getStartRenditionTime() { return m_startRenditionTime;} void setOutOf(int outOf); void setProgress(int progress); void incProgress(); void readyToPlay(int rendered); void totalSleepTime( int totalSleepTime); void sceneRendered(const char* sceneName, int frames, float videoRenditionTime); void setEncoderProgress(int frames, double timePassed, double averageFramesPerSec); void exceedPaddedSize(int codecType, int frameNumber, int frameSize, int maxFrameSize); void writeEvent(const char* name,const char* value); void flush(); }; <file_sep>/VideoStream/VideoStream/VSSharedData.h #pragma once #ifdef WIN32 #include <windows.h> #else #include <pthread.h> #include "WindowDefinitions.h" #endif namespace VideoStream { class CSharedData { private: void *_pData; #ifdef WIN32 HANDLE _mutex; HANDLE _shmem, _fileHandle; HANDLE GetFileHandle (LPCTSTR dataName, bool persistent); #else pthread_mutex_t _mutex; int _fileHandle; int m_size; int GetFileHandle (LPCTSTR dataName, bool persistent); #endif DWORD _dwTimeout; public: CSharedData (LPCTSTR dataName, bool persistent = false, int size = sizeof(int), DWORD dwTimeout = 3600000 /* 60 minutes by default */); virtual ~CSharedData(); void Block(); void Release(); void *GetData() {return _pData;} void Read (void *pData, int size, bool atomic = true); void *Read (int &size, bool atomic = true); void Write (const void *pData, int size, bool atomic = true); void WriteWithSize (const void *pData, int size, bool atomic = true); int ReadInt(bool atomic = true); void WriteInt (int value, bool atomic = true); int IncreaseInt(bool atomic = true); }; /* * CSharedDataLocker: * An object of this class locks the specified name for mutual exclusive (i.e. no * 2 objects with the same dataName can exist simultaneously). * Its is used to protect the operation of creating a shared data and writing/reading to it, * so that the reader is guaranteed that if he succeeded to access a shared memory, it is * already updated (used for the shmem header). */ class CSharedDataLocker { public: CSharedDataLocker( const char *dataName ); ~CSharedDataLocker(); private: #ifdef WIN32 HANDLE _mutex; #else pthread_mutex_t _mutex; #endif }; };<file_sep>/RenderingManager/RenderingManager/EngineManager.cpp #include <iostream> #include <time.h> #ifndef WIN32 #include <unistd.h> #endif #include "EngineFactory.h" #include "Engine.h" #include "EngineManager.h" #include "RenderingManager.h" #include "HeaderPrinter.h" #include "SWException.h" using namespace std; EngineManager::EngineManager() : m_nEngines(0), m_rndMng(NULL) { } EngineManager::~EngineManager() { map<string,EngineTypeInfo *>::iterator iter; for(iter = m_engineTypes.begin(); iter != m_engineTypes.end(); iter++ ) { if (iter->second != NULL) delete iter->second; } m_engineTypes.clear(); } bool EngineManager::init(RenderingManager * rndMng) { m_rndMng = rndMng; return true; } void EngineManager::addEngine(Engine * engine) { RenderingManager::EngineListMutexAutoLock mal(m_rndMng); if (!m_rndMng->areExecutionsAllowed()) { cerr << "Error in EngineManager::addEngine: New engine executions are disallowed" << endl; return; } if (m_engines[engine->getExecutionId()] != NULL) { cerr << "Error in EngineManager::addEngine: Engine " << engine->getExecutionId() << " already exists" << endl; return; } m_engines[engine->getExecutionId()] = engine; m_nEngines++; if (m_engineTypes[engine->getEngineType()]->m_nRunning >= m_engineTypes[engine->getEngineType()]->m_maxRunning) { // Cannot execute the engine just yet. Insert it into the pending queue. m_engineTypes[engine->getEngineType()]->m_pending.push(engine); } else { // Launch new thread m_engineTypes[engine->getEngineType()]->m_nRunning++; engine->m_threadHandle = engine->launchEngineThread(); } } void EngineManager::removeEngine(Engine * engine) { RenderingManager::EngineListMutexAutoLock mal(m_rndMng); if (m_engines[engine->getExecutionId()] == NULL) { cerr << "Error in EngineManager::removeEngine: Engine " << engine->getExecutionId() << " does not exist" << endl; return; } m_rndMng->writeEngineEnd(engine); m_engines.erase (engine->getExecutionId()); m_nEngines--; m_engineTypes[engine->getEngineType()]->m_nRunning --; queue<Engine*> & pendingQueue = m_engineTypes[engine->getEngineType()]->m_pending; if (! pendingQueue.empty()) { // At lkeast one engine is pending in the queue. Execute the one that was there longest. Engine *pendingEngine = pendingQueue.front(); pendingQueue.pop(); m_engineTypes[pendingEngine->getEngineType()]->m_nRunning++; pendingEngine->m_threadHandle = pendingEngine->launchEngineThread(); } if (m_rndMng->applyExitConditions()) { mal.unlock(); // Pause for one second before exiting. // Typically we want to exit once the encoder finishes running and often this may happen concurrently with the end of the // synthesizer engine and the last scene's flash2video engine. // This pause lets those other engines end properly. #ifdef WIN32 Sleep(1000); #else usleep(1000 * 1000); #endif // Before exiting, write to the log file without a mutex protection. // This is used for debugging to make sure there is no deadlock. fprintf(m_rndMng->getLogFile(), "Exiting\n"); fflush(m_rndMng->getLogFile()); exit(0); } } Engine * EngineManager::getEngineByExecutionId(string executionId) { return m_engines[executionId]; } bool EngineManager::hasExecutionsOfType(string engineType) { RenderingManager::EngineListMutexAutoLock mal(m_rndMng); map<string,Engine *>::iterator iter; for(iter = m_engines.begin(); iter != m_engines.end(); iter++ ) { Engine *engine = iter->second; if (engine != NULL && engine->getEngineType() == engineType) return true; } return false; } void EngineManager::printReport() { RenderingManager::EngineListMutexAutoLock mal(m_rndMng); map<string,Engine *>::iterator iter; for(iter = m_engines.begin(); iter != m_engines.end(); iter++ ) { Engine *engine = iter->second; if (engine != NULL) { cout << " Type: " << engine->getEngineType() << " Name: " << engine->getExecutionId() << " State: " << Engine::engineStateToString(engine->getEngineState()) << endl; } } cout << "Total: " << m_nEngines << " engine" << ((m_nEngines == 1)?"":"s") << endl; } void EngineManager::printEngineTypeReport() { RenderingManager::EngineListMutexAutoLock mal(m_rndMng); cout << "Engine types:" << endl; map<string,EngineTypeInfo *>::iterator iter; for(iter = m_engineTypes.begin(); iter != m_engineTypes.end(); iter++ ) { EngineTypeInfo *typeInfo = iter->second; if (typeInfo != NULL) { cout << " " << typeInfo->m_type; if (typeInfo->m_nRunning > 0 && typeInfo->m_maxRunning < UNLIMITED_EXECUTIONS) { if (typeInfo->m_maxRunning == UNLIMITED_EXECUTIONS) { cout << " running: " << typeInfo->m_nRunning; } else { cout << " running: "<< typeInfo->m_nRunning << " limit: " << typeInfo->m_maxRunning; if (!typeInfo->m_pending.empty()) { cout << " pending: " << typeInfo->m_pending.size(); } } } cout << endl; } } } void EngineManager::stopAllEngines(Engine *triggeringEngine) { fprintf(m_rndMng->getLogFile(), "Start Stopping engine\n"); RenderingManager::EngineListMutexAutoLock mal(m_rndMng); map<string,Engine *>::iterator iter; for(iter = m_engines.begin(); iter != m_engines.end(); iter++ ) { fprintf(m_rndMng->getLogFile(), "Start Stopping engine inside loop\n"); Engine *engine = iter->second; if (engine != NULL) { fprintf(m_rndMng->getLogFile(), "Start Stopping engine inside loop engine != null\n"); if (engine != triggeringEngine) { fprintf(m_rndMng->getLogFile(), "Start Stopping engine inside loop engine != triggering\n"); RenderingManager::FileMutexAutoLock mal(m_rndMng); HeaderPrinter::printHeader(m_rndMng->getLogFile()); fprintf(m_rndMng->getLogFile(), "Stopping engine %s [%s]\n", engine->getEngineType().c_str(), engine->getExecutionId().c_str()); fflush(m_rndMng->getLogFile()); } engine->stopEngine(engine == triggeringEngine); fprintf(m_rndMng->getLogFile(), "Start Stopping engine inside loop after stop engine\n"); } } } void EngineManager::initEngineType(string engineType, EngineFactory * engineFactory) { if (m_engineTypes[engineType] != NULL) { throw SWException("engine type %s already exists.", engineType.c_str()); } if (engineFactory == NULL) { throw SWException("engine factory for type %s is null.", engineType.c_str()); } m_engineTypes[engineType] = new EngineTypeInfo(engineType, engineFactory); if (m_engineTypes[engineType] == NULL) { throw SWException("initEngineType failed to allocate memory."); } // Init factory engineFactory->m_engineType = engineType; engineFactory->m_commonStorage = m_rndMng->getCommonStorage(); engineFactory->init(); } bool EngineManager::hasExecutions(string engineType) { return (m_engineTypes[engineType]->m_nRunning != 0 || ! m_engineTypes[engineType]->m_pending.empty()); } void EngineManager::setEngineTypeLimit(string engineType, unsigned int limit) { if (m_engineTypes[engineType] == NULL) { throw SWException("engine type %s not supported.", engineType.c_str()); } if (hasExecutions(engineType)) { throw SWException("Cannot set engine type limit for type %s after engines were executed.", engineType.c_str()); } m_engineTypes[engineType]->m_maxRunning = limit; } <file_sep>/RenderingManager/RenderingManager/TestEngineFactory.cpp #include "TestEngineFactory.h" #include "TestEngine.h" using namespace std; Engine *TestEngineFactory::createEngine() { return new TestEngine(); } <file_sep>/RenderingManager/RenderingManager/EngineLogger.cpp #include "EngineLogger.h" #include "Engine.h" #include "RenderingManager.h" #include "HeaderPrinter.h" #ifndef WIN32 #include <string.h> #endif void EngineLogger::vprintf_adv(bool printHeader, bool verifyNewLine, DebugLevel dbgLvl, const char *format, va_list argList) { RenderingManager::FileMutexAutoLock mal(m_engine->m_rndMng); if (printHeader) HeaderPrinter::printHeader(m_engine->m_rndMng->getLogFile(), m_engine, dbgLvl); vfprintf(m_engine->m_rndMng->getLogFile(), format, argList); if (verifyNewLine && (strlen(format) == 0 || format[strlen(format)-1] != '\n')) fprintf(m_engine->m_rndMng->getLogFile(), "\n"); fflush(m_engine->m_rndMng->getLogFile()); } DebugLevel EngineLogger::getDebugLevel() { return m_engine->getDebugLevel(); } <file_sep>/RenderingManager/RenderingManager/RenderingManager.cpp // RenderingManager.cpp : Defines the entry point for the console application. // #include <iostream> #include <map> #include <vector> #include <string.h> #include "Engine.h" #include "CommandParser.h" #include "EngineManager.h" #include "RenderingManager.h" #include "HeaderPrinter.h" #include "VSLocalMemStreamManager.h" #include "SWException.h" #include "ProgressTracker.h" #include "EngineFactory.h" #include "TestEngineFactory.h" #include "LightSynthesizerEngineFactory.h" //#include "Flash2VideoEngineFactory.h" #include "VideoDecoderEngineFactory.h" #include "VideoEncoderEngineFactory.h" //#include "VideoConsumerEngineFactory.h" //#include "VideoProducerEngineFactory.h" //#include "VideoConsumerProducerEngineFactory.h" //#include "VideoTransitionEngineFactory.h" #include "SVG2VideoEngineFactory.h" #ifndef WIN32 #include <getopt.h> #endif #define USE_SVG_SERVER using namespace std; #ifndef USE_SVG_SERVER typedef jint (JNICALL CreateJavaVM_t)(JavaVM **, void **, void *); #endif // Global variable which can be used by the engines. string g_baseFolder; bool RenderingManager::init(const string & baseFolder, const string &inputFileName, const string & notificationFileName, const string & logFileName, const string & captureFileName, const string & debugLevelString, const string & params) { m_logger.m_rndMng = this; // Base folder g_baseFolder = baseFolder; // Debug level m_defaultDebugLevel = DebugLevel_INFO; // default is INFO if (debugLevelString != "") { if (! debugLevelFromString(debugLevelString, m_defaultDebugLevel)) { cerr << "Error parsing debug level: " << debugLevelString << " is not a vliad value." << endl; return false; } } // Create mutexes. #ifdef WIN32 m_fileMutex = CreateMutex (NULL, FALSE, NULL); if (m_fileMutex == NULL) { cerr << "Failed to create file mutex." << endl; return false; } m_stopExecutionMutex = CreateMutex (NULL, FALSE, NULL); if (m_stopExecutionMutex == NULL) { cerr << "Failed to create stop execution mutex." << endl; return false; } m_engineListMutex = CreateMutex (NULL, FALSE, NULL); if (m_engineListMutex == NULL) { cerr << "Failed to create engine list mutex." << endl; return false; } #else int rc = pthread_mutex_init(&m_fileMutex, NULL); if (rc != 0) { cerr << "Failed to create file mutex." << endl; return false; } rc = pthread_mutex_init(&m_stopExecutionMutex, NULL); if (rc != 0) { cerr << "Failed to create stop execution mutex." << endl; return false; } rc = pthread_mutex_init(&m_engineListMutex, NULL); if (rc != 0) { cerr << "Failed to create engine list mutex." << endl; return false; } #endif // Open output files. Allow setting the output files to stdout or stderr (for debug purposes). if (notificationFileName == "stdout" || notificationFileName.empty()) { m_notificationFile = stdout; } else if (notificationFileName == "stderr") { m_notificationFile = stderr; } else { m_notificationFile = fopen(notificationFileName.c_str(), "w"); if (m_notificationFile == NULL) { cerr << "Failed to open notification file " << notificationFileName << endl; return false; } } if (logFileName == "stdout" || logFileName.empty()) { m_logFile = stdout; } else if (logFileName == "stderr") { m_logFile = stderr; } else { m_logFile = fopen(logFileName.c_str(), "w"); if (m_logFile == NULL) { cerr << "Failed to open log file " << logFileName << endl; return false; } } if (captureFileName.empty()) { m_captureFile = NULL; } else if (captureFileName == "stdout") { m_captureFile = stdout; } else if (captureFileName == "stderr") { m_captureFile = stderr; } else { m_captureFile = fopen(captureFileName.c_str(), "w"); if (m_captureFile == NULL) { cerr << "Failed to open capture file " << captureFileName << endl; return false; } } // Open input file. Allow setting it to stdin if (inputFileName == "stdin" || inputFileName.empty()) { m_inputFile = &cin; } else { m_inputFile = new ifstream(inputFileName.c_str()); if (m_inputFile == NULL || m_inputFile->fail() ) { HeaderPrinter::printHeader(m_logFile); fprintf(m_logFile, "Failed to open input file %s\n", inputFileName.c_str()); return false; } } // Parse params and add them to the common storage map if (!parseParams(params)) { cerr << "Failed to parse param string " << params << endl; return false; } // Initialize engtine manager m_engineManager.init(this); // Set ProgressTracker in common storage so it will be accessible to engines. m_commonStorage.insert(pair<string, void *>("progressTracker", new ProgressTracker(this, 0.1))); // Print progress every 10% // Initilaize the engine types initEngineTypes(); #ifndef USE_SVG_SERVER //check if we support SVGVideo m_svgSupported = m_engineManager.isEngineTypeSupported("SVG2Video"); if (m_svgSupported){ //initialize JVM m_env = create_jvm(&m_jvm); if (m_env == NULL) { m_logger.error("Failed to generate m_jvm\n"); //throw SWException("Failed to generate m_jvm\n"); } else{ m_commonStorage.insert(pair<string, void *>("java_env", m_env)); m_commonStorage.insert(pair<string, void *>("jvm", m_jvm)); m_svgSupported = true; } } #endif // Initialize the local memory stream manager VideoStream::CLocalMemStreamManager::init(); // Bind callbacks to the CLI manager if (! m_cliManager.addCommand("run", "Run an engine.", "Executes an engine.\n" "Usage: run -type engineType -id executionId [-debug_level debug_level] [-args args]\n" " -type engineType : Any suppported engine type (see print-engine-types).\n" " -id executionId : A unique identifier for the engine execution.\n" " -debug_level debug_level: The debug level for this execution (optional).\n" " -args args : The arguments to be passed to the engine.\n" " The args must appear last.\n" "All parameters should be encoded using url encoding.\n", RenderingManager::executeEngine, this)) { return false; } if (! m_cliManager.addCommand("report", "Prints a report on the engines that are running.", "Prints a list of all engines being currently executed.", RenderingManager::reportEngines, this)) { return false; } if (! m_cliManager.addCommand("report-by-type", "Prints supported engines.", "Prints a list of engine types supported by the engine manager.", RenderingManager::reportEngineTypes, this)) { return false; } if (! m_cliManager.addCommand("stop", "End process when all executions end.", "Indicate that there will be no more engine executions. When all current executions end, end the process.", RenderingManager::stopExecutions, this)) { return false; } if (! m_cliManager.addCommand("stop-after", "End process when all executions of a certain type end.", "Usage: stop-after engineType\n" "Indicate that there will be no more engine executions of any type.\n" "When all executions of the given engine type end, end the process.", RenderingManager::stopExecutionsAfter, this)) { return false; } if (! m_cliManager.addCommand("kill", "Terminate engine executions.", "Terminate currently running engine executions and end the process.", RenderingManager::killEngines, this)) { return false; } if (! m_cliManager.addCommand("limit", "Limit the number of concurrent executions per engine type.", "Usage: limit engineType numberOfConcurrentExecutions\n" "Limit the number of concurrent executions of engines of the given type\n" "When this limit is reached, further engines wait in a queue until currently\n" "running engines finish running.\n" "Engines are executed in a first-come-first-serve manner.\n", RenderingManager::limitExecutions, this)) { return false; } if (! m_cliManager.addCommand("pause", "Pauses execution of engines.", "Pauses all local video streams (thus effectively pausing all engines)\n" "Use the \"resume\" command to resume local video streams.\n", RenderingManager::pauseLocalVideoStreams, this)) { return false; } if (! m_cliManager.addCommand("resume", "Resumes execution of paused engines.", "Cancels the affect of the \"pause\" command.\n", RenderingManager::resumeLocalVideoStreams, this)) { return false; } m_cliManager.setCaptureFile(m_captureFile); // TODO: Make this configurable m_cliManager.setTimeOut(3600.0); return true; } RenderingManager::~RenderingManager() { #ifdef WIN32 CloseHandle(m_fileMutex); CloseHandle(m_stopExecutionMutex); CloseHandle(m_engineListMutex); #else pthread_mutex_destroy(&m_fileMutex); pthread_mutex_destroy(&m_stopExecutionMutex); pthread_mutex_destroy(&m_engineListMutex); #endif if ( m_inputFile && m_inputFile != &cin ) delete m_inputFile; #ifndef USE_SVG_SERVER if (m_svgSupported) { if (m_jvm != NULL){ m_jvm->DestroyJavaVM(); FreeLibrary(jvmdll); } } #endif } #ifndef USE_SVG_SERVER JNIEnv* RenderingManager::create_jvm(JavaVM ** jvm){ m_logger.trace("Jvm dll loaded. \n"); JavaVMInitArgs vm_args; char *libraryPath = getenv ("SMARTVIDEO_LIBRARY_PATH"); if (libraryPath == NULL) { //set default java classpath libraryPath = new char[4096]; sprintf(libraryPath, "%s", // Note - due to a typo in the original jar, we accept both sundaysky-batik.jar and sunkdaysky-batik.jar (sundaysky-batik.jar should be first, it's the newer, and you should avoid actually having both) "./libs/sundaysky-batik.jar;" "./libs/crimson-1.1.3.jar;./libs/jacl-1.2.6.jar;./libs/js.jar;jython-2.2.jar;./libs/pdf-transcoder.jar;" "./libs/tcljava-1.2.6.jar;./libs/xalan-2.6.0.jar;./libs/xerces_2_5_0.jar;./libs/xml-apis-ext.jar;./libs/xml-apis.jar;" "./libs/slf4j-api-1.6.4.jar;" "./libs/xuggle-xuggler-arch-i686-w64-mingw32.jar;" "./libs/xuggle-xuggler-arch-x86_64-w64-mingw32.jar;" "./libs/xuggle-xuggler-noarch-5.4.jar"); m_logger.trace("SMARTVIDEO_LIBRARY_PATH missing. Use default: %s", libraryPath); } // Java options: JavaVMOption options[2]; // Option 1 - classpath containing required jars to run int n = 0; char *parms = new char[4096]; sprintf(parms, "-Djava.class.path=%s", libraryPath); options[n++].optionString = parms; // Option 2 - enough memory in Java to run options[n++].optionString = "-XX:MaxHeapSize=512m"; vm_args.version = JNI_VERSION_1_6; //JDK version. This indicates version 1.6 vm_args.nOptions = n; vm_args.options = options; vm_args.ignoreUnrecognized = 0; m_logger.trace("Options are: %s, %s\n", options[0].optionString, options[1].optionString); char* jvmDllPath = new char[1024]; char* javaHome = getenv("JAVA_HOME"); if (javaHome == NULL){ m_logger.info("JAVA_HOME missing. Use default.\n"); jvmDllPath = "C:/Program Files/Java/jre6/bin/server/jvm.dll"; } else{ sprintf(jvmDllPath, "%s/jre/bin/server/jvm.dll", javaHome); } m_logger.trace("JAVA_HOME initial value is %s\n", jvmDllPath); m_logger.trace("JVM path is : %s", jvmDllPath); m_logger.trace("Loading library path"); jvmdll = LoadLibrary(jvmDllPath); // //L"C:/Program Files (x86)/Java/jdk1.8.0_25/jre/bin/server/jvm.dll" if (jvmdll == NULL) { long code = GetLastError(); m_logger.error("Can't load JVM dll, code is %d\n", code); //throw SWException("Can't load JVM dll"); return NULL; } m_logger.trace("Jvm dll loaded. \n"); CreateJavaVM_t* pfnCreateJavaVM=(CreateJavaVM_t*)GetProcAddress(jvmdll,"JNI_CreateJavaVM"); if (pfnCreateJavaVM == NULL){ m_logger.error("Can't get function JNI_CreateJavaVM, code is %d\n", GetLastError()); return NULL; } m_logger.trace("After Get proc. \n"); jint ret= pfnCreateJavaVM(jvm,(void**)&m_env, &vm_args); if (ret != JNI_OK){ m_logger.error("JVM start failed (pfnCreateJavaVM) - return code is %d\n", GetLastError()); return NULL; } m_logger.trace("JVM creation - return code is %d\n", ret); m_logger.info("Finished create jvm. \n"); return m_env; } #endif int RenderingManager::run() { try { m_cliManager.run(*m_inputFile); } catch (const SWException& ex) { m_logger.error("Caught exception %s at\n%s", ex.what(), ex.getStackTrace().c_str() ); } catch (const exception& ex) { m_logger.error("Caught exception: %s\n", ex.what()); } catch (const string& ex) { m_logger.error("Caught string: %s\n", ex.c_str()); } catch (...) { m_logger.error("Caught an error\n"); } return 0; } void RenderingManager::initEngineTypes() { try { m_engineManager.initEngineType("Test", new TestEngineFactory()); m_engineManager.initEngineType("Test2", new TestEngineFactory()); //m_engineManager.initEngineType("ConsumerTest", new VideoConsumerEngineFactory()); //m_engineManager.initEngineType("ProducerTest", new VideoProducerEngineFactory()); //m_engineManager.initEngineType("ConsumerProducerTest", new VideoConsumerProducerEngineFactory()); m_engineManager.initEngineType("Synthesizer", new LightSynthesizerEngineFactory()); m_engineManager.initEngineType("Decoder", new VideoDecoderEngineFactory()); m_engineManager.initEngineType("Encoder", new VideoEncoderEngineFactory()); m_engineManager.initEngineType("SVG2Video", new SVG2VideoEngineFactory()); //m_engineManager.initEngineType("Flash2Video", new Flash2VideoEngineFactory()); //m_engineManager.initEngineType("VideoTransition", new VideoTransitionEngineFactory()); } catch (const SWException& ex) { m_logger.error("initEngineTypes error: %s\nStack trace at\n%s\n", ex.what(), ex.getStackTrace().c_str()); writeToNotificationFile("ENGINE INIT ERROR - %s\n", ex.what()); exit (-1); } } bool RenderingManager::parseParams(const string & params) { // These are parameters to the rendering manager, to be put in the common storage for use of all engines. // The format of params is expected to be something like "name1=val1&name2=val2&name3=val3". // Perhaps in the future we should do UrlDecoding for the names and values. For now we just assume that the contain no '=', '&', '%', or white spaces. unsigned int pos = 0; while (params.length() > pos) { int equalPos = params.find_first_of('=', pos); if (equalPos < 0) return false; string name(params, pos, equalPos-pos); pos=equalPos+1; int ampPos = params.find_first_of('&', pos); if (ampPos < 0) ampPos = params.length(); string *value = new string(params, pos, ampPos-pos); pos=ampPos+1; // There is a possible memory leak here because it is unclear who is supposed to free value. // However, this is not critical because this happens only once on initialization. m_commonStorage.insert(pair<string, void *>(name, value)); } return true; } bool RenderingManager::parseArgs(char *argv[], int argc, string & engineType, string & executionId, DebugLevel & debugLevel, int & firstArgIndex) { // argv is the arguments of the specific command, not of the executable. int i; for (i = 0; i < argc; i++) { if (argv[i][0] != '-') { return false; } string var(argv[i] + 1); if (var == "type") { i++; if (i >= argc) { return false; } engineType = string(argv[i]); } else if (var == "id") { i++; if (i >= argc) { return false; } executionId = string(argv[i]); } else if (var == "debug_level") { i++; if (i >= argc) { return false; } if (!debugLevelFromString(string(argv[i]), debugLevel)) { cerr << argv[i] << " is not a valid debug level" << endl; return false; } } else if (var == "args") { i++; break; } else { return false; } } firstArgIndex = i; return true; } void RenderingManager::executeEngine(string fullCommand, void *callbackData, bool & exitCli) { RenderingManager * rndMng = (RenderingManager *)callbackData; if (rndMng->m_noMoreNewExecutions) { rndMng->m_logger.error("Rendering Manager - Engine executions are no longer allowed\n"); return; } // The format of the fullCommand is (expected to be): char *usage = "run -type engineType -id executionId [-debug_level debug_level] [-args args]"; // The engineType and executionId are mandatory the rest are optional // The -args parameter must be the last one because there can be multiple arguments. // First peel off the "run". int spaceIndex = fullCommand.find(' '); if (spaceIndex == string::npos) { rndMng->m_logger.error("Rendering Manager - Expected format: %s\n", usage); return; } string actualCommand(fullCommand.substr(spaceIndex)); // Use the CommandParser to parse the command line CommandParser parser(actualCommand); if (!parser.IsValidCommand()) { rndMng->m_logger.error("Rendering Manager - Error parsing command line: %s\n", actualCommand.c_str()); return; } int argc = parser.getArgc(); char** argv = parser.getArgv(); string engineType(""); string executionId(""); DebugLevel debugLevel = rndMng->m_defaultDebugLevel; int firstArgIndex = 0; if (! rndMng->parseArgs(argv, argc, engineType, executionId, debugLevel, firstArgIndex)) { rndMng->m_logger.error("Rendering Manager - Error parsing run command line. Expected format: %s\n", usage); parser.freeArgs(); return; } if (engineType == "") { rndMng->m_logger.error("Rendering Manager - Engine Type is missing. Expected format: %s\n", usage); parser.freeArgs(); return; } if (executionId == "") { rndMng->m_logger.error("Rendering Manager - Execution ID is missing. Expected format: %s\n", usage); parser.freeArgs(); return; } if (rndMng->getEngineManager().getEngineByExecutionId(executionId) != NULL) { rndMng->m_logger.error("Rendering Manager - There is already an engine execution ID %s. Cannot run another.\n", executionId.c_str()); parser.freeArgs(); return; } EngineFactory *ef = rndMng->m_engineManager.getEngineFactory(engineType); if (ef == NULL) { rndMng->m_logger.error("Rendering Manager - Engine %s is not supported.\n", engineType.c_str()); parser.freeArgs(); return; } Engine * engine = ef->createEngine(executionId); if (engine == NULL) { rndMng->m_logger.error("Rendering Manager - Engine creation failed. Engine type = %s. Engine execution ID = %s.\n", engineType.c_str(), executionId.c_str()); parser.freeArgs(); return; } if (!engine->init(rndMng)) { rndMng->m_logger.error("Rendering Manager - Engine initialization failed. Engine type = %s. Engine execution ID = %s.\n", engineType.c_str(), executionId.c_str()); parser.freeArgs(); return; } // Set the engine's debug level (which can be different from the rendering manager's default debug level). engine->setDebugLevel(debugLevel); // Start the engine in a new thread. It is the engines responsibility to free argv and itself once it is done. This is done in the abstract Engine class. engine->run(argc, argv, firstArgIndex); } void RenderingManager::reportEngineTypes(string fullCommand, void *callbackData, bool & exitCli) { RenderingManager * rndMng = (RenderingManager *)callbackData; rndMng->m_engineManager.printEngineTypeReport(); } void RenderingManager::reportEngines(string fullCommand, void *callbackData, bool & exitCli) { RenderingManager * rndMng = (RenderingManager *)callbackData; rndMng->m_engineManager.printReport(); } void RenderingManager::stopExecutions(string fullCommand, void *callbackData, bool & exitCli) { RenderingManager * rndMng = (RenderingManager *)callbackData; rndMng->m_noMoreNewExecutions = true; exitCli = rndMng->applyExitConditions(); } void RenderingManager::stopExecutionsAfter(string fullCommand, void *callbackData, bool & exitCli) { RenderingManager * rndMng = (RenderingManager *)callbackData; // First peel off the "stop-after". int spaceIndex = fullCommand.find(' '); if (spaceIndex == string::npos) { rndMng->m_logger.error("Usage: stop-after engineType\n"); return; } string engineType(fullCommand.substr(spaceIndex)); // Remove leading and trailing spaces int firstNonSpaceIndex = engineType.find_first_not_of(' '); int lastNonSpaceIndex = engineType.find_last_not_of(' '); if (firstNonSpaceIndex == string::npos) { rndMng->m_logger.error("Usage: stop-after engineType\n"); return; } engineType = engineType.substr(firstNonSpaceIndex, lastNonSpaceIndex + 1 - firstNonSpaceIndex); if (! rndMng->m_engineManager.isEngineTypeSupported(engineType)) { rndMng->m_logger.error("Engine type %s not supported\n", engineType.c_str()); return; } rndMng->m_noMoreNewExecutions = true; rndMng->m_stopAfterEngineType = engineType; exitCli = rndMng->applyExitConditions(); } void RenderingManager::killEngines(string fullCommand, void *callbackData, bool & exitCli) { RenderingManager * rndMng = (RenderingManager *)callbackData; RenderingManager::EngineListMutexAutoLock mal(rndMng); // Exiting the cli will cause the process to end (including all the currently running threads). rndMng->killExecutions(); exitCli = true; } void RenderingManager::limitExecutions(string fullCommand, void *callbackData, bool & exitCli) { RenderingManager * rndMng = (RenderingManager *)callbackData; // Use command parser CommandParser parser(fullCommand); if (parser.getArgc() != 3) { rndMng->m_logger.error("Usage: limit engineType numberOfConcurrentExecutions\n"); parser.freeArgs(); return; } string engineType(parser.getArgv()[1]); int numberOfConcurrentExecutions = atoi(parser.getArgv()[2]); if (! rndMng->m_engineManager.isEngineTypeSupported(engineType)) { rndMng->m_logger.error("Error: Engine type %s is not supported\n", engineType.c_str()); parser.freeArgs(); return; } if (numberOfConcurrentExecutions <= 0) { // This check also catches cases where the parameter was not a number. rndMng->m_logger.error("Error: Invalid concurrent execution limit %s\n", parser.getArgv()[2]); parser.freeArgs(); return; } if (rndMng->m_engineManager.hasExecutions(engineType)) { rndMng->m_logger.error("Error: Cannot set a limit on executions of type %s after executions have begun\n", engineType.c_str()); parser.freeArgs(); return; } rndMng->m_engineManager.setEngineTypeLimit(engineType, numberOfConcurrentExecutions); parser.freeArgs(); } void RenderingManager::pauseLocalVideoStreams(string fullCommand, void *callbackData, bool & exitCli) { RenderingManager * rndMng = (RenderingManager *)callbackData; VideoStream::CLocalMemStreamManager::pauseAllStreams(); rndMng->writeToNotificationFile("STREAMS PAUSED\n"); } void RenderingManager::resumeLocalVideoStreams(string fullCommand, void *callbackData, bool & exitCli) { RenderingManager * rndMng = (RenderingManager *)callbackData; rndMng->writeToNotificationFile("STREAMS RESUMED\n"); VideoStream::CLocalMemStreamManager::resumeAllStreams(); } void RenderingManager::writeToNotificationFile(const char *format, ...) { va_list argList; va_start(argList, format); writeToNotificationFile_va(true, NULL, format, argList); va_end(argList); } void RenderingManager::writeToNotificationFile(Engine *engine, const char *format, ...) { va_list argList; va_start(argList, format); writeToNotificationFile_va(true, engine, format, argList); va_end(argList); } void RenderingManager::writeToNotificationFileNoHeader(const char *format, ...) { va_list argList; va_start(argList, format); writeToNotificationFile_va(false, NULL, format, argList); va_end(argList); } void RenderingManager::writeToNotificationFile_va(bool printHeader, Engine *engine, const char *format, va_list argList) { FileMutexAutoLock mal(this); if (printHeader) { if (engine != NULL) HeaderPrinter::printHeader(m_notificationFile, engine); else HeaderPrinter::printHeader(m_notificationFile); } vfprintf(m_notificationFile, format, argList); fflush(m_notificationFile); } void RenderingManager::writeEngineStart(Engine *engine) { #ifdef WIN32 writeToNotificationFile(engine, "START (thread id=%lu)\n", GetCurrentThreadId()); #else pthread_t self_id; self_id = pthread_self(); writeToNotificationFile(engine, "START (thread id=%lu)\n", self_id); #endif } void RenderingManager::writeEngineEnd(Engine *engine) { clock_t now = clock(); float diff = ((float)now - (float)engine->getStartTime()) / ((float)CLOCKS_PER_SEC); writeToNotificationFile(engine, "END (total engine execution time %.3f seconds)\n", diff); } void RenderingManager::writeProgress(unsigned int progress, unsigned int outOf) { writeToNotificationFile("PROGRESS %u OUT OF %u\n", progress, outOf); } void RenderingManager::writeReadyToPlay(unsigned int renderedChunk) { writeToNotificationFile("READY TO PLAY VIDEO %u\n", renderedChunk); } void RenderingManager::writeTotalSleepTime(unsigned int totalSleepTime) { writeToNotificationFile("TOTAL_SLEEP_TIME %u\n", totalSleepTime); } void RenderingManager::sceneRendered(const char* sceneName, unsigned int frames, float renditionTime, float videoRenditionTime){ writeToNotificationFile("SCENE RENDERED sceneName: %s renderedFrames: %u renditionTime: %.2f videoRenditionTime: %.2f\n", sceneName, frames,renditionTime, videoRenditionTime); } void RenderingManager::setEncoderProgress(unsigned int frames, double timePassed, double averageFramesPerSec){ writeToNotificationFile("ENCODER PROGRESS encodedFramesNumber: %u elapsedTime: %.2f averageFramePerSec: %.2f\n", frames,timePassed, averageFramesPerSec); } void RenderingManager::exceedPaddedSize(int codecType, int frameNumber, int frameSize, int maxFrameSize){ writeToNotificationFile("EXCEED PADDED SIZE Codec Type: %d Frame size is too big: Frame Number: %d Current: %d, Defined max frame size: %d\n",codecType, frameNumber, frameSize, maxFrameSize); //writeToNotificationFile("EXCEED PADDED SIZE (%s) frameNumber %d frameSize=%d, maxFrameSize=%d %d\n", codecType == 0? "v" : "a", frameNumber, frameSize, maxFrameSize); } void RenderingManager::writeEvent(const char *name, const char *value){ writeToNotificationFile("%s: %s \n.", name, value); } bool RenderingManager::testAndSetStopExecutions() { // Set the value to true and return the previous value StopExecutionMutexAutoLock mal(this); bool prevValue = m_stopExecutions; m_stopExecutions = true; return prevValue; } void RenderingManager::endJobExecutionNormally() { if (testAndSetStopExecutions() == true) { // We are already in the process of stopping execution. return; } writeToNotificationFile("JOB ENDED\n"); } void RenderingManager::stopExecutionsDueToEngineFailure(Engine *engine, const char *format, va_list argList) { if (testAndSetStopExecutions() == true) { // We are already in the process of stopping execution. return; } { // Take the FileMutexAutoLock so that all the printings will be together. FileMutexAutoLock mal(this); // Write a message in the notification file indicating an error. // For simplicity of parsing, print the error message in a separate line (and don't rely on it ending with a newline). writeToNotificationFile("ENGINE ABORTION - Error message: "); writeToNotificationFile_va(false, NULL, format, argList); if (strlen(format)==0 || format[strlen(format)-1] != '\n') writeToNotificationFileNoHeader("\n"); writeToNotificationFileNoHeader("*** END ERROR MESSAGE ***\n"); if (m_dbg) { // Debug printing fprintf(m_logFile, "Open streams:\n"); VideoStream::CLocalMemStreamManager::printReport(m_logFile); fflush(m_logFile); } } // Call stopEngine() method of running engines. m_engineManager.stopAllEngines(engine); } void RenderingManager::stopExecutionsDueToEngineEnding(string endingEngineType) { if (testAndSetStopExecutions() == true) { // We are already in the process of stopping execution. return; } { // Take the FileMutexAutoLock so that all the printings will be together. FileMutexAutoLock mal(this); writeToNotificationFile("JOB TERMINATED - no more executions of type %s\n", endingEngineType.c_str()); if (m_dbg) { // Debug printing fprintf(m_logFile, "Open streams:\n"); VideoStream::CLocalMemStreamManager::printReport(m_logFile); fflush(m_logFile); } } // Call stopEngine() method of running engines.` m_engineManager.stopAllEngines(NULL); } void RenderingManager::killExecutions() { if (testAndSetStopExecutions() == true) { // We are already in the process of stopping execution. return; } // Write a message in the notification file indicating an abrupt end. writeToNotificationFile("JOB KILLED EXTERNALLY\n"); // Call stopEngine() method of running engines. m_engineManager.stopAllEngines(NULL); } // return true if the process should be terminated and false if not bool RenderingManager::applyExitConditions() { RenderingManager::EngineListMutexAutoLock mal(this); if (areExecutionsAllowed()) { // Don't exit. return false; } if (m_engineManager.getNumberOfEngines() == 0) { // The last engine finished and the "stop" or "stop-after" command has been issued. // Exit the process. endJobExecutionNormally(); if (m_dbg) { // Debug printing RenderingManager::FileMutexAutoLock mal(this); fprintf(m_logFile, "Open streams:\n"); VideoStream::CLocalMemStreamManager::printReport(m_logFile); fflush(m_logFile); } return true; } if (!m_stopAfterEngineType.empty() && ! m_engineManager.hasExecutionsOfType(m_stopAfterEngineType)) { // There are no more executions of the type we are waiting for. stopExecutionsDueToEngineEnding(m_stopAfterEngineType); if (m_dbg) { // Debug printing RenderingManager::FileMutexAutoLock mal(this); fprintf(m_logFile, "Open streams:\n"); VideoStream::CLocalMemStreamManager::printReport(m_logFile); fflush(m_logFile); } return true; } return false; } #ifndef WIN32 vector<string> getArgs(char *argv[], int argc) { vector<string> args; for (int i = 0; i < argc; i++) { args.push_back(argv[i]); } return args; } bool parseArgs(vector<string> & args, map<string, string> &retMap) { for (unsigned int i = 0; i < args.size(); i++) { if (args[i].substr(0, 1) != "-") { cerr << "Error analyzing argument " << args[i] << ". Expecting it to start with '-'." << endl; return false; } string var = args[i].substr(1); if (var == "showPrompt") { retMap.insert(pair<string, string>(var, "true")); } else if ((var == "inputFile") || (var == "notificationFile") || (var == "logFile") || (var == "captureFile") || (var == "debugLevel") || (var == "params")) { i++; if (i >= args.size()) { cerr << "Error: arguments ended unexpectedly after " << args[i - 1] << endl; return false; } retMap.insert(pair<string, string>(var, args[i])); } else { cerr << "Error: unsupported argumnet " << args[i] << endl; return false; } } return true; } string get_base_folder(string & exepath) { size_t last_slash = exepath.find_last_of("/"); if (last_slash == std::string::npos) // Relative... return string("."); else return exepath.substr(0, last_slash); } int main(int argc, char* argv[]) { const char * usageParams = " [-inputFile inputFileName] [-notificationFile notificationFileName] [-logFile logFileName] [-captureFile captureFileName] [-debugLevel debugLevel] [-showPrompt] [-params params]"; if (argc < 1) { cerr << "Usage: " << argv[0] << usageParams << endl; return -1; } vector<string> args = getArgs(argv + 1, argc - 1); map<string, string> argsMap; bool rc = parseArgs(args, argsMap); if (!rc) { cerr << "Usage: " << argv[0] << usageParams << endl; return -1; } RenderingManager rndMng(argsMap["showPrompt"] == "true"); string exepath = argv[0]; string baseFolder = get_base_folder(exepath); string inputFile = argsMap["inputFile"]; string notificationFileName = argsMap["notificationFile"]; string logFileName = argsMap["logFile"]; string captureFileName = argsMap["captureFile"]; string debugLevel = argsMap["debugLevel"]; string params = argsMap["params"]; if (!rndMng.init(baseFolder, inputFile, notificationFileName, logFileName, captureFileName, debugLevel, params)) { //m_exitCode = RENDERER_ERR_FAILED_RENDERING_MANAGER_INIT; return FALSE; } return rndMng.run(); } #endif <file_sep>/Fmod/ConstantInterpolated.h #ifndef __CONSTANT_INTERPOLATED_H__ #define __CONSTANT_INTERPOLATED_H__ #pragma once #include "Interpolated.h" class ConstantInterpolated : public Interpolated { private: double value; public: ConstantInterpolated() { } ConstantInterpolated(double v, double st, double et) { value = v; startTime = st; endTime = et; } ~ConstantInterpolated(void){} double getValue() { return value; } void setValue(double v) { value = v; } std::string getType() { return "ConstantInterpolated"; } }; #endif <file_sep>/utils/Utils/Profiler.cpp #include "Profiler.h" #include <iostream> #include <fstream> #include <sstream> #include <string.h> #include <windows.h> #include <math.h> using namespace std; Profiler::Profiler(std::string filename, std::string profilingConfigFileName, int rate, int scale, int endFrame): m_profilingConfigFileName(profilingConfigFileName), m_rate(rate), m_scale(scale) { m_startFrame = 0; m_endFrame = endFrame; withAlfa = false; if (filename.empty()) m_profilingFile = NULL; else { m_profilingFile = _fsopen(filename.c_str(), "w", _SH_DENYWR); if (m_profilingFile == NULL) { /*m_exception = new SWException( "Failed to open profiling file %s\n", filename.c_str()); SendMessage( WM_CLOSE, 0 , 0);*/ std::stringstream errMsg; errMsg << "Failed to open profiling file '" << filename.c_str() << "'"; throw std::exception( errMsg.str().c_str()); } m_profileTimesNoAlphaMS = new double[endFrame]; m_profileTimesWithAlphaMS = new double[endFrame]; m_profileTimesTotalMS = new double[endFrame]; for (int i=0; i < endFrame; i++){ m_profileTimesNoAlphaMS[i] = 0; m_profileTimesWithAlphaMS[i] = 0; m_profileTimesTotalMS[i] = 0; } QueryPerformanceCounter((LARGE_INTEGER*)&m_initTime); setStartTime(); } QueryPerformanceFrequency((LARGE_INTEGER*)&m_queryPerformanceFreq); } Profiler::~Profiler(void) { try{ if ( m_profilingFile ) { //WriteProfileFile(0, 0); fclose(m_profilingFile); /*if ( m_profileTimesNoAlphaMS ) delete [] m_profileTimesNoAlphaMS; if ( m_profileTimesWithAlphaMS ) delete [] m_profileTimesWithAlphaMS; if ( m_profileTimesTotalMS ) delete [] m_profileTimesTotalMS;*/ } } catch(...){ } } bool Profiler::GetDoubleFromMap( const map<string,double> &keyValues, const string &key, double &result, bool mustExist ) { map<string,double>::const_iterator val = keyValues.find( key ); if ( val == keyValues.end() ) { if ( mustExist ) fprintf(m_profilingFile, "Failed to parse line in profiling config file %s: missing field: %s\n", m_profilingConfigFileName.c_str(), key.c_str()); return false; } result = val->second; return true; } #define MAX_TARGET_JOBS 10 // limits[i] = limit for i+1 jobs (-1 = not calculated). // nlimits = number of calculated limits bool Profiler::CalcProfilingLimits( double *limits, int &nlimits ) { for ( int i = 0; i < MAX_TARGET_JOBS; i++ ) limits[i] = -1; if ( m_profilingConfigFileName.empty() ) return false; std::ifstream infile(m_profilingConfigFileName.c_str()); if ( !infile.is_open() ) { fprintf(m_profilingFile, "Failed to open profiling config file %s\n", m_profilingConfigFileName.c_str()); return false; } // Read key-values pairs map<string,double> keyValues; string line; while ( getline(infile, line) ) { size_t eqsign = line.find_first_of("="); if ( eqsign == string::npos ) { fprintf(m_profilingFile, " Failed to parse line in profiling config file %s: %s\n", m_profilingConfigFileName.c_str(), line.c_str()); return false; } string key = line.substr(0, eqsign); key = key.erase(key.find_last_not_of(" \n\r\t")+1); string val = line.substr( eqsign+1 ); double dval = atof( val.c_str() ); keyValues[key] = dval; } infile.close(); double targetMachine_final_bound; double targetMachine_nonf2v_overhead_pctOfRealTime; double targetMachine_refScene_1job; double myMachine_refScene_1job; double targetMachine_refScene_Njobs[MAX_TARGET_JOBS]; char *names[] = { "targetMachine_final_bound", "targetMachine_nonf2v_overhead_pctOfRealTime", "targetMachine_refScene_1job", "myMachine_refScene_1job", }; double *vals[] = { &targetMachine_final_bound, &targetMachine_nonf2v_overhead_pctOfRealTime, &targetMachine_refScene_1job, &myMachine_refScene_1job, }; for ( int i = 0; i < sizeof(vals)/sizeof(*vals); i++ ) { if ( !GetDoubleFromMap( keyValues, names[i], *vals[i], true ) ) { return false; } } if ( targetMachine_refScene_1job <= 0 || myMachine_refScene_1job <= 0 ) { fprintf(m_profilingFile, "profiling parameters should be positive\n"); return false; } for ( int i = 1; i < MAX_TARGET_JOBS; i++ ) { const char fmt[] = "targetMachine_refScene_%djobs"; char key[sizeof(fmt)]; sprintf_s(key, fmt, i+1); bool mandatory = ( i == 0 ); // Only 1job is mandatory if ( !GetDoubleFromMap( keyValues, key, targetMachine_refScene_Njobs[i], mandatory ) ) { targetMachine_refScene_Njobs[i] = -1; // Not calculated continue; } if ( targetMachine_refScene_Njobs[i] <= 0 ) { fprintf(m_profilingFile, "profiling parameters should be positive\n"); return false; } } limits[0] = ( targetMachine_final_bound - targetMachine_nonf2v_overhead_pctOfRealTime ) * myMachine_refScene_1job / targetMachine_refScene_1job; nlimits = 1; for ( int i = 1; i < MAX_TARGET_JOBS; i++ ) { if ( targetMachine_refScene_Njobs[i] > 0 ) { limits[i] = limits[0] * targetMachine_refScene_1job / targetMachine_refScene_Njobs[i]; nlimits++; } } return true; } void Profiler::WriteProfileFile(int startFrame, int lastFrameWritten) { if ( !m_profileTimesNoAlphaMS || lastFrameWritten < 0 ) return; if (!m_profilingFile) { return; } // Limits double limits[MAX_TARGET_JOBS]; int nlimits; CalcProfilingLimits( limits, nlimits ); fprintf(m_profilingFile, "<html>\n" " <head>\n" " <script type=\"text/javascript\" src=\"https://www.google.com/jsapi\"></script>\n" " <script type=\"text/javascript\">\n" " google.load(\"visualization\", \"1\", {packages:[\"corechart\"]});\n" " google.setOnLoadCallback(drawCharts);\n" " function drawCharts() {\n" " var data_frameTime = google.visualization.arrayToDataTable([\n" " ['Frame', 'no alpha', 'with alpha', 'elapsed'],\n"); /* * Time per frame */ int frames = (lastFrameWritten > m_endFrame ? m_endFrame : lastFrameWritten); for ( int fr = startFrame; fr < frames; fr++ ) { double curTotal; if ( fr == startFrame ) curTotal = m_profileTimesTotalMS[fr]; else curTotal = m_profileTimesTotalMS[fr] - m_profileTimesTotalMS[fr-1]; double frameIntervalMS = ((double)m_scale)/m_rate * 1000; double pctRealTimeWithAlpha = m_profileTimesWithAlphaMS[fr] / frameIntervalMS * 100; double pctRealTimeNoAlpha = m_profileTimesNoAlphaMS[fr] / frameIntervalMS * 100; double pctRealTimeTotal = curTotal / frameIntervalMS * 100; fprintf(m_profilingFile," [%d, %f, %f, %f],\n", fr, pctRealTimeNoAlpha, pctRealTimeWithAlpha, pctRealTimeTotal ); } fprintf(m_profilingFile, " ]);\n\n" " var options_frameTime = {\n" " title: 'Rendition time (in ms) per frame',\n" " colors: ['#0000FF','#00FF00','#A0A000' ]\n" " };\n\n" " var chart_frameTime = new google.visualization.LineChart(document.getElementById('chart_div_frameTime'));\n" " chart_frameTime.draw(data_frameTime, options_frameTime);\n" "\n" " var data_pctRealTime = google.visualization.arrayToDataTable([\n" " ['Frame', 'no alpha', 'with alpha'"); for ( int njobs = 1; njobs <= MAX_TARGET_JOBS; njobs++ ) { if ( limits[njobs-1] > 0 ) fprintf(m_profilingFile, ", 'Limit - %d job%s'", njobs, njobs > 1 ? "s" : ""); } fprintf(m_profilingFile, "],\n"); /* * Pct of real time */ double timePerSecondWithAlphaMS = 0; double timePerSecondNoAlphaMS = 0; double totalRenderTimeWithAlphaMS = 0; double totalRenderTimeNoAlphaMS = 0; int fps = (int)ceil((double)m_rate / m_scale); if ( fps < 1 ) fps = 1; for ( int fr = startFrame; fr < frames; fr++ ) { timePerSecondWithAlphaMS += m_profileTimesWithAlphaMS[fr]; totalRenderTimeWithAlphaMS += m_profileTimesWithAlphaMS[fr]; timePerSecondNoAlphaMS += m_profileTimesNoAlphaMS[fr]; totalRenderTimeNoAlphaMS += m_profileTimesNoAlphaMS[fr]; if ( fr >= startFrame+fps-1 ) { double curTotal = m_profileTimesTotalMS[fr]; if ( fr >= startFrame+fps ) { timePerSecondWithAlphaMS -= m_profileTimesWithAlphaMS[fr-fps]; timePerSecondNoAlphaMS -= m_profileTimesNoAlphaMS[fr-fps]; curTotal -= m_profileTimesTotalMS[fr-fps]; } double pctRealTimeWithAlpha = timePerSecondWithAlphaMS / 1000 * 100; double pctRealTimeNoAlpha = timePerSecondNoAlphaMS / 1000 * 100; double pctRealTimeTotal = curTotal / 1000 * 100; fprintf(m_profilingFile," [%d, %f, %f", fr, pctRealTimeNoAlpha, pctRealTimeWithAlpha ); for ( int njobs = 1; njobs <= MAX_TARGET_JOBS; njobs++ ) { if ( limits[njobs-1] > 0 ) fprintf(m_profilingFile,", %f", limits[njobs-1]); } fprintf(m_profilingFile,"],\n"); } } double totalTimeMS = ((double)frames-startFrame+1) / m_rate * m_scale * 1000; double avgRealTimePctNoAlpha = totalRenderTimeNoAlphaMS / totalTimeMS * 100; double avgRealTimePctWithAlpha = totalRenderTimeWithAlphaMS / totalTimeMS * 100; double avgRealTimePctTotal = frames == 0 ? 0 : m_profileTimesTotalMS[frames] / totalTimeMS * 100; fprintf(m_profilingFile, " ]);\n\n" " var options_pctRealTime = {\n" " title: '%% of realtime per second',\n" " colors: ['#0000FF','#00FF00','#000000','#C04040','#FF0000','#FF8000' ]\n" " };\n\n" " var chart_pctRealTime = new google.visualization.LineChart(document.getElementById('chart_div_pctRealTime'));\n" " chart_pctRealTime.draw(data_pctRealTime, options_pctRealTime);\n" "\n" " var data_pctRealTimeCum = google.visualization.arrayToDataTable([\n" " ['Frame', 'no alpha', 'with alpha'"); for ( int njobs = 1; njobs <= MAX_TARGET_JOBS; njobs++ ) { if ( limits[njobs-1] > 0 ) fprintf(m_profilingFile, ", 'Limit - %d job%s'", njobs, njobs > 1 ? "s" : ""); } fprintf(m_profilingFile, "],\n"); /* * Cumulative Pct of real time */ double renderTimeNoAlphaSec = 0; double renderTimeWithAlphaSec = 0; int startDisplayFrame = startFrame; if ( frames - startFrame > 3*fps ) { // Don't display first 2 secs - not informative startDisplayFrame = startFrame + 2*fps - 1; } for ( int fr = startFrame; fr < frames; fr++ ) { renderTimeNoAlphaSec += m_profileTimesNoAlphaMS[fr] / 1000.0; renderTimeWithAlphaSec += m_profileTimesWithAlphaMS[fr] / 1000.0; if ( fr >= startDisplayFrame ) { double timSec = ((double)fr-startFrame+1) / m_rate * m_scale; double pctRealTimeNoAlphaCum = renderTimeNoAlphaSec / timSec * 100; double pctRealTimeWithAlphaCum = renderTimeWithAlphaSec / timSec * 100; double pctRealTimeTotalCum = m_profileTimesTotalMS[fr] / 1000 / timSec * 100; fprintf(m_profilingFile," [%d, %f, %f", fr, pctRealTimeNoAlphaCum, pctRealTimeWithAlphaCum); for ( int njobs = 1; njobs <= MAX_TARGET_JOBS; njobs++ ) { if ( limits[njobs-1] > 0 ) fprintf(m_profilingFile,", %f", limits[njobs-1]); } fprintf(m_profilingFile,"],\n"); } } unsigned long timeBeforeFirstFrameMS = (unsigned long)(((m_renderStartTime-m_initTime) * 1000) / m_queryPerformanceFreq); char *limit_colors[] = { "#000000","#C04040","#FF0000","#FF8000","#FF9000","#FFB000" }; fprintf(m_profilingFile, " ]);\n\n" " var options_pctRealTimeCum = {\n" " title: 'Cumulative %% of realtime',\n" " colors: ['#0000FF','#00FF00'"); // Limit Colors int curcolor = 0; for ( int njobs = 1; njobs <= MAX_TARGET_JOBS; njobs++ ) { if ( limits[njobs-1] > 0 ) { fprintf(m_profilingFile,",'%s'", limit_colors[curcolor]); if ( curcolor+1 < sizeof(limit_colors)/sizeof(*limit_colors) ) curcolor++; } } fprintf(m_profilingFile, " ]\n" " };\n\n" " var chart_pctRealTimeCum = new google.visualization.LineChart(document.getElementById('chart_div_pctRealTimeCum'));\n" " chart_pctRealTimeCum.draw(data_pctRealTimeCum, options_pctRealTimeCum);\n" " }\n" " </script>\n" " </head>\n" " <body>\n" " <div id=\"chart_div_pctRealTimeCum\" style=\"width: 900px; height: 500px;\"></div>\n" " <br> Average %% of realtime (render): %f%%<br>" " <br> Average %% of realtime (render+alpha): %f%%<br>" " <br> Average %% of realtime (total): %f%%<br>" " <br> Elapsed time before first frame: %d ms<br>" " <div id=\"chart_div_pctRealTime\" style=\"width: 900px; height: 500px;\"></div>\n" " <div id=\"chart_div_frameTime\" style=\"width: 900px; height: 500px;\"></div>\n" " </body>\n" "</html>\n", avgRealTimePctNoAlpha, avgRealTimePctWithAlpha, avgRealTimePctTotal, timeBeforeFirstFrameMS ); #if 0 // For debug purposes - write all averages to a file string profileBottomlineFile = "c:\\temp\\pers.csv";//m_args.m_profilingFileName + "_bl.csv"; Sleep( rand() % 3 ); FILE *fp = _fsopen(profileBottomlineFile.c_str(), "a", _SH_DENYWR); if (fp == NULL) { m_exception = new SWException( "Failed to open profiling file %s\n", profileBottomlineFile.c_str()); SendMessage( WM_CLOSE, 0 , 0); return; } time_t now = time(NULL); tm localtm; char timeString[30]; timeString[0] = '\0'; if (localtime_s(&localtm, &now) == 0) { strftime(timeString, sizeof(timeString), "%Y-%m-%d %H:%M:%S", &localtm); } fprintf(fp, "%s,,,, %f, %f, %f, %d\n", timeString, avgRealTimePctNoAlpha, avgRealTimePctWithAlpha, avgRealTimePctTotal, timeBeforeFirstFrameMS ); fclose(fp); #endif } void Profiler::startFrameRendering(int frameNum){ if (m_profilingFile) { QueryPerformanceCounter((LARGE_INTEGER*)&startTime); if ( m_startFrame == frameNum ){ m_renderStartTime = startTime; } } } void Profiler::stopFrameRendering(int frameNum){ if (m_profilingFile) { QueryPerformanceCounter((LARGE_INTEGER*)&stopTime); unsigned long TimeDiffinMicroSeconds = (unsigned long)(((stopTime - startTime) * 1000000) / m_queryPerformanceFreq); m_profileTimesNoAlphaMS[frameNum] = ((double)TimeDiffinMicroSeconds) / 1000.0; if (withAlfa) { m_profileTimesWithAlphaMS[frameNum] = m_profileTimesNoAlphaMS[frameNum]; } } } void Profiler::setStartTime(){ if (m_profilingFile) { QueryPerformanceCounter((LARGE_INTEGER*)&startTime); } } void Profiler::setTotalRenderTime(int frameNum){ if (m_profilingFile) { unsigned long TimeDiffinMicroSeconds = (unsigned long)(((stopTime - m_renderStartTime) * 1000000) / m_queryPerformanceFreq); m_profileTimesTotalMS[frameNum] = ((double)TimeDiffinMicroSeconds) / 1000.0; } } <file_sep>/utils/CliManager/include/CommandParser.h #pragma once #include <string> #include <iostream> using namespace std; /* * The CommandParser class receives a command string and converts it into argc,argv format. * Parameters are separated by one or more consecutive spaces. * Each parameter is encoded using URL encoding. * * For example * "arg1 arg2" --> argc=2, argv[0]="arg1", argv[1]="arg2" * "arg 1 arg2" --> argc=3, argv[0]="arg", argv[1]="1", argv[2]="arg2" * "arg%201 arg2" --> argc=2, argv[0]="arg 1", argv[1]="arg2" * * Note that argv[0] and argv must be freed after use. This can be done using the freeArgs() method (but can also be done after * the CommandParser object no longer exists). */ class CommandParser { private: bool m_isValidCommand; int m_argc; char **m_argv; public: // The constructor receives the command line. CommandParser(std::string & command); bool IsValidCommand() {return m_isValidCommand;}; int getArgc() {return m_argc;}; char ** getArgv() {return m_argv;}; void freeArgs(); private: unsigned int CountLeadingSpaces(string & str); unsigned int GetLengthExcludingTrailingSpaces(string & str); int getWords(char * str, char **pointers); bool urlStringToArgv(string & str, int * argc_p, char *** argv_p); void urlDecode(char *str); char hexStringToValue(char *str); }; <file_sep>/VideoStream/VideoStream/Media.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSMedia.h" #include "VSLog.h" #include "VSAVIFileMedia.h" #include "VSAVIStdFileMedia.h" #include "VSShmemMedia.h" #include "VSLocalMemMedia.h" #include "SWException.h" #include <tchar.h> #else #include <string.h> #include "VSMedia.h" #include "VSLog.h" #include "VSAVIFileMedia.h" #include "VSShmemMedia.h" #include "VSLocalMemMedia.h" #include "SWException.h" #include "SWException.h" #endif using namespace VideoStream; IMedia *CMediaFactory::GetMedia (LPCTSTR location) { if (strncmp (location, _T("local:"), 6) == 0) return new CLocalMemMedia(); if (strncmp (location, _T("file:"), 5) == 0) return new CAVIFileMedia(); if (strncmp (location, _T("shmem:"), 6) == 0) return new CShmemMedia(); if (strncmp (location, _T("filedbg:"), 8) == 0) //return new CAVIStdFileMedia(); throw SWException("Unsupported format : filedbg"); // Default is file return new CAVIFileMedia(); } LPCTSTR CMediaFactory::RemoveMediaPrefix (LPCTSTR location) { LPCTSTR semicollonPos = strchr (location, ':'); if (semicollonPos == NULL || strncmp (semicollonPos, _T(":///"), 4) != 0) return location; else return semicollonPos + 4; } //evev - move from here? // remove protocol prefix (...:///), and divide params from location (after ?) //static void CMediaFactory::ParseMediaLocation (LPCTSTR location, std::string *bareLocation, MediaLocationParams *mparms) { // Remove prefix location = RemoveMediaPrefix (location); // Separate params LPCTSTR questionmarkPos = strchr (location, '?'); if (questionmarkPos == NULL) { *bareLocation = location; } else { // bare location ends before ?, and params start afterwards *bareLocation = std::string( location, questionmarkPos-location ); std::string params = questionmarkPos+1; ParseMediaLocationParams( mparms, params ); } } // parse: a=b&c=d&e=f void CMediaFactory::ParseMediaLocationParams( MediaLocationParams *mparms, std::string &locationParams ) { size_t curparam = 0; while ( true ) { size_t eqsign = locationParams.find( '=', curparam ); if ( eqsign == locationParams.npos ) break; size_t ampsign = locationParams.find( '&', eqsign + 1 ); #define CODEC_PARAM "codec" #define BPS_PARAM "bps" #define TIMEOUT_PARAM "timeout" #define BUFFER_SIZE "bufsize" std::string val = locationParams.substr(eqsign+1, ampsign); // ampsign may be npos! int x = sizeof(CODEC_PARAM); if ( locationParams.compare( curparam, strlen(CODEC_PARAM), CODEC_PARAM )== 0 ) { mparms->codecstr = val; } else if ( locationParams.compare( curparam, strlen(BPS_PARAM), BPS_PARAM )== 0 ) { mparms->bps= atoi(val.c_str()); } else if ( locationParams.compare( curparam, strlen(TIMEOUT_PARAM), TIMEOUT_PARAM)== 0 ) { // Timeout in URI is in seconds!! mparms->timeoutMS = atoi(val.c_str()) * 1000; } else if ( locationParams.compare( curparam, strlen(BUFFER_SIZE), BUFFER_SIZE)== 0 ) { mparms->bufSize = atoi(val.c_str()); } if ( ampsign == locationParams.npos ) break; // no more params curparam = ampsign + 1; } } void CBaseVideoStream::LogTime(LPCTSTR title) { if (CLog::IsEnabled()) CLog::Instance()->WriteItemTime(_streamId, _curFrame, title); } <file_sep>/VideoStream/VideoStream/Utils.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSUtils.h" #include <tchar.h> #else #include "VSUtils.h" #include <stdio.h> #include <string.h> #endif using namespace VideoStream; /* * Encode a single TCHAR to a unique alphanumeric string (valid in sync object name) * * Algorithm: * Convert the input character to its 2-digit hex representation, e.g. '/' = "2F". * Potential BUG if the char is non-ascii, because 2-digit hex is not enough for unique representation. * If we use more characters we may run out of space in the sync object name... */ static void EncodeTCHAR (TCHAR tch, LPTSTR s) { int num = tch; for (int i=0; i < 2; i++) { int digit = num%16; s[i] = (digit < 10 ? _T('0') + digit : _T('A') + digit - 10); num /= 16; } s[2] = _T('\0'); } /* * Name: GetSyncObjName * Description: get unique string indentifier for sync object. * * Algorithm: * 1. Concatanate the sync obj prefix with the media location URL. * 2. Encode the lowercased result into hex representation (see EncodeTCHAR notes). * * Notes: * This function must reflect 1:1 the media uniqueness: * Assume you have a single prefix <pr> and two locations <loc1>, <loc2>. * (GetSyncObjName (<pr>, <loc1>) == GetSyncObjName (<pr>, <loc2>) if and only if loc1 and loc2 point to the same media. * * Known bug: * This function does not reflect media uniquness when using relative pathes in file media locations. * If <loc1> == "file://C:\Temp\Video.avi" and <loc2> == "file://Video.avi" where the working directory is "C:\Temp", then the locations * are pointing to the same media, but this function will return different identifiers for the same prefix. */ std::string VideoStream::GetSyncObjName (LPCTSTR prefix, LPCTSTR location) { // 1. Concatanate the sync obj prefix with the media location URL. char name[MAX_PATH+1]; strncpy (name, prefix, MAX_PATH); // 2. Encode the lowercased result into hex representation (see EncodeTCHAR notes). const int suffixLengthUsed = 80; // 80 chars should be enough for unique identification... int locationLen = (int)strlen (location); int i = (locationLen < suffixLengthUsed ? 0 : locationLen - suffixLengthUsed); char *p = name + strlen (name); for (; i < locationLen; i++) { EncodeTCHAR (tolower(location[i]), p); p += 2; } return name; }<file_sep>/LightSynthesizer/LightSynthesizer.cpp //#include "stdafx.h" #include <algorithm> #include <iostream> #include <fstream> #include "AviWriter.h" #include "LightSynthesizer.h" #include "VideoObject.h" #include "SWException.h" #include "CliManager.h" #include "CommandParser.h" #include "MutexAutoLock.h" #ifndef WIN32 #include <unistd.h> #include <sys/time.h> #endif using namespace std; #ifdef WIN32 int LightSynthesizer::FileTime2Ms(const FILETIME& ft) { LARGE_INTEGER li; li.LowPart = ft.dwLowDateTime; li.HighPart = ft.dwHighDateTime; return (int)(li.QuadPart / 10000); } #endif LightSynthesizer::LightSynthesizer() { aviWriter = NULL; progressTracker = NULL; } LightSynthesizer::~LightSynthesizer() { VideoObjectMap::iterator it; for(it = objects.begin(); it != objects.end(); ++it) { delete it->second; } } bool LightSynthesizer::parseArgs(int argc, char** argv, map<string, string> &retMap) { for (int i = 0; i < argc; i++) { if (argv[i][0] != '-') { return false; } string var(argv[i]+1); if (var == "inputType" || var == "input" || var == "pf" || var == "rpcKeepAhead" || var == "rpcGrace" || var == "rpcInterval") { i++; if (i >= argc) { return false; } retMap.insert(pair<string, string>(var, string(argv[i]))); } else if (var == "hide" || var == "dbg") { retMap.insert(pair<string, string>(var, "true")); } } return true; } // The entry point from the Rendeing Manager. void LightSynthesizer::executeEngine(int argc, char** argv) { map<string, string> argMap; bool rc = parseArgs(argc, argv, argMap); if (!rc) { throw SWException("Error parsing arguments.\n"); } string inputType = argMap["inputType"]; string inputFile = argMap["input"]; profilingFile = argMap["pf"]; std::string rpcKeepAheadStr = argMap["rpcKeepAhead"]; std::string rpcGraceStr = argMap["rpcGrace"]; std::string rpcIntervalStr = argMap["rpcInterval"]; // inputType should be either "xml" or "commands" bool xmlInput; if (inputType == "xml") xmlInput = true; else if (inputType == "commands") xmlInput = false; else if (inputType == "") throw SWException("Error parsing arguments: inputType is missing\n"); else throw SWException("Error parsing arguments: inputType %s is incorrect\n", inputType.c_str()); if (inputFile == "") throw SWException("Error parsing arguments: input is missing\n"); // I treat this flag the active/not active for the whole feature if (rpcKeepAheadStr != "") { rpcKeepAhead = atoi(rpcKeepAheadStr.c_str()); m_logger.info("rpc keep ahead=%d\n", rpcKeepAhead); struct timeval prepartv; gettimeofday(&prepartv, 0); startTime = prepartv.tv_sec * 1000 + prepartv.tv_usec / 1000; } if (rpcGraceStr != "") { rpcGrace = atoi(rpcGraceStr.c_str()); m_logger.info("rpc grace=%d\n", rpcGrace); } if (rpcIntervalStr != "") { rpcInterval = atoi(rpcIntervalStr.c_str()); m_logger.info("rpc interval=%d\n", rpcInterval); } if (argMap["dbg"] == "true") { VideoStream::EnableLog(true); } else { VideoStream::EnableLog(false); } // Get progressTracker from common storage map<string, void *>::const_iterator iter = m_commonStorage->find( "progressTracker" ); if ( iter == m_commonStorage->end() ) throw SWException("Missing progressTracker in common storage.\n"); else progressTracker = (ProgressTracker *)iter->second; previousFramePtr.setNull(); timingStart(); if (xmlInput) { if (!handleXml(inputFile.c_str())) { throw SWException("Synthesizer handleXml failed\n"); } } else { if (!handleCommands(inputFile.c_str(), NULL)) { throw SWException("Synthesizer handleCommands failed.\n"); } } progressTracker->flush(); timingEnd(); } void LightSynthesizer::stopEngine(bool isTriggeringEngine) { progressTracker->flush(); } bool LightSynthesizer::handleCommands(const char * commandFileName, const char * captureFileName) { CliManager cliManager(false); ifstream commandFileIfstream(commandFileName); if (commandFileIfstream.fail() ) { throw SWException("Failed to open input file %s.\n", commandFileName); return false; } // Bind commands if (! cliManager.addCommand("createOutputStream", "Create an output stream.", "Usage: createOutputStream -framerate <framerate> -numberOfFrames <numberOfFrames>\n" " -height <height> -width <width> -resizeMethod <resizeMethod> -path <path>\n", LightSynthesizer::createOutputStream, this)) { return false; } if (! cliManager.addCommand("createInputStream", "Create an input stream.", "Usage: createInputStream -id <id> -path <path> -z <z>\n", LightSynthesizer::createInputStream, this)) { return false; } if (! cliManager.addCommand("outputFrame", "Map an output frame to the input frames that compose it.", "Usage: outputFrame <outputFrameNumber> <inputFrame1 ... inputFrameN>\n" " Each inputFrame is the format <id>#<inputFrameNumber>\n", LightSynthesizer::outputFrame, this)) { return false; } if (! cliManager.addCommand("removeInputStream", "Remove an input stream.", "Usage: removeInputStream <id>\n", LightSynthesizer::removeInputStream, this)) { return false; } if (! cliManager.addCommand("endOutputFrames", "End input.", "This command indicates that there are no more commands\n", LightSynthesizer::endOutputFrames, this)) { return false; } if (captureFileName != NULL) { FILE * captureFile = fopen(captureFileName, "w"); if (captureFile == NULL) { throw SWException("Failed to open synthesizer capture file %s.\n", captureFileName); return false; } cliManager.setCaptureFile(captureFile); } cliManager.run(commandFileIfstream); return true; } void LightSynthesizer::createOutputStream(string fullCommand, void *callbackData, bool & exitCli) { LightSynthesizer * synth = (LightSynthesizer *)callbackData; if (! synth->outputFileName.empty()) { throw SWException("createOutputStream can only be called once"); return; } char *usage = "createOutputStream -framerate <framerate> -numberOfFrames <numberOfFrames> -height <height> -width <width> -resizeMethod <resizeMethod> -path <path>"; // Use the CommandParser to parse the command line CommandParser parser(fullCommand); if (!parser.IsValidCommand()) { parser.freeArgs(); throw SWException("Failed to parse createOutputStream command: %s", fullCommand.c_str()); return; } int argc = parser.getArgc(); char** argv = parser.getArgv(); // We are very strict... // The paramer names are nevertheless necessary in order to increase readability of the synthesizer command file. if (argc != 13 || strcmp(argv[0], "createOutputStream") || strcmp(argv[1], "-framerate") || strcmp(argv[3], "-numberOfFrames") || strcmp(argv[5], "-height") || strcmp(argv[7], "-width") || strcmp(argv[9], "-resizeMethod") || strcmp(argv[11], "-path")) { parser.freeArgs(); throw SWException("Invalid createOutputStream command: %s. Correct usage is %s", fullCommand.c_str(), usage); return; } synth->framerate = atoi(argv[2]); synth->numFrames = atoi(argv[4]); synth->resolutionHeight = atoi(argv[6]); synth->resolutionWidth = atoi(argv[8]); synth->parseResizeMethod(argv[10]); synth->outputFileName = string(argv[12]); // Set the total number of expected frames for the progress tracker synth->progressTracker->setOutOf(synth->numFrames); // int avi writer synth->aviWriter = new AviWriter(); synth->aviWriter->Open(synth->outputFileName, synth->resolutionWidth, synth->resolutionHeight, synth->framerate); parser.freeArgs(); } void LightSynthesizer::createInputStream(string fullCommand, void *callbackData, bool & exitCli) { LightSynthesizer * synth = (LightSynthesizer *)callbackData; char *usage = "createInputStream -id <id> -path <path> -z <z>"; // Use the CommandParser to parse the command line CommandParser parser(fullCommand); if (!parser.IsValidCommand()) { parser.freeArgs(); throw SWException("Failed to parse createInputStream command: %s", fullCommand.c_str()); return; } int argc = parser.getArgc(); char** argv = parser.getArgv(); // We are very strict... // The paramer names are nevertheless necessary in order to increase readability of the synthesizer command file. if (argc != 7 || strcmp(argv[0], "createInputStream") || strcmp(argv[1], "-id") || strcmp(argv[3], "-path") || strcmp(argv[5], "-z")) { parser.freeArgs(); throw SWException("Invalid createInputStream command: %s. Correct usage is %s", fullCommand.c_str(), usage); return; } string id(argv[2]); string path(argv[4]); double z = atof(argv[6]); parser.freeArgs(); if (synth->objects[id] != NULL) { throw SWException("createInputStream failed: Input stream %s cannot be created twice.", id.c_str()); return; } VideoObject *videoObject = new VideoObject(synth->m_logger, path, synth->resizeMethod, z); synth->objects[id] = videoObject; } void LightSynthesizer::outputFrame(string fullCommand, void *callbackData, bool & exitCli) { LightSynthesizer * synth = (LightSynthesizer *)callbackData; char *usage = "outputFrame <outputFrameNumber> <inputFrame1 ... inputFrameN>"; // Use the CommandParser to parse the command line CommandParser parser(fullCommand); if (!parser.IsValidCommand()) { parser.freeArgs(); throw SWException("Failed to parse outputFrame command: %s", fullCommand.c_str()); return; } int argc = parser.getArgc(); char** argv = parser.getArgv(); if (argc < 2 || strcmp(argv[0], "outputFrame")) { parser.freeArgs(); throw SWException("Invalid outputFrame command: %s. Correct usage is %s", fullCommand.c_str(), usage); return; } VideoStream::CFramePtr framePtr; int targetSequence = atoi(argv[1]); int nInputStreams = argc - 2; if (nInputStreams == 0) { // No inputs -> duplicate previous frame (this should usually not happen, but it does sometimes, probably due to rounding errors) framePtr = synth->previousFramePtr; } else { std::vector<VideoObject *> renderObjs; // Objects for rendition in current frame renderObjs.reserve(nInputStreams); for (int i=0; i<nInputStreams; i++) { // The format of each input stream is "<id>#<inputFrameNumber>" const char *hash = strrchr(argv[i+2], '#'); if (hash == NULL) { parser.freeArgs(); throw SWException("Error: outputFrame %d has an invalid input stream", targetSequence, argv[i+2]); return; } string id(argv[i+2], hash - argv[i+2]); int inputFrameNumber=atoi(hash+1); VideoObject *videoObject=synth->objects[id]; if (videoObject == NULL) { parser.freeArgs(); throw SWException("Error: outputFrame %d uses an input stream %s which was not created", targetSequence, id.c_str()); return; } // Set object current frame bool rc; videoObject->updateCurFrame( inputFrameNumber, synth->resolutionWidth, synth->resolutionHeight, &rc ); if ( !rc ) { parser.freeArgs(); throw SWException("Error: updateCurFrame Failed for frame %d from input stream %s", inputFrameNumber, videoObject->getPath()); return; } renderObjs.push_back( videoObject ); videoObject->AppearedInFrame( targetSequence ); } parser.freeArgs(); if (renderObjs.size() == 0) { throw SWException("Error: Output frame %d has no associated input frames", targetSequence); return; } /* * Sort objects for rendition by z-pos * Use stable sort so if two objects have equal z, the first one will be behind */ std::stable_sort( renderObjs.begin(), renderObjs.end(), VideoObject::compareObjsByZ ); // Render framePtr = synth->render(renderObjs); if (framePtr.isNull()) { throw SWException("Error: Render produced no output frame %d", targetSequence); return; } synth->previousFramePtr = framePtr; } // write frame synth->aviWriter->WriteFrame(framePtr); synth->progressTracker->incProgress(); synth->timingFrame(); // Time the frame generation time int numOfFramesToPerformCheck = synth->framerate; if (synth->rpcInterval > 0) numOfFramesToPerformCheck = synth->rpcInterval; //Perform the chceck if we can sleep if the feature is activated, we passed the grace numbere of frames and we reached the number of frame to perform if (synth->rpcKeepAhead > 0 && synth->currentFrameNum > synth->rpcGrace && synth->currentFrameNum % numOfFramesToPerformCheck == 0) { struct timeval prepartv; gettimeofday(&prepartv, 0); long nowTime = prepartv.tv_sec * 1000 + prepartv.tv_usec / 1000; long elapsedTime = nowTime - synth->startTime; long realTime = synth->currentFrameNum * 1000 / synth->framerate; synth->m_logger.info("elapsedTime=%d realTime=%d", elapsedTime, realTime); if (realTime - elapsedTime > synth->rpcKeepAhead) { long sleepTime = realTime - elapsedTime - synth->rpcKeepAhead; synth->m_logger.info("sleep time=%d", sleepTime); usleep(sleepTime*1000); synth->totalSleepTime += sleepTime; } } synth->currentFrameNum++; } void LightSynthesizer::removeInputStream(string fullCommand, void *callbackData, bool & exitCli) { LightSynthesizer * synth = (LightSynthesizer *)callbackData; char *usage = "removeInputStream <id>"; // Use the CommandParser to parse the command line CommandParser parser(fullCommand); if (!parser.IsValidCommand()) { parser.freeArgs(); throw SWException("Failed to parse removeInputStream command: %s", fullCommand.c_str()); return; } int argc = parser.getArgc(); char** argv = parser.getArgv(); // We are very strict... // The paramer names are nevertheless necessary in order to increase readability of the synthesizer command file. if (argc != 2 || strcmp(argv[0], "removeInputStream")) { parser.freeArgs(); throw SWException("Invalid removeInputStream command: %s. Correct usage is %s", fullCommand.c_str(), usage); return; } VideoObject *videoObject=synth->objects[string(argv[1])]; if (videoObject==NULL) { parser.freeArgs(); throw SWException("removeInputStream failed: cannot find input stream %s", argv[1]); return; } videoObject->close(); } void LightSynthesizer::endOutputFrames(string fullCommand, void *callbackData, bool & exitCli) { LightSynthesizer * synth = (LightSynthesizer *)callbackData; // Close open video objects (should there be any???) VideoObjectMap::iterator videoObjects; for (videoObjects = synth->objects.begin(); videoObjects != synth->objects.end(); videoObjects++) { VideoObject *videoObject = (*videoObjects).second; videoObject->close(); } synth->aviWriter->Close(); exitCli = true; } bool LightSynthesizer::handleXml(const char * documentName) { // Parse XML file if (!readParamsFromXml(documentName)) { throw SWException("Failed to read parameters from XML file\n"); return false; } // int avi writer aviWriter = new AviWriter(); aviWriter->Open(outputFileName, resolutionWidth, resolutionHeight, framerate); if (!doFlow()) { throw SWException("Synthesizer doFlow failed.\n"); return false; } return true; } bool LightSynthesizer::parseResizeMethod(const char *resizeMethodStr) { // Parse resize method. if ( resizeMethodStr == NULL || strlen(resizeMethodStr) == 0 ) resizeMethod = RESIZE_METHOD_LINEAR; // Default else if (strcmp( resizeMethodStr, "none" ) == 0 ) resizeMethod = RESIZE_METHOD_NONE; else if ( strcmp( resizeMethodStr, "nn" ) == 0 ) resizeMethod = RESIZE_METHOD_NN; else if ( strcmp( resizeMethodStr, "linear" ) == 0 ) resizeMethod = RESIZE_METHOD_LINEAR; else if ( strcmp( resizeMethodStr, "cubic" ) == 0 ) resizeMethod = RESIZE_METHOD_CUBIC; else if ( strcmp( resizeMethodStr, "area" ) == 0 ) resizeMethod = RESIZE_METHOD_AREA; else { throw SWException("Invalid resizeMethod: %s\n", resizeMethodStr ); return false; } return true; } bool LightSynthesizer::readParamsFromXml(const char * documentName) { document = new TiXmlDocument(documentName); if (!document->LoadFile()) { throw SWException("TiXmlDocument load failed.\n"); return false; } TiXmlElement *params = document->FirstChild("Movie")->FirstChildElement("OutputParams"); framerate = atoi(params->FirstChildElement("Framerate")->GetText()); numFrames = atoi(params->FirstChildElement("NumberOfFrames")->GetText()); TiXmlElement *stage = params->FirstChildElement("Resolution"); resolutionWidth = atoi(stage->Attribute("width")); resolutionHeight = atoi(stage->Attribute("height")); if (!parseResizeMethod(stage->Attribute("resizeMethod"))) { throw SWException("parseResizeMethod failed\n"); return false; } outputFileName = params->FirstChildElement("FileName")->GetText(); // Set the total number of expected frames for the progress tracker progressTracker->setOutOf(numFrames); return true; } bool LightSynthesizer::readObjects() { TiXmlElement *objectsXml = document->FirstChild("Movie")->FirstChildElement("ObjectSources"); if (objectsXml == 0) { return false; } for (TiXmlElement *objectXml = objectsXml->FirstChildElement(); objectXml != 0; objectXml = objectXml->NextSiblingElement()) { std::string id = objectXml->Attribute("id"); std::string type = objectXml->Attribute("type"); if (type == "VIDEO") { VideoObject *videoObject = new VideoObject(m_logger); // get path if (!videoObject->Init(objectXml, resizeMethod )) { continue; } objects[id] = videoObject; } } return true; } static void setZPos(VideoObject *videoObject, const FrameObject *frameObject) { if (!frameObject->getZPos().empty()) videoObject->setZPos(atof(frameObject->getZPos().c_str())); } VideoObject *LightSynthesizer::getVideoObject(const FrameObject *frameObject) { std::string videoId = frameObject->getSourceId(); VideoObject *videoObject=objects[videoId]; //setZPos(videoObject,frameObject); return videoObject; } void LightSynthesizer::updateCurFrame(VideoObject *videoObject, const FrameObject *frameObject, bool *rc) { int nextFrame = atoi(frameObject->getFrameNum().c_str()); videoObject->updateCurFrame( nextFrame, resolutionWidth, resolutionHeight, rc ); } static void overlayFrame(unsigned char *data1, const unsigned char *data2, int dataSize) { // data2 is placed over data1 unsigned char *dataEnd1 = data1 + dataSize; for (; data1 < dataEnd1; data1+=4, data2+=4) { unsigned char a = *(data2+3); if (a == 0xFF) { *((unsigned int *)data1) = *((unsigned int *)data2); } else if (a > 0) { unsigned char b = 255 - a; *data1 = (a*(*data2) + b*(*data1)) / 255; *(data1+1) = (a*(*(data2+1)) + b*(*(data1+1))) / 255; *(data1+2) = (a*(*(data2+2)) + b*(*(data1+2))) / 255; } } } void LightSynthesizer::timingStart() { #ifdef WIN32 startTimeMS = GetTickCount(); #else m_logger.trace("Timing Not Implemented\n"); #endif } // Time the frame generation time void LightSynthesizer::timingFrame() { #ifdef WIN32 if ( !profilingFile.empty() ) frameTimesCumMS.push_back( GetTickCount() - startTimeMS); #else m_logger.trace("Timing Not Implemented\n"); #endif } void LightSynthesizer::timingEnd() { #ifdef WIN32 if ( !profilingFile.empty() ) WriteProfilingFile( frameTimesCumMS); m_logger.info("Total handling time: %d ms\n", GetTickCount() - startTimeMS); FILETIME procCreationTime, procExitTime, kernelTime, userTime; if (GetProcessTimes(GetCurrentProcess(), &procCreationTime, &procExitTime, &kernelTime, &userTime)) { int iUser = FileTime2Ms(userTime); int iKernel = FileTime2Ms(kernelTime); int iTotalCPU = iUser + iKernel; m_logger.info("Cpu time=%d ms (user=%d ms, kernel=%d ms)\n", iTotalCPU, iUser, iKernel); } #else m_logger.info("Total sleep time:%d ms\n", totalSleepTime); progressTracker->totalSleepTime(totalSleepTime); #endif //m_logger.info("Time stats=%d,%d,%d,%d\n", endtick-starttick, iTotalCPU, iUser, iKernel ); } bool LightSynthesizer::doFlow() { #ifdef WIN32 DWORD preparsetime = GetTickCount(); #endif if (!readObjects()) { throw SWException("readObjects failed\n"); } TargetFrames *targetFrames = new TargetFrames(document); std::vector<TargetFrame *>::const_iterator targetFramesIterator = targetFrames->getFrames().begin(); int targetSequence = 0, framePaddingCount = 0; #ifdef WIN32 m_logger.info("Total parsing time: %d ms\n", GetTickCount()-preparsetime); #endif VideoStream::CFramePtr framePtr; std::vector<VideoObject *> renderObjs; // Objects for rendition in current frame renderObjs.reserve(objects.size()); std::vector<std::pair<int,string> > videoObjectStartFrames; while (targetFramesIterator != targetFrames->getFrames().end()) { const TargetFrame *targetFrame = (*targetFramesIterator); // render and advance till we're synched (target frame and target sequence) if (targetAndSequenceSynched(targetFrame, targetSequence)) { // Close sources that were removed std::vector<FrameObject *>::const_iterator removedObjectsIterator; for (removedObjectsIterator = targetFrame->getRemovedObjects().begin(); removedObjectsIterator != targetFrame->getRemovedObjects().end(); removedObjectsIterator++) { const FrameObject *frameObject = (*removedObjectsIterator); VideoObject *videoObject=getVideoObject(frameObject); videoObject->close(); } /* * Get next frames and update z-pos */ renderObjs.clear(); renderObjs.reserve(objects.size()); std::vector<FrameObject *>::const_iterator frobj_iter; for ( frobj_iter = targetFrame->getFrameObjects().begin(); frobj_iter != targetFrame->getFrameObjects().end(); frobj_iter++ ) { const FrameObject *frameObject = *frobj_iter; VideoObject *videoObject=getVideoObject(frameObject); // Set object current frame bool rc; updateCurFrame( videoObject, frameObject, &rc ); if ( !rc ) { m_logger.error("updateCurFrame Failed for frame #%d from %s\n", videoObject->getCurFrameNum(), videoObject->getPath() ); return false; } // Set objcet z position setZPos(videoObject,frameObject); renderObjs.push_back( videoObject ); videoObject->AppearedInFrame( targetSequence ); } /* * Sort objects for rendition by z-pos * Use stable sort so if two objects have equal z, the first one will be behind */ std::stable_sort( renderObjs.begin(), renderObjs.end(), VideoObject::compareObjsByZ ); // Render framePtr = render(renderObjs); if (framePtr.isNull()) { // if frame is null, it means that the synth didn't increment the current frame to be rendered, which means that we should write the previous // frame again if (!previousFramePtr.isNull()) framePtr = previousFramePtr; else framePaddingCount++; } // write frame if (! framePtr.isNull()) { while (framePaddingCount > 0) { aviWriter->WriteFrame(framePtr); framePaddingCount--; timingFrame(); // Time the frame generation time } aviWriter->WriteFrame(framePtr); timingFrame(); // Time the frame generation time } previousFramePtr = framePtr; targetFramesIterator++; } targetSequence++; progressTracker->setProgress(targetSequence); } // Close open video objects VideoObjectMap::iterator videoObjects; for (videoObjects = objects.begin(); videoObjects != objects.end(); videoObjects++) { VideoObject *videoObject = (*videoObjects).second; videoObject->close(); } aviWriter->Close(); delete targetFrames; return true; } void LightSynthesizer::MakeBlackFrame(unsigned char *buf, int nbytes) { memset(buf, 0, nbytes); for (int pl = 3; pl < nbytes; pl += 4) { buf[pl] = 255; } } VideoStream::CFramePtr LightSynthesizer::render(std::vector<VideoObject *> renderObjs) { VideoStream::CFramePtr framePtr; // mutableFramePtr is used as a temporary holder for framePtr's frame in case we need to change the frame itself. // Note that framePtr is a CFramePtr (and not a CMutableFramePtr) and hence its frame cannot be changed. // The reason why we need two separate containers is that sometimes we want to copy the curframePtr directly into // framePtr. This cannot be done if framePtr were mutable. VideoStream::CMutableFramePtr mutableFramePtr; VideoStream::CMutableFramePtr blackFrame(resolutionWidth, resolutionHeight, 32); MakeBlackFrame(blackFrame.GetData(), blackFrame.GetDataSize()); m_logger.trace("black frame created"); std::vector<VideoObject *>::const_iterator objiter; for ( objiter = renderObjs.begin(); objiter != renderObjs.end(); objiter++ ) { VideoStream::CFramePtr curframePtr = (*objiter)->getCurFramePtr(); if ( curframePtr.isNull() ) { throw SWException("Failed to read frame #%d from %s\n", (*objiter)->getCurFrameNum(), (*objiter)->getPath() ); } if ( framePtr.isNull() ) { // First object // If there is a single layer we write the same frame object that we read. // Otherwise, we create a new frame object because we don't want to change the input frame. // overlayFrame() changes the content of its first argument. That's why we need to ensure that in that case, the // frame is our own copy and not one we read from the input stream. //set black frame as solid background mutableFramePtr = VideoStream::CMutableFramePtr(blackFrame, true); overlayFrame(mutableFramePtr.GetData(), curframePtr.GetData(), mutableFramePtr.GetDataSize()); framePtr = mutableFramePtr; m_logger.trace("overlay black frame done."); } else { // Change the content of mutableFramePtr's data overlayFrame (mutableFramePtr.GetData(), curframePtr.GetData(), mutableFramePtr.GetDataSize()); framePtr = mutableFramePtr; } } // mutableFramePtr finished its job. mutableFramePtr.setNull(); return framePtr; } TiXmlElement * LightSynthesizer::getNextVideoFrameObject(TiXmlElement *targetFrame) { TiXmlElement *frameObject = targetFrame->FirstChildElement("FrameObjects")->FirstChildElement("FrameObject"); while (frameObject) { string frameObjectType = frameObject->Attribute("xsi:type"); if (frameObjectType == "Video") { return frameObject; } else { frameObject = frameObject->NextSiblingElement(); } } return NULL; } VideoObject * LightSynthesizer::getNextVideoObject(TiXmlElement *targetFrame) { TiXmlElement *frameObject = targetFrame->FirstChildElement("FrameObjects")->FirstChildElement("FrameObject"); while (frameObject) { string frameObjectType = frameObject->Attribute("xsi:type"); if (frameObjectType == "Video") { // open the video stream string videoId = frameObject->Attribute("id"); return objects[videoId]; } else { frameObject = frameObject->NextSiblingElement(); } } return NULL; } bool LightSynthesizer::targetAndSequenceSynched(const TargetFrame *targetFrame, int targetSequence) { const char *strSequenceNumber = targetFrame->getSequenceNumber().c_str(); if (strSequenceNumber == 0) { // assume sequencial frames return true; } int sequenceNumber = atoi(strSequenceNumber); if (sequenceNumber > targetSequence) { return false; } return true; } /* bool LightSynthesizer::targetAndSequenceSynchedOld(TiXmlElement *targetFrame, int targetSequence) { const char *strSequenceNumber = targetFrame->Attribute("sequenceNumber"); if (strSequenceNumber == 0) { // assume sequencial frames return true; } int sequenceNumber = atoi(strSequenceNumber); if (sequenceNumber > targetSequence) { return false; } return true; } */ /* ******************************************************************** * Profiling file functions ******************************************************************** */ bool LightSynthesizer::WriteProfilingFile( std::vector<DWORD> frameTimesCumMS ) { FILE *fp = fopen(profilingFile.c_str(), "w"); if (fp == NULL) { m_logger.error( "Failed to open profiling file %s\n", profilingFile.c_str()); return false; } // Sort objects (scenes) by start time std::vector<const VideoObject *> sortedObjects; GetObjectsByStartTime( sortedObjects ); fprintf(fp, "<html>\n" " <head>\n" " <script type=\"text/javascript\" src=\"https://www.google.com/jsapi\"></script>\n" " <script type=\"text/javascript\">\n" " google.load(\"visualization\", \"1\", {packages:[\"corechart\"]});\n" " google.setOnLoadCallback(drawCharts);\n" " function drawCharts() {\n"); /* * % realtime cum */ fprintf(fp, " var data_pctRealTimeCum = google.visualization.arrayToDataTable([\n" " ['Frame', '%%'],\n"); double pctRealTimeCum; int start_frame = 5; for ( size_t fr = start_frame; fr < frameTimesCumMS.size(); fr++ ) { double timSec = ((double)fr) / framerate; pctRealTimeCum = ((double)frameTimesCumMS[fr]-frameTimesCumMS[0]) / 1000 / timSec * 100; fprintf(fp," [%d, %f],\n", fr, pctRealTimeCum); } fprintf(fp, " ]);\n\n" " var options_pctRealTimeCum = {\n" " title: 'Cumulative %% of realtime',\n" " colors: ['#00FF00'],\n"); AddSceneTicks( sortedObjects, frameTimesCumMS, fp ); fprintf(fp, " }\n"); fprintf(fp, " };\n\n" " var chart_pctRealTimeCum = new google.visualization.LineChart(document.getElementById('chart_div_pctRealTimeCum'));\n" " chart_pctRealTimeCum.draw(data_pctRealTimeCum, options_pctRealTimeCum);\n"); /* * % realtime */ fprintf(fp, " var data_pctRealTime = google.visualization.arrayToDataTable([\n" " ['Frame', '%%'],\n"); double pctRealTime; int interval_frames = 20; double timSec = ((double)interval_frames) / framerate; for ( size_t fr = 0; fr < frameTimesCumMS.size()-interval_frames; fr++ ) { pctRealTime = ((double)frameTimesCumMS[fr+interval_frames]-frameTimesCumMS[fr]) / 1000 / timSec * 100; fprintf(fp," [%d, %f],\n", fr, pctRealTime); } fprintf(fp, " ]);\n\n" " var options_pctRealTime = {\n" " title: '%% of realtime',\n" " colors: ['#00FF00'],\n"); AddSceneTicks( sortedObjects, frameTimesCumMS, fp ); fprintf(fp, " }\n"); fprintf(fp, " };\n\n" " var chart_pctRealTime = new google.visualization.LineChart(document.getElementById('chart_div_pctRealTime'));\n" " chart_pctRealTime.draw(data_pctRealTime, options_pctRealTime);\n"); fprintf(fp, " }\n" " </script>\n" " </head>\n" " <body>\n" " <div id=\"chart_div_pctRealTimeCum\" style=\"width: 1440px; height: 500px;\"></div>\n"); /* * Scene stats */ fprintf(fp, " <table border=\"1\">\n" " <tr>\n" " <th></th>\n" " <th>Name</th>\n" " <th>Frame range</th>\n" " <th>Total Frames</th>\n" " <th>Render time (secs)</th>\n" " <th>%% realtime</th>\n" " </tr>\n"); std::vector<const VideoObject *>::const_iterator videoObjects; int serNum; for (videoObjects = sortedObjects.begin(), serNum = 0; videoObjects != sortedObjects.end(); videoObjects++) { const VideoObject *videoObject = *videoObjects; int firstFrame = videoObject->GetFirstFrame(); int lastFrame = videoObject->GetLastFrame(); char shortName; string displayName; if ( !GetVideoObjectDisplayDetails( *videoObject, serNum, shortName, displayName ) ) continue; // No need to display serNum++; fprintf(fp, " <tr> <td>%c</td> <td>%s", shortName, displayName.c_str() ); if ( firstFrame >= 0 ) { fprintf(fp, "</td> <td>%d-", firstFrame ); if ( lastFrame >= 0 ) { int nframes = lastFrame - firstFrame+1; fprintf(fp, "%d</td> <td>%d", lastFrame , nframes); if ( lastFrame < (int)frameTimesCumMS.size() ) { double playTimeSec = ((double)lastFrame-firstFrame) / framerate; double renderTimeSec = ((double)frameTimesCumMS[lastFrame]-frameTimesCumMS[firstFrame]) / 1000; double pctRealTimeCum = renderTimeSec / playTimeSec * 100; fprintf(fp, "</td> <td>%f</td> <td>%f%%", renderTimeSec, pctRealTimeCum); } } fprintf(fp, "</td>"); } fprintf(fp, "</tr>\n"); } if ( frameTimesCumMS.size() > 0 ) { // Totals double renderTimeSec = ((double)frameTimesCumMS.back()) / 1000; fprintf(fp, " <tr><td></td><td></td><td></td><td></td><td></td><td></td></tr> <tr><td></td> " "<td> <b> Total </b></td> <td></td><td>%d</td>" "<td>%f</td> <td>%f%%</td></tr>", frameTimesCumMS.size(), renderTimeSec, pctRealTimeCum); } fprintf(fp, "</table>\n"); fprintf(fp, " <br> Time before first frame: %d ms<br>" " <br> Frame rate: %d fps<br>" " <div id=\"chart_div_pctRealTime\" style=\"width: 1440px; height: 500px;\"></div>\n" " </body>\n" "</html>\n", frameTimesCumMS[0], framerate ); fclose(fp); return true; } bool LightSynthesizer::GetVideoObjectDisplayDetails( const VideoObject &videoObject, int serialNum, char &shortName, string &displayName) { string name = string(videoObject.getPath()); // Don't display decoder output int decoded_str = name.find("_decoded.avi"); if ( decoded_str != string::npos ) { return false; } shortName = 'A' + serialNum; /* * Display name - remove path, initial number, url parameters, extension */ displayName = string(videoObject.getPath()); int last_slash = displayName.find_last_of( '/'); if ( last_slash != string::npos ) displayName = displayName.substr( last_slash+1 ); for ( int iunderscore = 0; iunderscore < 2; iunderscore++ ) { int first_underscore = displayName.find_first_of( '_'); if ( first_underscore != string::npos ) displayName = displayName.substr( first_underscore+1 ); } int first_questionmark = displayName.find_first_of( '?'); if ( first_questionmark != string::npos ) displayName = displayName.substr( 0, first_questionmark ); int last_dot = displayName.find_last_of( '.'); if ( last_dot != string::npos ) displayName = displayName.substr( 0, last_dot ); return true; } void LightSynthesizer::AddSceneTicks( std::vector<const VideoObject *> &sortedObjects, const std::vector<DWORD> &frameTimesCumMS, FILE *fp ) { // Mark object times fprintf(fp, " hAxis: { title: \"frame\", ticks: ["); std::vector<const VideoObject *>::const_iterator videoObjects; int serNum; for (videoObjects = sortedObjects.begin(), serNum = 0; videoObjects != sortedObjects.end(); videoObjects++) { const VideoObject *videoObject = *videoObjects; int firstFrame = videoObject->GetFirstFrame(); int lastFrame = videoObject->GetLastFrame(); string name = string(videoObject->getPath()); char shortName; string displayName; if ( !GetVideoObjectDisplayDetails( *videoObject, serNum, shortName, displayName ) ) continue; // No need to display serNum++; if ( firstFrame >= 0 ) fprintf(fp, "{v:%d, f:\"%c\"}, ", firstFrame, shortName); } fprintf(fp, "%d] ", frameTimesCumMS.size()); } void LightSynthesizer::GetObjectsByStartTime( std::vector<const VideoObject *> &sortedObjects) { VideoObjectMap::const_iterator videoObjects; for (videoObjects = objects.begin(); videoObjects != objects.end(); videoObjects++) { VideoObject *videoObject = (*videoObjects).second; sortedObjects.push_back( videoObject ); } std::stable_sort( sortedObjects.begin(), sortedObjects.end(), VideoObject::compareObjsByFirstFrame ); } <file_sep>/Fmod/Events/SyncPointEventDescriptor.h #ifndef __SYNC_POINT_EVENT_DESCRIPTOR_H__ #define __SYNC_POINT_EVENT_DESCRIPTOR_H__ class SyncPointEventDescriptor { public: virtual int apply(FMOD::Channel *channel){return 0;} }; #endif <file_sep>/VideoStream/VideoStream/LocalMemStreamManager.cpp #ifdef WIN32 #include "StdAfx.h" #include <map> #include "VSLocalMemStream.h" #include "VSLocalMemStreamManager.h" #include "SWException.h" using namespace std; using namespace VideoStream; // Static variables of CLocalMemStreamManager map<string,CLocalMemStream *> CLocalMemStreamManager::m_localStreams; HANDLE CLocalMemStreamManager::m_mutex = NULL; bool CLocalMemStreamManager::m_isInitialized = false; // Should be called once at the beginning of the process bool CLocalMemStreamManager::init() { if (m_isInitialized) return true; m_isInitialized = true; // Create mutex. m_mutex = CreateMutex (NULL, FALSE, NULL); if (m_mutex == NULL) { throw SWException("Failed to create mutex."); return false; } return true; } // Reader API CLocalMemStream *CLocalMemStreamManager::openLocalStreamForReading(string & streamName, DWORD readerTimeoutMS) { WaitForSingleObject(m_mutex, INFINITE); CLocalMemStream *localStream = m_localStreams[streamName]; if (localStream == NULL) { localStream = new CLocalMemStream(streamName); if (localStream == NULL) { ReleaseMutex(m_mutex); throw SWException("Failed to create CLocalMemStream for stream %s for reading", streamName.c_str() ); } m_localStreams[streamName] = localStream; } ReleaseMutex(m_mutex); localStream->readerOpen(readerTimeoutMS); return localStream; } void CLocalMemStreamManager::closeLocalStreamForReading(CLocalMemStream * localStream) { WaitForSingleObject(m_mutex, INFINITE); string streamName = localStream->m_streamName; localStream->readerClose(); if (localStream->didReaderAndWriterClose()) { m_localStreams[streamName] = NULL; delete localStream; } ReleaseMutex(m_mutex); } // Writer API CLocalMemStream *CLocalMemStreamManager::openLocalStreamForWriting(string & streamName, const VIDEOINFO &vi, const string & codecstr, int bps, int maxBufferSize, DWORD writerTimeoutMS) { WaitForSingleObject(m_mutex, INFINITE); CLocalMemStream *localStream = m_localStreams[streamName]; if (localStream == NULL) { localStream = new CLocalMemStream(streamName); if (localStream == NULL) { ReleaseMutex(m_mutex); throw SWException("Failed to create CLocalMemStream for stream %s for writing", streamName.c_str() ); } m_localStreams[streamName] = localStream; } ReleaseMutex(m_mutex); localStream->writerOpen(vi, codecstr, bps, maxBufferSize, writerTimeoutMS); return localStream; } void CLocalMemStreamManager::closeLocalStreamForWriting(CLocalMemStream * localStream) { WaitForSingleObject(m_mutex, INFINITE); if (localStream == NULL) { ReleaseMutex(m_mutex); throw SWException("Failed to close local stream for writing"); } string streamName = localStream->m_streamName; localStream->writerClose(); if (localStream->didReaderAndWriterClose()) { m_localStreams[streamName] = NULL; delete localStream; } ReleaseMutex(m_mutex); } void CLocalMemStreamManager::printReport(FILE *outputFile) { WaitForSingleObject(m_mutex, INFINITE); map<string,CLocalMemStream *>::iterator iter; for(iter = m_localStreams.begin(); iter != m_localStreams.end(); iter++ ) { CLocalMemStream * stream = iter->second; if (stream != NULL) { WaitForSingleObject(stream->m_mutex, INFINITE); fprintf(outputFile, " %s: Reader state = %d Writer state = %d Frames in stream = %d\n", stream->m_streamName.c_str(), stream->m_readerState, stream->m_writerState, stream->getNumberOfFrames()); ReleaseMutex(stream->m_mutex); } } ReleaseMutex(m_mutex); } void CLocalMemStreamManager::pauseAllStreams() { CLocalMemStream::pauseAllStreams(); } void CLocalMemStreamManager::resumeAllStreams() { CLocalMemStream::resumeAllStreams(); } #else #include <map> #include "VSLocalMemStream.h" #include "VSLocalMemStreamManager.h" #include "SWException.h" using namespace std; using namespace VideoStream; // Static variables of CLocalMemStreamManager map<string, CLocalMemStream *> CLocalMemStreamManager::m_localStreams; pthread_mutex_t CLocalMemStreamManager::m_mutex; // = 0; bool CLocalMemStreamManager::m_isInitialized = false; // Should be called once at the beginning of the process bool CLocalMemStreamManager::init() { if (m_isInitialized) return true; m_isInitialized = true; // Create mutex. //m_mutex = CreateMutex (NULL, FALSE, NULL); int res = pthread_mutex_init(&m_mutex, NULL); if (res != 0) { throw SWException("Failed to create mutex."); return false; } return true; } // Reader API CLocalMemStream *CLocalMemStreamManager::openLocalStreamForReading( string & streamName, DWORD readerTimeoutMS) { pthread_mutex_lock(&m_mutex); CLocalMemStream *localStream = m_localStreams[streamName]; if (localStream == NULL) { localStream = new CLocalMemStream(streamName); if (localStream == NULL) { pthread_mutex_unlock(&m_mutex); throw SWException( "Failed to create CLocalMemStream for stream %s for reading", streamName.c_str()); } m_localStreams[streamName] = localStream; } pthread_mutex_unlock(&m_mutex); localStream->readerOpen(readerTimeoutMS); return localStream; } void CLocalMemStreamManager::closeLocalStreamForReading( CLocalMemStream * localStream) { //WaitForSingleObject(m_mutex, INFINITE); pthread_mutex_lock(&m_mutex); string streamName = localStream->m_streamName; localStream->readerClose(); if (localStream->didReaderAndWriterClose()) { m_localStreams[streamName] = NULL; delete localStream; } // ReleaseMutex(m_mutex); pthread_mutex_unlock(&m_mutex); } // Writer API CLocalMemStream *CLocalMemStreamManager::openLocalStreamForWriting( string & streamName, const VIDEOINFO &vi, const string & codecstr, int bps, int maxBufferSize, DWORD writerTimeoutMS) { //WaitForSingleObject(m_mutex, INFINITE); pthread_mutex_lock(&m_mutex); CLocalMemStream *localStream = m_localStreams[streamName]; if (localStream == NULL) { fprintf(stderr, "COutputLocalMemStream :: FIRST TIME : OPEN from %s\n", streamName.c_str()); localStream = new CLocalMemStream(streamName); if (localStream == NULL) { //ReleaseMutex(m_mutex); pthread_mutex_unlock(&m_mutex); throw SWException( "Failed to create CLocalMemStream for stream %s for writing", streamName.c_str()); } m_localStreams[streamName] = localStream; } //ReleaseMutex(m_mutex); pthread_mutex_unlock(&m_mutex); localStream->writerOpen(vi, codecstr, bps, maxBufferSize, writerTimeoutMS); return localStream; } void CLocalMemStreamManager::closeLocalStreamForWriting( CLocalMemStream * localStream) { //WaitForSingleObject(m_mutex, INFINITE); pthread_mutex_lock(&m_mutex); if (localStream == NULL) { //ReleaseMutex(m_mutex); pthread_mutex_unlock(&m_mutex); throw SWException("Failed to close local stream for writing"); } string streamName = localStream->m_streamName; localStream->writerClose(); if (localStream->didReaderAndWriterClose()) { m_localStreams[streamName] = NULL; delete localStream; } //ReleaseMutex(m_mutex); pthread_mutex_unlock(&m_mutex); } void CLocalMemStreamManager::printReport(FILE *outputFile) { //WaitForSingleObject(m_mutex, INFINITE); pthread_mutex_lock(&m_mutex); map<string, CLocalMemStream *>::iterator iter; for (iter = m_localStreams.begin(); iter != m_localStreams.end(); iter++) { CLocalMemStream * stream = iter->second; if (stream != NULL) { pthread_mutex_lock(&stream->m_mutex); //WaitForSingleObject(stream->m_mutex, INFINITE); fprintf( outputFile, " %s: Reader state = %d Writer state = %d Frames in stream = %d\n", stream->m_streamName.c_str(), stream->m_readerState, stream->m_writerState, stream->getNumberOfFrames()); //ReleaseMutex(stream->m_mutex); pthread_mutex_unlock(&stream->m_mutex); } } //ReleaseMutex(m_mutex); pthread_mutex_unlock(&m_mutex); } void CLocalMemStreamManager::pauseAllStreams() { CLocalMemStream::pauseAllStreams(); } void CLocalMemStreamManager::resumeAllStreams() { CLocalMemStream::resumeAllStreams(); } #endif<file_sep>/VideoStream/VideoStream/VSAVIStdFileMedia.h #pragma once #include "VSBaseMedia.h" #ifdef WIN32 #include <windows.h> #include <vfw.h> #endif namespace VideoStream { //class CInputAVIStdFile : public IInputVideoStream //{ //private: // PAVIFILE _pfile; // PAVISTREAM _pavi; // PAVISTREAM _pavi_comp; // int _frameNum; //public: // CInputAVIStdFile (LPCTSTR location); // ~CInputAVIStdFile(); // const VIDEOINFO &GetVideoInfo() const; // CFramePtr GetNextFrame(); //}; #ifdef WIN32 class COutputAVIStdFile : public CBaseOutputVideoStream<true> { private: PAVISTREAM _pavi; int _frameNum; public: COutputAVIStdFile(); ~COutputAVIStdFile(); bool Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams); bool WriteFrame (CFramePtr framePtr); void Close(); }; #endif class CAVIStdFileMedia : public IMedia { public: virtual IInputVideoStream *CreateInputVideoStream(); virtual IOutputVideoStream *CreateOutputVideoStream(); }; }<file_sep>/NewEncoder/VideoEncoder/OutputAvi.h #pragma once #include "OutputFormat.h" #include "VirtualDub/AVIOutput.h" class OutputAvi : public OutputFormat { private: AVIOutputFile* aviout; void Init(const char *path, AVISTREAMINFO *streamInfo, BITMAPINFOHEADER *bih); public: OutputAvi(const char *path, DWORD length, DWORD rate, DWORD scale, DWORD fourCC, DWORD quality, BITMAPINFOHEADER *bih); OutputAvi(const char *path, AVISTREAMINFO *streamInfo, BITMAPINFOHEADER *bih); void writeData(void* data, int dataSize, bool keyframe); ~OutputAvi(void); }; <file_sep>/VideoStream/VideoStream/SharedData.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSSharedData.h" #include "VSLog.h" #include "VSExceptions.h" #include <stdlib.h> #include <tchar.h> #include <string> #include <direct.h> using namespace VideoStream; static LPCTSTR GetSharedDataFolder() { static bool folderReady = false; static TCHAR dataFolder[MAX_PATH+1]; if (!folderReady) { TCHAR tempPath[MAX_PATH+1]; GetTempPath (MAX_PATH, tempPath); _stprintf_s (dataFolder, MAX_PATH, _T("%s\\SundaySky\\"), tempPath); _tmkdir(dataFolder); _stprintf_s (dataFolder, MAX_PATH, _T("%s\\SundaySky\\VSData\\"), tempPath); _tmkdir(dataFolder); folderReady = true; } return dataFolder; } HANDLE CSharedData::GetFileHandle (LPCTSTR dataName, bool persistent) { if (persistent) { // Get data file name static TCHAR dataFileName[MAX_PATH+1]; dataFileName[0] = '\0'; _tcscat_s (dataFileName, MAX_PATH, GetSharedDataFolder()); _tcscat_s (dataFileName, MAX_PATH, dataName); _tcscat_s (dataFileName, MAX_PATH, _T(".dat")); _fileHandle = CreateFile (dataFileName, GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ | FILE_SHARE_WRITE, 0, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, 0); } else // use memory paging file (RAM) _fileHandle = INVALID_HANDLE_VALUE; return _fileHandle; } CSharedData::CSharedData (LPCTSTR dataName, bool persistent, int size, DWORD dwTimeout) : _dwTimeout (dwTimeout), _fileHandle (INVALID_HANDLE_VALUE) { // This mutex is used to prevent two threads/processes writing/reading the same piece of data std::string mutexName (dataName); mutexName += "_mutex"; _mutex = CreateMutex (NULL, FALSE, mutexName.c_str()); // Create shared memory if (size > 0) _shmem = CreateFileMapping (GetFileHandle(dataName, persistent), 0, PAGE_READWRITE, 0, size, dataName); else _shmem = OpenFileMapping (FILE_MAP_WRITE, 0, dataName); if (_shmem == NULL) throw CSharedDataIOException(); // Map shared memory to process memory _pData = MapViewOfFile (_shmem, FILE_MAP_WRITE, 0, 0, 0); if (_pData == NULL) throw CSharedDataIOException(); } CSharedData::~CSharedData() { UnmapViewOfFile (_pData); CloseHandle (_shmem); if (_fileHandle != INVALID_HANDLE_VALUE) CloseHandle (_fileHandle); CloseHandle(_mutex); } void CSharedData::Block() { // Wait until mutex is released or timeout was reached DWORD dwResult = WaitForSingleObject (_mutex, _dwTimeout); if (dwResult == WAIT_TIMEOUT) { CLog::Write ("TIMEOUT reached in CSharedData::Block()"); throw CSyncTimeoutException(); } } void CSharedData::Release() { ReleaseMutex (_mutex); } void CSharedData::Read (void *pData, int size, bool atomic) { if (atomic) Block(); memcpy (pData, _pData, size); if (atomic) Release(); } void *CSharedData::Read (int &size, bool atomic) { if (atomic) Block(); memcpy (&size, _pData, sizeof (int)); void *pData = malloc (size); memcpy (pData, ((char *)_pData) + sizeof (int), size); if (atomic) Release(); return pData; } void CSharedData::Write (const void *pData, int size, bool atomic) { if (atomic) Block(); memcpy (_pData, pData, size); if (atomic) Release(); } void CSharedData::WriteWithSize (const void *pData, int size, bool atomic) { if (atomic) Block(); memcpy (_pData, &size, sizeof (int)); memcpy (((char *)_pData) + sizeof (int), pData, size); if (atomic) Release(); } int CSharedData::ReadInt(bool atomic) { int value; Read(&value, sizeof(int), atomic); return value; } void CSharedData::WriteInt (int value, bool atomic) { Write(&value, sizeof(int), atomic); } int CSharedData::IncreaseInt (bool atomic) { if (atomic) Block(); int value = ReadInt(false); // get value WriteInt(value+1, false); // increase it if (atomic) Release(); return value; } /* * CSharedDataLocker: * An object of this class locks the specified name for mutual exclusive (i.e. no * 2 objects with the same dataName can exist simultaneously). * Its is used to protect the operation of creating a shared data and writing/reading to it, * so that the reader is guaranteed that if he succeeded to access a shared memory, it is * already updated (used for the shmem header). */ CSharedDataLocker::CSharedDataLocker( const char *dataName ) { std::string mutexName (dataName); mutexName += "_Locker"; _mutex = CreateMutex (NULL, FALSE, mutexName.c_str()); // Wait until mutex is released or timeout was reached DWORD dwTimeout = 3600000; DWORD dwResult = WaitForSingleObject (_mutex, dwTimeout); if (dwResult == WAIT_TIMEOUT) { CLog::Write ("TIMEOUT reached in CSharedDataLocker()"); throw CSyncTimeoutException(); } } CSharedDataLocker::~CSharedDataLocker() { if ( _mutex ) { ReleaseMutex (_mutex); CloseHandle( _mutex ); _mutex = NULL; } } #else #include "VSSharedData.h" #include "VSLog.h" #include "VSExceptions.h" #include <stdlib.h> #include <string.h> #include <sys/types.h> #include <sys/stat.h> #include <sys/mman.h> #include <sys/ipc.h> #include <sys/shm.h> #include <fcntl.h> #include <unistd.h> #include <errno.h> #include "SWException.h" #include "FileUtils.h" using namespace VideoStream; static LPCTSTR GetSharedDataFolder() { static bool folderReady = false; static TCHAR dataFolder[MAX_PATH + 1]; if (!folderReady) { int temp; const char* tempPath = getenv("TMPDIR"); if (tempPath == NULL) { tempPath = "/tmp"; } snprintf(dataFolder, MAX_PATH, _T("%s/SundaySky/"), tempPath); if (!FileUtils::folderExists(dataFolder)) { if ((temp = mkdir(dataFolder, 0777)) != 0) { throw SWException("Unable to mkdir 1"); } } snprintf(dataFolder, MAX_PATH, _T("%s/SundaySky/VSData/"), tempPath); if (!FileUtils::folderExists(dataFolder)) { if ((temp = mkdir(dataFolder, 0777)) != 0) { throw SWException("Unable to mkdir 2"); } } folderReady = true; } return dataFolder; } int CSharedData::GetFileHandle(LPCTSTR dataName, bool persistent) { if (persistent) { // Get data file name static char dataFileName[MAX_PATH + 1]; dataFileName[0] = '\0'; strncat(dataFileName, GetSharedDataFolder(), MAX_PATH); strncat(dataFileName, dataName, MAX_PATH); strncat(dataFileName, _T(".dat"), MAX_PATH); // _fileHandle = CreateFile(dataFileName, GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ | FILE_SHARE_WRITE, 0, // OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, 0); if (FileUtils::fileExists(dataFileName)) { if (remove(dataFileName) != 0) { throw SWException("Error deleting file %s", dataFileName); } } _fileHandle = open(dataFileName, O_RDWR | O_CREAT | O_TRUNC, MY_MASK); int result = lseek(_fileHandle, m_size * sizeof(int), SEEK_SET); if (result == -1) { close(_fileHandle); throw SWException("Error calling lseek"); } result = write(_fileHandle, "", 1); if (result == -1) { close(_fileHandle); throw SWException("Error writing last byte"); } } else // use memory paging file (RAM) _fileHandle = -1; // INVALID_HANDLE_VALUE; return _fileHandle; } CSharedData::CSharedData(LPCTSTR dataName, bool persistent, int size, DWORD dwTimeout) : _dwTimeout(dwTimeout), _fileHandle(-1) { // This mutex is used to prevent two threads/processes writing/reading the same piece of data // std::string mutexName(dataName); // mutexName += "_mutex"; m_size = size; //_mutex = CreateMutex(NULL, FALSE, mutexName.c_str()); pthread_mutex_init(&_mutex, NULL); // // Create shared memory if (size > 0) {//?? map_fixed if (persistent) { _pData = mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, GetFileHandle(dataName, persistent), 0); } else { _pData = mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED | MAP_ANONYMOUS, GetFileHandle(dataName, persistent), 0); } } else { //generate memory key key_t sharedKey = ftok(dataName, 1); //get the shared memory segment id int shareSpaceId = shmget(sharedKey, size, MY_MASK | IPC_CREAT); //Attach shared the shared memory segment _pData = shmat(shareSpaceId, NULL, 0); } //OpenFileMapping(FILE_MAP_WRITE, 0, dataName); if (_pData == NULL) throw CSharedDataIOException(); // Map shared memory to process memory //_pData = MapViewOfFile(_shmem, FILE_MAP_WRITE, 0, 0, 0); // _pData = mmap(_shmem, size, PROT_WRITE, MAP_FIXED, 0, 0); // if (_pData == NULL) // throw CSharedDataIOException(); /*if (size > 0) _shmem = CreateFileMapping(GetFileHandle(dataName, persistent), 0, PAGE_READWRITE, 0, size, dataName); else _shmem = OpenFileMapping(FILE_MAP_WRITE, 0, dataName); if (_shmem == NULL) throw CSharedDataIOException(); // Map shared memory to process memory _pData = MapViewOfFile(_shmem, FILE_MAP_WRITE, 0, 0, 0); if (_pData == NULL) throw CSharedDataIOException();*/ } CSharedData::~CSharedData() { if (_fileHandle != -1) { close(_fileHandle); } munmap(_pData, m_size); // shmdt(_shmem); m_size = 0; //UnmapViewOfFile(_pData); //CloseHandle(_shmem); //if (_fileHandle != INVALID_HANDLE_VALUE) // CloseHandle(_fileHandle); //CloseHandle(&_mutex); pthread_mutex_destroy(&_mutex); } void CSharedData::Block() { struct timespec timeout; timeout.tv_sec = _dwTimeout; int dwResult = pthread_mutex_timedlock(&_mutex, &timeout); if (dwResult == ETIMEDOUT) { CLog::Write("TIMEOUT reached in CSharedData::Block()"); throw CSyncTimeoutException(); } // Wait until mutex is released or timeout was reached // DWORD dwResult = WaitForSingleObject(_mutex, _dwTimeout); // if (dwResult == WAIT_TIMEOUT) { // CLog::Write("TIMEOUT reached in CSharedData::Block()"); // throw CSyncTimeoutException(); // } } void CSharedData::Release() { //ReleaseMutex(_mutex); pthread_mutex_unlock(&_mutex); } void CSharedData::Read(void *pData, int size, bool atomic) { if (atomic) Block(); memcpy(pData, _pData, size); if (atomic) Release(); } void *CSharedData::Read(int &size, bool atomic) { if (atomic) Block(); memcpy(&size, _pData, sizeof(int)); void *pData = malloc(size); memcpy(pData, ((char *)_pData) + sizeof(int), size); if (atomic) Release(); return pData; } void CSharedData::Write(const void *pData, int size, bool atomic) { if (atomic) Block(); memcpy(_pData, pData, size); if (atomic) Release(); } void CSharedData::WriteWithSize(const void *pData, int size, bool atomic) { if (atomic) Block(); memcpy(_pData, &size, sizeof(int)); memcpy(((char *)_pData) + sizeof(int), pData, size); if (atomic) Release(); } int CSharedData::ReadInt(bool atomic) { int value; Read(&value, sizeof(int), atomic); return value; } void CSharedData::WriteInt(int value, bool atomic) { Write(&value, sizeof(int), atomic); } int CSharedData::IncreaseInt(bool atomic) { if (atomic) Block(); int value = ReadInt(false); // get value WriteInt(value + 1, false); // increase it if (atomic) Release(); return value; } /* * CSharedDataLocker: * An object of this class locks the specified name for mutual exclusive (i.e. no * 2 objects with the same dataName can exist simultaneously). * Its is used to protect the operation of creating a shared data and writing/reading to it, * so that the reader is guaranteed that if he succeeded to access a shared memory, it is * already updated (used for the shmem header). */ CSharedDataLocker::CSharedDataLocker(const char *dataName) { std::string mutexName(dataName); mutexName += "_Locker"; // _mutex = CreateMutex(NULL, FALSE, mutexName.c_str()); pthread_mutex_init(&_mutex, NULL); // Wait until mutex is released or timeout was reached DWORD dwTimeout = 3600000; struct timespec timeout; timeout.tv_sec = dwTimeout; pthread_mutex_lock(&_mutex); int dwResult = pthread_mutex_timedlock(&_mutex, &timeout); if (dwResult == ETIMEDOUT) { CLog::Write("TIMEOUT reached in CSharedDataLocker()"); throw CSyncTimeoutException(); } if (&_mutex) { pthread_mutex_unlock(&_mutex); } // // DWORD dwResult = WaitForSingleObject(_mutex, dwTimeout); // if (dwResult == WAIT_TIMEOUT) { // CLog::Write("TIMEOUT reached in CSharedDataLocker()"); // throw CSyncTimeoutException(); // } } CSharedDataLocker::~CSharedDataLocker() { if (&_mutex) { // ReleaseMutex(_mutex); // CloseHandle(_mutex); pthread_mutex_unlock(&_mutex); pthread_mutex_destroy(&_mutex); } } #endif <file_sep>/SVG2Video/FrameWriter.cpp /************************************************************************** * * THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY * KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR * PURPOSE. * * Copyright (C) 1992 - 1996 Microsoft Corporation. All Rights Reserved. * **************************************************************************/ /**************************************************************************** * * WRITEAVI.C * * Creates the file OUTPUT.AVI, an AVI file consisting of a rotating clock * face. This program demonstrates using the functions in AVIFILE.DLL * to make writing AVI files simple. * * This is a stripped-down example; a real application would have a user * interface and check for errors. * ***************************************************************************/ #include "stdafx.h" #include <memory.h> //#include <mmsystem.h> #include "FrameWriter.h" //#include "CmdArgs.h" #include "Logger.h" #include "SWException.h" #include <string.h> //#define MIN(_a,_b) ((_a)<(_b)?(_a):(_b)) //#define MAX(_a,_b) ((_a)>(_b)?(_a):(_b)) //FrameWriter::FrameWriter(const char* lpszFileName, int out_rate, int out_scale, // int xdim, int ydim, bool dbg) : // xDim(xdim), yDim(ydim) { FrameWriter::FrameWriter(const char* lpszFileName, int out_rate, int xdim, int ydim, bool dbg, int minBitDepth, Logger &logger) : xDim(xdim), yDim(ydim), m_minBitDepth(minBitDepth), m_logger(logger) { if ( strcmp( lpszFileName, "NONE" ) == 0 ) { _outputStream = NULL; return; } VideoStream::VIDEOINFO vi; vi.bitCount = m_minBitDepth; vi.frameRate = out_rate; vi.width = xdim; vi.height = ydim; VideoStream::EnableLog(dbg); _outputStream = VideoStream::COutputVideoStream::Create(); _outputStream->Open((LPCTSTR)lpszFileName, vi); //fix alpha stats total_handled = total_changed = total_contradicts = 0; } FrameWriter::~FrameWriter() { // m_logger.trace( // "Fix-alpha: Total bytes handled: %d, total quads changed: %d, total contradicts: %d\n", // total_handled, total_changed, total_contradicts); if (_outputStream) { _outputStream->Close(); VideoStream::COutputVideoStream::Destroy(_outputStream); } } void FrameWriter::AddFrame(VideoStream::CMutableFramePtr wFramePtr) { if (wFramePtr.GetData() == NULL) { throw SWException("Failed to get frame from memory"); } int bitsPerPixel = wFramePtr.GetBitCount(); // Validate depth if (bitsPerPixel < 24) { throw SWException("White bitdepth (%d) must be >= 24\n", bitsPerPixel); } if (_outputStream) { if (!_outputStream->WriteFrame(wFramePtr)) throw SWException("Failed to write frame to VStream.\n"); } return; } <file_sep>/Fmod/AudioSpeaker.cpp #include "AudioSpeaker.h" AudioSpeaker::AudioSpeaker(void) { } AudioSpeaker::~AudioSpeaker(void) { } <file_sep>/utils/Utils/MutexAutioLock.cpp /* * MutexAutioLock.cpp * * Created on: Aug 8, 2013 * Author: eranv */ #ifndef WIN32 #include "MutexAutoLock.h" MutexAutoLock::MutexAutoLock(pthread_mutex_t& mutex) { //WaitForSingleObject(m_mutex, INFINITE); m_mutex = mutex; pthread_mutex_lock(&m_mutex); m_locked = true; } MutexAutoLock::~MutexAutoLock() { unlock(); } void MutexAutoLock::unlock() { if (m_locked) { //ReleaseMutex(m_mutex); pthread_mutex_unlock(&m_mutex); m_locked = false; } } #endif <file_sep>/Fmod/Events/DurationEvent.h #ifndef __DURATION_EVENT_H__ #define __DURATION_EVENT_H__ #include "SyncPointEventDescriptor.h" class DurationEvent : public SyncPointEventDescriptor { public: int apply(FMOD::Channel *channel) { channel->setPaused(true); FMOD::Sound *sound; channel->getCurrentSound(&sound); unsigned int length; sound->getLength(&length, FMOD_TIMEUNIT_MS); // set the position for the end of the file. pay attention to the fact that the end is length - 1 FMOD_RESULT result = channel->setPosition(length-1, FMOD_TIMEUNIT_MS); channel->setPaused(false); return 0; } }; #endif <file_sep>/VideoStream/VideoStream/Video.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSVideoImpl.h" #include "VSLog.h" #include "VSExceptions.h" #include <tchar.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include "VSThreadOutput.h" #include "VSSync.h" #include "SWException.h" #else #include "VSVideoImpl.h" #include "VSLog.h" #include "VSExceptions.h" #include <stdio.h> #include <stdlib.h> #include <string.h> #include <algorithm> #include "VSThreadOutput.h" #include "VSSync.h" #include "SWException.h" #endif using namespace VideoStream; /********************/ /* CVideoStreamImpl */ /********************/ const int MAX_MSG = 256; CVideoStreamImpl::CVideoStreamImpl (LPCTSTR processVerb) { _processVerb = strdup (processVerb); _curFrame = 0; _streamId = -1; } CVideoStreamImpl::~CVideoStreamImpl() { _Close(); free (_processVerb); } void VideoStream::EnableLog (bool enable) { CLog::Enable (enable); } void VideoStream::KillStream (LPCTSTR location) { // Log message const int MAX_MSG = 256; TCHAR str[MAX_MSG+1]; snprintf (str, MAX_MSG, _T("Sending KILL event to <%s>"), location); str[MAX_MSG+1] ='\0'; CLog::Write(str); // Kill stream std::string bareLocation; MediaLocationParams locationParams; CMediaFactory::ParseMediaLocation (location, &bareLocation, &locationParams); CSyncObjects::KillStream (bareLocation.c_str()); } void VideoStream::KillAllStreams() { CLog::Write(_T("Sending KILL event to all streams")); CSyncObjects::KillAllStreams(); } void CVideoStreamImpl::Write2Log (LPCTSTR msg) { if (CLog::IsEnabled()) CLog::Write (msg, _streamId); } void CVideoStreamImpl::_Open (LPCTSTR location) { _Close(); _curFrame = 0; // Write log message if (CLog::IsEnabled()) { _streamId = CLog::Instance()->OpenContext(location); const int MAX_MSG = 1024; TCHAR str[MAX_MSG+1]; snprintf (str, MAX_MSG, "OPEN %s <%s>", _processVerb, location); Write2Log (str); } } void CVideoStreamImpl::FrameWasProcessed (LPCTSTR processVerb) { // Write log message if (CLog::IsEnabled()) { const int MAX_MSG = 256; TCHAR str[MAX_MSG+1]; snprintf (str, MAX_MSG, "%s #%d", (processVerb == NULL ? _processVerb : processVerb), _curFrame); Write2Log (str); } _curFrame++; } void CVideoStreamImpl::_Close() { if (CLog::IsEnabled()) { if (_streamId >= 0) { TCHAR str[MAX_MSG+1]; snprintf (str, MAX_MSG, _T("CLOSE %s"), _processVerb); Write2Log (str); CLog::Instance()->CloseContext(_streamId); _streamId = -1; } } } /*************************/ /* CInputVideoStreamImpl */ /*************************/ CInputVideoStreamImpl::CInputVideoStreamImpl() : CVideoStreamImpl (_T("READ")) { _pStream = NULL; _pSync = NULL; _eov = false; _slowFrame = 0; } bool CInputVideoStreamImpl::HandshakeWithProducer (LPCTSTR _bareLocation, bool waitForProducer, DWORD timeoutMS) { _pSync = NULL; if (_pStream->BypassSyncObject()) { return true; } // Wait for video to be produced try { Write2Log (_T("OPEN: Trying to handshake with producer...")); _pSync = CInputSync::Handshake (_bareLocation, waitForProducer, timeoutMS); Write2Log (_pSync == NULL ? _T("OPEN: Producer already gone") : _T("OPEN: Handshake complete!")); return true; } catch (...) { Write2Log (_T("OPEN: Handshake failed")); return false; } } bool CInputVideoStreamImpl::OpenInputStream (LPCTSTR location) { // Try to open media // Find bare location, codec, timeout, etc. CMediaFactory::ParseMediaLocation (location, &_bareLocation, &_locationParams); bool mediaOpened = _pStream->Open (_bareLocation.c_str(), _locationParams, _streamId); if (mediaOpened) Write2Log (_T("OPEN: Media already exists")); // Handshake with producer if (!HandshakeWithProducer (_bareLocation.c_str(), !mediaOpened, _locationParams.timeoutMS)) return false; // Open again if early open did not work if (!mediaOpened) mediaOpened = _pStream->Open (_bareLocation.c_str(), _locationParams, _streamId); return mediaOpened; } bool CInputVideoStreamImpl::Open (LPCTSTR location, unsigned int startFrame) { // If already open, close Close(); _Open (location); IMedia *pMedia = CMediaFactory::GetMedia (location); _pStream = pMedia->CreateInputVideoStream(); delete pMedia; // Use separate thread for input if desired if (_pStream->IsAsync()) _pStream->SetWaitForFrameCallback (WaitForFrame, this); // Open input stream if (OpenInputStream (location)) { // Start reading in separate thread if (_pStream->IsAsync()) _pStream->RunThread(); // Move ahead "startFrame" frames if (FastForward (startFrame)) return true; } // Open has failed --> close anything dangling Close(); return false; } const VIDEOINFO &CInputVideoStreamImpl::GetVideoInfo() const { if (_pStream == NULL) { static VIDEOINFO info = {0, 0, 0, 0}; return info; } else return _pStream->GetVideoInfo(); } unsigned int CInputVideoStreamImpl::GetWidth() const { return GetVideoInfo().width; } unsigned int CInputVideoStreamImpl::GetHeight() const { return GetVideoInfo().height; } unsigned int CInputVideoStreamImpl::GetFrameRate() const { return GetVideoInfo().frameRate; } unsigned int CInputVideoStreamImpl::GetBitCount() const { return GetVideoInfo().bitCount; } bool CInputVideoStreamImpl::WaitForFrame (void *pThis, HANDLE readEndEventHandle) { CInputVideoStreamImpl *pStream = (CInputVideoStreamImpl *) pThis; return pStream->WaitForFrame(readEndEventHandle); } bool CInputVideoStreamImpl::WaitForFrame(HANDLE readEndEventHandle) { if (_pSync != NULL) { try { if (CLog::IsEnabled()) CLog::Instance()->WriteItemTime(_streamId, _slowFrame, _T("Waiting for frame")); _slowFrame++; if (_pSync->WaitForFrame (readEndEventHandle) == false) return false; } catch (...) { Write2Log (_T("READ: WaitForFrame failed")); return false; } } return true; } bool CInputVideoStreamImpl::FastForward (unsigned int frames) { if (_pStream == NULL) return false; if (_eov == true) return false; if (frames == 0) return true; for (unsigned int i = 0; i < frames && !_eov; i++) { // If input stream is synchronous then wait for next frame (otherwise stream will do the waiting...) if (!_pStream->IsAsync()) { if (!WaitForFrame(NULL)) _eov = true; } if (_eov == true || _pStream->FastForward (1) == false) _eov = true; if (!_eov) FrameWasRead (_T("SKIP")); } if (_eov) { Write2Log (_T("SKIP: reached end of video")); return false; // End of video reached } else return true; } CFramePtr CInputVideoStreamImpl::GetNextFrame() { CFramePtr framePtr; if (_pStream == NULL) return framePtr; if (_eov == true) return framePtr; // If input stream is synchronous then wait for next frame to be produced (otherwise separate thread will do the waiting...) if (!_pStream->IsAsync()) { if (!WaitForFrame(NULL)) _eov = true; } // Read next frame from media if (!_eov) framePtr = _pStream->GetNextFrame(); if (!framePtr.isNull()) FrameWasRead(); else { _eov = true; Write2Log (_T("READ: reached end of video")); } return framePtr; } void CInputVideoStreamImpl::FrameWasRead(LPCTSTR processVerb) { if (CLog::IsEnabled()) CLog::Instance()->WriteItemTime(_streamId, _curFrame, _T("Complete reading")); FrameWasProcessed(processVerb); if (_pSync != NULL) _pSync->FrameWasRead(); } void CInputVideoStreamImpl::Close() { if (_pStream != NULL) { _pStream->Close(); delete _pStream; _pStream = NULL; } if (_pSync != NULL) { _pSync->Close(); delete _pSync; _pSync = NULL; } _Close(); } CInputVideoStreamImpl::~CInputVideoStreamImpl() { Close(); } /**************************/ /* COutputVideoStreamImpl */ /**************************/ COutputVideoStreamImpl::COutputVideoStreamImpl() : CVideoStreamImpl (_T("WRITE")) { _pStream = NULL; _pSync = NULL; _slowFrame = 0; // Initialize as a non multiplexer _numOfVidoeStreams = 0; _videoStreamArray = NULL; _locations = NULL; } bool COutputVideoStreamImpl::Open (LPCTSTR location, const VIDEOINFO &vi) { // If already open, close Close(); _Open (location); _vi = vi; // If the location has ';' characters in it then it is actually a multiple output stream string locationStr(location); unsigned int semiColonCount = count(locationStr.begin(), locationStr.end(), ';'); if (semiColonCount > 0) { // copy the location string to a local copy so that we can plant '\0' chars in it. _locations = (char *)malloc(strlen(location)+1); if (_locations == NULL) { Close(); return false; } memcpy(_locations, location, strlen(location)+1); // We don't yet know exactly how many sub-locations there are in the location string but there // are certainly no more than semiColonCount+1 _videoStreamArray = new COutputVideoStreamImpl[semiColonCount+1]; if (_videoStreamArray == NULL) { Close(); return false; } // Use strtok_r() (strtok() is not thread safe) to delimit the _locations string into sub locations char *saveptr = NULL; for (char *str = _locations; ; str = NULL) { #ifdef WIN32 char *subLocation = strtok_s(str, ";", &saveptr); #else char *subLocation = strtok_r(str, ";", &saveptr); #endif if (subLocation == NULL) break; // The recursion should be of depth 1. if (! _videoStreamArray[_numOfVidoeStreams].Open(subLocation, vi)) { Close(); return false; } _numOfVidoeStreams++; } return true; } // This is not a multiplexer. // Get output video stream IMedia *pMedia = CMediaFactory::GetMedia (location); _pStream = pMedia->CreateOutputVideoStream(); delete pMedia; // Find bare location, codec, timeout, etc. CMediaFactory::ParseMediaLocation (location, &_bareLocation, &_locationParams); if (_pStream->BypassSyncObject()) { // In case we bypass the _pSync object, just call _pStream->Open() return _pStream->Open (_bareLocation.c_str(), vi, _locationParams, _streamId); } else { // Use separate thread for output if desired if (_pStream->IsAsync()) _pStream->SetFrameWrittenCallback (FrameWasWritten, this); _pSync = new COutputSync (_bareLocation.c_str(), _locationParams.bufSize, _locationParams.timeoutMS); if (_pStream->Open (_bareLocation.c_str(), vi, _locationParams, _streamId) == true) { // Sign that producer started (consumer can now start reading). _pSync->Started(); return true; } else { delete _pSync; _pSync = NULL; delete _pStream; _pStream = NULL; return false; } } } void COutputVideoStreamImpl::Close() { // Free multiplexer stuff for (unsigned int i=0; i<_numOfVidoeStreams; i++) { // The recursion should be of depth 1. _videoStreamArray[i].Close(); } _numOfVidoeStreams = 0; if (_locations != NULL) { free (_locations); _locations = NULL; } if (_videoStreamArray != NULL) { delete[] _videoStreamArray; _videoStreamArray = NULL; } // Free non-multiplexer stuff if (_pStream != NULL) { _pStream->Close(); if (_pSync != NULL) { _pSync->VideoWasComplete(); if (!_pStream->IsPersistent()) { try { _pSync->WaitForConsumerEnd(); } catch (...) {} } delete _pSync; _pSync = NULL; } delete _pStream; _pStream = NULL; } _Close(); } COutputVideoStreamImpl::~COutputVideoStreamImpl() { Close(); } bool COutputVideoStreamImpl::WriteFrame (CFramePtr framePtr) { // If this is a multiplexer, delegate command to sons if (_numOfVidoeStreams > 0) { for (unsigned int i=0; i<_numOfVidoeStreams; i++) { // The recursion should be of depth 1. if (! _videoStreamArray[i].WriteFrame(framePtr)) return false; } return true; } if (_pStream != NULL) { // Make sure frame has standard pixel format and attributes that match those of the video stream if (!framePtr.IsStandardPixelFormat() || framePtr.GetBitCount() != _vi.bitCount || framePtr.GetWidth() != _vi.width || framePtr.GetHeight() != _vi.height) throw SWException("Frame format does not match format of video stream."); if (CLog::IsEnabled()) CLog::Instance()->WriteItemTime(_streamId, _slowFrame++, _T("Waiting for buffer")); if (_pSync != NULL) { try { _pSync->WaitForBufferSpace(); } catch (...) { Write2Log (_T("WRITE: WaitForBufferSpace failed")); return false; } } bool rc = _pStream->WriteFrame (framePtr); if (!_pStream->IsAsync()) FrameWasWritten(); if (_pSync != NULL && !_pStream->IsPersistent()) _pStream->DeleteUpToFrame (_pSync->HowManyFramesConsumed() - 1); return rc; } return false; } void COutputVideoStreamImpl::FrameWasWritten (void *pThis) { COutputVideoStreamImpl *pStream = (COutputVideoStreamImpl *) pThis; pStream->FrameWasWritten(); } void COutputVideoStreamImpl::FrameWasWritten() { if (CLog::IsEnabled()) CLog::Instance()->WriteItemTime(_streamId, _curFrame, _T("Written")); FrameWasProcessed(); if (_pSync != NULL) _pSync->FrameWasWritten(); } /********************/ /* CONSTRUCTION API */ /********************/ CInputVideoStream *CInputVideoStream::Create() { return new CInputVideoStreamImpl(); } void CInputVideoStream::Destroy (CInputVideoStream *pStream) { delete pStream; } COutputVideoStream *COutputVideoStream::Create() { return new COutputVideoStreamImpl(); } void COutputVideoStream::Destroy (COutputVideoStream *pStream) { delete pStream; }<file_sep>/VideoStream/VideoStream/VSLocalMemStream.h #pragma once #ifdef WIN32 #include <windows.h> #else #include <pthread.h> #include <semaphore.h> #endif #include <iostream> #include <queue> #include <string> #include "VSFrame.h" using namespace std; namespace VideoStream { class CLocalMemStream { private: enum State { NOT_OPENED, OPENED, CLOSED }; string m_streamName; static const int m_frameArraySize = 2048; // actually there can be one less frame #ifdef WIN32 HANDLE m_mutex; // Mutex for accessing the frame array and refcount HANDLE m_fullSem; // Semaphore for counting full slots (when value is 0 stream is empty and reader is stuck) HANDLE m_emptySem; // Semaphore for counting empty slots (when value is 0 stream is full and writer is stuck) HANDLE m_openingSem; // Semaphore for blocking the readerOpen until a writerOpen takes place. static HANDLE m_pausingMutex; // A single mutex (for all VideoStreams) for the sake of pausing. #else pthread_mutex_t m_mutex; // Mutex for accessing the frame array and refcount sem_t m_fullSem; // Semaphore for counting full slots (when value is 0 stream is empty and reader is stuck) sem_t m_emptySem; // Semaphore for counting empty slots (when value is 0 stream is full and writer is stuck) sem_t m_openingSem; // Semaphore for blocking the readerOpen until a writerOpen takes place. static pthread_mutex_t m_pausingMutex; // A single mutex (for all VideoStreams) for the sake of pausing. #endif int m_maxBufferSize; DWORD m_readerTimeoutMS; // Number of MS after which reader times out if there are no frames to read. DWORD m_writerTimeoutMS; // Number of MS after which writer times out if the stream reached its capacity of m_maxBufferSize. CFramePtr m_frameArray[m_frameArraySize]; int m_firstElem; int m_firstFreeElem; VIDEOINFO m_vi; string m_codecstr; int m_bps; State m_readerState; State m_writerState; friend class CLocalMemStreamManager; private: // API for the CLocalMemStreamManager CLocalMemStream(string streamName); ~CLocalMemStream(); #ifndef WIN32 void printDebug(sem_t& sem, string semName, string streamName, string mode); #endif public: CFramePtr read(); // Interface for the reader void write(CFramePtr framePtr); // Interface for the writer int getNumberOfFrames(); int getMaxBufferSize(); void readerOpen(DWORD readerTimeoutMS); void readerClose(); void writerOpen(const VIDEOINFO &vi, const string & codecstr, int bps, int maxBufferSize, DWORD writerTimeoutMS); void writerClose(); bool didReaderAndWriterClose(); static void pauseAllStreams(); static void resumeAllStreams(); const VIDEOINFO &getVideoInfo() {return m_vi;} const string getCodec() {return m_codecstr;} int getBps() {return m_bps;} #ifndef WIN32 static pthread_mutex_t create_mutex(); #endif }; }<file_sep>/RenderingManager/RenderingManager/include/Logger.h #pragma once #ifdef WIN32 #include <windows.h> #else #include <stdarg.h> #endif #include "DebugLevel.h" using namespace std; // The implementing class needs only to implement vprintf_adv() and getDebugLevel() class Logger { public: // By default we print a header before the message and a newline in the end (if it is not already in the format). // In order to print without a header/newline, use the special printing methods. void trace(const char *format, ...); void info(const char *format, ...); void warning(const char *format, ...); void error(const char *format, ...); void printf(DebugLevel dbgLvl, const char *format, ...); void v_trace(const char *format, va_list argList); void v_info(const char *format, va_list argList); void v_warning(const char *format, va_list argList); void v_error(const char *format, va_list argList); void vprintf(DebugLevel dbgLvl, const char *format, va_list argList); // Special printing methods. void printf_adv(bool printHeader, bool verifyNewLine, DebugLevel dbgLvl, const char *format, ...); virtual void vprintf_adv(bool printHeader, bool verifyNewLine, DebugLevel dbgLvl, const char *format, va_list argList) = 0; virtual DebugLevel getDebugLevel() =0 ; }; <file_sep>/VideoStream/VideoStream/Log.cpp #ifdef WIN32 #include <StdAfx.h> #include "VSLog.h" #include <tchar.h> #include <Share.h> #include <time.h> #include <locale.h> #include <sys/types.h> #include <sys/timeb.h> #include "VSExceptions.h" #include <direct.h> #include <math.h> using namespace VideoStream; class CMutexBlock { private: HANDLE _mutex; public: CMutexBlock (HANDLE mutex); ~CMutexBlock(); }; CMutexBlock::CMutexBlock (HANDLE mutex) : _mutex (mutex) { DWORD dwTimeout = 10000; // 10 seconds // Wait until mutex is released or timeout was reached WaitForSingleObject (mutex, dwTimeout); // Timeout will be reached only if another process got stuck inside of the mutex block, // or for some reason didn't finish it cleanly. } CMutexBlock::~CMutexBlock() { ReleaseMutex (_mutex); } // Use this macro at the beginning of every {} block that writes/opens/closes/flushes the log file // It will prevent another process/thread trying to write to it at the same time #define FILE_MUTEX_BLOCK CMutexBlock __block(_hFileMutex) // Utility function to get current date/time as a string static LPCTSTR GetCurrentTimeString (bool withDate = false) { const int MAX_TIME_STR_LEN = 128; static TCHAR timeStr[MAX_TIME_STR_LEN+1]; struct _timeb curTime; _ftime_s (&curTime); struct tm curTM; localtime_s (&curTM, &curTime.time); TCHAR tmpStr[MAX_TIME_STR_LEN+1]; _tcsftime (tmpStr, MAX_TIME_STR_LEN, (withDate ? _T("%x %X") : _T("%X")), &curTM); _stprintf_s (timeStr, MAX_TIME_STR_LEN, _T("%s.%03d"), tmpStr, (int) curTime.millitm); return timeStr; } static LPCTSTR GetLogFolder() { static bool folderReady = false; static TCHAR logFolder[MAX_PATH+1]; if (!folderReady) { TCHAR tempPath[MAX_PATH+1]; GetTempPath (MAX_PATH, tempPath); _stprintf_s (logFolder, MAX_PATH, _T("%s\\SundaySky\\"), tempPath); _tmkdir(logFolder); _stprintf_s (logFolder, MAX_PATH, _T("%s\\SundaySky\\VideoStream\\"), tempPath); _tmkdir(logFolder); folderReady = true; } return logFolder; } CLog *CLog::_pLog = NULL; bool CLog::_logEnabled = false; CLog::CLog() { // This mutex is used to prevent two threads/processes writing to the log at the same time _hFileMutex = CreateMutex (NULL, FALSE, _T("VideoStreamLogMutex")); // Set default locale so that time/date strings will look fine setlocale (LC_ALL, _T("")); // Get log file name TCHAR logFileName[MAX_PATH+1]; _stprintf_s (logFileName, MAX_PATH, _T("%s\\Log.txt"), GetLogFolder()); // Open log file { FILE_MUTEX_BLOCK; _fp = _tfsopen (logFileName, _T("a"), _SH_DENYNO); if (_fp != NULL) { fprintf (_fp, "%s: Logging started for process #%d\n", GetCurrentTimeString (true), (int) GetCurrentProcessId()); fflush (_fp); } } } CLog::~CLog() { if (_fp != NULL) { FILE_MUTEX_BLOCK; fclose (_fp); } for (performanceLogsMap::const_iterator i = _performanceLogs.begin(); i != _performanceLogs.end(); i++) { CPerformanceLog *pLog = (*i).second; pLog->Close(); delete pLog; } CloseHandle(_hFileMutex); } int CLog::OpenContext (LPCTSTR name) { if (_logEnabled == true) { CPerformanceLog *pLog = new CPerformanceLog(); int context = pLog->Open(name); _performanceLogs[context] = pLog; return context; } else return -1; } CLog::CPerformanceLog *CLog::GetPerformanceLog (int context) { if (_logEnabled == true && context != -1) { performanceLogsMap::const_iterator log = _performanceLogs.find(context); if (log != _performanceLogs.end()) return (*log).second; } return NULL; } void CLog::WriteItemTime (int context, int itemNum, LPCTSTR title) { CPerformanceLog *pLog = GetPerformanceLog (context); if (pLog != NULL) pLog->WriteItemTime(itemNum, title); } void CLog::CloseContext (int context) { if (_logEnabled == true && context != -1) { performanceLogsMap::iterator log = _performanceLogs.find(context); if (log != _performanceLogs.end()) { CPerformanceLog *pLog = (*log).second; pLog->Close(); delete pLog; _performanceLogs.erase(log); } } } void CLog::WriteLine (LPCTSTR line, int context) { if (_logEnabled == true && _fp != NULL) { FILE_MUTEX_BLOCK; if (context < 0) fprintf (_fp, "%s: %s\n", GetCurrentTimeString(), line); else fprintf (_fp, "%s: (%d) %s\n", GetCurrentTimeString(), context, line); fflush (_fp); } } CLog *CLog::Instance() { if (_pLog == NULL) _pLog = new CLog(); return _pLog; } /**********************************/ /* PREFORMANCE LOG IMPLEMENTATION */ /**********************************/ #define PERFORMANCE_DATA_CRITICAL_BLOCK CMutexBlock __block(_hDataMutex) int CLog::CPerformanceLog::GetUniqueId() { int id = 0; try { id = _uniqueId.IncreaseInt(); } catch (CSyncTimeoutException) {} catch (CSharedDataIOException) {} return id; } void CLog::CPerformanceLog::WriteToFile (LPCTSTR filePath) { PERFORMANCE_DATA_CRITICAL_BLOCK; FILE *fp; _tfopen_s (&fp, filePath, _T("w")); if (fp == NULL) return; // Write titles _ftprintf (fp, _T("Index")); for (titlesVector::const_iterator i = _titles.begin(); i != _titles.end(); i++) { const std::string &title = (*i); _ftprintf (fp, _T(", %s"), title.c_str()); } _ftprintf (fp, _T(", , Date, Open time, Full name\n")); // Write items int index = 0; for (int index = 0; index < (int)_times.size(); index++) { itemsMap::const_iterator item = _times.find(index); if (item == _times.end()) continue; const itemTimes &itemTimes = (*item).second; bool firstTime = true; _ftprintf (fp, _T("%d"), index); // Write time breakdown of item for (titlesVector::const_iterator i = _titles.begin(); i != _titles.end(); i++) { _ftprintf (fp, _T(", ")); const std::string &title = (*i); itemTimes::const_iterator timeIter = itemTimes.find(title); if (timeIter != itemTimes.end()) { const struct _timeb &time = (*(timeIter)).second; int secDiff = (int)time.time - (int)_openTime.time; int milliDiff = (int)time.millitm - (int)_openTime.millitm; if (milliDiff < 0) { secDiff--; milliDiff += 1000; } _ftprintf (fp, _T("%d.%03d"), secDiff, milliDiff); } } if (index == 0) { // Write date and open time in first row const int MAX_TIME_STR_LEN = 128; struct tm openTM; localtime_s (&openTM, &_openTime.time); TCHAR tmpStr[MAX_TIME_STR_LEN+1]; _tcsftime (tmpStr, MAX_TIME_STR_LEN, _T("%x, %X"), &openTM); _ftprintf (fp, _T(", , %s.%03d, %s"), tmpStr, (int) _openTime.millitm, _name.c_str()); } _ftprintf (fp, _T("\n")); } fclose (fp); } void CLog::CPerformanceLog::AddTitle (LPCTSTR title) { // Does title already exist for (titlesVector::const_iterator i = _titles.begin(); i != _titles.end(); i++) { const std::string &t = (*i); if (t.compare (title) == 0) return; } _titles.push_back(title); } CLog::CPerformanceLog::CPerformanceLog() : _context(-1), _uniqueId (_T("LogLastID"), true) { // This mutex is used to prevent two threads/processes obtaining a unique id at the same time _hFileMutex = CreateMutex (NULL, FALSE, _T("VideoStreamUniqueIdMutex")); // This mutex is used to prevent two threads accessing the performance data at the same time _hDataMutex = CreateMutex (NULL, FALSE, NULL); } CLog::CPerformanceLog::~CPerformanceLog() { if (_context != -1) { Close(); _context = -1; } CloseHandle (_hFileMutex); CloseHandle (_hDataMutex); } int CLog::CPerformanceLog::Open (LPCTSTR name) { _name = name; _context = GetUniqueId(); _ftime_s (&_openTime); return _context; } void CLog::CPerformanceLog::WriteItemTime (int itemNum, LPCTSTR title) { PERFORMANCE_DATA_CRITICAL_BLOCK; AddTitle (title); struct _timeb curTime; _ftime_s (&curTime); (_times[itemNum])[title] = curTime; } void CLog::CPerformanceLog::Close() { // Clean log name std::string::size_type lastPos = _name.find_last_of ('.'); if (lastPos == std::string::npos) lastPos = _name.length(); std::string::size_type startPos = _name.find_last_of ('\\'); if (startPos == std::string::npos) { startPos = _name.find_last_of ('/'); if (startPos == std::string::npos) startPos = 0; } std::string exactName = _name.substr(startPos, lastPos - startPos); // Get log file name TCHAR logFileName[MAX_PATH+1]; _stprintf_s (logFileName, MAX_PATH, _T("%s\\%s_%d.csv"), GetLogFolder(), exactName.c_str(), _context); WriteToFile (logFileName); } #else #include "VSLog.h" #include <time.h> #include <locale.h> #include <sys/types.h> #include <sys/stat.h> #include <sys/timeb.h> #include "VSExceptions.h" #include <stdlib.h> #include <math.h> #include <pthread.h> #include <string.h> #include <iostream> #include <unistd.h> #include "SWException.h" #include "FileUtils.h" #include "CMutex.h" using namespace VideoStream; class CMutexBlock { private: pthread_mutex_t* _mutex; public: CMutexBlock(pthread_mutex_t& mutex); ~CMutexBlock(); }; CMutexBlock::CMutexBlock(pthread_mutex_t& mutex) { DWORD dwTimeout = 10000; // 10 seconds struct timespec timeout; timeout.tv_sec = dwTimeout; _mutex = &mutex; pthread_mutex_timedlock(_mutex, &timeout); // Wait until mutex is released or timeout was reached //WaitForSingleObject (mutex, dwTimeout); // Timeout will be reached only if another process got stuck inside of the mutex block, // or for some reason didn't finish it cleanly. } CMutexBlock::~CMutexBlock() { pthread_mutex_unlock(_mutex); //ReleaseMutex (_mutex); } // Use this macro at the beginning of every {} block that writes/opens/closes/flushes the log file // It will prevent another process/thread trying to write to it at the same time #define FILE_MUTEX_BLOCK CMutexBlock __block(&_hFileMutex) // Utility function to get current date/time as a string static LPCTSTR GetCurrentTimeString(bool withDate = false) { time_t curTime = time(0);//get time now struct tm curTM = *localtime(&curTime); const int MAX_TIME_STR_LEN = 128; static TCHAR timeStr[MAX_TIME_STR_LEN + 1]; //struct _timeb curTime; //_ftime_s (&curTime); //struct tm curTM; //localtime_s (&curTM, &curTime.time); char tmpStr[MAX_TIME_STR_LEN + 1]; strftime(tmpStr, MAX_TIME_STR_LEN, (withDate ? "%x %X" : "%X"), &curTM); snprintf(timeStr, MAX_TIME_STR_LEN, "%s.%03d", tmpStr, (int)curTime); return timeStr; } static LPCTSTR GetLogFolder() { static bool folderReady = false; static char logFolder[MAX_PATH + 1]; int temp; if (!folderReady) { const char* tempPath = getenv("TMPDIR"); if (tempPath == NULL) { tempPath = "/tmp"; } snprintf(logFolder, MAX_PATH, "%s/SundaySky/", tempPath); if (!FileUtils::folderExists(logFolder)) { if ((temp = mkdir(logFolder, 0777)) != 0) { throw SWException("Unable to mkdir 1"); } } snprintf(logFolder, MAX_PATH, "%s/SundaySky/VideoStream/", tempPath); if (!FileUtils::folderExists(logFolder)) { if ((temp = mkdir(logFolder, 0777)) != 0) { throw SWException("Unable to mkdir 2"); } } folderReady = true; } return logFolder; } CLog *CLog::_pLog = NULL; bool CLog::_logEnabled = false; CLog::CLog() { // This mutex is used to prevent two threads/processes writing to the log at the same time //_hFileMutex = CreateMutex (NULL, FALSE, _T("VideoStreamLogMutex")); // pthread_mutex_init(&_hFileMutex, NULL); union semun arg; _hFileMutex = CreateNamedMutex(arg, FALSE, _T("VideoStreamLogMutex")); // Set default locale so that time/date strings will look fine setlocale(LC_ALL, ""); // Get log file name TCHAR logFileName[MAX_PATH + 1]; snprintf(logFileName, MAX_PATH, "%sLog.txt", GetLogFolder()); // Open log file { //FILE_MUTEX_BLOCK; // pthread_mutex_lock(&_hFileMutex); MutexLock(_hFileMutex, INFINITE); _fp = fopen(logFileName, "a+"); //, _SH_DENYNO); if (_fp == NULL) { remove(logFileName); _fp = fopen(logFileName, "a+"); } if (_fp != NULL) { fprintf(_fp, "%s: Logging started for process #%d\n", GetCurrentTimeString(true), getpid()); fflush(_fp); } // pthread_mutex_unlock(&_hFileMutex); ReleaseMutex(_hFileMutex); } } CLog::~CLog() { if (_fp != NULL) { //FILE_MUTEX_BLOCK; // pthread_mutex_lock(&_hFileMutex); MutexLock(_hFileMutex, INFINITE); fclose(_fp); // pthread_mutex_unlock(&_hFileMutex); ReleaseMutex(_hFileMutex); } for (performanceLogsMap::const_iterator i = _performanceLogs.begin(); i != _performanceLogs.end(); i++) { CPerformanceLog *pLog = (*i).second; pLog->Close(); delete pLog; } //CloseHandle(_hFileMutex); // pthread_mutex_destroy(&_hFileMutex); DeleteMutex(_hFileMutex); } int CLog::OpenContext(LPCTSTR name) { if (_logEnabled == true) { CPerformanceLog *pLog = new CPerformanceLog(); int context = pLog->Open(name); _performanceLogs[context] = pLog; return context; } else return -1; } CLog::CPerformanceLog *CLog::GetPerformanceLog(int context) { if (_logEnabled == true && context != -1) { performanceLogsMap::const_iterator log = _performanceLogs.find(context); if (log != _performanceLogs.end()) return (*log).second; } return NULL; } void CLog::WriteItemTime(int context, int itemNum, LPCTSTR title) { CPerformanceLog *pLog = GetPerformanceLog(context); if (pLog != NULL) pLog->WriteItemTime(itemNum, title); } void CLog::CloseContext(int context) { if (_logEnabled == true && context != -1) { performanceLogsMap::iterator log = _performanceLogs.find(context); if (log != _performanceLogs.end()) { CPerformanceLog *pLog = (*log).second; pLog->Close(); delete pLog; _performanceLogs.erase(log); } } } void CLog::WriteLine(LPCTSTR line, int context) { if (_logEnabled == true && _fp != NULL) { //FILE_MUTEX_BLOCK; // pthread_mutex_lock(&_hFileMutex); MutexLock(_hFileMutex, INFINITE); if (context < 0) fprintf(_fp, "%s: %s\n", GetCurrentTimeString(), line); else fprintf(_fp, "%s: (%d) %s\n", GetCurrentTimeString(), context, line); fflush(_fp); // pthread_mutex_unlock(&_hFileMutex); ReleaseMutex(_hFileMutex); } } CLog *CLog::Instance() { if (_pLog == NULL) _pLog = new CLog(); return _pLog; } /**********************************/ /* PREFORMANCE LOG IMPLEMENTATION */ /**********************************/ #define PERFORMANCE_DATA_CRITICAL_BLOCK CMutexBlock __block(_hDataMutex) int CLog::CPerformanceLog::GetUniqueId() { int id = 0; try { id = _uniqueId.IncreaseInt(); } catch (CSyncTimeoutException) { } catch (CSharedDataIOException) { } return id; } void CLog::CPerformanceLog::WriteToFile(LPCTSTR filePath) { //PERFORMANCE_DATA_CRITICAL_BLOCK; FILE *fp = fopen(filePath, "w"); if (fp == NULL) return; // Write titles fprintf(fp, "Index"); for (titlesVector::const_iterator i = _titles.begin(); i != _titles.end(); i++) { const std::string &title = (*i); fprintf(fp, ", %s", title.c_str()); } fprintf(fp, ", , Date, Open time, Full name\n"); // Write items int index = 0; for (int index = 0; index < (int)_times.size(); index++) { itemsMap::const_iterator item = _times.find(index); if (item == _times.end()) continue; const itemTimes &itemTimes = (*item).second; bool firstTime = true; fprintf(fp, "%d", index); // Write time breakdown of item for (titlesVector::const_iterator i = _titles.begin(); i != _titles.end(); i++) { fprintf(fp, ", "); const std::string &title = (*i); itemTimes::const_iterator timeIter = itemTimes.find(title); if (timeIter != itemTimes.end()) { const timeb &time = (*(timeIter)).second; int secDiff = (int)time.time - (int)_openTime.time; int milliDiff = (int)time.millitm - (int)_openTime.millitm; if (milliDiff < 0) { secDiff--; milliDiff += 1000; } fprintf(fp, "%d.%03d", secDiff, milliDiff); } } if (index == 0) { // Write date and open time in first row const int MAX_TIME_STR_LEN = 128; time_t curTime = time(0);//get time now struct tm openTM = *localtime(&curTime); localtime(&_openTime.time); TCHAR tmpStr[MAX_TIME_STR_LEN + 1]; strftime(tmpStr, MAX_TIME_STR_LEN, "%x, %X", &openTM); fprintf(fp, ", , %s.%03d, %s", tmpStr, (int)_openTime.millitm, _name.c_str()); } fprintf(fp, "\n"); } fclose(fp); } void CLog::CPerformanceLog::AddTitle(LPCTSTR title) { // Does title already exist for (titlesVector::const_iterator i = _titles.begin(); i != _titles.end(); i++) { const std::string &t = (*i); if (t.compare(title) == 0) return; } _titles.push_back(title); } CLog::CPerformanceLog::CPerformanceLog() : _context(-1), _uniqueId("LogLastID", true) { // This mutex is used to prevent two threads/processes obtaining a unique id at the same time //_hFileMutex = CreateMutex (NULL, FALSE, _T("VideoStreamUniqueIdMutex")); // pthread_mutex_init(&_hFileMutex, NULL); union semun arg; _hFileMutex = CreateNamedMutex(arg, FALSE, "VideoStreamUniqueIdMutex"); // This mutex is used to prevent two threads accessing the performance data at the same time //_hDataMutex = CreateMutex (NULL, FALSE, NULL); pthread_mutex_init(&_hDataMutex, NULL); } CLog::CPerformanceLog::~CPerformanceLog() { if (_context != -1) { Close(); _context = -1; } // pthread_mutex_destroy(&_hFileMutex); DeleteMutex(_hFileMutex); pthread_mutex_destroy(&_hDataMutex); //CloseHandle (_hFileMutex); //CloseHandle (_hDataMutex); } int CLog::CPerformanceLog::Open(LPCTSTR name) { _name = name; _context = GetUniqueId(); ftime(&_openTime); return _context; } void CLog::CPerformanceLog::WriteItemTime(int itemNum, LPCTSTR title) { //PERFORMANCE_DATA_CRITICAL_BLOCK; pthread_mutex_lock(&_hDataMutex); AddTitle(title); struct timeb curTime; ftime(&curTime); (_times[itemNum])[title] = curTime; pthread_mutex_unlock(&_hDataMutex); } void CLog::CPerformanceLog::Close() { // Clean log name std::string::size_type lastPos = _name.find_last_of('.'); if (lastPos == std::string::npos) lastPos = _name.length(); std::string::size_type startPos = _name.find_last_of('\\/'); if (startPos == std::string::npos) { startPos = _name.find_last_of('/'); if (startPos == std::string::npos) startPos = 0; } std::string exactName = _name.substr(startPos, lastPos - startPos); // Get log file name char logFileName[MAX_PATH + 1]; snprintf(logFileName, MAX_PATH, "%s/%s_%d.csv", GetLogFolder(), exactName.c_str(), _context); puts(logFileName); WriteToFile(logFileName); } #endif <file_sep>/SVG2Video/SVG2VideoEngine.cpp #include "stdafx.h" #include <iostream> #include "SVG2VideoEngine.h" #include "SWException.h" #include <sys/types.h> #ifndef WIN32 #include <sys/time.h> #include <math.h> #define _stricmp strcasecmp #endif using namespace std; #define USE_SVG_SERVER #ifdef USE_SVG_SERVER #ifdef WIN32 #include <grpc/grpc.h> #include <grpc++/channel_arguments.h> #include <grpc++/channel_interface.h> #include <grpc++/client_context.h> #include <grpc++/create_channel.h> #include <grpc++/credentials.h> #include <grpc++/status.h> #include <grpc++/stream.h> #include "svg_transcoder.pb.h" #include "svg_transcoder.grpc.pb.h" #else #include <grpc/grpc.h> #include <grpc++/support/channel_arguments.h> #include <grpc++/impl/codegen/channel_interface.h> #include <grpc++/impl/codegen/client_context.h> #include <grpc++/create_channel.h> #include <grpc++/security/credentials.h> #include <grpc++/impl/codegen/status.h> //#include <grpc++/stream.h> #include "svg_transcoder.pb.h" #include "svg_transcoder.grpc.pb.h" #endif using grpc::ChannelArguments; using grpc::ChannelInterface; using grpc::ClientContext; using svg_transcoder::SvgTranscoder; #else jclass m_clsH = NULL; jmethodID m_ctor = NULL; #endif SVG2VideoEngine::SVG2VideoEngine(map<string, void *> *commonStorage):m_writer(NULL)//, m_profiler(NULL) { } SVG2VideoEngine::~SVG2VideoEngine(void){ if ( m_writer ) { // Close video delete m_writer; m_writer = NULL; } /*if (m_profiler != NULL){ delete m_profiler; m_profiler = NULL; }*/ } void SVG2VideoEngine::executeEngine(int argc, char** argv) { SVG2VideoArgs svgArgs; if ( !parseArgs( argc, argv, svgArgs ) ) { throw SWException( "Usage: SVG2Video <infile> <outfile> <outwidth> <outheight> " "[-r <framerate>] " "[-duration <duration in sec>] " "[-pf <profiling-file>] " "[-pfconfig <profiling-config-file>]" "[-dbg]" "[-useFrameCache <true|false>" "[-timeFromStartToCache <time in ms>\n" ); } #ifdef WIN32 DWORD starttick = GetTickCount(); #else struct timeval prepartv; gettimeofday(&prepartv, 0); DWORD starttick = prepartv.tv_sec * 1000 + prepartv.tv_usec / 1000; #endif #ifndef USE_SVG_SERVER map<string, void *>::const_iterator iter = m_commonStorage->find( "java_env" ); if ( iter == m_commonStorage->end() ) throw SWException("Missing Java Environment in common storage.\n"); else m_env = (JNIEnv *)iter->second; iter = m_commonStorage->find( "jvm" ); if ( iter == m_commonStorage->end() ) throw SWException("Missing JVM in common storage.\n"); else m_jvm = (JavaVM *)iter->second; #endif m_logger.info("SVG2VideoEngine::executeEngine start.\n"); //start profiler, if need //int endFrame = int(svgArgs.m_out_rate * svgArgs.m_seconds); //m_logger.info("End frame is %d, duration is %f, out rate is %s\n", endFrame, svgArgs.m_seconds, svgArgs.m_out_rate); //m_profiler = new Profiler(svgArgs.m_profilingFileName, svgArgs.m_profilingConfigFileName, svgArgs.m_out_rate, svgArgs.m_out_scale, endFrame); bool wasExceptionThrown = false; SWException exp; //init output m_writer = new FrameWriter(svgArgs.m_outFile.c_str(), svgArgs.m_out_rate, svgArgs.m_outWidth, svgArgs.m_outHeight, svgArgs.m_dbg, 32, /*svgArgs.m_minBitDepth,*/ m_logger); // Render - this where all the logic is actually done... #ifdef USE_SVG_SERVER HRESULT renderResult = renderSvgByServer(svgArgs); #else HRESULT renderResult = renderSvg(svgArgs); #endif if (renderResult != 0) { wasExceptionThrown = true; m_logger.warning("Render failed (RC %d)\n", renderResult); exp = SWException("Render failed (RC %d)\n", renderResult); } if ( m_writer ) { // Close video delete m_writer; m_writer = NULL; } #ifdef WIN32 DWORD endtime = GetTickCount(); int endtick = GetTickCount(); #else gettimeofday(&prepartv, 0); DWORD endtick = prepartv.tv_sec * 1000 + prepartv.tv_usec / 1000; #endif m_logger.info("Wrote %s in %d ms\n", svgArgs.m_outFile.c_str(), endtick-starttick); #ifdef WIN32 FILETIME procCreationTime, procExitTime, kernelTime, userTime; if ( GetProcessTimes(GetCurrentProcess(), &procCreationTime, &procExitTime, &kernelTime, &userTime) ) { int iUser = FileTime2Ms( userTime ); int iKernel = FileTime2Ms( kernelTime ); int iTotalCPU = iUser+iKernel; m_logger.info("Cpu time=%d ms (user=%d ms, kernel=%d ms)\n", iTotalCPU, iUser, iKernel ); //FOR MAX printf("Time stats=%d,%d,%d,%d\n", endtick-m_starttick, iTotalCPU, iUser, iKernel ); } #endif if (wasExceptionThrown) throw exp; if (renderResult != 0 ) { throw SWException("SVG2Video failed: Premature end of application"); } } #ifdef WIN32 int SVG2VideoEngine::FileTime2Ms(const FILETIME& ft){ LARGE_INTEGER li; li.LowPart = ft.dwLowDateTime; li.HighPart = ft.dwHighDateTime; return (int)(li.QuadPart / 10000); } #endif bool SVG2VideoEngine::parseArgs(int argc,char* argv[], SVG2VideoArgs& args){ if ( argc < 4 ) { throw SWException("Too few arguments\n"); return false; } int iarg = 0; args.m_inFile = argv[iarg++]; args.m_outFile = argv[iarg++]; args.m_outWidth = atoi(argv[iarg++]); if ( args.m_outWidth <= 0 ) { throw SWException( "Width must be > 0. (%d)\n", args.m_outWidth ); return false; } args.m_outHeight = atoi(argv[iarg++]); if ( args.m_outHeight <= 0 ) { throw SWException( "Height must be > 0. (%d)\n", args.m_outHeight ); return false; } for ( ; iarg < argc; iarg++ ) { if ( _stricmp(argv[iarg], "-r") == 0 ) { iarg++; if ( iarg < argc ) { args.m_out_rate = atoi(argv[iarg]); //args.m_out_scale = 1; } else { throw SWException( "Expecting frame rate\n"); return false; } } else if ( _stricmp(argv[iarg], "-duration") == 0 ) { iarg++; if ( iarg < argc ) { args.m_seconds = atof(argv[iarg]); //args.m_out_scale = 1; } else { throw SWException( "Expecting duration.\n"); return false; } } else if ( _stricmp(argv[iarg], "-pf") == 0 ) { iarg++; if ( iarg < argc ) { args.m_profilingFileName = argv[iarg]; } else { throw SWException( "Expecting profiling file\n"); return false; } } else if ( _stricmp(argv[iarg], "-pfconfig") == 0 ) { iarg++; if ( iarg < argc ) { args.m_profilingConfigFileName = argv[iarg]; } else { throw SWException( "Expecting profiling config file\n"); return false; } } else if ( _stricmp(argv[iarg], "-min_bitdepth") == 0 ) { iarg++; if ( iarg < argc ) { args.m_minBitDepth = atoi(argv[iarg]); } else { throw SWException( "Expecting minimum bitdepth\n"); return false; } } else if (_stricmp(argv[iarg], "-server_host") == 0) { iarg++; if (iarg < argc) { args.m_svgServerHost = argv[iarg]; } else { throw SWException("Expecting SVG server host\n"); return false; } } else if (_stricmp(argv[iarg], "-useFrameCache") == 0) { iarg++; if (iarg < argc) { args.m_useFrameCache = argv[iarg]; } else { throw SWException("Expecting use frame cache true or false\n"); return false; } } else if (_stricmp(argv[iarg], "-timeFromStartToCache") == 0) { iarg++; if (iarg < argc) { args.m_timeFromStartToCache = atoi(argv[iarg]); } else { throw SWException("Expecting time from start to cache in ms\n"); return false; } } else if (_stricmp(argv[iarg], "-dbg") == 0) { args.m_dbg = true; } else { throw SWException( "Unknown option: %s\n", argv[iarg]); return false; } } return true; } HRESULT SVG2VideoEngine::renderSvg(SVG2VideoArgs& args) { #ifndef USE_SVG_SERVER m_logger.info("Initializing JVM"); initJNI(); m_logger.info("Init JVM finished 2"); // Instantiate transcoder for this rendering char svg[1024]; sprintf_s(svg, "file:/%s", args.m_inFile.c_str()); jstring url = m_env->NewStringUTF(svg); jstring uri = m_env->NewStringUTF(""); int width = args.m_outWidth; int height = args.m_outHeight; int minBitDepth = 32;//args.m_minBitDepth; m_logger.trace("Getting transcoder function pointer"); jobject transcoder = m_env->NewObject(m_clsH, m_ctor, url, uri, width, height); m_logger.trace("Transcoder function pointer is %p", transcoder); if (transcoder == NULL) { throw SWException("Transcoder cannot be created\n"); } // Loop and trnascode the frames. double frameDuration = 1.0 / args.m_out_rate; int frame = 0; long javaTime = 0; long convertTime = 0; long videoTime = 0; DWORD before; DWORD after; for (double rtime = 0.0; rtime < args.m_seconds ; rtime += frameDuration) { m_logger.trace("Transcoding frame at %f (of %f)", rtime, args.m_seconds); m_profiler->startFrameRendering(frame); before = GetTickCount(); // Call transcoding method. jobject image = m_env->CallObjectMethod(transcoder, m_methodTx, rtime); after = GetTickCount(); javaTime += (after-before); before = after; m_profiler->stopFrameRendering(frame); if (image == NULL) { m_logger.trace("ERROR - cannot get image in frame %3d (timestamp %5.2f)\n", frame, rtime); } else { // Create CBitmap from image pixel bytes. jbyteArray *arr = reinterpret_cast<jbyteArray*>(&image); jsize len = m_env->GetArrayLength(*arr); jbyte *pixels = m_env->GetByteArrayElements(*arr, 0); VideoStream::CMutableFramePtr framePtr(width, height, 32); if (minBitDepth == 32) { memcpy( framePtr.GetData(), pixels, len); } else { MakeFullOpaque(width, height, pixels, len/3, framePtr); } after = GetTickCount(); convertTime += (after-before); before = after; m_writer->AddFrame(framePtr); after = GetTickCount(); videoTime += (after-before); before = after; //saveFrameToFile(frame, pixels) //free byte array m_env->ReleaseByteArrayElements(*arr, pixels, 0); m_profiler->setTotalRenderTime(frame); frame++; } m_env->DeleteLocalRef(image); } m_logger.trace("Java time: %d\n", javaTime); m_logger.trace("Convert time: %d\n", convertTime); m_logger.trace("Video time: %d\n", videoTime); if (m_jvm) { m_jvm->DetachCurrentThread(); } m_profiler->WriteProfileFile(0, frame); #endif return 0; } // Close the session with the server // Use non-class method to avoid need for forward declaration of a nested class static void CloseSessionWithServer(const std::unique_ptr<SvgTranscoder::Stub> &stub, const char *reqId, Logger &logger) { svg_transcoder::CloseRequest closeReq; svg_transcoder::CloseResponse closeRes; closeReq.set_requestid(reqId); ClientContext closeCtx; stub->Close(&closeCtx, closeReq, &closeRes); int status = closeRes.status(); logger.info("Status for closing connection to SVG server = %d", status);//EVEV-ERROR? } //Generate a request id based on the output file name std::string SVG2VideoEngine::GetRequestId(string outFile) { // Take only the last folder and the filename to avoid too long strings. size_t last_separator = outFile.find_last_of("\\/"); if (last_separator == string::npos) return outFile; // No separator - return the whole string size_t second_last_separator = outFile.find_last_of("\\/", last_separator - 1); if (second_last_separator == string::npos) return outFile; // No second separator - return the whole string return outFile.substr(second_last_separator + 1); } // Render the file using an external server (over grpc) HRESULT SVG2VideoEngine::renderSvgByServer(SVG2VideoArgs& args) { m_logger.info("Initializing connection to SVG server <%s>", args.m_svgServerHost.c_str()); #ifdef WIN32 std::shared_ptr<ChannelInterface> channel = grpc::CreateChannel(args.m_svgServerHost, grpc::InsecureCredentials(), ChannelArguments()); #else std::shared_ptr<ChannelInterface> channel = grpc::CreateChannel(args.m_svgServerHost, grpc::InsecureChannelCredentials()); #endif std::unique_ptr<SvgTranscoder::Stub> stub = SvgTranscoder::NewStub(channel); m_logger.info("after create channel"); /* * Init */ string reqIdStr = GetRequestId(args.m_outFile); // Unique request id - use out file name const char *reqId = reqIdStr.c_str(); svg_transcoder::InitRequest initReq; svg_transcoder::InitResponse initRes; initReq.set_svgfilename(args.m_inFile.c_str()); initReq.set_width(args.m_outWidth); initReq.set_height(args.m_outHeight); initReq.set_duration((float)args.m_seconds); initReq.set_fps((float)args.m_out_rate); initReq.set_requestid(reqId); initReq.set_usecache(args.m_useFrameCache); initReq.set_timetocachefromstart(args.m_timeFromStartToCache); m_logger.info("Creating session %s", reqId); ClientContext initCtx; stub->Init(&initCtx, initReq, &initRes); m_logger.info("after init call to server"); const std::string msg = initRes.message(); int status = initRes.status(); if (status != 0) { m_logger.error("Init call failed"); throw SWException("Failed to initialize connection to svg server <%s>. status=%d, message=%s\n", args.m_svgServerHost.c_str(), status, msg.c_str()); } m_logger.info("Created session %s", reqId); // Loop and trnascode the frames. int numFrames = (int)ceil(args.m_seconds * args.m_out_rate); int frame = 0; long javaTime = 0; long convertTime = 0; long videoTime = 0; DWORD before; DWORD after; //for (double rtime = 0.0; rtime < args.m_seconds; rtime += frameDuration) for (frame = 0; frame < numFrames; frame++ ) { double rtime = ((double)frame) / args.m_out_rate; m_logger.trace("Transcoding frame at %f (of %f) diff=%.20f", rtime, args.m_seconds, args.m_seconds-rtime); //m_profiler->startFrameRendering(frame); #ifdef WIN32 before = GetTickCount(); #else struct timeval prepartv; gettimeofday(&prepartv, 0); before = prepartv.tv_sec * 1000 + prepartv.tv_usec / 1000; #endif // Get frame svg_transcoder::GetFramesRequest getFramesReq; svg_transcoder::GetFramesResponse getFramesRes; getFramesReq.set_requestid(reqId); getFramesReq.set_time((float)rtime); getFramesReq.set_numberofframes(1); ClientContext getFramesCtx; stub->GetFrames(&getFramesCtx, getFramesReq, &getFramesRes); int status = getFramesRes.status(); if (status != 0) { CloseSessionWithServer(stub, reqId, m_logger); throw SWException("Failed to get frame for time %f: status=%d, message=%s\n", rtime, status, msg.c_str()); } if (getFramesRes.frame_size() != 1) { CloseSessionWithServer(stub, reqId, m_logger); throw SWException("Expected 1 frame and got %d for time %f\n", getFramesRes.frame_size(), rtime); } const std::string &curfrm = getFramesRes.frame(0); unsigned char *pixels = (unsigned char *)curfrm.c_str(); int len = curfrm.size(); #ifdef WIN32 after = GetTickCount(); #else gettimeofday(&prepartv, 0); after = prepartv.tv_sec * 1000 + prepartv.tv_usec / 1000; #endif javaTime += (after - before); before = after; //m_profiler->stopFrameRendering(frame); if (pixels == NULL) { m_logger.error("ERROR - cannot get image in frame %3d (timestamp %5.2f)\n", frame, rtime); } else { // Create a frame from image pixel bytes. VideoStream::CMutableFramePtr framePtr(args.m_outWidth, args.m_outHeight, 32); memcpy(framePtr.GetData(), pixels, len); #ifdef WIN32 after = GetTickCount(); #else gettimeofday(&prepartv, 0); after = prepartv.tv_sec * 1000 + prepartv.tv_usec / 1000; #endif convertTime += (after - before); before = after; m_writer->AddFrame(framePtr); #ifdef WIN32 after = GetTickCount(); #else gettimeofday(&prepartv, 0); after = prepartv.tv_sec * 1000 + prepartv.tv_usec / 1000; #endif videoTime += (after - before); before = after; //saveFrameToFile(frame, pixels) //m_profiler->setTotalRenderTime(frame); } } CloseSessionWithServer(stub, reqId, m_logger); m_logger.trace("Java time: %d\n", javaTime); m_logger.trace("Convert time: %d\n", convertTime); m_logger.trace("Video time: %d\n", videoTime); //m_profiler->WriteProfileFile(0, frame); return 0; } void SVG2VideoEngine::MakeFullOpaque(int width, int height, unsigned char *pixels, int count, VideoStream::CMutableFramePtr& framePtr) { unsigned char *rgba = (unsigned char *) framePtr.GetData(); if(count==0) return; //convert rgb to rgba for(int i=0; i<count; i++, rgba+=4, pixels+=3) { int j=0; for(j=0; j < 3; j++) { rgba[j] = pixels[j]; } rgba[j] = 255; } } <file_sep>/NewEncoder/VideoEncoder/WMVGenProfile.cpp //***************************************************************************** // // Microsoft Windows Media // Copyright (C) Microsoft Corporation. All rights reserved. // // FileName: GenProfile_lib.cpp // // Abstract: The implementation for the GenProfile static library. // //***************************************************************************** #include "WMVmacros.h" #include <wmsdk.h> #include <assert.h> #include <mlang.h> #include <stdio.h> #include <tchar.h> #include <wmcodeciface.h> #include <wmcodecconst.h> #include <ocidl.h> #include "WMVGenProfile.h" struct PIXEL_FORMAT { const GUID* guidFormat; DWORD dwFourCC; WORD wBitsPerPixel; }; PIXEL_FORMAT PixelFormats[] = { { &WMMEDIASUBTYPE_RGB555, BI_RGB, 16 }, { &WMMEDIASUBTYPE_RGB24, BI_RGB, 24 }, { &WMMEDIASUBTYPE_RGB32, BI_RGB, 32 }, { &WMMEDIASUBTYPE_I420, 0x30323449, 12 }, { &WMMEDIASUBTYPE_IYUV, 0x56555949, 12 }, { &WMMEDIASUBTYPE_YV12, 0x32315659, 12 }, { &WMMEDIASUBTYPE_YUY2, 0x32595559, 16 }, { &WMMEDIASUBTYPE_UYVY, 0x59565955, 16 }, { &WMMEDIASUBTYPE_YVYU, 0x55595659, 16 } }; DWORD WaveFrequency[] = { 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000 }; //------------------------------------------------------------------------------ // Name: CopyMediaType() // Desc: Allocates memory for a WM_MEDIA_TYPE and its format data and // copies an existing media type into it. //------------------------------------------------------------------------------ STDMETHODIMP CopyMediaType( WM_MEDIA_TYPE** ppmtDestination, WM_MEDIA_TYPE* pmtSource ) { if ( !ppmtDestination ) { return E_POINTER; } if ( !pmtSource ) { return E_NOTIMPL; } // // Create enough space for the media type and its format data // *ppmtDestination = (WM_MEDIA_TYPE*) new BYTE[ sizeof( WM_MEDIA_TYPE ) + pmtSource->cbFormat ]; if ( !*ppmtDestination) { return E_OUTOFMEMORY; } // // Copy the media type and the format data // memcpy( *ppmtDestination, pmtSource, sizeof( WM_MEDIA_TYPE ) ); (*ppmtDestination)->pbFormat = ( ((BYTE*) *ppmtDestination) + sizeof( WM_MEDIA_TYPE ) ); // Format data is immediately after media type memcpy( (*ppmtDestination)->pbFormat, pmtSource->pbFormat, pmtSource->cbFormat ); return S_OK; } //------------------------------------------------------------------------------ // Name: EnsureIWMCodecInfo3() // Desc: Creates an IWMCodecInfo3 interface if none exists, and ensures // an outstanding reference either way. This way the IWMCodecInfo3 // object is guaranteed to exist and isn't released too many times. //------------------------------------------------------------------------------ STDMETHODIMP EnsureIWMCodecInfo3( IWMCodecInfo3** ppCodecInfo3 ) { HRESULT hr = S_OK; if ( !ppCodecInfo3 ) { return E_POINTER; } do { if ( !*ppCodecInfo3 ) { // // Create a new IWMCodecInfo3 object // IWMProfileManager* pProfileManager; hr = WMCreateProfileManager( &pProfileManager ); if ( FAILED( hr ) ) { break; } assert( pProfileManager ); hr = pProfileManager->QueryInterface( IID_IWMCodecInfo3, (void**) ppCodecInfo3 ); SAFE_RELEASE( pProfileManager ); if ( FAILED( hr ) ) { break; } } else { // // Add a reference to the existing object, so that it won't be destroyed during cleanup // SAFE_ADDREF( (*ppCodecInfo3) ); } assert( *ppCodecInfo3 ); // // It should now not matter if the IWMCodecInfo3 was just created or was passed in // } while ( FALSE ); return hr; } //------------------------------------------------------------------------------ // Name: SetCodecVBRSettings() // Desc: Enables VBR with the specified number of passes, or disables it. //------------------------------------------------------------------------------ STDMETHODIMP SetCodecVBRSettings( IWMCodecInfo3* pCodecInfo3, GUID guidCodecType, DWORD dwCodecIndex, BOOL fIsVBR, DWORD dwVBRPasses ) { HRESULT hr; if ( !pCodecInfo3 ) { return E_INVALIDARG; } do { // // Configure the codec to use or not use VBR as requested // hr = pCodecInfo3->SetCodecEnumerationSetting( guidCodecType, dwCodecIndex, g_wszVBREnabled, WMT_TYPE_BOOL, (BYTE*) &fIsVBR, sizeof( BOOL ) ); if ( FAILED( hr ) ) { // // If VBR is requested, then it's a problem, but otherwise the codec may just not support VBR // if ( ( !fIsVBR ) && FAILED( hr )/*( NS_E_UNSUPPORTED_PROPERTY == hr )*/ ) { hr = S_OK; } else { break; } } if ( fIsVBR ) { hr = pCodecInfo3->SetCodecEnumerationSetting( guidCodecType, dwCodecIndex, g_wszNumPasses, WMT_TYPE_DWORD, (BYTE*) &dwVBRPasses, sizeof( DWORD ) ); if ( FAILED( hr ) ) { break; } } //RM: test /* DWORD ComplexityLive = 0; DWORD ComplexityOffline = 0; DWORD ComplexityMax = 0; hr = GetComplexitySettings(pCodecInfo3, guidCodecType, dwCodecIndex, &ComplexityLive, &ComplexityOffline, &ComplexityMax); if ( FAILED( hr ) ) { break; }*/ } while ( FALSE ); return hr; } //------------------------------------------------------------------------------ // Name: SetStreamLanguage() // Desc: Sets the language in the stream configuration. //------------------------------------------------------------------------------ STDMETHODIMP SetStreamLanguage( IWMStreamConfig * pStreamConfig, LCID dwLanguage ) { HRESULT hr = S_OK; IWMStreamConfig3 * pStreamConfig3 = NULL; IMultiLanguage * pMLang = NULL; BSTR bstrLanguage = NULL; do { hr = CoCreateInstance( CLSID_CMultiLanguage, NULL, CLSCTX_ALL, IID_IMultiLanguage, (VOID **) &pMLang ); if( FAILED( hr ) ) { break; } hr = pMLang->GetRfc1766FromLcid( dwLanguage, &bstrLanguage ); if( FAILED( hr ) ) { break; } hr = pStreamConfig->QueryInterface( IID_IWMStreamConfig3, (void**)&pStreamConfig3 ); if( FAILED( hr ) ) { break; } hr = pStreamConfig3->SetLanguage( bstrLanguage ); if( FAILED( hr ) ) { break; } } while (FALSE); SAFE_RELEASE( pMLang ); SAFE_RELEASE( pStreamConfig3 ); if ( !bstrLanguage ) { SysFreeString( bstrLanguage ); } return hr; } /* ** Functions that create media types for the various stream types */ //------------------------------------------------------------------------------ // Name: CreateUncompressedAudioMediaType() // Desc: Initializes a WM_MEDIA_TYPE for uncompressed audio. //------------------------------------------------------------------------------ STDMETHODIMP CreateUncompressedAudioMediaType( WM_MEDIA_TYPE** ppmtMediaType, DWORD dwSamplesPerSecond, WORD wNumChannels, WORD wBitsPerSample ) { HRESULT hr = S_OK; WM_MEDIA_TYPE mtUncompressedAudio; WAVEFORMATEX wfxUncompressedAudio; if ( !ppmtMediaType ) { return E_POINTER; } // // pCodecInfo3 is allowed to be NULL, since CreateMediatypeForFormat calls EnsureIWMCodecInfo3 // do { // // Setup the local copy of the uncompressed media type // ZeroMemory( &mtUncompressedAudio, sizeof( mtUncompressedAudio ) ); mtUncompressedAudio.majortype = WMMEDIATYPE_Audio; mtUncompressedAudio.subtype = WMMEDIASUBTYPE_PCM; mtUncompressedAudio.bFixedSizeSamples = TRUE; mtUncompressedAudio.bTemporalCompression = FALSE; mtUncompressedAudio.lSampleSize = wNumChannels * wBitsPerSample / 8; mtUncompressedAudio.formattype = WMFORMAT_WaveFormatEx; mtUncompressedAudio.pUnk = NULL; mtUncompressedAudio.cbFormat = sizeof( WAVEFORMATEX ); mtUncompressedAudio.pbFormat = (BYTE*) &wfxUncompressedAudio; // // Configure the WAVEFORMATEX structure for the uncompressed audio // ZeroMemory( &wfxUncompressedAudio, sizeof( wfxUncompressedAudio ) ); wfxUncompressedAudio.wFormatTag = 1; wfxUncompressedAudio.nChannels = wNumChannels; wfxUncompressedAudio.nSamplesPerSec = dwSamplesPerSecond; wfxUncompressedAudio.nAvgBytesPerSec = dwSamplesPerSecond * ( wNumChannels * wBitsPerSample / 8 ); wfxUncompressedAudio.nBlockAlign = wNumChannels * wBitsPerSample / 8; wfxUncompressedAudio.wBitsPerSample = wBitsPerSample; wfxUncompressedAudio.cbSize = sizeof( WAVEFORMATEX ); // // Return a copy of the media type to the caller, since the mediatype is on the stack // hr = CopyMediaType( ppmtMediaType, &mtUncompressedAudio ); if ( FAILED( hr ) ) { break; } } while( FALSE ); return( hr ); } //------------------------------------------------------------------------------ // Name: CreateVideoMediaType() // Desc: Initializes a WM_MEDIA_TYPE for video. //------------------------------------------------------------------------------ STDMETHODIMP CreateVideoMediaType( WM_MEDIA_TYPE** ppmtMediaType, IWMCodecInfo3* pCodecInfo3, DWORD dwCodecIndex, DWORD dwFPS, DWORD dwWidth, DWORD dwHeight, DWORD dwBitrate, BOOL fIsVBR, DWORD dwNumberOfPasses ) { HRESULT hr = S_OK; WM_MEDIA_TYPE *pMediaType = NULL; WMVIDEOINFOHEADER *pVIH; if ( !ppmtMediaType ) { return E_POINTER; } // // pCodecInfo3 is allowed to be NULL, since CreateMediatypeForFormat calls EnsureIWMCodecInfo3 // do { // // Get the mediatype for the codec // hr = CreateMediatypeForFormat( &pMediaType, pCodecInfo3, NULL, WMMEDIATYPE_Video, dwCodecIndex, 0, fIsVBR, dwNumberOfPasses ); if( FAILED( hr ) ) { break; } assert( pMediaType ); // // Configure the WMVIDEOINFOHEADER structure of the media type // pVIH = (WMVIDEOINFOHEADER*) pMediaType->pbFormat; pVIH->dwBitRate = dwBitrate; pVIH->rcSource.right = dwWidth; pVIH->rcSource.bottom = dwHeight; pVIH->rcTarget.right = dwWidth; pVIH->rcTarget.bottom = dwHeight; pVIH->bmiHeader.biWidth = dwWidth; pVIH->bmiHeader.biHeight = dwHeight; pVIH->AvgTimePerFrame = ( (LONGLONG) 10000000 ) / ( (LONGLONG) dwFPS ); // // Return a copy of the media type to the caller // hr = CopyMediaType( ppmtMediaType, pMediaType ); if ( FAILED( hr ) ) { break; } } while( FALSE ); SAFE_ARRAY_DELETE( pMediaType ); return( hr ); } //------------------------------------------------------------------------------ // Name: CreateUncompressedVideoMediaType() // Desc: Initializes a WM_MEDIA_TYPE for uncompressed video. //------------------------------------------------------------------------------ STDMETHODIMP CreateUncompressedVideoMediaType( WM_MEDIA_TYPE** ppmtMediaType, GUID guidFormat, DWORD dwFourCC, WORD wBitsPerPixel, BYTE* pbPaletteData, DWORD cbPaletteDataSize, DWORD dwFPS, DWORD dwWidth, DWORD dwHeight ) { const DWORD BITFIELD_DATA_SIZE = sizeof( RGBQUAD ) * 3; HRESULT hr; WM_MEDIA_TYPE mtUncompressedVideo; WMVIDEOINFOHEADER* pvihUncompressedVideo; BYTE* pbFormatData = NULL; DWORD dwFormatDataSize; DWORD dwMaxColors; BOOL fPalettePresent; BYTE* pbPostVIHData; DWORD cbExpectedPostVIHDataSize; BOOL fBitfieldsPresent; if ( !ppmtMediaType ) { return E_POINTER; } // // pCodecInfo3 is allowed to be NULL, since CreateMediatypeForFormat calls EnsureIWMCodecInfo3 // // // The width must be on a byte boundry // if ( dwWidth * wBitsPerPixel % 8 != 0 ) { return E_INVALIDARG; } // // The width, height, and frames per second must all be non-zero // if ( 0 == dwWidth || 0 == dwHeight || 0 == dwFPS ) { return E_INVALIDARG; } do { // // Allocate space for the format data ( WMVIDEOINFOHEADER + pallete data ) // dwFormatDataSize = sizeof( WMVIDEOINFOHEADER ); cbExpectedPostVIHDataSize = 0; // // If there are <= 8 bits / pixel, then there needs to be palette data following the WMVIDEOINFOHEADER // fPalettePresent = ( wBitsPerPixel <= 8 ); if ( fPalettePresent ) { dwMaxColors = 1 << wBitsPerPixel; cbExpectedPostVIHDataSize = sizeof( RGBQUAD ) * dwMaxColors; } // // If the format uses bitfields, then make sure the data is following // fBitfieldsPresent = ( BI_BITFIELDS == dwFourCC ); if ( fBitfieldsPresent ) { cbExpectedPostVIHDataSize = BITFIELD_DATA_SIZE; } if ( fPalettePresent || fBitfieldsPresent ) { dwFormatDataSize += cbExpectedPostVIHDataSize; if ( !pbPaletteData || ( cbPaletteDataSize != cbExpectedPostVIHDataSize ) ) { hr = E_INVALIDARG; break; } } pbFormatData = new BYTE[ dwFormatDataSize ]; if ( !pbFormatData ) { hr = E_OUTOFMEMORY; break; } ZeroMemory( pbFormatData, dwFormatDataSize ); pbPostVIHData = pbFormatData + sizeof( WMVIDEOINFOHEADER ); pvihUncompressedVideo = (WMVIDEOINFOHEADER*) pbFormatData; // // Set up the local copy of the uncompressed media type // ZeroMemory( &mtUncompressedVideo, sizeof( mtUncompressedVideo ) ); mtUncompressedVideo.majortype = WMMEDIATYPE_Video; mtUncompressedVideo.subtype = guidFormat; mtUncompressedVideo.bFixedSizeSamples = TRUE; mtUncompressedVideo.bTemporalCompression = FALSE; mtUncompressedVideo.lSampleSize = wBitsPerPixel * dwWidth * dwHeight / 8; mtUncompressedVideo.formattype = WMFORMAT_VideoInfo; mtUncompressedVideo.pUnk = NULL; mtUncompressedVideo.cbFormat = dwFormatDataSize; mtUncompressedVideo.pbFormat = (BYTE*) pbFormatData; // // Configure the WMVIDEOINFOHEADER structure for uncompressed video // pvihUncompressedVideo->dwBitRate = mtUncompressedVideo.lSampleSize * dwFPS * 8; pvihUncompressedVideo->rcSource.right = dwWidth; pvihUncompressedVideo->rcSource.bottom = dwHeight; pvihUncompressedVideo->rcTarget.right = dwWidth; pvihUncompressedVideo->rcTarget.bottom = dwHeight; pvihUncompressedVideo->bmiHeader.biSizeImage = mtUncompressedVideo.lSampleSize; pvihUncompressedVideo->bmiHeader.biPlanes = 1; pvihUncompressedVideo->bmiHeader.biSize = sizeof( BITMAPINFOHEADER ); pvihUncompressedVideo->bmiHeader.biWidth = dwWidth; pvihUncompressedVideo->bmiHeader.biHeight = dwHeight; pvihUncompressedVideo->bmiHeader.biCompression = dwFourCC; pvihUncompressedVideo->bmiHeader.biBitCount = wBitsPerPixel; pvihUncompressedVideo->AvgTimePerFrame = ( (LONGLONG) 10000000 ) / ( (LONGLONG) dwFPS ); // // Copy the palette information, if present // if ( ( fPalettePresent || fBitfieldsPresent ) && pbPaletteData ) { memcpy( pbPostVIHData, pbPaletteData, cbExpectedPostVIHDataSize ); } // // Return a copy of the media type to the caller, since the media type is on the stack // hr = CopyMediaType( ppmtMediaType, &mtUncompressedVideo ); if ( FAILED( hr ) ) { break; } } while( FALSE ); SAFE_ARRAY_DELETE( pbFormatData ); return( hr ); } //------------------------------------------------------------------------------ // Name: CreateScriptMediaType() // Desc: Initializes a WM_MEDIA_TYPE for script. //------------------------------------------------------------------------------ STDMETHODIMP CreateScriptMediaType( WM_MEDIA_TYPE** ppmtMediaType ) { HRESULT hr = S_OK; WM_MEDIA_TYPE wmtMediaType; WMSCRIPTFORMAT wsfScriptFormat; assert( ppmtMediaType ); do { ZeroMemory( &wmtMediaType, sizeof( wmtMediaType ) ); // // Configure media type // wmtMediaType.majortype = WMMEDIATYPE_Script; wmtMediaType.subtype = GUID_NULL; wmtMediaType.bFixedSizeSamples = FALSE; wmtMediaType.bTemporalCompression = TRUE; wmtMediaType.lSampleSize = 0; wmtMediaType.formattype = WMFORMAT_Script; wmtMediaType.cbFormat = sizeof( WMSCRIPTFORMAT ); wmtMediaType.pbFormat = (BYTE*) &wsfScriptFormat; wsfScriptFormat.scriptType = WMSCRIPTTYPE_TwoStrings; // // Return a copy of the media type to the caller // hr = CopyMediaType( ppmtMediaType, &wmtMediaType ); if ( FAILED( hr ) ) { break; } } while ( FALSE ); return hr; } //------------------------------------------------------------------------------ // Name: CreateImageMediaType() // Desc: Initializes a WM_MEDIA_TYPE for image. //------------------------------------------------------------------------------ STDMETHODIMP CreateImageMediaType( WM_MEDIA_TYPE** ppmtMediaType, DWORD dwWidth, DWORD dwHeight, DWORD dwBitrate ) { static const WORD BIT_COUNT = 24; HRESULT hr = S_OK; WM_MEDIA_TYPE wmtMediaType; WMVIDEOINFOHEADER vihVideoInfo; do { ZeroMemory( &wmtMediaType, sizeof( wmtMediaType ) ); // // Set up the WM_MEDIA_TYPE structure // wmtMediaType.majortype = WMMEDIATYPE_Image; wmtMediaType.subtype = WMMEDIASUBTYPE_RGB24; wmtMediaType.bFixedSizeSamples = FALSE; wmtMediaType.bTemporalCompression = FALSE; wmtMediaType.lSampleSize = 0; wmtMediaType.bFixedSizeSamples = FALSE; wmtMediaType.bTemporalCompression = FALSE; wmtMediaType.lSampleSize = 0; wmtMediaType.formattype = WMFORMAT_VideoInfo; wmtMediaType.pUnk = NULL; wmtMediaType.cbFormat = sizeof( WMVIDEOINFOHEADER ); wmtMediaType.pbFormat = (BYTE*) &vihVideoInfo; // // Set up the WMVIDEOINFOHEADER structure // ZeroMemory( &vihVideoInfo, sizeof( vihVideoInfo ) ); vihVideoInfo.rcSource.left = 0; vihVideoInfo.rcSource.top = 0; vihVideoInfo.rcSource.bottom = dwHeight; vihVideoInfo.rcSource.right = dwWidth; vihVideoInfo.rcTarget = vihVideoInfo.rcSource; vihVideoInfo.dwBitRate = dwBitrate; vihVideoInfo.dwBitErrorRate = 0; vihVideoInfo.AvgTimePerFrame = 0; vihVideoInfo.bmiHeader.biSize = sizeof( BITMAPINFOHEADER ); vihVideoInfo.bmiHeader.biWidth = dwWidth; vihVideoInfo.bmiHeader.biHeight = dwHeight; vihVideoInfo.bmiHeader.biPlanes = 1; vihVideoInfo.bmiHeader.biBitCount = BIT_COUNT; vihVideoInfo.bmiHeader.biCompression = BI_RGB; vihVideoInfo.bmiHeader.biSizeImage = ( dwHeight * dwWidth * vihVideoInfo.bmiHeader.biBitCount ) / 8; vihVideoInfo.bmiHeader.biXPelsPerMeter = 0; vihVideoInfo.bmiHeader.biYPelsPerMeter = 0; vihVideoInfo.bmiHeader.biClrUsed = 0; vihVideoInfo.bmiHeader.biClrImportant = 0; // // Return a copy of the media type to the caller // hr = CopyMediaType( ppmtMediaType, &wmtMediaType ); if ( FAILED( hr ) ) { break; } } while ( FALSE ); return hr; } //------------------------------------------------------------------------------ // Name: CreateWebMediaType() // Desc: Initializes a WM_MEDIA_TYPE for Web media. //------------------------------------------------------------------------------ STDMETHODIMP CreateWebMediaType( WM_MEDIA_TYPE** ppmtMediaType ) { HRESULT hr = S_OK; WM_MEDIA_TYPE wmtMediaType; WMT_WEBSTREAM_FORMAT wwfWebFormat; do { ZeroMemory( &wmtMediaType, sizeof( wmtMediaType ) ); // // Configure media type // wmtMediaType.majortype = WMMEDIATYPE_FileTransfer; wmtMediaType.subtype = WMMEDIASUBTYPE_WebStream; wmtMediaType.bFixedSizeSamples = FALSE; wmtMediaType.bTemporalCompression = TRUE; wmtMediaType.lSampleSize = 0; wmtMediaType.formattype = WMFORMAT_WebStream; wmtMediaType.pUnk = NULL; wmtMediaType.cbFormat = sizeof( WMT_WEBSTREAM_FORMAT ); wmtMediaType.pbFormat = (BYTE*) &wwfWebFormat; ZeroMemory( &wwfWebFormat, sizeof( wwfWebFormat ) ); wwfWebFormat.cbSize = sizeof( WMT_WEBSTREAM_FORMAT ); wwfWebFormat.cbSampleHeaderFixedData = sizeof( WMT_WEBSTREAM_SAMPLE_HEADER ); wwfWebFormat.wVersion = 1; wwfWebFormat.wReserved = 0; // // Return a copy of the media type to the caller // hr = CopyMediaType( ppmtMediaType, &wmtMediaType ); if ( FAILED( hr ) ) { break; } } while ( FALSE ); return hr; } //------------------------------------------------------------------------------ // Name: CreateFileMediaType() // Desc: Initializes a WM_MEDIA_TYPE for file transfer. //------------------------------------------------------------------------------ STDMETHODIMP CreateFileMediaType( WM_MEDIA_TYPE** ppmtMediaType ) { HRESULT hr = S_OK; WM_MEDIA_TYPE wmtMediaType; do { ZeroMemory( &wmtMediaType, sizeof( wmtMediaType ) ); // // Configure media type // wmtMediaType.majortype = WMMEDIATYPE_FileTransfer; wmtMediaType.subtype = GUID_NULL; wmtMediaType.bFixedSizeSamples = FALSE; wmtMediaType.bTemporalCompression = FALSE; wmtMediaType.lSampleSize = 0; // // Return a copy of the media type to the caller // hr = CopyMediaType( ppmtMediaType, &wmtMediaType ); if ( FAILED( hr ) ) { break; } } while ( FALSE ); return hr; } /* ** Functions that create various stream types */ //------------------------------------------------------------------------------ // Name: CreateAudioStream() // Desc: Creates an audio stream and returns its configuration object. //------------------------------------------------------------------------------ STDMETHODIMP CreateAudioStream( IWMStreamConfig** ppStreamConfig, IWMCodecInfo3* pCodecInfo3, IWMProfile *pProfile, DWORD dwBufferWindow, DWORD dwCodecIndex, DWORD dwFormatIndex, BOOL fIsVBR, DWORD dwNumberOfPasses, LCID dwLanguage ) { HRESULT hr = S_OK; IWMStreamConfig* pStreamConfig = NULL; IWMStreamConfig* pFormatConfig = NULL; IWMPropertyVault* pPropertyVault = NULL; WM_MEDIA_TYPE* pMediaType = NULL; IWMMediaProps* pMediaProps = NULL; DWORD dwBitrate; if ( !ppStreamConfig ) { return E_POINTER; } if ( !pProfile ) { return E_INVALIDARG; } // // pCodecInfo3 is allowed to be NULL, since CreateMediatypeForFormat calls EnsureIWMCodecInfo3 // do { // // Create the audio stream // hr = pProfile->CreateNewStream( WMMEDIATYPE_Audio, &pStreamConfig ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig ); // // Create the media type and get the format's stream configuration // hr = CreateMediatypeForFormat( &pMediaType, pCodecInfo3, &pFormatConfig, WMMEDIATYPE_Audio, dwCodecIndex, dwFormatIndex, fIsVBR, dwNumberOfPasses ); if ( FAILED( hr ) ) { break; } assert( pMediaType ); // // Configure the new audio stream // hr = pFormatConfig->GetBitrate( &dwBitrate ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->SetBitrate( dwBitrate ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->SetBufferWindow( dwBufferWindow ); if ( FAILED( hr ) ) { break; } // // Set the media type on the stream // hr = pStreamConfig->QueryInterface( IID_IWMMediaProps, (void**) &pMediaProps ); if ( FAILED( hr ) ) { break; } assert( pMediaProps ); hr = pMediaProps->SetMediaType( pMediaType ); if ( FAILED( hr ) ) { break; } // // If this profile will be used by the writer, we should set // the VBREnabled attribute properly // if ( SUCCEEDED( pStreamConfig->QueryInterface( IID_IWMPropertyVault, (void**)&pPropertyVault ) ) ) { assert( pPropertyVault ); pPropertyVault->SetProperty( g_wszVBREnabled, WMT_TYPE_BOOL, (BYTE*)&fIsVBR, sizeof( fIsVBR ) ); SAFE_RELEASE( pPropertyVault ); } hr = SetStreamLanguage( pStreamConfig, dwLanguage ); if ( FAILED( hr ) ) { break; } // // Return the pStreamConfig to the caller // SAFE_ADDREF( pStreamConfig ); *ppStreamConfig = pStreamConfig; } while ( FALSE ); SAFE_RELEASE( pStreamConfig ); SAFE_RELEASE( pFormatConfig ); SAFE_RELEASE( pPropertyVault ); SAFE_ARRAY_DELETE( pMediaType ); SAFE_RELEASE( pMediaProps ); return hr; } //------------------------------------------------------------------------------ // Name: CreateAudioStream() // Desc: Creates an audio stream and returns its configuration object. //------------------------------------------------------------------------------ STDMETHODIMP CreateUncompressedAudioStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwSamplesPerSecond, WORD wNumChannels, WORD wBitsPerSample, LCID dwLanguage ) { HRESULT hr = S_OK; IWMStreamConfig* pStreamConfig = NULL; IWMPropertyVault* pPropertyVault = NULL; WM_MEDIA_TYPE* pMediaType = NULL; IWMMediaProps* pMediaProps = NULL; BOOL fFalse = FALSE; if ( !ppStreamConfig ) { return E_POINTER; } if ( !pProfile ) { return E_INVALIDARG; } // // pCodecInfo3 is allowed to be NULL, since CreateMediatypeForFormat calls EnsureIWMCodecInfo3 // do { // // Create the audio stream // hr = pProfile->CreateNewStream( WMMEDIATYPE_Audio, &pStreamConfig ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig ); // // Create the media type for the uncompressed audio // hr = CreateUncompressedAudioMediaType( &pMediaType, dwSamplesPerSecond, wNumChannels, wBitsPerSample ); if ( FAILED( hr ) ) { break; } assert( pMediaType ); // // Configure the new audio stream // hr = pStreamConfig->SetBitrate( dwSamplesPerSecond * wNumChannels * wBitsPerSample ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->SetBufferWindow( 0 ); if ( FAILED( hr ) ) { break; } // // Set the media type on the stream // hr = pStreamConfig->QueryInterface( IID_IWMMediaProps, (void**) &pMediaProps ); if ( FAILED( hr ) ) { break; } assert( pMediaProps ); hr = pMediaProps->SetMediaType( pMediaType ); if ( FAILED( hr ) ) { break; } // // If this profile will be used by the writer, we should set // VBREnabled attribute properly // if ( SUCCEEDED( pStreamConfig->QueryInterface( IID_IWMPropertyVault, (void**)&pPropertyVault ) ) ) { assert( pPropertyVault ); pPropertyVault->SetProperty( g_wszVBREnabled, WMT_TYPE_BOOL, (BYTE*)&fFalse, sizeof( fFalse ) ); SAFE_RELEASE( pPropertyVault ); } hr = SetStreamLanguage( pStreamConfig, dwLanguage ); if ( FAILED( hr ) ) { break; } // // Return the pStreamConfig to the caller // SAFE_ADDREF( pStreamConfig ); *ppStreamConfig = pStreamConfig; } while ( FALSE ); SAFE_RELEASE( pStreamConfig ); SAFE_RELEASE( pPropertyVault ); SAFE_ARRAY_DELETE( pMediaType ); SAFE_RELEASE( pMediaProps ); return hr; } //------------------------------------------------------------------------------ // Name: CreateVideoStream() // Desc: Creates a video stream and returns its configuration object. //------------------------------------------------------------------------------ STDMETHODIMP CreateVideoStream( IWMStreamConfig** ppStreamConfig, IWMCodecInfo3* pCodecInfo3, IWMProfile *pProfile, DWORD dwCodecIndex, DWORD dwBitrate, DWORD dwBufferWindow, DWORD dwWidth, DWORD dwHeight, DWORD dwFPS, DWORD dwQuality, DWORD dwSecPerKey, BOOL fIsVBR, VIDEO_VBR_MODE vbrMode, DWORD dwVBRQuality, DWORD dwMaxBitrate, DWORD dwMaxBufferWindow, LCID dwLanguage ) { HRESULT hr = S_OK; IWMStreamConfig* pStreamConfig = NULL; IWMPropertyVault* pPropertyVault = NULL; WM_MEDIA_TYPE* pMediaType = NULL; IWMVideoMediaProps* pVideoMediaProps = NULL; DWORD dwNumberOfPasses; if ( !ppStreamConfig ) { return E_POINTER; } if ( !pProfile ) { return E_INVALIDARG; } // // pCodecInfo3 is allowed to be NULL, since CreateVideoMediatype calls EnsureIWMCodecInfo3 // do { switch( vbrMode ) { case VBR_OFF: dwNumberOfPasses = 0; break; case VBR_QUALITYBASED: dwNumberOfPasses = 1; break; case VBR_CONSTRAINED: dwNumberOfPasses = 2; break; case VBR_UNCONSTRAINED: dwNumberOfPasses = 2; break; default: hr = E_FAIL; break; } if ( FAILED( hr ) ) { break; } // // Create the video stream // hr = pProfile->CreateNewStream( WMMEDIATYPE_Video, &pStreamConfig ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig ); // // Configure the new video stream // hr = pStreamConfig->SetBitrate( dwBitrate ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->SetBufferWindow( dwBufferWindow ); if ( FAILED( hr ) ) { break; } // // Set the media type for the stream // hr = CreateVideoMediaType( &pMediaType, pCodecInfo3, dwCodecIndex, dwFPS, dwWidth, dwHeight, dwBitrate, fIsVBR, dwNumberOfPasses ); if ( FAILED( hr ) ) { break; } assert( pMediaType ); hr = pStreamConfig->QueryInterface( IID_IWMVideoMediaProps, (void**) &pVideoMediaProps ); if ( FAILED( hr ) ) { break; } assert( pVideoMediaProps ); hr = pVideoMediaProps->SetMediaType( pMediaType ); if ( FAILED( hr ) ) { break; } // // Set quality and MaxKeyFrameSpacing on the IWMVideoMediaProps object // hr = pVideoMediaProps->SetQuality( dwQuality ); if ( FAILED( hr ) ) { break; } hr = pVideoMediaProps->SetMaxKeyFrameSpacing( (LONGLONG) dwSecPerKey * 10000000 ); if ( FAILED( hr ) ) { break; } // // If this profile will be used by writer, we should set the properties // on the stream configuration // if ( SUCCEEDED( pStreamConfig->QueryInterface( IID_IWMPropertyVault, (void**)&pPropertyVault ) ) ) { assert( pPropertyVault ); hr = pPropertyVault->SetProperty( g_wszVBREnabled, WMT_TYPE_BOOL, (BYTE*) &fIsVBR, sizeof( fIsVBR ) ); if ( FAILED( hr ) ) { break; } switch( vbrMode ) { case VBR_QUALITYBASED: // // Only the quality needs to be set // hr = pPropertyVault->SetProperty( g_wszVBRQuality, WMT_TYPE_DWORD, (BYTE*) &dwVBRQuality, sizeof( DWORD ) ); break; case VBR_CONSTRAINED: // // The peak bitrate and, optionally, max buffer window need to be set // hr = pPropertyVault->SetProperty( g_wszVBRBitrateMax, WMT_TYPE_DWORD, (BYTE*) &dwMaxBitrate, sizeof( DWORD ) ); if ( FAILED( hr ) ) { break; } if( dwMaxBufferWindow != 0 ) { hr = pPropertyVault->SetProperty( g_wszVBRBufferWindowMax, WMT_TYPE_DWORD, (BYTE*) &dwMaxBufferWindow, sizeof( DWORD ) ); } break; case VBR_UNCONSTRAINED: break; case VBR_OFF: break; default: hr = E_FAIL; break; } if ( FAILED( hr ) ) { break; } SAFE_RELEASE( pPropertyVault ); } hr = SetStreamLanguage( pStreamConfig, dwLanguage ); if ( FAILED( hr ) ) { break; } // // Return the pStreamConfig to the caller // SAFE_ADDREF( pStreamConfig ); *ppStreamConfig = pStreamConfig; } while ( FALSE ); SAFE_RELEASE( pStreamConfig ); SAFE_RELEASE( pPropertyVault ); SAFE_ARRAY_DELETE( pMediaType ); SAFE_RELEASE( pVideoMediaProps ); return hr; } //------------------------------------------------------------------------------ // Name: CreateUncompressedVideoStream() // Desc: Creates an uncompressed video stream and returns its configuration object. //------------------------------------------------------------------------------ STDMETHODIMP CreateUncompressedVideoStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, GUID guidFormat, DWORD dwFourCC, WORD wBitsPerPixel, BYTE* pbPaletteData, DWORD cbPaletteDataSize, DWORD dwWidth, DWORD dwHeight, DWORD dwFPS, LCID dwLanguage ) { HRESULT hr = S_OK; IWMStreamConfig* pStreamConfig = NULL; IWMPropertyVault* pPropertyVault = NULL; WM_MEDIA_TYPE* pMediaType = NULL; IWMVideoMediaProps* pVideoMediaProps = NULL; BOOL fFalse = FALSE; if ( !ppStreamConfig ) { return E_POINTER; } if ( !pProfile ) { return E_INVALIDARG; } // // pCodecInfo3 is allowed to be NULL, since CreateVideoMediatype calls EnsureIWMCodecInfo3 // do { // // Create the video stream // hr = pProfile->CreateNewStream( WMMEDIATYPE_Video, &pStreamConfig ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig ); // // Configure the new video stream // hr = pStreamConfig->SetBitrate( wBitsPerPixel * dwWidth * dwHeight * dwFPS ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->SetBufferWindow( 0 ); if ( FAILED( hr ) ) { break; } // // Set the media type for the stream // hr = CreateUncompressedVideoMediaType( &pMediaType, guidFormat, dwFourCC, wBitsPerPixel, pbPaletteData, cbPaletteDataSize, dwFPS, dwWidth, dwHeight ); if ( FAILED( hr ) ) { break; } assert( pMediaType ); hr = pStreamConfig->QueryInterface( IID_IWMVideoMediaProps, (void**) &pVideoMediaProps ); if ( FAILED( hr ) ) { break; } assert( pVideoMediaProps ); hr = pVideoMediaProps->SetMediaType( pMediaType ); if ( FAILED( hr ) ) { break; } // // If this profile will be used by writer, we should set the properties // on the stream configuration // if ( SUCCEEDED( pStreamConfig->QueryInterface( IID_IWMPropertyVault, (void**)&pPropertyVault ) ) ) { assert( pPropertyVault ); hr = pPropertyVault->SetProperty( g_wszVBREnabled, WMT_TYPE_BOOL, (BYTE*) &fFalse, sizeof( fFalse ) ); if ( FAILED( hr ) ) { break; } SAFE_RELEASE( pPropertyVault ); } hr = SetStreamLanguage( pStreamConfig, dwLanguage ); if ( FAILED( hr ) ) { break; } // // Return the pStreamConfig to the caller // SAFE_ADDREF( pStreamConfig ); *ppStreamConfig = pStreamConfig; } while ( FALSE ); SAFE_RELEASE( pStreamConfig ); SAFE_RELEASE( pPropertyVault ); SAFE_ARRAY_DELETE( pMediaType ); SAFE_RELEASE( pVideoMediaProps ); return hr; } //------------------------------------------------------------------------------ // Name: CreateScriptStream() // Desc: Creates a scripat stream and returns its configuration object. //------------------------------------------------------------------------------ STDMETHODIMP CreateScriptStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwBitrate, DWORD dwBufferWindow, LCID dwLanguage ) { HRESULT hr = S_OK; IWMStreamConfig* pStreamConfig = NULL; WM_MEDIA_TYPE* pMediaType = NULL; IWMMediaProps* pMediaProps = NULL; if ( !ppStreamConfig ) { return E_POINTER; } if ( !pProfile ) { return E_INVALIDARG; } do { // // Create the script stream // hr = pProfile->CreateNewStream( WMMEDIATYPE_Script, &pStreamConfig ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig ); // // Configure the new stream // hr = pStreamConfig->SetBitrate( dwBitrate ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->SetBufferWindow( dwBufferWindow ); if ( FAILED( hr ) ) { break; } // // Set the media type for the script stream // hr = CreateScriptMediaType( &pMediaType ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->QueryInterface( IID_IWMMediaProps, (void**) &pMediaProps ); if ( FAILED( hr ) ) { break; } assert( pMediaProps ); hr = pMediaProps->SetMediaType( pMediaType ); if ( FAILED( hr ) ) { break; } hr = SetStreamLanguage( pStreamConfig, dwLanguage ); if ( FAILED( hr ) ) { break; } // // Return the pStreamConfig to the caller // SAFE_ADDREF( pStreamConfig ); *ppStreamConfig = pStreamConfig; } while ( FALSE ); SAFE_RELEASE( pStreamConfig ); SAFE_ARRAY_DELETE( pMediaType ); SAFE_RELEASE( pMediaProps ); return hr; } //------------------------------------------------------------------------------ // Name: CreateImageStream() // Desc: Creates an image stream and returns its configuration object. //------------------------------------------------------------------------------ STDMETHODIMP CreateImageStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwBitrate, DWORD dwBufferWindow, DWORD dwWidth, DWORD dwHeight, LCID dwLanguage ) { HRESULT hr = S_OK; IWMStreamConfig* pStreamConfig = NULL; WM_MEDIA_TYPE* pMediaType = NULL; IWMMediaProps* pMediaProps = NULL; if ( !ppStreamConfig ) { return E_POINTER; } if ( !pProfile ) { return E_INVALIDARG; } do { // // Create the image stream // hr = pProfile->CreateNewStream( WMMEDIATYPE_Image, &pStreamConfig ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig ); // // Configure the new stream // hr = pStreamConfig->SetBitrate( dwBitrate ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->SetBufferWindow( dwBufferWindow ); if ( FAILED( hr ) ) { break; } // // Set the media type for the image stream // hr = CreateImageMediaType( &pMediaType, dwBitrate, dwWidth, dwHeight ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->QueryInterface( IID_IWMMediaProps, (void**) &pMediaProps ); if ( FAILED( hr ) ) { break; } assert( pMediaProps ); hr = pMediaProps->SetMediaType( pMediaType ); if ( FAILED( hr ) ) { break; } hr = SetStreamLanguage( pStreamConfig, dwLanguage ); if ( FAILED( hr ) ) { break; } // // Return the pStreamConfig to the caller // SAFE_ADDREF( pStreamConfig ); *ppStreamConfig = pStreamConfig; } while ( FALSE ); SAFE_RELEASE( pStreamConfig ); SAFE_ARRAY_DELETE( pMediaType ); SAFE_RELEASE( pMediaProps ); return hr; } //------------------------------------------------------------------------------ // Name: CreateWebStream() // Desc: Creates a Web stream and returns its configuration object. //------------------------------------------------------------------------------ STDMETHODIMP CreateWebStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwBitrate, DWORD dwBufferWindow, LCID dwLanguage ) { HRESULT hr = S_OK; IWMStreamConfig* pStreamConfig = NULL; WM_MEDIA_TYPE* pMediaType = NULL; IWMMediaProps* pMediaProps = NULL; if ( !ppStreamConfig ) { return E_POINTER; } if ( !pProfile ) { return E_INVALIDARG; } do { // // Create the Web stream // hr = pProfile->CreateNewStream( WMMEDIATYPE_FileTransfer, &pStreamConfig ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig ); // // Configure the new stream // hr = pStreamConfig->SetBitrate( dwBitrate ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->SetBufferWindow( dwBufferWindow ); if ( FAILED( hr ) ) { break; } // // Set the media type for the stream // hr = CreateWebMediaType( &pMediaType ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->QueryInterface( IID_IWMMediaProps, (void**) &pMediaProps ); if ( FAILED( hr ) ) { break; } assert( pMediaProps ); hr = pMediaProps->SetMediaType( pMediaType ); if ( FAILED( hr ) ) { break; } hr = SetStreamLanguage( pStreamConfig, dwLanguage ); if ( FAILED( hr ) ) { break; } // // Return the pStreamConfig to the caller // SAFE_ADDREF( pStreamConfig ); *ppStreamConfig = pStreamConfig; } while ( FALSE ); SAFE_RELEASE( pStreamConfig ); SAFE_ARRAY_DELETE( pMediaType ); SAFE_RELEASE( pMediaProps ); return hr; } //------------------------------------------------------------------------------ // Name: CreateFileStream() // Desc: Creates a file stream and returns its configuration object. //------------------------------------------------------------------------------ STDMETHODIMP CreateFileStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwBufferWindow, DWORD dwBitrate, WORD wMaxFilenameLength, LCID dwLanguage ) { HRESULT hr = S_OK; IWMStreamConfig* pStreamConfig = NULL; IWMStreamConfig2* pStreamConfig2 = NULL; WM_MEDIA_TYPE* pMediaType = NULL; IWMMediaProps* pMediaProps = NULL; if ( !ppStreamConfig ) { return E_POINTER; } if ( !pProfile ) { return E_INVALIDARG; } do { // // Create the file stream // hr = pProfile->CreateNewStream( WMMEDIATYPE_FileTransfer, &pStreamConfig ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig ); // // Configure the new stream // hr = pStreamConfig->SetBitrate( dwBitrate ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->SetBufferWindow( dwBufferWindow ); if ( FAILED( hr ) ) { break; } // // Set the media type for the stream // hr = CreateFileMediaType( &pMediaType ); if ( FAILED( hr ) ) { break; } hr = pStreamConfig->QueryInterface( IID_IWMMediaProps, (void**) &pMediaProps ); if ( FAILED( hr ) ) { break; } assert( pMediaProps ); hr = pMediaProps->SetMediaType( pMediaType ); if ( FAILED( hr ) ) { break; } // // Add the filename data unit extension to the stream // hr = pStreamConfig->QueryInterface( IID_IWMStreamConfig2, (void**) &pStreamConfig2 ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig2 ); hr = pStreamConfig2->AddDataUnitExtension( WM_SampleExtensionGUID_FileName, wMaxFilenameLength, NULL, 0 ); if ( FAILED( hr ) ) { break; } hr = SetStreamLanguage( pStreamConfig, dwLanguage ); if ( FAILED( hr ) ) { break; } // // Return the pStreamConfig to the caller // SAFE_ADDREF( pStreamConfig ); *ppStreamConfig = pStreamConfig; } while ( FALSE ); SAFE_RELEASE( pStreamConfig ); SAFE_ARRAY_DELETE( pMediaType ); SAFE_RELEASE( pMediaProps ); SAFE_RELEASE( pStreamConfig2 ); return hr; } //------------------------------------------------------------------------------ // Name: CreateMediatypeForFormat() // Desc: Initializes a WM_MEDIA_TYPE for a codec format, setting VBR attributes. //------------------------------------------------------------------------------ STDMETHODIMP CreateMediatypeForFormat( WM_MEDIA_TYPE** ppmtDestination, IWMCodecInfo3* pCodecInfo3, IWMStreamConfig** ppFormatConfig, GUID guidCodecType, DWORD dwCodecIndex, DWORD dwFormatIndex, BOOL fIsVBR, DWORD dwVBRPasses ) { HRESULT hr = S_OK; IWMProfileManager* pProfileManager = NULL; IWMStreamConfig* pStreamConfig = NULL; IWMMediaProps* pMediaProps = NULL; if ( !ppmtDestination ) { return E_INVALIDARG; } do { *ppmtDestination = NULL; // // Make sure the pCodecInfo3 exists, and there's an outstanding reference // hr = EnsureIWMCodecInfo3( &pCodecInfo3 ); if ( FAILED( hr ) ) { break; } assert( pCodecInfo3 ); // // Set the VBR settings appropriately // /*evev hr = SetCodecVBRSettings( pCodecInfo3, guidCodecType, dwCodecIndex, fIsVBR, dwVBRPasses ); if ( FAILED( hr ) ) { break; } */ /* RM: WORD wCodecIndex = 0; WORD wFormatIndex= 0; FindAudioCodec( &wCodecIndex, //[out] &wFormatIndex, //[out] guidCodecType, //[in] 48000, //[in] 2, //[in] 192000, //[in] 16, //[in] TRUE ); */ // // Call the version that doesn't set the VBR attributes // hr = CreateMediatypeForFormat( ppmtDestination, pCodecInfo3, ppFormatConfig, guidCodecType, dwCodecIndex, dwFormatIndex ); if ( FAILED( hr ) ) { break; } assert( *ppmtDestination ); } while ( FALSE ); if ( FAILED( hr ) ) { SAFE_ARRAY_DELETE( (*ppmtDestination) ); } SAFE_RELEASE( pCodecInfo3 ); SAFE_RELEASE( pProfileManager ); SAFE_RELEASE( pStreamConfig ); SAFE_RELEASE( pMediaProps ); return hr; } //------------------------------------------------------------------------------ // Name: CreateMediatypeForFormat() (Overloaded) // Desc: Initializes a WM_MEDIA_TYPE for a codec format, without setting // VBR attributes. //------------------------------------------------------------------------------ STDMETHODIMP CreateMediatypeForFormat( WM_MEDIA_TYPE** ppmtDestination, IWMCodecInfo3* pCodecInfo3, IWMStreamConfig** ppFormatConfig, GUID guidCodecType, DWORD dwCodecIndex, DWORD dwFormatIndex ) { HRESULT hr = S_OK; IWMProfileManager* pProfileManager = NULL; IWMStreamConfig* pFormatConfig = NULL; IWMMediaProps* pMediaProps = NULL; DWORD dwMediaTypeLength; if ( !ppmtDestination ) { return E_INVALIDARG; } do { *ppmtDestination = NULL; // // Make sure the pCodecInfo3 exists, and there's an outstanding reference // hr = EnsureIWMCodecInfo3( &pCodecInfo3 ); if ( FAILED( hr ) ) { break; } assert( pCodecInfo3 ); // // Get the stream configuration for the given format // hr = pCodecInfo3->GetCodecFormat( guidCodecType, dwCodecIndex, dwFormatIndex, &pFormatConfig ); if ( FAILED( hr ) ) { break; } assert( pFormatConfig ); // // Get the media type for the requested format // hr = pFormatConfig->QueryInterface( IID_IWMMediaProps, (void**) &pMediaProps ); if ( FAILED( hr ) ) { break; } assert( pMediaProps ); dwMediaTypeLength = 0; hr = pMediaProps->GetMediaType( NULL, &dwMediaTypeLength ); if( FAILED( hr ) ) { break; } *ppmtDestination = (WM_MEDIA_TYPE *) new BYTE[ dwMediaTypeLength ]; if( !*ppmtDestination ) { hr = E_OUTOFMEMORY; break; } hr = pMediaProps->GetMediaType( *ppmtDestination, &dwMediaTypeLength ); if( FAILED( hr ) ) { break; } assert( *ppmtDestination ); if ( ppFormatConfig ) { SAFE_ADDREF( pFormatConfig ); *ppFormatConfig = pFormatConfig; } } while ( FALSE ); if ( FAILED( hr ) ) { SAFE_ARRAY_DELETE( *ppmtDestination ); } SAFE_RELEASE( pCodecInfo3 ); SAFE_RELEASE( pProfileManager ); SAFE_RELEASE( pFormatConfig ); SAFE_RELEASE( pMediaProps ); return hr; } //------------------------------------------------------------------------------ // Name: WriteProfileAsPRX() // Desc: Writes a profile to a PRX file. //------------------------------------------------------------------------------ STDMETHODIMP WriteProfileAsPRX( LPCTSTR tszFilename, LPCWSTR wszProfileData, DWORD dwProfileDataLength ) { HANDLE hFile = NULL; HRESULT hr = S_OK; DWORD dwBytesWritten; BOOL bResult; assert( tszFilename ); assert( wszProfileData ); assert( 0 != dwProfileDataLength ); do { // // Create the file, overwriting any existing file // hFile = CreateFile( tszFilename, GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, 0, NULL ); if ( INVALID_HANDLE_VALUE == hFile ) { hr = HRESULT_FROM_WIN32( GetLastError() ); break; } // // Write the profile data to the file // bResult = WriteFile( hFile, wszProfileData, dwProfileDataLength, &dwBytesWritten, NULL ); if ( !bResult ) { hr = HRESULT_FROM_WIN32( GetLastError() ); break; } } while ( FALSE ); // // Close the file, if it was opened successfully // SAFE_CLOSE_FILE_HANDLE( hFile ); return hr; } //------------------------------------------------------------------------------ // Name: AddSMPTEExtensionToStream() // Desc: Add a data unit extension for SMPTE time code. //------------------------------------------------------------------------------ STDMETHODIMP AddSMPTEExtensionToStream( IWMStreamConfig* pStream ) { HRESULT hr = S_OK; IWMStreamConfig2* pStreamConfig2 = NULL; if ( !pStream ) { return E_INVALIDARG; } do { // // Get the IWMStreamConfig2 interface // hr = pStream->QueryInterface( IID_IWMStreamConfig2, (void**) &pStreamConfig2 ); if ( FAILED( hr ) ) { break; } assert( pStreamConfig2 ); // // Add SMPTE extension // hr = pStreamConfig2->AddDataUnitExtension( WM_SampleExtensionGUID_Timecode, sizeof( WMT_TIMECODE_EXTENSION_DATA ), NULL, 0 ); if ( FAILED( hr ) ) { break; } } while ( FALSE ); SAFE_RELEASE( pStreamConfig2 ); return hr; } //------------------------------------------------------------------------------ // Name: GetUncompressedWaveFormatCount() // Desc: Returns the number of uncompressed wave formats, four for each // frequency in the WaveFrequency array. //------------------------------------------------------------------------------ STDMETHODIMP GetUncompressedWaveFormatCount( DWORD * pdwCount ) { *pdwCount = 4 * sizeof( WaveFrequency ) / sizeof( WaveFrequency[0] ); return S_OK; } //------------------------------------------------------------------------------ // Name: GetUncompressedWaveFormat() // Desc: Retrieves wave format parameters by index. //------------------------------------------------------------------------------ STDMETHODIMP GetUncompressedWaveFormat( DWORD dwIndex, DWORD * pdwSamplesPerSecond, WORD * pwNumChannels, WORD * pwBitsPerSample ) { if ( NULL == pdwSamplesPerSecond || NULL == pwNumChannels || NULL == pwBitsPerSample ) { return E_POINTER; } DWORD dwCount; HRESULT hr; hr = GetUncompressedWaveFormatCount( &dwCount ); if ( FAILED( hr ) ) { return hr; } if ( dwIndex >= dwCount ) { return E_INVALIDARG; } *pdwSamplesPerSecond = WaveFrequency[ dwIndex / 4 ]; switch ( dwIndex % 4 ) { case 0: *pwBitsPerSample = 8; *pwNumChannels = 1; break; case 1: *pwBitsPerSample = 8; *pwNumChannels = 2; break; case 2: *pwBitsPerSample = 16; *pwNumChannels = 1; break; case 3: *pwBitsPerSample = 16; *pwNumChannels = 2; break; } return S_OK; } //------------------------------------------------------------------------------ // Name: GetUncompressedPixelFormatCount() // Desc: Returns the number of formats in the PixelFormats array. //------------------------------------------------------------------------------ STDMETHODIMP GetUncompressedPixelFormatCount( DWORD * pdwCount ) { *pdwCount = sizeof( PixelFormats ) / sizeof( PixelFormats[0] ); return S_OK; } //------------------------------------------------------------------------------ // Name: GetUncompressedPixelFormat() // Desc: Retrieves pixel format parameters by index. //------------------------------------------------------------------------------ STDMETHODIMP GetUncompressedPixelFormat( DWORD dwIndex, GUID * pguidFormat, DWORD * pdwFourCC, WORD * pwBitsPerPixel ) { if ( NULL == pguidFormat || NULL == pdwFourCC || NULL == pwBitsPerPixel ) { return E_POINTER; } DWORD dwCount = sizeof( PixelFormats ) / sizeof( PixelFormats[0] ); if ( dwIndex > dwCount ) { return E_INVALIDARG; } *pguidFormat = *PixelFormats[ dwIndex ].guidFormat; *pdwFourCC = PixelFormats[ dwIndex ].dwFourCC; *pwBitsPerPixel = PixelFormats[ dwIndex ].wBitsPerPixel; return S_OK; } //------------------------------------------------------------------------------ // Name: FindVideoCodec() // Desc: Find specific video codec index by GUID. //------------------------------------------------------------------------------ STDMETHODIMP FindVideoCodec( WORD* wCodecIndex, //[out] WORD* wFormatIndex,//[out] GUID guidCodec //[in] ) { HRESULT hr = S_OK; IWMProfileManager * pManager = NULL; IWMCodecInfo * pCodecInfo = NULL; IWMStreamConfig * pStreamConfig = NULL; IWMVideoMediaProps * pMediaProps = NULL; WM_MEDIA_TYPE * pMediaType = NULL; *wCodecIndex = 0; *wFormatIndex = 0; do { hr = WMCreateProfileManager( &pManager ); if( FAILED( hr ) ) { break; } hr = pManager->QueryInterface( IID_IWMCodecInfo, (void **) &pCodecInfo ); if( FAILED( hr ) ) { break; } DWORD cCodecs; hr = pCodecInfo->GetCodecInfoCount( WMMEDIATYPE_Video, &cCodecs ); if( FAILED( hr ) ) { break; } // // Search from the last codec because the last codec usually // is the newest codec. // for( int i = cCodecs-1; i >= 0; i-- ) { DWORD cFormats; hr = pCodecInfo->GetCodecFormatCount( WMMEDIATYPE_Video, i, &cFormats ); if( FAILED( hr ) ) { break; } DWORD j; for( j = 0; j < cFormats; j++ ) { SAFE_RELEASE( pStreamConfig ); hr = pCodecInfo->GetCodecFormat( WMMEDIATYPE_Video, i, j, &pStreamConfig ); if( FAILED( hr ) ) { break; } SAFE_RELEASE( pMediaProps ); hr = pStreamConfig->QueryInterface( IID_IWMVideoMediaProps, (void **) &pMediaProps ); if( FAILED( hr ) ) { break; } DWORD cbMT; hr = pMediaProps->GetMediaType( NULL, &cbMT ); if( FAILED( hr ) ) { break; } SAFE_ARRAY_DELETE( pMediaType ); pMediaType = (WM_MEDIA_TYPE *) new BYTE[ cbMT ]; if( !pMediaType ) { hr = E_OUTOFMEMORY; break; } hr = pMediaProps->GetMediaType( pMediaType, &cbMT ); if( FAILED( hr ) ) { break; } if( pMediaType->formattype != WMFORMAT_VideoInfo ) { SAFE_RELEASE( pStreamConfig ); continue; } if( pMediaType->subtype == guidCodec ) { *wCodecIndex = (WORD)i; *wFormatIndex = (WORD)j; break; } SAFE_RELEASE( pStreamConfig ); } if( FAILED( hr ) || NULL != pStreamConfig ) { break; } } if( FAILED( hr ) ) { break; } if( NULL == pStreamConfig ) { hr = NS_E_VIDEO_CODEC_NOT_INSTALLED; break; } } while( FALSE ); SAFE_RELEASE( pCodecInfo ); SAFE_RELEASE( pStreamConfig ); SAFE_RELEASE( pMediaProps ); SAFE_RELEASE( pManager ); SAFE_ARRAY_DELETE( pMediaType ); return( hr ); } //------------------------------------------------------------------------------ // Name: FindAudioCodec() // Desc: Find specific audio codec index by GUID. //------------------------------------------------------------------------------ STDMETHODIMP FindAudioCodec( WORD* wCodecIndex, //[out] WORD* wFormatIndex, //[out] GUID guidCodec, //[in] DWORD dwSampleRate, //[in] DWORD dwChannels, //[in] DWORD dwBitsPerSec, //[in] WORD wBitsPerSample, //[in] BOOL fAVSync, //[in] BOOL* bIsVBR //[in/out] ) { HRESULT hr = S_OK; IWMProfileManager * pIWMProfileManager = NULL; IWMStreamConfig * pIWMStreamConfig = NULL; IWMMediaProps * pIMP = NULL; IWMCodecInfo * pIWMInfo = NULL; IWMCodecInfo3 * pIWMInfo3 = NULL; WAVEFORMATEX * pWfx = NULL; WM_MEDIA_TYPE * pType = NULL; *wCodecIndex = 0; *wFormatIndex = 0; do { hr = WMCreateProfileManager( &pIWMProfileManager ); if( FAILED( hr ) ) { break; } hr = pIWMProfileManager->QueryInterface( IID_IWMCodecInfo, (void **) &pIWMInfo ); if( FAILED( hr ) ) { break; } DWORD i, j; DWORD cCodecs; hr = pIWMInfo->GetCodecInfoCount( WMMEDIATYPE_Audio, &cCodecs ); if( FAILED( hr ) ) { break; } for( i = 0; i < cCodecs; i++ ) { DWORD cFormats; BOOL isVBR = *bIsVBR; if(isVBR) { hr = pIWMProfileManager->QueryInterface( IID_IWMCodecInfo3, (void **) &pIWMInfo3 ); if( ! (FAILED( hr )) ) { hr = CodecSupportsVBRSetting( pIWMInfo3, WMMEDIATYPE_Audio, i, 1, &isVBR ); if ( FAILED( hr ) || !isVBR) { continue; } } SAFE_RELEASE(pIWMInfo3); } hr = pIWMInfo->GetCodecFormatCount( WMMEDIATYPE_Audio, i, &cFormats ); if( FAILED( hr ) ) { break; } // // Find a proper format in this codec // for( j = 0; j < cFormats; j++ ) { if( NULL != pType ) { SAFE_ARRAY_DELETE( pType ); } DWORD cbType = 0; hr = pIWMInfo->GetCodecFormat( WMMEDIATYPE_Audio, i, j, &pIWMStreamConfig ); if( FAILED( hr ) ) { break; } SAFE_RELEASE( pIMP ); hr = pIWMStreamConfig->QueryInterface( IID_IWMMediaProps, (void **)&pIMP ); if( FAILED( hr ) ) { break; } hr = pIMP->GetMediaType( NULL, &cbType ); if( FAILED( hr ) ) { break; } pType = (WM_MEDIA_TYPE *) new BYTE[ cbType ]; if( NULL == pType ) { hr = E_OUTOFMEMORY; break; } hr = pIMP->GetMediaType( pType, &cbType ); if( FAILED( hr ) ) { break; } if( pType->formattype != WMFORMAT_WaveFormatEx ) { hr = E_FAIL; break; } pWfx = (WAVEFORMATEX *) pType->pbFormat; // // This sample will use this format only if it has the same // sample rate, channels and more bits per sample. // This is not necessary, because normally the codec can convert // the sample rate and bits per sample for you. // if( pType->subtype == guidCodec) { if( pWfx->nSamplesPerSec == dwSampleRate && pWfx->nChannels == dwChannels && pWfx->wBitsPerSample >= wBitsPerSample ) { // If audio/video synchronization requested, check the number // of packets per second (Bps / BlockAlign). The bit rate is // greater than 3200 bps, this value must be 5. // Otherwise this value is 3. // This is an ASF requirement. if(((pWfx->nAvgBytesPerSec*8) == dwBitsPerSec) || isVBR) { if(fAVSync) { if((pWfx->nAvgBytesPerSec / pWfx->nBlockAlign) >= ((pWfx->nAvgBytesPerSec >= 4000) ? 5.0 : 3.0)) { *wCodecIndex = (WORD)i; *wFormatIndex = (WORD)j; break; } } // if fAVSync else { *wCodecIndex = (WORD)i; *wFormatIndex = (WORD)j; break; } } //nAvgBytesPerSec } } SAFE_RELEASE( pIWMStreamConfig ); } if( FAILED( hr ) || NULL != pIWMStreamConfig ) { break; } } if( FAILED( hr ) ) { break; } if( NULL == pIWMStreamConfig ) { *bIsVBR = FALSE; hr = NS_E_AUDIO_CODEC_NOT_INSTALLED; break; } } while( FALSE ); SAFE_ARRAY_DELETE( pType ); SAFE_RELEASE( pIWMInfo ); SAFE_RELEASE( pIWMStreamConfig ); SAFE_RELEASE( pIMP ); SAFE_RELEASE( pIWMProfileManager ); return( hr ); } //------------------------------------------------------------------------------ // Name: FindInputIndex() // Desc: Find specific input index by GUID of media type. //------------------------------------------------------------------------------ STDMETHODIMP FindInputIndex( WORD* pwInputIndex, //[out] IWMWriter* pWMWriter, //[in] GUID guidMediaType //[in] ) { HRESULT hr = S_OK; DWORD dwInputCount; GUID guidInputType; IWMInputMediaProps* pInputProps = NULL; // Find the number of inputs supported by this profile. hr = pWMWriter->GetInputCount(&dwInputCount); if(FAILED(hr)) { _tprintf(_T("Unable to get Input Count For Profile \n")); return ( hr ); } // Loop through all of the supported inputs. for(DWORD i = 0;i < dwInputCount;i++) { hr = pWMWriter->GetInputProps(i,&pInputProps); if(FAILED(hr)) { _tprintf(_T("Unable to get Input Properties\n")); break; } hr = pInputProps->GetType(&guidInputType); if(FAILED(hr)) { _tprintf(_T("Unable to get Input Property Type\n")); break; } //Find Input Stream if(guidInputType == guidMediaType) { *pwInputIndex = (WORD)i; break; } SAFE_RELEASE(pInputProps); } SAFE_RELEASE(pInputProps); return hr; } //------------------------------------------------------------------------------ // Name: CWMWriter::SaveProfileAsPRX() // Desc: Saves an profile as *.prx file //------------------------------------------------------------------------------ STDMETHODIMP SaveProfileAsPRX( LPCTSTR tszFilename, IWMProfile *pIWMProfile) { HRESULT hr = S_OK; IWMProfileManager* pIWMProfileManager = NULL; LPWSTR wszProfileData = NULL; // // Convert the profile to XML // do { // // Create profile manager // hr = WMCreateProfileManager( &pIWMProfileManager ); if( FAILED( hr ) ) { break; } DWORD dwProfileDataLength = 0; hr = pIWMProfileManager->SaveProfile( pIWMProfile, NULL, &dwProfileDataLength ); if ( FAILED( hr ) ) { break; } wszProfileData = new WCHAR[ dwProfileDataLength + 1 ]; if ( !wszProfileData ) { hr = E_OUTOFMEMORY; break; } hr = pIWMProfileManager->SaveProfile( pIWMProfile, wszProfileData, &dwProfileDataLength ); if ( FAILED( hr ) ) { break; } // // Write the profile to a file // hr = WriteProfileAsPRX( tszFilename, wszProfileData, dwProfileDataLength * sizeof( WCHAR ) ); if ( FAILED( hr ) ) { break; } } while(FALSE); // // Release all resources // SAFE_RELEASE( pIWMProfileManager ); SAFE_ARRAY_DELETE(wszProfileData); return hr; } //------------------------------------------------------------------------------ // Name: CWMWriter::SaveProfileAsPRX() // Desc: Saves an profile as *.prx file //------------------------------------------------------------------------------ STDMETHODIMP CodecSupportsVBRSetting( IWMCodecInfo3* pCodecInfo, GUID guidType, DWORD dwCodecIndex, DWORD dwPasses, BOOL* pbIsSupported ) { HRESULT hr = S_OK; assert( pbIsSupported ); assert( pCodecInfo ); do { *pbIsSupported = FALSE; // default to "no" // // Try setting the requested settings // hr = SetCodecVBRSettings( pCodecInfo, guidType, dwCodecIndex, TRUE, dwPasses ); if ( FAILED( hr ) ) { hr = S_OK; break; } // // If it worked, then the codec should support it // *pbIsSupported = TRUE; } while ( FALSE ); return hr; } //------------------------------------------------------------------------------ // Name: SetStreamBasics() // Desc: Creates and configures a stream. //------------------------------------------------------------------------------ STDMETHODIMP SetStreamBasics(IWMStreamConfig * pIWMStreamConfig, IWMProfile * pIWMProfile, LPWSTR pwszStreamName, LPWSTR pwszConnectionName, DWORD dwBitrate, WM_MEDIA_TYPE * pmt ) { HRESULT hr = S_OK; IWMMediaProps * pIWMMediaProps = NULL; IWMStreamConfig * pIWMStreamConfig2 = NULL; WORD wStreamNum = 0; if( NULL == pIWMStreamConfig || NULL == pIWMProfile || NULL == pmt ) { return( E_INVALIDARG ); } do { hr = pIWMProfile->CreateNewStream( pmt->majortype, &pIWMStreamConfig2 ); if( FAILED( hr ) ) { break; } hr = pIWMStreamConfig2->GetStreamNumber( &wStreamNum ); SAFE_RELEASE( pIWMStreamConfig2 ); if( FAILED( hr ) ) { break; } hr = pIWMStreamConfig->SetStreamNumber( wStreamNum ); if( FAILED( hr ) ) { break; } hr = pIWMStreamConfig->SetStreamName( pwszStreamName ); if( FAILED( hr ) ) { break; } hr = pIWMStreamConfig->SetConnectionName( pwszConnectionName ); if( FAILED( hr ) ) { break; } hr = pIWMStreamConfig->SetBitrate( dwBitrate ); if( FAILED( hr ) ) { break; } hr = pIWMStreamConfig->QueryInterface( IID_IWMMediaProps, (void **) &pIWMMediaProps ); if( FAILED( hr ) ) { break; } hr = pIWMMediaProps->SetMediaType( pmt ); if( FAILED( hr ) ) { break; } } while( FALSE ); SAFE_RELEASE( pIWMMediaProps ); return( hr ); } // Get the encoder complexity values for the codec. STDMETHODIMP GetComplexitySettings(IWMCodecInfo3* pCodecInfo3, GUID guidCodecType, DWORD dwCodecIndex, DWORD* pLive, DWORD* pOffline, DWORD* pMax) { HRESULT hr = S_OK; IWMCodecProps* pCodecProps = NULL; DWORD cbValue = sizeof(DWORD); WMT_PROP_DATATYPE dataType; WMT_ATTR_DATATYPE attDataType; IPropertyBag *pPropertyBag = NULL; // Get the codec properties interface. hr = pCodecInfo3->GetCodecProp(WMMEDIATYPE_Video,dwCodecIndex, g_wszIsVBRSupported, &attDataType, (BYTE*)pLive, &cbValue); if( FAILED( hr ) ) { //ON_FAIL("Could not get the codec props interface.", hr); return hr; } //hr = pCodecProps->GetCodecProp(WMCFOURCC_WMV3, //g_wszWMVCComplexityMode, // ) // Get the setting for live encoding. hr = pCodecProps->GetCodecProp(WMCFOURCC_WMV3, g_wszWMVCComplexityExLive, &dataType, (BYTE*)pLive, &cbValue); if( FAILED( hr ) ) { //ON_FAIL("Could not get the live complexity setting.", hr); return hr; } // Get the setting for offline encoding. hr = pCodecProps->GetCodecProp(WMCFOURCC_WMV3, g_wszWMVCComplexityExOffline, &dataType, (BYTE*)pOffline, &cbValue); if( FAILED( hr ) ) { //ON_FAIL("Could not get the offline complexity setting.", hr); return hr; } // Get the maximum complexity setting. hr = pCodecProps->GetCodecProp(WMCFOURCC_WMV3, g_wszWMVCComplexityExMax, &dataType, (BYTE*)pMax, &cbValue); if( FAILED( hr ) ) { //ON_FAIL("Could not get the maximum complexity setting.", hr); return hr; } return hr; } <file_sep>/NewEncoder/VideoEncoder/VFWWriter.h #if !defined(___VFWWRITER_H) #define ___VFWWRITER_H #include <string> //#include <afxwin.h> // MFC core and standard components //#include <windowsx.h> //#include <memory.h> //#include <mmsystem.h> #include <windows.h> #include <vfw.h> #include "VideoWriter.h" #include "Logger.h" #define VE_CODEC_VP6 "vp6" class OutputAvi; class CVFWWriter : public CVideoWriter { private: Logger & m_logger; public: CVFWWriter(Logger & logger); virtual ~CVFWWriter(); // Initialize the encoder library virtual bool Init(CVideoWriterOptions &options); virtual bool BeginWrite(); // Encode a video frame virtual bool EncodeVFrame(CVideoFrame& video_frame, int *out_size=0); // Encode a audio frame virtual bool EncodeAFrame(CAudioFrame& audio_frame); // Finish virtual bool Close(); private: bool OpenOutputVStream(); static const char *AviMessage(HRESULT code ); bool StoreCodecConf(COMPVARS* cvar); bool LoadCodecConf(COMPVARS* cvar); void ChooseOneCompatibleCompressor(COMPVARS *cvar, BITMAPINFOHEADER *bi); private: PAVIFILE poutfile; AVISTREAMINFO vstrhdr; PAVISTREAM poutsv; PAVISTREAM poutsvComp; /*, poutsa = NULL; PAVISTREAM poutsvComp = NULL, poutsaComp = NULL; AVICOMPRESSOPTIONS FAR * aopts[1]; AVISTREAMINFO astrhdr; */ bool m_bInit; int m_numFrames; int m_rowWidthBytes; unsigned char *m_pFrmBfr; BITMAPINFOHEADER outbi; COMPVARS compvars; OutputAvi* outavi; }; #endif // !defined <file_sep>/Fmod/Presets/BackgroundPreset.h #ifndef __BACKGROUND_PRESET_H__ #define __BACKGROUND_PRESET_H__ #include "Preset.h" #include "../Effects/NormalizationDsp.h" #include "../Effects/EqualizerDsp.h" class BackgroundPreset : public Preset { public: BackgroundPreset() { Preset(); } virtual void create(FMOD::System *fmodSystem) { EqualizerDsp *eq = new EqualizerDsp(); eq->addEqBand(fmodSystem, 3500.0f, 1.0f, 0.7f); effectDspList.push_back(eq); volume = 0.3f; created = true; } }; #endif <file_sep>/NewEncoder/VideoEncoder/FFMpegWriter.cpp #include <stdlib.h> #include <stdio.h> #ifndef WIN32 #include <pthread.h> #endif #define __STDC_CONSTANT_MACROS #ifndef WIN32 #define _stricmp strcasecmp #endif extern "C" { #include "libavformat/avformat.h" #include "libswscale/swscale.h" #include "libavutil/opt.h" #include "libavutil/error.h" #include <libswscale/swscale.h> #include <libswresample/swresample.h> #include "libavfilter/avfilter.h" #include "libavfilter/avfiltergraph.h" #include "libavcodec/avcodec.h" #include "libavfilter/buffersink.h" #include "libavfilter/buffersrc.h" } #include "FFMpegWriter.h" //#include "VFWEncoder.h" #include "PlaylistWriter.h" #include "BaseFolder.h" #include "SWException.h" #include "ColorSpace.h" #include "avc.h" #include "MutexAutoLock.h" #pragma warning (disable : 4996 ) // A global handle to synchronize calls to ffmpeg funcs // This handle is also used by the decoder // it is shared through the commonStorage mechanism #ifdef WIN32 static HANDLE avcodec_lock_mutex = NULL; #else static pthread_mutex_t avcodec_lock_mutex; // = NULL; #endif #define AVCODEC_LOCK_MUTEX_ID "avcodec_lock_mutex" #define INPUT_SAMPLERATE 44100 #define INPUT_FORMAT AV_SAMPLE_FMT_S16 #define INPUT_CHANNEL_LAYOUT AV_CH_LAYOUT_STEREO #define INPUT_CHANNEL_NUMBER 2 enum STREAM_IDS { VIDEO_STREAM_ID = 0, AUDIO_STREAM_ID = 1 }; void CFFMpegWriter::my_log_callback(void *ptr, int level, const char *fmt, va_list vargs) { printf("INIT started from callback\n"); vprintf(fmt, vargs); //vfprintf(stdout, fmt, vargs); printf(fmt, vargs); } /* select layout with the highest channel count */ int CFFMpegWriter::select_channel_layout(AVCodec *codec) { const uint64_t *p; uint64_t best_ch_layout = 0; int best_nb_channels = 0; if (!codec->channel_layouts) return AV_CH_LAYOUT_STEREO; p = codec->channel_layouts; while (*p) { int nb_channels = av_get_channel_layout_nb_channels(*p); if (nb_channels > best_nb_channels) { best_ch_layout = *p; best_nb_channels = nb_channels; } p++; } return (int)best_ch_layout; } /* initialize libavcodec, and register all codecs and formats */ //static void CFFMpegWriter::init(map<string, void *> *commonStorage) { /*av_log_set_callback(CFFMpegWriter::my_log_callback); av_log_set_level(AV_LOG_DEBUG); av_log(NULL, AV_LOG_DEBUG, "START AV LOG WRITER");*/ // Take handle to avcodec mutex // Its existance also indicates that ffmpeg was initialized by another engine (decoder) map<string, void *>::const_iterator iter = commonStorage->find( AVCODEC_LOCK_MUTEX_ID ); if ( iter == commonStorage->end() ) { // No - init #ifdef WIN32 avcodec_lock_mutex = CreateMutex( NULL, FALSE, NULL ); #else pthread_mutex_init(&avcodec_lock_mutex, NULL); #endif // First time - intialize ffmpeg avcodec_register_all(); av_register_all(); avfilter_register_all(); // Store for other engines #ifdef WIN32 (*commonStorage)[ AVCODEC_LOCK_MUTEX_ID ] = avcodec_lock_mutex; #else (*commonStorage)[AVCODEC_LOCK_MUTEX_ID] = &avcodec_lock_mutex; #endif } else { #ifdef WIN32 avcodec_lock_mutex = (HANDLE)iter->second; #else avcodec_lock_mutex = *((pthread_mutex_t*)iter->second); #endif } } CFFMpegWriter::CFFMpegWriter(Logger & logger, ProgressTracker *progressTracker) : m_logger(logger) { m_pVideoBuffer = NULL; m_pAVOutputFormat = NULL; m_pAVFormatContext = NULL; m_pVStream = NULL; m_pVCodec = NULL; m_pVCodecContext = NULL; //m_pVFrameOrig = NULL; m_pVFrameForEnc = NULL; m_pAStream = NULL; m_pACodec = NULL; m_pACodecContext = NULL; //m_pAEncodedSample = NULL; m_pAFrameForEnc = NULL; m_passLogFile = NULL; m_pSWSContext = NULL; m_pPlaylistWriter = NULL; m_bInit = false; m_codecOptions = NULL; //swr_ctx = NULL; samples_count = 0; encoded_samples_count = 0; m_pAFrameForFilter = 0; graph = NULL; m_paddedIOContext = NULL; m_progressTracker = progressTracker; moovHeaderWriter = new MoovHeaderWriter(logger); } CFFMpegWriter::~CFFMpegWriter() { if ( m_pVideoBuffer ) av_free(m_pVideoBuffer); if ( m_passLogFile ) fclose( m_passLogFile ); if (moovHeaderWriter){ delete moovHeaderWriter; moovHeaderWriter = NULL; } } // Initialize the encoder library bool CFFMpegWriter::Init(CVideoWriterOptions &options) { m_options = options; const char* result_name; std::string publish_name; const char* padded_name = m_options.padded ? m_options.name.c_str() : "none"; if (m_options.padded) { m_logger.info("padded_name %s\n", padded_name); std::string currange = m_options.name; size_t delim = currange.find( '.' ); if ( delim == std::string::npos ){ // No delimiter publish_name = m_options.name + "_fixed."; } else { publish_name = currange.substr(0, delim) + "_fixed." + currange.substr(delim + 1); } m_logger.info("publish_name %s\n", publish_name.c_str()); result_name = m_options.padded ? publish_name.c_str() : m_options.name.c_str(); } else{ result_name = options.name.c_str(); } //const char *output_name = options.name.c_str(); const char* output_name = result_name; m_logger.info("output_name %s\n", output_name); if ( CPlaylistWriter::IsPlaylist( output_name ) ) { if ( options.playlist_segment_dur_ms <= 0 ) { throw SWException("playlist_segment_dur must be positive (%d)\n", options.playlist_segment_dur_ms ); } m_pPlaylistWriter = new CPlaylistWriter( output_name, options.playlist_segment_dur_ms , options.playlist_first_segment_dur_ms, m_options ); if ( !m_pPlaylistWriter->Start() ) return false; output_name = m_pPlaylistWriter->GetNextSegmentFullPath().c_str(); // Name of first segment } /* * File format */ m_pAVOutputFormat = av_guess_format(NULL, output_name, NULL); //shot name is not nessessary "mp4" if (!m_pAVOutputFormat) { throw SWException( "FFMpeg: Cannot find output format\n"); } //m_pAVFormatContext = avformat_alloc_context(); avformat_alloc_output_context2(&m_pAVFormatContext, NULL, NULL, output_name); m_pAVFormatContext->oformat = m_pAVOutputFormat; // Write bitrate for player if ( options.br_inhdr > 0 ) { // total bitrate to be written in header specified as a parameter (to overcome a // situation where resulting bitrate is lower than requested) m_pAVFormatContext->bit_rate = options.br_inhdr; } else { // Use real data rate m_pAVFormatContext->bit_rate = GetTotalBitrate(); } sprintf(m_pAVFormatContext->filename, "%s", output_name); //if (url_fopen(&m_pAVFormatContext->pb, output_name, URL_WRONLY) < 0) if (avio_open(&m_pAVFormatContext->pb, output_name, AVIO_FLAG_WRITE)) { throw SWException("Could not open %s\n", output_name ); } if ( m_pPlaylistWriter ) m_pPlaylistWriter->SegmentCreated(); // Notify that first segment was created if ( !AddVideoStream(options) ) return false; // if ( options.m_bAudio ) if ( options.abitrate > 0 ) { if ( !AddAudioStream(options) ) return false; /* Set up the filtergraph. */ int err = init_filter_graph(&graph, &buffer_src, &sink); if (err < 0) { fprintf(stderr, "Unable to init filter graph.\n"); throw SWException("Unable to init filter graph.\n"); } } if (m_options.padded) { m_logger.info("CREATE PADDED MP4.\n"); // Create internal Buffer for FFmpeg: const int pBufSize = 32 * 1024; BYTE* pBuffer = new BYTE[pBufSize]; // Allocate the AVIOContext: // The fourth parameter (pStream) is a user parameter which will be passed to our callback functions m_paddedIOContext = avio_alloc_context(pBuffer, pBufSize, // internal Buffer and its size 1, // bWriteable (1=true,0=false) NULL,/*pInStream,*/ // user data ; will be passed to our callback functions NULL,/*ReadFunc,*/ 0, // Write callback function (not used in this example) NULL /*SeekFunc*/); if (avio_open(&m_paddedIOContext, padded_name, AVIO_FLAG_WRITE)) { throw SWException("Could not open %s\n", output_name ); } //double stretchDuration = ceil(m_options.vduration_ms * m_options.stretch_time); int frameNumber = m_options.vduration_ms * m_options.fps / 1000 /*ms*/; m_options.frame_number = frameNumber; double fps = (double) m_options.framerate/ (double) m_options.framerate_scale; double vduration = (options.stretch_time != 1) ? m_options.vduration_ms : (1000 /*ms*/ * ((double) (frameNumber -1)) / (double) fps);// ((double)m_options.framerate/m_options.framerate_scale); m_options.vtrack_duration = ceil(vduration); //round up if (m_pAStream == NULL){ m_options.aPacketsNumber = 0; m_options.atrack_duration = 0; } else{ double aPacketsNumber = vduration * (double)m_pAStream->codec->sample_rate/ (double)(m_pAStream->codec->frame_size) /1000; m_options.aPacketsNumber = floor(aPacketsNumber); //round down double aduration = (double) (1000 /*ms*/ * m_options.aPacketsNumber * (m_pAStream->codec->frame_size)) / (double)m_pAStream->codec->sample_rate; m_options.atrack_duration = ceil(aduration); //15047; } m_mediaList.clear(); vpacketList.clear(); apacketList.clear(); nPacketCount = 0; nVideoCount = 0; nAudioCount =0; nframeNumber =frameNumber; } m_bInit = true; m_vFirstFrame = true; return true; } int CFFMpegWriter::init_filter_graph(AVFilterGraph **graph, AVFilterContext **buffer_src, AVFilterContext **sink) { AVFilterGraph *filter_graph; AVFilterContext *abuffer_ctx; AVFilter *abuffer; AVFilterContext *atempo_ctx; AVFilter *atempo; AVFilterContext *aformat_ctx; AVFilter *aformat; AVFilterContext *abuffersink_ctx; AVFilter *abuffersink; AVDictionary *options_dict = NULL; uint8_t options_str[1024]; uint8_t ch_layout[64]; int err; /* Create a new filtergraph, which will contain all the filters. */ filter_graph = avfilter_graph_alloc(); if (!filter_graph) { fprintf(stderr, "Unable to create filter graph.\n"); m_logger.info("Unable to create filter graph.\n"); return AVERROR(ENOMEM); } /* Create the abuffer filter; * it will be used for feeding the data into the graph. */ abuffer = avfilter_get_by_name("abuffer"); if (!abuffer) { fprintf(stderr, "Could not find the abuffer filter.\n"); m_logger.info("Could not find the abuffer filter.\n"); return AVERROR_FILTER_NOT_FOUND; } char srcName [100], atempoName[100], aformatName[100], sinkName[100] ; sprintf(srcName, "src_%s", m_options.jobId.c_str()); sprintf(atempoName, "atempo_%s", m_options.jobId.c_str()); sprintf(sinkName, "sink_%s", m_options.jobId.c_str()); sprintf(aformatName, "aformat_%s", m_options.jobId.c_str()); abuffer_ctx = avfilter_graph_alloc_filter(filter_graph, abuffer, srcName); if (!abuffer_ctx) { fprintf(stderr, "Could not allocate the abuffer instance.\n"); m_logger.info("Could not allocate the abuffer instance.\n"); return AVERROR(ENOMEM); } /* Set the filter options through the AVOptions API. */ AVRational timeBase = m_pACodecContext->time_base; av_get_channel_layout_string((char *)ch_layout, sizeof(ch_layout), 0, INPUT_CHANNEL_LAYOUT); av_opt_set (abuffer_ctx, "channel_layout", (char *) ch_layout, AV_OPT_SEARCH_CHILDREN); av_opt_set_sample_fmt (abuffer_ctx, "sample_fmt", INPUT_FORMAT, AV_OPT_SEARCH_CHILDREN); ///Always input format from fmod is AV_SAMPLE_FMT_S16 av_opt_set_q (abuffer_ctx, "time_base", timeBase, AV_OPT_SEARCH_CHILDREN); av_opt_set_int(abuffer_ctx, "sample_rate",INPUT_SAMPLERATE, AV_OPT_SEARCH_CHILDREN); /* Now initialize the filter; we pass NULL options, since we have already * set all the options above. */ err = avfilter_init_str(abuffer_ctx, NULL); if (err < 0) { fprintf(stderr, "Could not initialize the abuffer filter.\n"); return err; } /* Create atempo filter. */ if ( m_options.atempo != 1 ) { atempo = avfilter_get_by_name("atempo"); if (!atempo) { fprintf(stderr, "Could not find the atempo filter.\n"); return AVERROR_FILTER_NOT_FOUND; } atempo_ctx = avfilter_graph_alloc_filter(filter_graph, atempo, atempoName); if (!atempo_ctx) { fprintf(stderr, "Could not allocate the atempo instance.\n"); m_logger.info("Could not allocate the atempo instance.\n"); return AVERROR(ENOMEM); } /* A different way of passing the options is as key/value pairs in a * dictionary. */ char tempoVal[10]; sprintf(tempoVal, "%g", m_options.atempo); av_dict_set(&options_dict, "tempo", tempoVal, 0); err = avfilter_init_dict(atempo_ctx, &options_dict); av_dict_free(&options_dict); if (err < 0) { fprintf(stderr, "Could not initialize the atempo filter.\n"); m_logger.info("Could not initialize the atempo instance.\n"); return err; } } /* Create the aformat filter; * it ensures that the output is of the format we want. */ aformat = avfilter_get_by_name("aformat"); if (!atempo) { fprintf(stderr, "Could not find the atempo filter.\n"); return AVERROR_FILTER_NOT_FOUND; } aformat_ctx = avfilter_graph_alloc_filter(filter_graph, aformat, aformatName); if (!aformat_ctx) { fprintf(stderr, "Could not allocate the aformat instance.\n"); m_logger.info("Could not allocate the aformat instance.\n"); return AVERROR(ENOMEM); } /* A third way of passing the options is in a string of the form * key1=value1:key2=value2.... */ snprintf((char *)options_str, sizeof(options_str), "sample_fmts=%s:sample_rates=%d:channel_layouts=0x%ld", av_get_sample_fmt_name(m_pACodecContext->sample_fmt), m_pACodecContext->sample_rate, m_pACodecContext->channel_layout); err = avfilter_init_str(aformat_ctx, (const char*) options_str); if (err < 0) { av_log(NULL, AV_LOG_ERROR, "Could not initialize the aformat filter.\n"); return err; } /* Finally create the abuffersink filter; * it will be used to get the filtered data out of the graph. */ abuffersink = avfilter_get_by_name("abuffersink"); if (!abuffersink) { fprintf(stderr, "Could not find the abuffersink filter.\n"); m_logger.info("Could not find the abuffersink instance.\n"); return AVERROR_FILTER_NOT_FOUND; } abuffersink_ctx = avfilter_graph_alloc_filter(filter_graph, abuffersink, sinkName); if (!abuffersink_ctx) { fprintf(stderr, "Could not allocate the abuffersink instance.\n"); m_logger.info("Could not allocate the abuffersink instance.\n"); return AVERROR(ENOMEM); } /* This filter takes no options. */ err = avfilter_init_str(abuffersink_ctx, NULL); if (err < 0) { fprintf(stderr, "Could not initialize the abuffersink instance.\n"); m_logger.info("Could not initialize the abuffersink instance.\n"); return err; } if ( m_options.atempo != 1 ) { err = avfilter_link(abuffer_ctx, 0, atempo_ctx, 0); if (err >= 0) err = avfilter_link(atempo_ctx, 0, aformat_ctx, 0); } else { err = avfilter_link(abuffer_ctx, 0, aformat_ctx, 0); } if (err >= 0) err = avfilter_link(aformat_ctx, 0, abuffersink_ctx, 0); if (err < 0) { fprintf(stderr, "Error connecting filters\n"); m_logger.info("Error connecting filters.\n"); return err; } /* Configure the graph. */ err = avfilter_graph_config(filter_graph, NULL); if (err < 0) { av_log(NULL, AV_LOG_ERROR, "Error configuring the filter graph\n"); return err; } *graph = filter_graph; *buffer_src = abuffer_ctx; *sink = abuffersink_ctx; return 0; } /* check that a given sample format is supported by the encoder */ int CFFMpegWriter::check_sample_fmt(AVCodec *codec, enum AVSampleFormat sample_fmt) { const enum AVSampleFormat *p = codec->sample_fmts; while (*p != AV_SAMPLE_FMT_NONE) { if (*p == sample_fmt) return 1; p++; } return 0; } bool CFFMpegWriter::AddAudioStream(CVideoWriterOptions &options) { int ret; m_pAStream = avformat_new_stream(m_pAVFormatContext, NULL); m_pAStream->id = AUDIO_STREAM_ID; m_pACodecContext = m_pAStream->codec; m_pACodecContext ->codec_type = AVMEDIA_TYPE_AUDIO; if (m_pAVOutputFormat->flags & AVFMT_GLOBALHEADER) m_pACodecContext ->flags |= CODEC_FLAG_GLOBAL_HEADER; if ( _stricmp( options.acodec.c_str(), VE_CODEC_PCM_S16LE ) == 0 ) { m_pACodecContext ->codec_id = AV_CODEC_ID_PCM_S16LE; m_pACodecContext->sample_fmt = AV_SAMPLE_FMT_S16; } else if ( _stricmp( options.acodec.c_str(), VE_CODEC_MP3 ) == 0 ){ m_pACodecContext ->codec_id = AV_CODEC_ID_MP3; m_pACodecContext->sample_fmt = AV_SAMPLE_FMT_S16P; } else if ( _stricmp( options.acodec.c_str(), VE_CODEC_AAC ) == 0 ){ m_pACodecContext ->codec_id = AV_CODEC_ID_AAC; m_pACodecContext->sample_fmt = AV_SAMPLE_FMT_FLTP; } else if ( _stricmp( options.acodec.c_str(), VE_CODEC_VORBIS ) == 0 ){ m_pACodecContext ->codec_id = AV_CODEC_ID_VORBIS; m_pACodecContext->sample_fmt = AV_SAMPLE_FMT_FLTP; } else { throw SWException ("Unknown acodec: %s\n", options.acodec.c_str() ); } m_pACodec = avcodec_find_encoder(m_pACodecContext->codec_id); if (!m_pACodec) { throw SWException ("Could not create FFMpeg audio encoder\n"); } m_bSetAudioPts = ( m_pACodecContext ->codec_id == AV_CODEC_ID_VORBIS ); // Setting pts only for vorbis (other codecs work well without it) m_iAudioBytesCount = 0; // Count input audio bytes m_pACodecContext ->bit_rate = options.abitrate; m_pACodecContext ->sample_rate = options.asample_rate; m_pACodecContext ->channels = options.achannels; m_pACodecContext->channel_layout = select_channel_layout(m_pACodec); if (m_bSetAudioPts) { // If pts is set, set time_base to 1/1000 m_pACodecContext ->time_base.den = 1000; m_pACodecContext ->time_base.num = 1; m_pAStream->time_base.den = 1000; m_pAStream->time_base.num = 1; } //m_pACodecContext ->strict_std_compliance = FF_COMPLIANCE_INOFFICIAL; if (!check_sample_fmt(m_pACodec, m_pACodecContext->sample_fmt)) { fprintf(stderr, "Encoder does not support sample format %s", av_get_sample_fmt_name(m_pACodecContext->sample_fmt)); throw SWException ("Encoder does not support sample format %s", av_get_sample_fmt_name(m_pACodecContext->sample_fmt)); } // Open codec AvcodecOpenThreadSafe(m_pACodecContext, m_pACodec); m_lASamplesPerFrame = m_pACodecContext->codec->capabilities & CODEC_CAP_VARIABLE_FRAME_SIZE ? 10000 : m_pACodecContext->frame_size; /* compute the number of converted samples: buffering is avoided * ensuring that the output buffer will contain at least all the * converted input samples */ ret = av_samples_alloc_array_and_samples(&dst_samples_data, &dst_samples_linesize, m_pACodecContext->channels, m_lASamplesPerFrame, m_pACodecContext->sample_fmt, 0); if (ret < 0) { throw SWException ("Could not allocate destination samples\n"); } /* the codec gives us the frame size, in samples, * we calculate the size of the samples buffer in bytes */ dst_samples_size = av_samples_get_buffer_size(NULL, m_pACodecContext->channels, m_lASamplesPerFrame, m_pACodecContext->sample_fmt, 0); //allocate audio frame m_pAFrameForEnc = av_frame_alloc(); if (m_pAFrameForEnc == NULL) { throw SWException("Couldn't allocate audio frame.\n"); } //allocate autio filter frame m_pAFrameForFilter = av_frame_alloc(); if (!m_pAFrameForFilter){ throw SWException("Couldn't allocate audio filter frame.\n"); } return true; } AVCodecID CFFMpegWriter::selectCodecId(CVideoWriterOptions &options){ AVCodecID codecId = AV_CODEC_ID_NONE; // Select codec if ( _stricmp( options.vcodec.c_str(), VE_CODEC_FLV ) == 0 ) codecId = AV_CODEC_ID_FLV1; else if ( _stricmp( options.vcodec.c_str(), VE_CODEC_H264 ) == 0 ) { codecId = AV_CODEC_ID_H264; } else if (_stricmp(options.vcodec.c_str(), VE_CODEC_H264_NVENC) == 0) { codecId = AV_CODEC_ID_H264; } else if ( _stricmp( options.vcodec.c_str(), VE_CODEC_MPEG4 ) == 0 ){ codecId = AV_CODEC_ID_MPEG4; } else if ( _stricmp( options.vcodec.c_str(), VE_CODEC_QTRLE ) == 0 ) { codecId = AV_CODEC_ID_QTRLE; } else if ( _stricmp( options.vcodec.c_str(), VE_CODEC_VP8 ) == 0 ) { codecId = AV_CODEC_ID_VP8; } else if ( _stricmp( options.vcodec.c_str(), VE_CODEC_VP6 ) == 0 ) { codecId =AV_CODEC_ID_H264 ; // CODEC_ID_VP6F; } else if ( _stricmp( options.vcodec.c_str(), VE_CODEC_XVID ) == 0 ){ codecId = AV_CODEC_ID_MPEG4; } else if ( _stricmp( options.vcodec.c_str(), IE_CODEC_IMAGE2 ) == 0 ){ codecId = AV_CODEC_ID_MJPEG; } else { throw SWException ("Unknown vcodec: %s\n", options.vcodec.c_str() ); } return codecId; } bool CFFMpegWriter::AddVideoStream(CVideoWriterOptions &options) { av_log_set_level(56); AVCodecID codecId = selectCodecId(options); bool nvenc_h264 = false; if (!options.h264Impl.empty() && _stricmp(options.h264Impl.c_str(), "nvenc") == 0) { m_pVCodec = avcodec_find_encoder_by_name(VE_CODEC_H264_NVENC); nvenc_h264 = true; m_logger.info("Using nvenc h264 encoder\n"); } else { m_pVCodec = avcodec_find_encoder(codecId); } if (!m_pVCodec) { throw SWException ("Could not create FFMpeg video encoder\n"); } m_pVStream = avformat_new_stream(m_pAVFormatContext, m_pVCodec); m_pVStream->id = VIDEO_STREAM_ID; m_pVCodecContext = m_pVStream->codec; m_pVCodecContext ->codec_type = AVMEDIA_TYPE_VIDEO; m_pVCodecContext->codec_id = codecId; if ( options.vconstantRateFactor > 0) { m_logger.info("Encoding with crf=%f\n", options.vconstantRateFactor); //m_pVCodecContext->crf = options.vconstantRateFactor; char crf [10] ; sprintf(crf, "%f", options.vconstantRateFactor); av_opt_set(m_pVStream->codec->priv_data, "crf", crf, 0); } else { m_logger.trace("Encoding with abr=%d\n", options.vbitrate); m_pVCodecContext ->bit_rate = options.vbitrate; } m_pVCodecContext ->width = options.width; m_pVCodecContext ->height = options.height; // timebase: This is the fundamental unit of time (in seconds) in terms // of which frame timestamps are represented. For fixed-fps content, // timebase should be 1/framerate and timestamp increments should be // identical to 1. m_pVCodecContext ->time_base.num = options.framerate_scale; m_pVCodecContext ->time_base.den = options.framerate ; m_pVCodecContext ->pix_fmt = AV_PIX_FMT_YUV420P; // Default. may be diff. for some codecs if (m_pVCodecContext->codec_id == AV_CODEC_ID_MPEG2VIDEO) { // just for testing, we also add B frames m_pVCodecContext->max_b_frames = 2; } if (m_pVCodecContext->codec_id == AV_CODEC_ID_MPEG1VIDEO) { // Needed to avoid using macroblocks in which some coeffs overflow. // This does not happen with normal video, it just happens here as //the motion of the chroma plane does not match the luma plane. m_pVCodecContext->mb_decision = 2; } if (m_pAVOutputFormat->flags & AVFMT_GLOBALHEADER) m_pVCodecContext ->flags |= CODEC_FLAG_GLOBAL_HEADER; m_pVCodecContext ->strict_std_compliance = FF_COMPLIANCE_NORMAL; // Set codec params // Quality vs. Performance: 0 = Fastest, 100 = Highest quality int q2p = options.GetQual2PerfTradeoff(); if ( m_pVCodecContext ->codec_id == AV_CODEC_ID_H264 ) { m_pVCodecContext->ticks_per_frame = 2; // 2 is used for h264 m_pVCodecContext ->time_base.den *= m_pVCodecContext->ticks_per_frame ; char *presetNames[] = {"ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow", "placebo" }; int npresets = sizeof(presetNames)/sizeof(*presetNames); int presetNum = max( 0, min( npresets-1, q2p * npresets / 101 ) ); //0->0 100->npresets-1 if( !options.fragmented){ if (!nvenc_h264) { char presetFile[MAX_PATH]; sprintf(presetFile, "%s/presets/libx264-%s.ffpreset", getBaseFolder(), presetNames[presetNum]); m_logger.info("Loading H264 preset: %s\n", presetFile); if (!SetPresetsFromFile(presetFile)) return false; /*av_dict_set(&m_codecOptions, "preset", presetNames[presetNum], 0); av_dict_set(&m_codecOptions, "vprofile", "baseline", 0);*/ } else { char * presetName = NULL; if (q2p <= 10) presetName = "llhp"; else if (q2p <= 20) presetName = "bd"; else if (q2p <= 30) presetName = "hp"; else if (q2p <= 40) presetName = "fast"; else if (q2p <= 50) presetName = "medium"; else if (q2p <= 60) presetName = "slow"; else if (q2p <= 100) presetName = "losslesshp"; av_opt_set(m_pVCodecContext->priv_data, "preset", presetName, 0); } } else{ //set mp4 fragmented options m_pVCodecContext->flags += CODEC_FLAG_LOOP_FILTER; m_pVCodecContext->coder_type = FF_CODER_TYPE_AC; m_pVCodecContext->me_cmp |= 1; //// cmp=+chroma, where CHROMA = 1 m_pVCodecContext->me_method = ME_HEX; m_pVCodecContext->me_subpel_quality = 7; //9, 7, 0 m_pVCodecContext->me_range = 16; m_pVCodecContext->scenechange_threshold = 40; av_dict_set(&m_codecOptions, "movflags", "frag_keyframe+empty_moov", 0); av_dict_set(&m_codecOptions, "vprofile", "baseline", 0); av_dict_set(&m_codecOptions, "preset", presetNames[presetNum], 0); } if (m_options.padded) { av_dict_set(&m_codecOptions, "movflags", "faststart", 0); } ////TO DELETE - just for debug //av_dict_set(&m_codecOptions, "movflags", "faststart", 0); av_dict_set(&m_codecOptions, "threads", "1", 0); // Always use 1 thread as we're running multiple jobs simultaneously av_dict_set(&m_codecOptions, "mbtree", "1", 0); if ( options.vpsy_rd >= 0 ) { char buffer[100]; sprintf(buffer, "%4.2f:0.00", options.vpsy_rd); // Psy-Trellis strength is always 0 av_dict_set(&m_codecOptions, "psy-rd", buffer, 0); } if ( options.vlookahead_frames >= 0 ) { char buffer[100]; sprintf(buffer, "%d", options.vlookahead_frames); av_dict_set(&m_codecOptions, "rc-lookahead", buffer, 0); } // Set profile and level m_pVCodecContext->level = options.vlevel < 0 ? 30 : options.vlevel; m_logger.trace("Encoding level: %d", m_pVCodecContext->level); if ( options.vprofile.empty() || _stricmp( options.vprofile.c_str(), "baseline" ) == 0 ) { m_logger.trace("Forcing baseline profile"); m_pVCodecContext->coder_type = FF_CODER_TYPE_VLC; m_pVCodecContext->max_b_frames = 0; av_dict_set(&m_codecOptions, "b-pyramid", "normal", 0); //264 - only b-frames av_dict_set(&m_codecOptions, "mixed-refs", "1", 0); av_dict_set(&m_codecOptions, "weightb", "1", 0); av_dict_set(&m_codecOptions, "8x8dct", "1", 0); av_opt_set(m_pVCodecContext->priv_data, "profile", "baseline", AV_OPT_SEARCH_CHILDREN); m_pVCodecContext->me_cmp |= 1; //// cmp=+chroma, where CHROMA = 1 } } else { if(codecId == AV_CODEC_ID_VP8){ av_opt_set(&m_codecOptions, "deadline", "realtime", 0); av_opt_set(m_pVCodecContext->priv_data, "deadline", "realtime", 0); } if (codecId == AV_CODEC_ID_QTRLE){ m_pVCodecContext ->pix_fmt = AV_PIX_FMT_RGB24; // No need to go through YUV } if ( codecId == AV_CODEC_ID_VP6F) { // The VP6F decoder accepts an optional 1 byte extradata. It is composed of: // - upper 4bits: difference between encoded width and visible width // - lower 4bits: difference between encoded height and visible height m_codecExtraData = ( ( (0x10 - (options.width&0xF)) % 0x10 ) << 4 ) + ( (0x10 - (options.height&0xF)) % 0x10 ); m_pVCodecContext->extradata_size = 1; m_pVCodecContext->extradata = &m_codecExtraData; } if (codecId == AV_CODEC_ID_MJPEG){ m_pVCodecContext->pix_fmt = AV_PIX_FMT_YUVJ420P; } if (codecId != AV_CODEC_ID_MJPEG) { if ( q2p >= 20 ) m_pVCodecContext->flags |= CODEC_FLAG_4MV; if ( q2p >= 30 ) m_pVCodecContext->me_cmp = 3; if ( q2p >= 50 ) m_pVCodecContext->me_sub_cmp = 3; if ( q2p >= 70 ) m_pVCodecContext->trellis = 1; if ( q2p >= 81 ) m_pVCodecContext->mb_decision = FF_MB_DECISION_RD; if ( q2p >= 91 ) m_pVCodecContext->trellis = 2; } } // Key frame interval. m_pVCodecContext->gop_size = options.GetKFInterval(); m_pVCodecContext->keyint_min=options.GetKFInterval(); m_logger.info( "Encoding settings: quality2performance=%d, keyframe-interval=%d\n", q2p, m_pVCodecContext->gop_size ); // Support 2pass if ( options.passnum > 0 ) { if ( options.passnum == 1 ) { // First pass -> Write to log file. m_pVCodecContext->flags |= CODEC_FLAG_PASS1; m_passLogFile = fopen(options.passlogname.c_str(), "w"); if (!m_passLogFile) { throw SWException ("Failed to open log file for writing: %s\n", options.passlogname.c_str() ); } } else if ( options.passnum == 2 ) { m_pVCodecContext->flags |= CODEC_FLAG_PASS2; /* read the log file */ FILE *f = fopen(options.passlogname.c_str(), "r"); if (!f) { throw SWException ("Failed to open log file for reading: %s\n", options.passlogname.c_str() ); } fseek(f, 0, SEEK_END); int size = ftell(f); fseek(f, 0, SEEK_SET); char *logbuffer = (char *)av_malloc(size + 1); if (!logbuffer) { throw SWException("Could not allocate log buffer\n"); } size = fread(logbuffer, 1, size, f); fclose(f); logbuffer[size] = '\0'; m_pVCodecContext->stats_in = logbuffer; } } ////if ( options.vqual2perf_tradeoff > 0 ) ////{ //// // constant quantization //// m_pVCodecContext->qmax=options.vqual2perf_tradeoff; //// m_pVCodecContext->qmin=0; //options.vqf; ////} //m_pVCodecContext->flags = 0; // this is what ffmpeg does... //m_pVCodecContext->debug = 0xFFFFFFFF; if ( options.vqf > 0) // && m_options.vcodec != IE_CODEC_IMAGE2) { // constant quantization m_pVCodecContext->qmax=options.vqf; m_pVCodecContext->qmin=options.vqf; } options.vrcbuf_ms = 10000; if ( options.vrcbuf_ms > 0 ) { // Ratecontrol buffer size m_pVCodecContext->rc_max_rate = m_pVCodecContext->rc_min_rate = options.vbitrate; m_pVCodecContext->rc_buffer_size = options.vbitrate/1000*options.vrcbuf_ms; } // crash... m_pVCodecContext->mpeg_quant=1; /* * Using ffmpeg encoder - prepare it */ m_nVideoBufferSize = m_pVCodecContext->width * m_pVCodecContext->height * 5; m_pVideoBuffer = (uint8_t*)av_malloc(m_nVideoBufferSize); // Now that all the parameters are set, we can open the audio and // video codecs and allocate the necessary encode buffers. AvcodecOpenThreadSafe(m_pVCodecContext, m_pVCodec); // if (m_pAVCodecContext->pix_fmt != PIX_FMT_YUV420P) // Frame to be encoded. evev - may use orig frame if in correct codec??? m_pVFrameForEnc = alloc_picture(m_pVCodecContext->pix_fmt , options.width, options.height); return true; } //needed?? bool CFFMpegWriter::BeginWrite() { if ( !m_bInit ) return false; av_dump_format(m_pAVFormatContext, 0, m_pAVFormatContext->filename, 1); avformat_write_header(m_pAVFormatContext, &m_codecOptions); if(m_options.padded){ moovHeaderWriter->write_header(m_pAVFormatContext, m_paddedIOContext, m_options, m_mediaList); } m_lVEncodedFrames = 0; return true; } // Encode a video frame bool CFFMpegWriter::EncodeVFrame(CVideoFrame& video_frame, int *out_size) { if ( !m_bInit ) return false; m_logger.info("Frame dims (%dx%d) ; video's (%dx%d)\n", video_frame.width, video_frame.height, m_options.width, m_options.height); if ( video_frame.width != m_options.width || video_frame.height != m_options.height ) { throw SWException("Frame dims (%dx%d) don't match video's (%dx%d) \n", video_frame.width, video_frame.height, m_options.width, m_options.height ); } // Convert color int widthbytes = video_frame.bytesperpel*video_frame.width; // input is flipped vertically unsigned char *src = (unsigned char *)video_frame.bfr + widthbytes*(video_frame.height-1); int stride = -widthbytes; ColorSpace::rgb32_to_yuv420p(m_pVFrameForEnc->data[0], m_pVFrameForEnc->data[1], m_pVFrameForEnc->data[2], src, video_frame.width, video_frame.height, stride); m_pVFrameForEnc->format = m_pVCodecContext->pix_fmt; m_pVFrameForEnc->width = video_frame.width; m_pVFrameForEnc->height = video_frame.height; // Encode current frame. printf("Handling frame #%ld\n", m_lVEncodedFrames); m_pVFrameForEnc->pict_type = video_frame.is_key ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_NONE; //FF_I_TYPE : 0; int flushed = 0; if ( m_pPlaylistWriter ) { if ( !switch_segment_if_needed( GetCurPtsMS(), &flushed ) ) return false; } m_pVFrameForEnc->width = video_frame.width; m_pVFrameForEnc->height = video_frame.height; int encoded = 0; if ( !EncodeAndWriteFrame( m_pVFrameForEnc, &encoded) ) return false; *out_size = flushed + encoded; m_logger.info("encode vFrame : return flushed %d , encoded %d, out_size : %d", flushed, encoded, *out_size); return true; } bool CFFMpegWriter::FlushEncoder(int *flushed) { int out_size; m_logger.trace("Flushing\n"); do { m_logger.trace("Flushing\n"); if ( !EncodeAndWriteFrame( NULL, &out_size ) ) { throw SWException("Encoder flushing problems\n"); } m_logger.trace("Flushed : %d", out_size); *flushed = *flushed + out_size; } while ( out_size > 0 ); m_logger.trace("Flushing end. rendered : %d\n", *flushed); if (m_options.padded) { if (!m_mediaList.empty()){ m_logger.trace("Flushing for padding\n"); while (m_mediaList.size() > 0){ int mediaType = m_mediaList.front(); AVPacket packet; if (mediaType == AVMEDIA_TYPE_VIDEO){ write_packet(vpacketList, (AVMediaType) mediaType); } else{ write_packet(apacketList, (AVMediaType) mediaType); } m_mediaList.pop_front(); } } } m_logger.info("V = %d A=%d\n", nVideoCount, nAudioCount); return true; } void CFFMpegWriter::createPaddedPacketFromPkt(AVPacket& pkt, AVPacket* paddedPkt, int packetSize){ int new_size = packetSize; av_init_packet(paddedPkt); paddedPkt->data = (uint8_t *)malloc( new_size ); memcpy( paddedPkt->data, pkt.data, pkt.size ); for ( int i = pkt.size; i < new_size; i++ ) paddedPkt->data[i] = 0xff; paddedPkt->size = new_size; paddedPkt->stream_index = pkt.stream_index; if (pkt.stream_index == (int)AVMEDIA_TYPE_VIDEO){ paddedPkt->dts = pkt.dts; paddedPkt->pts = pkt.pts; } else{ pkt.pts = pkt.dts = AV_NOPTS_VALUE; } //paddedPkt->buf->size = new_size; } // bool CFFMpegWriter::EncodeAndWriteFrame(AVFrame *frameForEnc, int *out_size) { bool set_pts = m_pVCodecContext ->codec_id == AV_CODEC_ID_H264 || m_pVCodecContext->codec_id == AV_CODEC_ID_VP8; if ( frameForEnc != NULL ) { // Only for h264 we want to set pts if ( set_pts ) { frameForEnc->pts = m_lVEncodedFrames * m_pVCodecContext->ticks_per_frame; } } int gotPicture = 0; AVPacket pkt, paddedPkt; av_init_packet(&pkt); pkt.data = NULL; pkt.size = 0; if (frameForEnc != NULL) { m_logger.info("ENCODE FRAME Width = %d height=%d channels=%d format=%d\n", frameForEnc->width, frameForEnc->height, frameForEnc->channels, frameForEnc->format); } int ret = avcodec_encode_video2(m_pVCodecContext, &pkt, frameForEnc,&gotPicture); if (ret < 0) { perror("Encoder error:\n"); char errorString[AV_ERROR_MAX_STRING_SIZE]; av_log(0, 0, "Err: %s\n", av_make_error_string(errorString, AV_ERROR_MAX_STRING_SIZE, ret)); throw SWException( "CFFMpegVEncoder::EncodeNextFrame(): Error (larisa): Cannot encode current frame %d %s\n", ret, errorString); } *out_size = pkt.size; if (frameForEnc) m_lVEncodedFrames++; if (gotPicture) { if (!set_pts) { pkt.pts = pkt.dts = AV_NOPTS_VALUE; } bool use_padding = false; #ifdef DEBUG_PADDING use_padding=true; #endif if (m_options.padded || use_padding) { if (pkt.size > m_options.vpacket_size){ m_progressTracker->exceedPaddedSize((int)AVMEDIA_TYPE_VIDEO, nVideoCount, pkt.size, m_options.vpacket_size); m_logger.error("Error: (v) Frame size is too big: Frame Number: <%d> Current: <%d>, Defined max frame size: <%d>\n", nVideoCount, pkt.size, m_options.vpacket_size); return false; } createPaddedPacketFromPkt(pkt, &paddedPkt, m_options.vpacket_size); *out_size = pkt.size; } //write the first frame size to the notification if (m_vFirstFrame){ char buf[10]; sprintf(buf, "%d", pkt.size); m_progressTracker->writeEvent("First frame size", buf); m_vFirstFrame = !m_vFirstFrame; } static int totalSize = 0, npkts = 0, maxSize = 0; totalSize += pkt.size; if ( pkt.size > maxSize ) maxSize = pkt.size; npkts++; m_logger.info("Writing (v) pkt of size %d (key=%d) avg=%d, max=%d packetNumber %d\n", pkt.size, pkt.flags&AV_PKT_FLAG_KEY, totalSize/npkts, maxSize, nPacketCount); #ifdef DEBUG_PADDING nPacketCount++; nVideoCount++; m_logger.trace("Writing (v) pkt of size %d (key=%d) avg=%d, max=%d packetNumber %d\n", pkt.size, pkt.flags&AV_PKT_FLAG_KEY, totalSize/npkts, maxSize, nPacketCount); #endif int err = write_frame(m_pAVFormatContext, &m_pVCodecContext->time_base, m_pVStream, &pkt, &paddedPkt); //av_free_packet(&pkt); if (err < 0) { throw SWException("CFFMpegWriter::EncodeVFrame(): Error: Cannot write frame\n"); } if ( m_passLogFile ) fprintf(m_passLogFile, "%s", m_pVCodecContext->stats_out); m_logger.info("return out_size : %u", out_size); } else { m_logger.info("GOT PICTURE FALSE"); } return true; } // Encode a audio frame bool CFFMpegWriter::EncodeAFrame(CAudioFrame& audio_frame) { if (!m_bInit) return false; // if ( !m_options.m_bAudio ) if ( m_options.abitrate <= 0 ) return false; int ret, dst_nb_samples; dst_samples_data[0] = (uint8_t *)audio_frame.bfr; dst_nb_samples = m_lASamplesPerFrame; m_pAFrameForFilter->nb_samples = dst_nb_samples; AVRational bq; bq.num = 1; bq.den = m_pACodecContext->sample_rate; m_pAFrameForFilter->pts = av_rescale_q(samples_count, bq, m_pACodecContext->time_base); ret = avcodec_fill_audio_frame(m_pAFrameForFilter, m_pACodecContext->channels, m_pACodecContext->sample_fmt, dst_samples_data[0], dst_samples_size, 0); samples_count += dst_nb_samples; //prepare filtering frame m_pAFrameForFilter->sample_rate = INPUT_SAMPLERATE; m_pAFrameForFilter->format = INPUT_FORMAT; m_pAFrameForFilter->channel_layout = INPUT_CHANNEL_LAYOUT; // m_pAFrameForFilter->channels = INPUT_CHANNEL_NUMBER; m_pAFrameForFilter->nb_samples = dst_nb_samples; av_frame_set_channels(m_pAFrameForFilter, INPUT_CHANNEL_NUMBER); /* Send the frame to the input of the filtergraph. */ int err = av_buffersrc_add_frame(buffer_src, m_pAFrameForFilter); if (err < 0) { //m_logger.error("err = %s", av_err2str(err)); printf( "Error decoding video frame (%d)\n", av_get_channel_layout_nb_channels(INPUT_CHANNEL_LAYOUT)); av_frame_unref(m_pAFrameForFilter); //fprintf(stderr, "Error submitting the frame to the filtergraph:"); throw SWException("Error submitting the frame to the filtergraph:: %d\n", err); ; return true; } /* Get all the filtered output that is available. */ while ((err = av_buffersink_get_samples(sink, m_pAFrameForEnc, m_lASamplesPerFrame)) >= 0) { AVPacket pkt, paddedPkt; int gotOutput = 0; av_init_packet(&pkt); pkt.data = NULL; pkt.size = 0; if (m_pACodecContext->codec_id == AV_CODEC_ID_VORBIS){ AVRational bq; bq.num = 1; bq.den = m_pAFrameForEnc->sample_rate; m_pAFrameForEnc->pts = av_rescale_q(encoded_samples_count, bq, m_pACodecContext->time_base); encoded_samples_count += m_pAFrameForEnc->nb_samples; m_pACodecContext->frame_size = m_pAFrameForEnc->nb_samples; } /* now emcode filtered frame */ err = avcodec_encode_audio2(m_pACodecContext, &pkt, m_pAFrameForEnc,&gotOutput); if (err < 0) { av_free_packet(&pkt); fprintf(stderr, "Error processing the filtered frame.\n"); m_logger.info("Error processing the filtered frame.\n"); throw SWException("Error encoding audio frame\n."); } if (!gotOutput) { av_free_packet(&pkt); return true; } bool use_padding = false; #ifdef DEBUG_PADDING use_padding=true; #endif if (m_options.padded || use_padding) { if (pkt.size > m_options.apacket_size){ m_progressTracker->exceedPaddedSize((int)AVMEDIA_TYPE_AUDIO, nAudioCount, pkt.size, m_options.apacket_size); //throw SWException("Error: (a) Frame size is too big: Frame Number: <%d> Current: <%d>, Defined max frame size: <%d>\n", nAudioCount, pkt.size, m_options.apacket_size); m_logger.error("Error: (a) Frame size is too big: Frame Number: <%d> Current: <%d>, Defined max frame size: <%d>\n", nAudioCount, pkt.size, m_options.apacket_size); return false; } createPaddedPacketFromPkt(pkt, &paddedPkt, m_options.apacket_size); } #ifdef DEBUG_PADDING nPacketCount++; nAudioCount++; m_logger.trace("Writing (a)pkt of size %d (key=%d) packetNumber %d\n", pkt.size, pkt.flags&AV_PKT_FLAG_KEY, nPacketCount); #endif ret = write_frame(m_pAVFormatContext, &m_pACodecContext->time_base, m_pAStream, &pkt, &paddedPkt); if (ret < 0) { throw SWException("Error while writing audio frame: %d\n", ret); } //av_free_packet(&pkt); av_frame_unref(m_pAFrameForEnc); } av_frame_unref(m_pAFrameForFilter); return true; } // Finish bool CFFMpegWriter::Close() { int rendered = 0; FlushEncoder(&rendered); /* write the trailer, if any */ av_write_trailer(m_pAVFormatContext); /* free the streams */ for(unsigned int i = 0; i < m_pAVFormatContext->nb_streams; i++) { av_freep(&m_pAVFormatContext->streams[i]); } if (m_options.padded){ if (m_paddedIOContext){ avio_close(m_paddedIOContext); } } avio_close(m_pAVFormatContext->pb); if ( m_pPlaylistWriter ) m_pPlaylistWriter->SegmentReady( GetCurPtsMS(), true );// End last segment if(m_pACodecContext){ if (m_pACodecContext->codec) { // Close codec AvcodecCloseThreadSafe(m_pACodecContext); } av_free(m_pACodecContext); m_pACodecContext = NULL; } if(m_pVCodecContext){ if (m_pVCodecContext->codec) { AvcodecCloseThreadSafe(m_pVCodecContext); // Close codec if ( m_pVCodecContext->stats_in ) { av_freep(&m_pVCodecContext->stats_in ); } } av_free(m_pVCodecContext); m_pVCodecContext = NULL; } /* free the stream */ av_free(m_pAVFormatContext); if (graph) { avfilter_graph_free(&graph); } if (!streamIds.empty()){ list<int>::iterator it = streamIds.begin(); int i = 0; int* buf = (int*)malloc(streamIds.size() * sizeof(int)); while(it != streamIds.end()){ buf[i] = *it; //buf[i++] = ','; m_logger.info("BUFFER = %d, \n", buf[i]); i++; it++; } } return true; } AVFrame *CFFMpegWriter::alloc_picture(AVPixelFormat pix_fmt, int width, int height) { AVFrame *picture; uint8_t *picture_buf; int size; picture = av_frame_alloc(); if (!picture) return NULL; size = avpicture_get_size(pix_fmt, width, height); picture_buf = (uint8_t *)av_malloc(size); if (!picture_buf) { av_free(picture); return NULL; } avpicture_fill((AVPicture *)picture, picture_buf, pix_fmt, width, height); return picture; } // For playlist output - check if need to switch to next segment, and do it if so bool CFFMpegWriter::switch_segment_if_needed( int time_ms, int *rendered ) { if ( m_pPlaylistWriter->ShouldStartNewSegment( time_ms ) ) { FlushEncoder(rendered); // Close previous file and open new one avio_flush(m_pAVFormatContext->pb); avio_close(m_pAVFormatContext->pb); // Notify playlist we are done with prev segment m_pPlaylistWriter->SegmentReady( time_ms ); const char *output_name = m_pPlaylistWriter->GetNextSegmentFullPath().c_str(); // Name of first segment m_logger.info("Segment ready : %s", output_name); if (avio_open(&m_pAVFormatContext->pb, output_name, AVIO_FLAG_WRITE) < 0) { throw SWException("Could not open %s\n", output_name ); } #ifdef WRITE_HEADER_IN_SEGEMNT av_dump_format(m_pAVFormatContext, 0, m_pAVFormatContext->filename, 1); /* write the stream header, if any */ avformat_write_header(m_pAVFormatContext); #endif m_pPlaylistWriter->SegmentCreated(); #define SEGEMNT_START_RESTART_ENCODER #ifdef SEGEMNT_START_RESTART_ENCODER // Close and re-open codec AvcodecCloseThreadSafe(m_pVCodecContext); AvcodecOpenThreadSafe(m_pVCodecContext, m_pVCodec); #endif m_logger.info( " Switching segment at time=%d\n", time_ms ); } return true; } bool CFFMpegWriter::write_packet(std::list<AVPacket> &list, AVMediaType mediaType){ AVPacket pkt; int rc =0; if (list.empty()){ return false; } AVIOContext *pb = m_paddedIOContext;//m_pAVFormatContext->pb; //get first element from list pkt = list.front(); if (pkt.size <= 0 || pkt.data == NULL){ return false; } uint8_t *reformatted_data = NULL; m_logger.trace("ENC samplesRead %d %s packetSize=%d with pts %lld \n", nPacketCount,( mediaType == AUDIO_STREAM_ID ? "(a)" : "(v)" ), pkt.size, pkt.pts); if (mediaType == AVMEDIA_TYPE_VIDEO && m_pVCodecContext->codec_id == AV_CODEC_ID_H264) { //&& trk->vos_len > 0 && *(uint8_t *)trk->vos_data != 1 && !TAG_IS_AVCI(trk->tag)) { /* from x264 or from bytestream h264 */ /* nal reformating needed */ int size = pkt.size; avc::ff_avc_parse_nal_units_buf(pkt.data, &reformatted_data,&size); avio_write(pb, reformatted_data, size); //m_logger.trace("reformatted size = %d\n", size); av_free(reformatted_data); } else{ avio_write(pb, pkt.data, pkt.size); } if ( rc < 0 ) { m_logger.error("av_interleaved_write_frame returned rc=%d\n nPacketCount=%d", rc, nPacketCount); } if (mediaType == AUDIO_STREAM_ID) { nAudioCount++; } else{ nVideoCount++; } av_free_packet(&pkt); avio_flush(pb); //delete packet from list list.pop_front(); // increase pcket counter nPacketCount++; return true; } int CFFMpegWriter::write_packet_list(AVStream *st,AVPacket *pkt){ int rc = 0; int mediaListSize = m_mediaList.size(); bool writePacket = mediaListSize > 0; while (writePacket){ int mediaType = m_mediaList.front(); //bring packet to write if (mediaType == (int)AVMEDIA_TYPE_VIDEO){ writePacket = write_packet(vpacketList, (AVMediaType)mediaType); } else{ writePacket = write_packet(apacketList, (AVMediaType)mediaType); } if (writePacket){ m_mediaList.pop_front(); } // all packetss was read if (nPacketCount >= mediaListSize) { writePacket = false; } } return rc; } //static long startTick = GetTickCount(); int CFFMpegWriter::write_frame(AVFormatContext *fmt_ctx, const AVRational *time_base, AVStream *st, AVPacket *pkt, AVPacket *paddedPkt) { int rc = 0; /* rescale output packet timestamp values from codec to stream timebase */ if (pkt->pts != AV_NOPTS_VALUE) { pkt->pts = av_rescale_q_rnd(pkt->pts, *time_base, st->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX)); } if (pkt->dts != AV_NOPTS_VALUE) { pkt->dts = av_rescale_q_rnd(pkt->dts, *time_base, st->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX)); //printf("Resacle dts #%ld\n", pkt->dts); } if (pkt->duration > 0) { pkt->duration = (int)av_rescale_q(pkt->duration, *time_base, st->time_base); //printf("Resacle duration #%ld\n", pkt->duration); } //printf("Packet duration #%ld\n", pkt->duration); pkt->stream_index = st->index; #ifdef DEBUG_PADDING if (st->id == VIDEO_STREAM_ID ){ m_logger.trace("Writing"); } m_logger.trace("Writing %s sample with pts %lld stream_id=%d\n", ( st->id == AUDIO_STREAM_ID ? "audio" : "video" ), pkt->pts, st->id); /* Write the compressed frame to the media file. */ rc = av_interleaved_write_frame(fmt_ctx, paddedPkt); if ( rc < 0 ) { m_logger.error("av_interleaved_write_frame returned rc=%d\n", rc); } streamIds.push_back(st->id); av_free_packet(paddedPkt); av_free_packet(pkt); //av_packet_rescale_ts(&pkt, mt_ctx->time_base, time_base); #else if (m_options.padded) { if (st->id == AUDIO_STREAM_ID){ apacketList.push_back(*paddedPkt); } else{ vpacketList.push_back(*paddedPkt); } rc = write_packet_list(st, paddedPkt); } m_logger.info("Writing %s sample with pts %lld\n", ( st->id == AUDIO_STREAM_ID ? "audio" : "video" ), pkt->pts); /* Write the compressed frame to the media file. */ rc = av_interleaved_write_frame(fmt_ctx, pkt); if ( rc < 0 ) { m_logger.error("av_interleaved_write_frame returned rc=%d\n", rc); } av_free_packet(pkt); #endif return rc; } int CFFMpegWriter::GetTotalBitrate() { int br = m_options.vbitrate; if ( m_options.abitrate > 0 ) br += m_options.abitrate ; return br; } bool CFFMpegWriter::SetPresetsFromFile( const char *filename) { int type=AV_OPT_FLAG_VIDEO_PARAM; int ret = 0; // From ffmpeg.c char tmp[1000], tmp2[1000], line[1000]; FILE *f= fopen(filename, "r"); if(!f) { throw SWException("File for preset '%s' not found\n", filename); } while(!feof(f)) { int e= fscanf(f, "%999[^\n]\n", line) - 1; if(line[0] == '#' && !e) continue; e|= sscanf(line, "%999[^=]=%999[^\n]\n", tmp, tmp2) - 2; if(e){ throw SWException("%s: Invalid syntax: '%s'\n", filename, line); } int ret = av_opt_set(&m_pVCodecContext->av_class, tmp, tmp2, 1); //this is for new ffmpeg if (ret != 0){ // Older version of h264 doesn't recognize all the options m_logger.trace("%s: Ignoring unknown option or argument: '%s', parsed as '%s' = '%s'\n", filename, line, tmp, tmp2); } } fclose(f); return true; } int CFFMpegWriter::GetCurPtsMS() { return (int)( ((double)m_lVEncodedFrames)*m_options.framerate_scale*1000/m_options.framerate + 0.5 ); // pts in ms } ////////////////// // Functions for opening / closing codec. // These are thread safe to protect ffmpeg structures void CFFMpegWriter::AvcodecOpenThreadSafe(AVCodecContext *avctx, AVCodec *codec) { #ifdef WIN32 WaitForSingleObject(avcodec_lock_mutex, INFINITE); #else pthread_mutex_lock(&avcodec_lock_mutex); #endif int rc = avcodec_open2(avctx, codec, &m_codecOptions); if ( rc < 0) { #ifdef WIN32 ReleaseMutex(avcodec_lock_mutex); throw SWException("Encoder : could not open codec\n" + GetLastError()); #else pthread_mutex_unlock(&avcodec_lock_mutex); throw SWException("Encoder : could not open codec\n"); #endif } #ifdef WIN32 ReleaseMutex(avcodec_lock_mutex); #else pthread_mutex_unlock(&avcodec_lock_mutex); #endif } void CFFMpegWriter::AvcodecCloseThreadSafe(AVCodecContext *avctx) { #ifdef WIN32 WaitForSingleObject(avcodec_lock_mutex, INFINITE); #else pthread_mutex_lock(&avcodec_lock_mutex); #endif avcodec_close(avctx); #ifdef WIN32 ReleaseMutex(avcodec_lock_mutex); #else pthread_mutex_unlock(&avcodec_lock_mutex); #endif } <file_sep>/Fmod/Parser.cpp #include "afx.h" #include "Parser.h" #include "tinyxml.h" #include "LinearInterpolated.h" #include "ConstantInterpolated.h" #include <stdlib.h> using namespace std; Parser::Parser() { } bool Parser::parse(std::string filePath) { TiXmlDocument *doc = new TiXmlDocument(filePath.c_str()); if (!doc->LoadFile()) { return false; } TiXmlElement *audioClip = doc->FirstChildElement("AudioClip"); parseAudioParts(audioClip); parseEffectGroups(audioClip); return true; } double Parser::getDoubleAttribute(const char* value) { if (value == NULL) { return 0; } return atof(value); } const char* Parser::getStringAttribute(const char* value) { if (value == NULL) { return NULL; } return value; } bool Parser::parseAudioParts(TiXmlElement *audioClip) { TiXmlElement *audioPartsXml = audioClip->FirstChildElement("SimpleParts"); for (TiXmlElement *audioPartXml = audioPartsXml->FirstChildElement(); audioPartXml != 0; audioPartXml = audioPartXml->NextSiblingElement()) { AudioPart *audioPart = new AudioPart(); std::string role; std::string type; std::string origin; std::string speaker; std::string source = audioPartXml->FirstChildElement("Source")->GetText(); audioPart->setSource(source); // id should be added - IMPORTANT const char* id = getStringAttribute(audioPartXml->Attribute("id")); if (id != NULL) { audioPart->setId(id); } double startTime = getDoubleAttribute(audioPartXml->Attribute("start")); audioPart->setStart(startTime); double endTime = getDoubleAttribute(audioPartXml->Attribute("end")); audioPart->setEnd(endTime); double paceFactor = getDoubleAttribute(audioPartXml->Attribute( "paceFactor")); audioPart->setPaceFactor(paceFactor); double offset = getDoubleAttribute(audioPartXml->Attribute("offset")); audioPart->setOffset(offset); TiXmlElement *roleXml = audioPartXml->FirstChildElement("Role"); if (roleXml != NULL) { role = roleXml->GetText(); setAudioRole(audioPart, role); } TiXmlElement *typeXml = audioPartXml->FirstChildElement("Type"); if (typeXml != NULL) { type = typeXml->GetText(); setAudioType(audioPart, type); } TiXmlElement *originXml = audioPartXml->FirstChildElement("Origin"); if (originXml != NULL) { origin = originXml->GetText(); setAudioOrigin(audioPart, origin); } TiXmlElement *speakerXml = audioPartXml->FirstChildElement("Speaker"); if (speakerXml != NULL) { speaker = speakerXml->GetText(); setAudioSpeaker(audioPart, speaker); } TiXmlElement *effectsXml = audioPartXml->FirstChildElement("Effects"); if (effectsXml != NULL) { std::vector<Effect *> effects; TiXmlElement *effectXml = 0; for (TiXmlElement *effectsXml = effectXml->FirstChildElement(); effectXml != 0; effectXml = effectXml->NextSiblingElement()) { parseEffect(&effects, effectXml); } audioPart->setEffects(effects); } TiXmlElement *volumesXml = audioPartXml->FirstChildElement( "VolumeLevels"); if (volumesXml != NULL) { std::vector<Interpolated *> volumes; TiXmlElement *volumeXml = 0; for (TiXmlElement *volumeXml = volumesXml->FirstChildElement(); volumeXml != 0; volumeXml = volumeXml->NextSiblingElement()) { parseVolume(&volumes, volumeXml); } audioPart->setVolume(volumes); } clipAudioParts.push_back(audioPart); } return true; } void Parser::setAudioRole(AudioPart *audioPart, std::string role) { if (role == "background") { audioPart->setRole(BACKGROUND); } else if (role == "narration") { audioPart->setRole(NARRATION); } else if (role == "effect") { audioPart->setRole(EFFECT); } } void Parser::setAudioOrigin(AudioPart *audioPart, std::string origin) { if (origin == "studio") { audioPart->setOrigin(STUDIO); } else if (origin == "internet") { audioPart->setOrigin(INTERNET); } else if (origin == "home") { audioPart->setOrigin(HOME); } else if (origin == "tts") { audioPart->setOrigin(TTS); } } void Parser::setAudioType(AudioPart *audioPart, std::string type) { if (type == "music") { audioPart->setType(MUSIC); } else if (type == "speech") { audioPart->setType(SPEECH); } else if (type == "effect") { audioPart->setType(SFX); } } void Parser::setAudioSpeaker(AudioPart *audioPart, std::string speaker) { if (speaker == "dave") { audioPart->setSpeaker(DAVE); } else if (speaker == "susan") { audioPart->setSpeaker(SUSAN); } else if (speaker == "kate") { audioPart->setSpeaker(KATE); } else if (speaker == "simon") { audioPart->setSpeaker(SIMON); } } void Parser::parseEffect(std::vector<Effect*> *effects, TiXmlElement *effectXml) { std::string effectType = getStringAttribute(effectXml->Attribute("type")); // FUTURE ADD-ON - SWITCH BETWEEN THE DIFFERENT TYPES } void Parser::parseVolume(std::vector<Interpolated *> *volumes, TiXmlElement *volumeXml) { std::string volumeType = volumeXml->Value(); double startTime = getDoubleAttribute(volumeXml->Attribute("startTime")); double endTime = getDoubleAttribute(volumeXml->Attribute("endTime")); Interpolated *volume = 0; if (volumeType == "ConstantVolumeLevel") { double value = getDoubleAttribute(volumeXml->Attribute("value")); ConstantInterpolated *constantVolume = new ConstantInterpolated(value, startTime, endTime); volume = constantVolume; } else if (volumeType == "LinearVolumeLevel") { double startValue = getDoubleAttribute(volumeXml->Attribute( "startValue")); double endValue = getDoubleAttribute(volumeXml->Attribute("endValue")); LinearInterpolated *linearVolume = new LinearInterpolated(startValue, endValue, startTime, endTime); volume = linearVolume; } volumes->push_back(volume); } bool Parser::parseEffectGroups(TiXmlElement *audioClip) { TiXmlElement *effectGroupsXml = audioClip->FirstChildElement("EffectGroups"); for (TiXmlElement *effectGroupXml = effectGroupsXml->FirstChildElement(); effectGroupXml != 0; effectGroupXml = effectGroupXml->NextSiblingElement()) { EffectGroup *effectGroup = new EffectGroup(); const char *id = getStringAttribute(effectGroupXml->Attribute("id")); if (id != NULL) { effectGroup->setId(id); } effectGroup->setPartsIDs(parseEffectGroupsPartsIDs(effectGroupXml)); effectGroup->setEffects(parseEffectGroupsEffects(effectGroupXml)); clipEffectGroups.push_back(effectGroup); } return true; } std::vector<std::string> Parser::parseEffectGroupsPartsIDs( TiXmlElement *effectGroupXml) { std::vector < std::string > partsIDs; TiXmlElement *partsXml = effectGroupXml->FirstChildElement("Parts"); for (TiXmlElement *partXml = partsXml->FirstChildElement(); partXml != 0; partXml = partXml->NextSiblingElement()) { const char *id = getStringAttribute(partXml->Attribute("id")); if (id != NULL) { partsIDs.push_back(id); } } return partsIDs; } std::vector<Effect *> Parser::parseEffectGroupsEffects( TiXmlElement *effectGroupXml) { std::vector<Effect *> effects; TiXmlElement *effectsXml = effectGroupXml->FirstChildElement("Effects"); for (TiXmlElement *effectXml = effectsXml->FirstChildElement(); effectXml != 0; effectXml = effectXml->NextSiblingElement()) { parseEffect(&effects, effectXml); } return effects; } std::vector<AudioPart *> Parser::getClipAudioParts() { return clipAudioParts; } std::vector<EffectGroup *> Parser::getClipEffectGroups() { return clipEffectGroups; } <file_sep>/NewEncoder/VideoEncoder/WMVWriter.h #pragma once #include <wmsdk.h> #include <wmsysprf.h> #include "VideoWriter.h" #include "Logger.h" #ifndef WMFORMAT_SDK_VERSION #define WMFORMAT_SDK_VERSION WMT_VER_9_0 #endif class CWMVWriter : public CVideoWriter { private: Logger & m_logger; public: CWMVWriter(Logger & logger); virtual ~CWMVWriter(); bool Init(CVideoWriterOptions &options); virtual bool BeginWrite() { return true; } // Encode a video frame virtual bool EncodeVFrame(CVideoFrame& video_frame, int* out_size=0); // Encode a audio frame virtual bool EncodeAFrame(CAudioFrame& audio_frame); // Finish virtual bool Close(); private: IWMProfile *m_pWMProfile; IWMWriter *m_pWMWriter; IWMInputMediaProps *m_pVideoProps; IWMProfileManager *m_pWMProfileManager; TCHAR m_szErrMsg[MAX_PATH]; DWORD m_dwVideoInput; DWORD m_dwCurrentVideoSample; QWORD m_msVideoTime; //time in ms DWORD m_dwFrameRate; // Frames Per Second Rate (FPS) IWMInputMediaProps *m_pAudioProps; DWORD m_dwAudioInput; DWORD m_dwCurrentAudioSample; QWORD m_AudioTime_cns; //time in 100nanosecs bool m_bInit; bool m_bIsAudio; // int m_nAppendFuncSelector; //0=Dummy 1=FirstTime 2=Usual /* HRESULT AppendFrameFirstTime(HBITMAP ); HRESULT AppendFrameUsual(HBITMAP); HRESULT AppendDummy(HBITMAP); HRESULT (CWMVWriter::*pAppendFrame[3])(HBITMAP hBitmap); */ // HRESULT AppendFrameFirstTime(int, int, LPVOID,int ); HRESULT AppendFrameUsual(int, int, LPVOID,int ); // HRESULT AppendDummy(int, int, LPVOID,int ); // HRESULT (CWMVWriter::*pAppendFrameBits[3])(int, int, LPVOID,int ); /// Takes care of creating the memory, streams, compression options etc. required for the movie HRESULT InitVideoStream(int nFrameWidth, int nFrameHeight, int nBitsPerPixel); HRESULT InitAudioStream( int nChannels, int nSamplesPerSec ); /// Takes care of releasing the memory and movie related handles void ReleaseMemory(); /// Sets the Error Message void SetErrorMessage(LPCTSTR lpszErrMsg); HRESULT LoadCustomProfile( LPCTSTR ptszProfileFile, WMT_VERSION ver, IWMProfile ** ppIWMProfile ); public: /* /// <Summary> /// Constructor accepts the filename, ProfileGUID and frame rate settings /// as parameters. /// lpszFileName: Name of the output movie file to create /// guidProfileID: GIUD of the Video Profile to be used for compression and other Settings /// dwFrameRate: The Frames Per Second (FPS) setting to be used for the movie /// </Summary> CWMVWriter(LPCTSTR lpszFileName = _T("Output.wmv"), const GUID& guidProfileID = WMProfile_V80_384Video, DWORD dwFrameRate = 1); /// <Summary> /// Destructor closes the movie file and flushes all the frames /// </Summary> ~CWMVWriter(void); /// </Summary> /// Inserts the given HBitmap into the movie as a new Frame at the end. /// </Summary> HRESULT AppendNewFrame(HBITMAP hBitmap); /// </Summary> /// Inserts the given bitmap bits into the movie as a new Frame at the end. /// The width, height and nBitsPerPixel are the width, height and bits per pixel /// of the bitmap pointed to by the input pBits. /// </Summary> HRESULT AppendNewFrame(int nWidth, int nHeight, LPVOID pBits,int nBitsPerPixel=32); /// <Summary> /// Returns the last error message, if any. /// </Summary> LPCTSTR GetLastErrorMessage() const { return m_szErrMsg; }*/ }; <file_sep>/RenderingManager/RenderingManager/include/Engine.h #pragma once #include <string> #include <map> #include <time.h> #ifndef WIN32 #include <pthread.h> #endif #include "DebugLevel.h" #include "EngineLogger.h" using namespace std; // Forward declaration. // include "RenderingManager.h" will not do because RenderingManager includes "Engine.h" and class Engine holds a pointer to RenderingManager. // Therefore, CliManager must be before RenderingManager in compilation order. class RenderingManager; enum EngineState { CREATED, STARTED, // run() was called in the main thread. PENDING, // Engine cannot yet run due to a limitation on the number of running engines. RUNNING, // entryPoint() was called in the engine thread. DONE }; class Engine { private: int m_argc; char ** m_argv; int m_firstArgIndex; #ifdef WIN32 uintptr_t m_threadHandle; #else pthread_t m_threadHandle; #endif string m_engineType; string m_executionId; EngineState m_state; RenderingManager * m_rndMng; DebugLevel m_debugLevel; clock_t m_startExecutionTime; protected: EngineLogger m_logger; map<string, void *> * m_commonStorage; public: Engine(); // Should be called right after the creation of Engine bool init(RenderingManager * rndMng); // Runs in the main thread void run(int argc, char** argv, int firstArgIndex); string getEngineType() {return m_engineType;}; string getExecutionId() {return m_executionId;}; clock_t getStartTime() {return m_startExecutionTime;}; EngineState getEngineState() {return m_state;}; DebugLevel getDebugLevel() {return m_debugLevel;}; void setDebugLevel(DebugLevel debugLevel) {m_debugLevel = debugLevel;}; #ifdef WIN32 uintptr_t getThreadHandle() {return m_threadHandle;} #else pthread_t getThreadHandle() { return m_threadHandle; } #endif static string engineStateToString(EngineState state); protected: virtual ~Engine() {}; // NOTE: argv contains only the arguments, not the command line. // So if there are no argumnets then argc is 0 (not 1). virtual void executeEngine(int argc, char** argv) = 0; // Just before an engine is to be killed, the stop() method is invoked. // In this function the inheriting engine class may choose to stop in an orderly fashion and/or dump debug information. // No need to free allocated memory since the process is about to terminate anyway. virtual void stopEngine(bool isTriggeringEngine) {}; // Launch a new thread and call Engine::EntryPoint from it. (this function returns immediately!) #ifdef WIN32 virtual uintptr_t launchEngineThread(); #else pthread_t launchEngineThread(); #endif static void entryPoint(void * pthis); private: // This function aborts the engine with an error message. // Such an exit is logged in the notification file and causes all other engine executions to cease. void abortEngineWithError(const char *format, ...); friend class EngineFactory; friend class EngineManager; friend class EngineLogger; }; <file_sep>/VideoStream/VideoStream/VSVideoImpl.h #pragma once #include "VSMedia.h" #include "VSISync.h" #include "VSVideo.h" namespace VideoStream { class CVideoStreamImpl { private: LPTSTR _processVerb; protected: int _streamId; int _curFrame; std::string _bareLocation; MediaLocationParams _locationParams; CVideoStreamImpl (LPCTSTR processVerb); virtual ~CVideoStreamImpl(); void _Open (LPCTSTR location); void _Close(); void FrameWasProcessed (LPCTSTR processVerb = NULL); void Write2Log (LPCTSTR msg); }; class CInputVideoStreamImpl : public CVideoStreamImpl, public CInputVideoStream { private: IInputVideoStream *_pStream; IInputSync *_pSync; bool _eov; int _slowFrame; void FrameWasRead(LPCTSTR processVerb = NULL); bool HandshakeWithProducer (LPCTSTR bareLocation, bool waitForProducer, DWORD timeoutMS); bool OpenInputStream (LPCTSTR location); static bool WaitForFrame (void *pThis, HANDLE readEndEventHandle); bool WaitForFrame(HANDLE readEndEventHandle); public: CInputVideoStreamImpl(); virtual ~CInputVideoStreamImpl(); bool Open (LPCTSTR location, unsigned int startFrame = 0); CFramePtr GetNextFrame(); CFramePtr GetFrame(int frameNum) { if (frameNum >= _curFrame && FastForward (frameNum - _curFrame)) return GetNextFrame(); else return CFramePtr(); } bool FastForward (unsigned int frames); bool IsEOV() {return _eov;} void Close(); const VIDEOINFO &GetVideoInfo() const; unsigned int GetWidth() const; unsigned int GetHeight() const; unsigned int GetFrameRate() const; unsigned int GetBitCount() const; }; class COutputVideoStreamImpl : public CVideoStreamImpl, public COutputVideoStream { private: IOutputVideoStream *_pStream; IOutputSync *_pSync; VIDEOINFO _vi; int _slowFrame; // A COutputVideoStreamImpl can also be a multiplexer. unsigned int _numOfVidoeStreams; // 0 - if not a multiplexer, otherwise at least 2. COutputVideoStreamImpl *_videoStreamArray; char *_locations; private: static void FrameWasWritten (void *pThis); void FrameWasWritten(); public: COutputVideoStreamImpl(); virtual ~COutputVideoStreamImpl(); bool Open (LPCTSTR location, const VIDEOINFO &vi); void Close(); bool WriteFrame (CFramePtr framePtr); }; };<file_sep>/RenderingManager/RenderingManager/TestEngine.cpp #include <iostream> #include "TestEngine.h" #include "SWException.h" #ifndef WIN32 #include <string.h> #include <unistd.h> #endif using namespace std; void TestEngine::executeEngine(int argc, char** argv) { // Print storage parameters m_logger.info("Common Storage:\n"); map<string,void *>::iterator iter; for(iter = m_commonStorage->begin(); iter != m_commonStorage->end(); iter++ ) { m_logger.info("%s\n", iter->first.c_str()); } if (argc>0 && !strcmp(argv[0], "hello")) { throw SWException ("HELP\n"); } m_logger.trace("argc = %d\n", argc); for (int i=0; i<argc; i++) { m_logger.trace("argv[%d] = %s\n", i, argv[i]); } m_logger.trace("printing a trace\n"); m_logger.info("printing an info\n"); m_logger.warning("printing a warning\n"); m_logger.error("printing an error\n"); #ifdef WIN32 Sleep(20000); #else usleep(20000 * 1000); #endif m_logger.warning("woke up after 20 seconds\n"); } <file_sep>/VideoStream/VideoStream/VSISync.h #pragma once namespace VideoStream { class IInputSync { public: virtual ~IInputSync() {}; virtual bool WaitForFrame (HANDLE readEndEventHandle) = 0; virtual void FrameWasRead() = 0; virtual void Close() = 0; virtual bool HasProducingEnded( DWORD timeoutMs ) = 0; virtual void Sleep( DWORD timeoutMs ) = 0; }; class IOutputSync { public: virtual ~IOutputSync() {}; virtual void WaitForBufferSpace() = 0; virtual void FrameWasWritten() = 0; virtual int HowManyFramesConsumed() = 0; virtual void Started() = 0; virtual void VideoWasComplete() = 0; virtual void WaitForConsumerEnd() = 0; }; }<file_sep>/VideoDecoder/VideoDecoder/VideoDecoder.h #pragma once #include <string> #include "Engine.h" class VideoDecoder : public Engine { public: void executeEngine(int argc, char **argv); static void init(map<string, void *> *commonStorage); // init decoders if needed private: void parse_args(int argc, char* argv[], char **infile, char **outurl ); }; <file_sep>/utils/Utils/CMutex.h /* * CMutex.h * * Created on: Oct 24, 2013 * Author: eranv */ #ifndef CMUTEX_H_ #define CMUTEX_H_ #define TRUE 1 #define FALSE 0 #include <string> #include "WindowDefinitions.h" using namespace std; union semun { int val; /* Value for SETVAL */ struct semid_ds *buf; /* Buffer for IPC_STAT, IPC_SET */ unsigned short *array; /* Array for GETALL, SETALL */ struct seminfo *__buf; /* Buffer for IPC_INFO (Linux-specific) */ }; int CreateNamedMutex(semun& arg, int bInitilaizedOwner, const char* mutexName); int MutexLock(int& namedMutex, DWORD timeout); int ReleaseMutex(int& namedMutex); int DeleteMutex(int& namedMutex); #endif /* CMUTEX_H_ */ <file_sep>/Fmod/MixerEnums.h #ifndef __MIXER_ENUMS_H__ #define __MIXER_ENUMS_H__ typedef enum { BACKGROUND, NARRATION, EFFECT } AudioRole; typedef enum { STUDIO, HOME, INTERNET, TTS } AudioOrigin; typedef enum { MUSIC, SPEECH, SFX } AudioType; typedef enum { DAVE, SUSAN, KATE, SIMON } AudioSpeaker; #endif<file_sep>/Fmod/Events/SequenceEvent.h #ifndef __SEQUENCE_EVENT_H__ #define __SEQUENCE_EVENT_H__ #include "SyncPointEventDescriptor.h" class SequenceEvent : public SyncPointEventDescriptor { private: int audioPart; public: SequenceEvent(int ap) { audioPart = ap; } int apply(FMOD::Channel *channel) { return audioPart; } int getAudioPart() { return audioPart; } }; #endif <file_sep>/VideoStream/VideoStream/include/VSLocalMemStreamManager.h #pragma once #include <map> #ifdef WIN32 #include <windows.h> #else #include <pthread.h> #endif #include "VSFrame.h" using namespace std; namespace VideoStream { // Forward declaration class CLocalMemStream; // CLocalMemStreamManager is a class of static methods, since there is only one manager. class CLocalMemStreamManager { private: static map<string,CLocalMemStream *> m_localStreams; #ifdef WIN32 static HANDLE m_mutex; #else static pthread_mutex_t m_mutex; #endif static bool m_isInitialized; public: // Should be called once at the beginning of the process static bool init(); // Reader API static CLocalMemStream *openLocalStreamForReading(string & streamName, DWORD readerTimeoutMS); static void closeLocalStreamForReading(CLocalMemStream * localStream); // Writer API static CLocalMemStream *openLocalStreamForWriting(string & streamName, const VIDEOINFO &vi, const string & codecstr, int bps, int maxBufferSize, DWORD writerTimeoutMS); static void closeLocalStreamForWriting(CLocalMemStream * localStream); // Pause/resume static void pauseAllStreams(); static void resumeAllStreams(); // Debug static void printReport(FILE *outputFile); }; }<file_sep>/VideoStream/VideoStream/ThreadOutput.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSThreadOutput.h" #else #include "VSThreadOutput.h" #include <stdio.h> #endif using namespace VideoStream; #ifdef WIN32 unsigned long __stdcall CThreadOutput::ThreadEntryProc(void *pThis) { return ((CThreadOutput *)pThis)->ThreadRun(); } #endif CThreadOutput::CThreadOutput (IOutputVideoStream *pStream) : _pStream (pStream) { const int maxQueueSize = 30; // limit the queue size to prevent big memory consumption InitializeCriticalSection (&_mutex); _frameReady = CreateEvent (0, FALSE, FALSE, 0); _framesDone = CreateEvent (0, FALSE, FALSE, 0); #ifdef WIN32 _queueBuffer = CreateSemaphore (0, maxQueueSize, maxQueueSize, 0); #else sem_init(&_queueBuffer, 0, maxQueueSize); #endif _shouldStop = false; _pFrameCallback = NULL; } CThreadOutput::~CThreadOutput() { Close(); DeleteCriticalSection(&_mutex); CloseHandle(_frameReady); CloseHandle(_framesDone); #ifdef WIN32 CloseHandle(_queueBuffer); #else sem_destroy(&_queueBuffer); #endif delete _pStream; } #ifndef WIN32 void* CThreadOutput::ThreadEntryProc(void *pThis) { CThreadOutput* output = (CThreadOutput*)pThis; // output->_threadId = pthread_self(); //return ((CThreadOutput *)pThis)->ThreadRun(); output->ThreadRun(); } #endif bool CThreadOutput::Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams, int streamId) { if (!_pStream->Open (location, vi, mlParams, streamId)) return false; #ifdef WIN32 _thread = CreateThread( NULL, // LPSECURITY_ATTRIBUTES lpThreadAttributes, 0, // SIZE_T dwStackSize, ThreadEntryProc, // LPTHREAD_START_ROUTINE lpStartAddress, this, // LPVOID lpParameter, 0, // dwCreationFlags &_threadId); #else pthread_create(&_thread, NULL, CThreadOutput::ThreadEntryProc, this); #endif return true; } bool CThreadOutput::WriteFrame (CFramePtr framePtr) { #ifdef WIN32 WaitForSingleObject(_queueBuffer, INFINITE); // wait until the queue is not full #else sem_wait(&_queueBuffer); #endif EnterCriticalSection(&_mutex); _queue.push(framePtr); LeaveCriticalSection(&_mutex); SetEvent(_frameReady); return true; } void CThreadOutput::Close() { if (_shouldStop == false) { _shouldStop = true; SetEvent(_frameReady); WaitForSingleObject(_framesDone, INFINITE); _pStream->Close(); } } unsigned long CThreadOutput::ThreadRun() { while (!_shouldStop) { WaitForSingleObject(_frameReady, INFINITE); while (true) { EnterCriticalSection(&_mutex); if (_queue.empty()) { LeaveCriticalSection(&_mutex); break; } CFramePtr framePtr = _queue.front(); _queue.pop(); LeaveCriticalSection(&_mutex); _pStream->WriteFrame(framePtr); #ifdef WIN32 ReleaseSemaphore(_queueBuffer, 1, NULL); #else sem_post(&_queueBuffer); #endif if (_pFrameCallback != NULL) (*_pFrameCallback)(_pContext); } } SetEvent(_framesDone); return 0; } <file_sep>/VideoStream/VideoStream/VSShmemMedia.h #pragma once #include "VSBaseMedia.h" #include "VSSharedData.h" #include "VSCompression.h" namespace VideoStream { // CInputShmemStream class class CInputShmemStream : public CBaseInputVideoStream { private: VIDEOINFO _vi; CSharedData *GetCurrentFrameData(); CVideoDecompressor *_pDecompressor; public: CInputShmemStream(); ~CInputShmemStream(); bool Open (LPCTSTR location, MediaLocationParams & mlParams); const VIDEOINFO &GetVideoInfo() const; bool FastForward (unsigned int frames); CFramePtr GetNextFrame(); void Close(); }; // COutputShmemStream class class COutputShmemStream : public CBaseOutputVideoStream<false> { private: std::vector<CSharedData *> _framesData; CSharedData *_headerData; CVideoCompressor *_pCompressor; public: COutputShmemStream(); ~COutputShmemStream(); bool Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams); bool WriteFrame (CFramePtr framePtr); bool DeleteUpToFrame (int frameNum); void Close(); }; // CShmemMedia class class CShmemMedia : public CBaseMedia<CInputShmemStream,COutputShmemStream,/*IsInputSync = */true,/*IsOutputSync = */true> { }; }<file_sep>/LightSynthesizer/AviWriter.h // AviWriter.h #ifndef __AVI_WRITER_H__ #define __AVI_WRITER_H__ #pragma once #include <string> #include "VSVideo.h" class AviWriter { public: AviWriter(); virtual ~AviWriter(); void Open(std::string filename, int width, int height, int framerate); bool WriteFrame(VideoStream::CFramePtr framePtr); void Close(); private: VideoStream::COutputVideoStream *outputVideoStream; VideoStream::VIDEOINFO videoInfo; int width; int height; std::string filename; bool closed; bool opened; }; #endif <file_sep>/Fmod/AudioSpeaker.h #pragma once class AudioSpeaker { public: AudioSpeaker(void); ~AudioSpeaker(void); }; <file_sep>/Fmod/AudioRole.cpp #include "AudioRole.h" AudioRole::AudioRole(void) { } AudioRole::~AudioRole(void) { } <file_sep>/NewEncoder/VideoEncoder/PlaylistWriter.h #if !defined(___PLAYLIST_WRITER_H) #define ___PLAYLIST_WRITER_H #include <string> #include "VideoWriter.h" class CPlaylistWriter { public: CPlaylistWriter( const char *name, int segment_duration_ms, float first_segment_duration_ms, CVideoWriterOptions &options ); ~CPlaylistWriter(); bool Start(); std::string &GetNextSegmentFullPath(); void SegmentReady( int end_time_ms, bool last_segment = false ); static bool IsPlaylist( const char *name ); bool ShouldStartNewSegment( int time_ms ); void SegmentCreated(); private: std::string GetSegmentName( int index, bool relativePath ); void WriteSegment2Playlist( int time_sec, const char *name ); const char *GetSegmentsFileName(); const char *GetSegmentsFileName2(); int GetPlaylistNamePrefixLen(); void WriteEnd2Playlist( ); bool CreatePlaylistFile(); bool CreateSegmentsFile(); private: CVideoWriterOptions *m_options; const char *m_playlist_name; int m_nTotalSegments; int m_segment_duration_ms; float m_first_segment_duration_ms; int m_prev_segment_time_ms; int m_nsegments_ready; std::string m_next_segment_name; FILE *fpPlaylist; FILE *fpSegmentsList; FILE *fpSegmentsList2; }; #endif // !defined <file_sep>/NewEncoder/VideoEncoder/WMVGenProfile.h //***************************************************************************** // // Microsoft Windows Media // Copyright (C) Microsoft Corporation. All rights reserved. // // FileName: GenProfile_lib.h // // Abstract: The header file which contains the structures and // functions used by the library to create a profile // //***************************************************************************** #ifndef __GENPROFILE_H__ #define __GENPROFILE_H__ #include <wmsdk.h> #define MAX_DATA_UNIT_EXTENSIONS 5 // An arbitrarily chosen maximum for the number of data unit extensions // // Enumeration of the different kinds of streams // enum StreamType { ST_Unknown, // Unknown streamtype ST_Audio, // Audio stream ST_Video, // Video stream ST_Script, // Script stream ST_Image, // Image stream ST_Web, // Web stream ST_File, // File stream ST_Arbitrary // Arbitrary data stream }; //---------------------------------------------------------------------------------------------------------- // Encoding method Description //---------------------------------------------------------------------------------------------------------- // 1-pass Constant Bit Rate (CBR) The only option for live streaming. Encodes to a predictable bit rate // and delivers the lowest quality of all encoding methods. // // 2-pass CBR Use for files that will be streamed over a network to a client reader, // but that are not broadcast from a live source. Encodes to a predictable // bit rate, but with better quality than 1-pass CBR. // // 1-pass Variable Bit Rate (VBR) Use when you need to specify the quality of the encoded output. // Delivers the most consistent quality of all encoding methods. // Use only for local files or for downloading. // // 2-pass VBR - unconstrained Use when you need to specify a bandwidth, but fluctuations around // the specified bandwidth are acceptable. For local files or downloading only. // // 2-pass VBR - constrained Use under the same circumstances as unconstrained, but when you need to specify // a maximum momentary bit rate. For local files or downloading only. // The following table lists the encoding methods that are supported by the codecs // that ship with the Windows Media Format SDK. //------------------------------------------------------------------------------------------------------------ // Codec CBR 2-pass CBR VBR 2-pass VBR //------------------------------------------------------------------------------------------------------------ // Windows Media Video 9 X X X X // Windows Media Audio 9 and later X X X X // Windows Media Video 9 Screen X X // Windows Media Audio 9 Voice X // Windows Media Audio Professional X X X X // Windows Media Audio Lossless X // Windows Media Video 9 Image and later X X // Windows Media Video 9 Advanced Profile X X //------------------------------------------------------------------------------------------------------------- enum VIDEO_VBR_MODE { VBR_OFF, // VBR is not used (CBR) VBR_QUALITYBASED, // Quality based VBR, generally one pass encoding VBR_CONSTRAINED, // Constrain the Peak Bitrate and also ( optionally ) the peak bufferwindow, 2 Pass VBR_UNCONSTRAINED // No constraint on the Peak Bitrate, generally 2 pass }; enum MUTEX_TYPE { MT_Bitrate, // Exclude the streams based on bitrate MT_Language, // Exclude the streams based on language MT_Presentation // Exclude the streams based on the presentation format (for example, different aspect ratios) }; /* ** Methods helpful in creation of a profile */ /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP EnsureIWMCodecInfo3( IWMCodecInfo3** ppCodecInfo3 ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP SetCodecVBRSettings( IWMCodecInfo3* pCodecInfo3, GUID guidCodecType, DWORD dwCodecIndex, BOOL bIsVBR, DWORD dwVBRPasses ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP SetStreamLanguage( IWMStreamConfig * pStreamConfig, LCID dwLanguage ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateUncompressedAudioMediaType( WM_MEDIA_TYPE** ppmtMediaType, DWORD dwSamplesPerSecond, WORD wNumChannels, WORD wBitsPerSample ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateVideoMediaType( WM_MEDIA_TYPE** ppmtMediaType, IWMCodecInfo3* pCodecInfo3, DWORD dwCodecIndex, DWORD dwFPS, DWORD dwWidth, DWORD dwHeight, DWORD dwBitrate, BOOL bIsVBR, DWORD dwNumberOfPasses ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateUncompressedVideoMediaType( WM_MEDIA_TYPE** ppmtMediaType, GUID guidFormat, DWORD dwFourCC, WORD wBitsPerPixel, BYTE* pbPaletteData, DWORD cbPaletteDataSize, DWORD dwFPS, DWORD dwWidth, DWORD dwHeight ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateScriptMediaType( WM_MEDIA_TYPE** ppmtMediaType ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateImageMediaType( WM_MEDIA_TYPE** ppmtMediaType, DWORD dwBitrate, DWORD dwWidth, DWORD dwHeight ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateWebMediaType( WM_MEDIA_TYPE** ppmtMediaType ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateFileMediaType( WM_MEDIA_TYPE** ppmtMediaType ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateMediatypeForFormat( WM_MEDIA_TYPE** ppmtDestination, IWMCodecInfo3* pCodecInfo, IWMStreamConfig** ppFormatConfig, GUID guidStreamType, DWORD dwCodecIndex, DWORD dwFormatIndex ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateMediatypeForFormat( WM_MEDIA_TYPE** ppmtDestination, IWMCodecInfo3* pCodecInfo3, IWMStreamConfig** ppFormatConfig, GUID guidCodecType, DWORD dwCodecIndex, DWORD dwFormatIndex, BOOL bIsVBR, DWORD dwVBRPasses ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateFileStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwBufferWindow, DWORD dwBitrate, WORD wMaxFilenameLength, LCID dwLanguage ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateWebStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwBitrate, DWORD dwBufferWindow, LCID dwLanguage ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateImageStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwBitrate, DWORD dwBufferWindow, DWORD dwWidth, DWORD dwHeight, LCID dwLanguage ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateScriptStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwBitrate, DWORD dwBufferWindow, LCID dwLanguage ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateAudioStream( IWMStreamConfig** ppStreamConfig, IWMCodecInfo3* pCodecInfo3, IWMProfile *pProfile, DWORD dwBufferWindow, DWORD dwCodecIndex, DWORD dwFormatIndex, BOOL bIsVBR, DWORD dwNumberOfPasses, LCID dwLanguage ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateUncompressedAudioStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, DWORD dwSamplesPerSecond, WORD wNumChannels, WORD wBitsPerSample, LCID dwLanguage ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateVideoStream( IWMStreamConfig** ppStreamConfig, IWMCodecInfo3* pCodecInfo3, IWMProfile *pProfile, DWORD dwCodecIndex, DWORD dwBitrate, DWORD dwBufferWindow, DWORD dwWidth, DWORD dwHeight, DWORD dwFPS, DWORD dwQuality, DWORD dwSecPerKey, BOOL fIsVBR, VIDEO_VBR_MODE vbrMode, DWORD dwVBRQuality, DWORD dwMaxBitrate, DWORD dwMaxBufferWindow, LCID dwLanguage ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP CreateUncompressedVideoStream( IWMStreamConfig** ppStreamConfig, IWMProfile *pProfile, GUID guidFormat, DWORD dwFourCC, WORD wBitsPerPixel, BYTE* pbPaletteData, DWORD cbPaletteDataSize, DWORD dwWidth, DWORD dwHeight, DWORD dwFPS, LCID dwLanguage ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP WriteProfileAsPRX( LPCTSTR tszFilename, LPCWSTR wszProfileData, DWORD dwProfileDataLength ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP AddSMPTEExtensionToStream( IWMStreamConfig* pStream ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP GetUncompressedWaveFormatCount( DWORD * pdwCount ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP GetUncompressedWaveFormat( DWORD dwIndex, DWORD * pdwSamplesPerSecond, WORD * pwNumChannels, WORD * pwBitsPerSample ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP GetUncompressedPixelFormatCount( DWORD * pdwCount ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP GetUncompressedPixelFormat( DWORD dwIndex, GUID * pguidFormat, DWORD * pdwFourCC, WORD * pwBitsPerPixel ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP FindVideoCodec( WORD* wCodecIndex, //[out] WORD* wFormatIndex,//[out] GUID guidCodec //[in] ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP FindAudioCodec( WORD* wCodecIndex, //[out] WORD* wFormatIndex, //[out] GUID guidCodec, //[in] DWORD dwSampleRate, //[in] DWORD dwChannels, //[in] DWORD dwBitsPerSec, //[in] WORD wBitsPerSample,//[in] BOOL fAVSync, //[in] BOOL* bIsVBR //[in/out] ); /////////////////////////////////////////////////////////////////////////////// STDMETHODIMP FindInputIndex( WORD* pwInputIndex, //[out] IWMWriter* pWMWriter, //[in] GUID guidMediaType //[in] ); ////////////////////////////////////////////////////////////////////////////// STDMETHODIMP SaveProfileAsPRX( LPCTSTR tszFilename, IWMProfile *pIWMProfile); ///////////////////////////////////////////////////////////////////////////// STDMETHODIMP CodecSupportsVBRSetting( IWMCodecInfo3* pCodecInfo, GUID guidType, DWORD dwCodecIndex, DWORD dwPasses, BOOL* pbIsSupported ); ///////////////////////////////////////////////////////////////////////////// STDMETHODIMP SetStreamBasics( IWMStreamConfig * pIWMStreamConfig, IWMProfile * pIWMProfile, LPWSTR pwszStreamName, LPWSTR pwszConnectionName, DWORD dwBitrate, WM_MEDIA_TYPE * pmt ); ////////////////////////////////////////////////////////////////////////////// STDMETHODIMP GetComplexitySettings(IWMCodecInfo3* pCodecInfo3, GUID guidCodecType, DWORD dwCodecIndex, DWORD* pLive, DWORD* pOffline, DWORD* pMax); #endif __GENPROFILE_H__ <file_sep>/NewEncoder/VideoEncoder/EncoderUtils.h #if !defined(___ENCODER_UTILS_H) #define ___ENCODER_UTILS_H extern std::string baseFolder; inline const char *getBaseFolder() { return baseFolder.c_str(); } #endif // !defined <file_sep>/VideoStream/VideoStream/VSFrameImpl.h #pragma once #include "VSFrame.h" #ifdef WIN32 #include <windows.h> #else #include <pthread.h> #endif namespace VideoStream { class CFrameImpl : public CFrame { private: class CPixelFormat { private: int _byteR, _byteG, _byteB, _byteA; char _strFormat[5]; void Init(int bitCount, LPCSTR strFormat); public: CPixelFormat(int bitCount, LPCSTR strFormat = NULL); CPixelFormat(const CPixelFormat &format); int GetByteR() const {return _byteR;} int GetByteG() const {return _byteG;} int GetByteB() const {return _byteB;} int GetByteA() const {return _byteA;} LPCSTR GetStringFormat() const {return _strFormat;} bool IsEqual(const CPixelFormat &format) const; bool IsStandard(int bitCount) const {return IsEqual(CPixelFormat(bitCount));} }; private: unsigned int _refCount; #ifdef WIN32 HANDLE m_mutex; // Used for accessing the _refCount #else pthread_mutex_t m_mutex; // Used for accessing the _refCount #endif unsigned char *_pData; unsigned int _width, _height, _bitCount; CPixelFormat _pixelFormat; void Allocate(); public: CFrameImpl (unsigned int width, unsigned int height, unsigned int bitCount, LPCSTR pixelFormat = NULL); CFrameImpl (const CFrameImpl &frame); virtual ~CFrameImpl(); void AddRef(); void RemRef(); bool IsSingleRef() const; const unsigned char *GetData() const; unsigned char *GetData(); int GetDataSize() const; unsigned int GetWidth() const {return _width;} unsigned int GetHeight() const {return _height;} unsigned int GetBitCount() const {return _bitCount;} LPCSTR GetPixelFormat() const {return _pixelFormat.GetStringFormat();} bool IsStandardPixelFormat() const {return _pixelFormat.IsStandard(GetBitCount());} unsigned int GetStride() const; // Pixel related API bool InFrame(unsigned int x, unsigned int y) const {return (x < GetWidth() && y < GetHeight());} unsigned char *GetPixelData(unsigned int x, unsigned int y) {return (_pData + (y * GetStride() + x * _bitCount/8));} const unsigned char *GetPixelData(unsigned int x, unsigned int y) const {return (_pData + (y * GetStride() + x * _bitCount/8));} /* * Return new frame with same image as this frame but different bit-count (32-->24 or 24-->32). * Return NULL if newBitCount is not valid or equal to pFrame bit count. * The new frame will have the standard pixel format regardless of the pixel format of the original frame. */ CFrame *ConvertBitCount (unsigned int newBitCount) const; /* * Return new frame with same image as this frame only upside down. */ CFrame *FlipFrameVertically() const; /* * Return new frame with same image as this frame only different pixel format, * e.g. ChangePixelFormat("ARGB"); * If newPixelFormat is NULL, the standard pixel format BGR(A) will be used. */ CFrame *ChangePixelFormat (LPCSTR newPixelFormat = NULL) const; }; }; <file_sep>/Fmod/Events/AddVolumeInterpolationStepEvent.h #ifndef __ADD_VOLUME_INTERPOLATION_STEP_EVENT_H__ #define __ADD_VOLUME_INTERPOLATION_STEP_EVENT_H__ #include "SyncPointEventDescriptor.h" class AddVolumeInterpolationStepEvent : public SyncPointEventDescriptor { private: double value; public: AddVolumeInterpolationStepEvent(double v) { value = v; } int apply(FMOD::Channel *channel) { float channelVolume; channel->getVolume(&channelVolume); channel->setVolume(channelVolume + (float)value); return 0; } double getValue() { return value; } }; #endif <file_sep>/Fmod/Effects/ReverbDsp.h #ifndef __REVERB_DSP_H__ #define __REVERB_DSP_H__ #include "DspEffect.h" #pragma once class ReverbDsp : public DspEffect { private: FMOD::DSP *dsp; public: void create(FMOD::System *fmodSystem) { fmodSystem->createDSPByType(FMOD_DSP_TYPE_REVERB, &dsp); setRoomSize(0.05f); setWetMix(0.05f); setDryMix(0.95f); dspList.push_back(dsp); } void setRoomSize(float factor) { dsp->setParameter(FMOD_DSP_REVERB_ROOMSIZE, factor); } void setDamp(float factor) { dsp->setParameter(FMOD_DSP_REVERB_DAMP, factor); } void setWetMix(float factor) { dsp->setParameter(FMOD_DSP_REVERB_WETMIX, factor); } void setDryMix(float factor) { dsp->setParameter(FMOD_DSP_REVERB_DRYMIX, factor); } void setWidth(float factor) { dsp->setParameter(FMOD_DSP_REVERB_WIDTH, factor); } void setMode(float factor) { dsp->setParameter(FMOD_DSP_REVERB_MODE, factor); } }; #endif <file_sep>/VideoStream/VideoStream/VSLog.h #pragma once #include "VSSharedData.h" #include <stdio.h> #include <string> #include <vector> #include <map> #include <sys/timeb.h> #ifndef WIN32 #include <sys/time.h> #include <iostream> #include <fstream> #endif namespace VideoStream { class CLog { private: class CPerformanceLog { private: int _context; std::string _name; #ifdef WIN32 struct _timeb _openTime; #else timeb _openTime; #endif typedef std::vector<std::string> titlesVector; #ifdef WIN32 typedef std::map<std::string,struct _timeb> itemTimes; // map titles to times #else typedef std::map<std::string, timeb> itemTimes; // map titles to times #endif typedef std::map<int,itemTimes> itemsMap; titlesVector _titles; itemsMap _times; #ifdef WIN32 HANDLE _hFileMutex, _hDataMutex; #else int _hFileMutex; pthread_mutex_t _hDataMutex; #endif CSharedData _uniqueId; int GetUniqueId(); void AddTitle (LPCTSTR title); void WriteToFile (LPCTSTR filePath); public: CPerformanceLog(); virtual ~CPerformanceLog(); int Open (LPCTSTR name); void WriteItemTime (int itemNum, LPCTSTR title); void Close(); }; private: static CLog *_pLog; FILE *_fp; #ifdef WIN32 HANDLE _hFileMutex; #else int _hFileMutex; #endif static bool _logEnabled; typedef std::map<int,CPerformanceLog *> performanceLogsMap; performanceLogsMap _performanceLogs; CPerformanceLog *GetPerformanceLog (int context); CLog(); public: ~CLog(); int OpenContext (LPCTSTR name); void WriteLine (LPCTSTR line, int context = -1); void WriteItemTime (int context, int itemNum, LPCTSTR title); void CloseContext (int context); static CLog *Instance(); static void Write (LPCTSTR line, int context = -1) {if (_logEnabled) Instance()->WriteLine (line, context);} #ifdef WIN32 static void Enable (bool enable = true) {_logEnabled = enable;} #else static void Enable (bool enable = false) {_logEnabled = false;} #endif static bool IsEnabled() {return _logEnabled;} }; } <file_sep>/RenderingManager/RenderingManager/HeaderPrinter.cpp #ifdef WIN32 #include <time.h> #include "HeaderPrinter.h" #include "Engine.h" void HeaderPrinter::printTime(FILE *file) { time_t now = time(NULL); tm localtm; char timeString[30]; timeString[0] = '\0'; timeval curTime; gettimeofday(&curTime, NULL); int milli = curTime.tv_usec / 1000; if (localtime_s(&localtm, &now) == 0) { strftime(timeString, sizeof(timeString), "%Y-%m-%d %H:%M:%S", &localtm); } char currentTime[84] = ""; sprintf(currentTime, "%s.%03ld", timeString, milli); fprintf(file, "%s ", currentTime); } int HeaderPrinter::gettimeofday(struct timeval * tp, struct timezone * tzp) { // Note: some broken versions only have 8 trailing zero's, the correct epoch has 9 trailing zero's static const uint64_t EPOCH = ((uint64_t)116444736000000000ULL); SYSTEMTIME system_time; FILETIME file_time; uint64_t time; GetSystemTime(&system_time); SystemTimeToFileTime(&system_time, &file_time); time = ((uint64_t)file_time.dwLowDateTime); time += ((uint64_t)file_time.dwHighDateTime) << 32; tp->tv_sec = (long)((time - EPOCH) / 10000000L); tp->tv_usec = (long)(system_time.wMilliseconds * 1000); return 0; } #else #include <stdio.h> #include <sys/time.h> #include <time.h> #include <math.h> #include "HeaderPrinter.h" #include "Engine.h" void HeaderPrinter::printTime(FILE *file) { // time_t now = time(NULL); // tm* localtm; // char timeString[30]; // timeString[0] = '\0'; //if (localtime_s(&localtm, &now) == 0) // localtm = gmtime(&now); // if (localtm != NULL) // { // strftime(timeString, sizeof(timeString), "%Y-%m-%d %H:%M:%S", localtm); // } char buffer[26]; int millisec; struct tm* tm_info; struct timeval tv; // struct timeval starttv; // gettimeofday(&starttv, 0); ::gettimeofday(&tv, 0); millisec = lrint(tv.tv_usec/1000.0); // Round to nearest millisec if (millisec>=1000) { // Allow for rounding up to nearest second millisec -=1000; tv.tv_sec++; } tm_info = localtime(&tv.tv_sec); strftime(buffer, 26, "%Y-%m-%d %H:%M:%S", tm_info); char buffer1[30]; sprintf(buffer1, "%s.%03d", buffer, millisec); fprintf(file, "%s ", buffer1); } #endif void HeaderPrinter::printHeader(FILE *file, Engine *engine, DebugLevel dbgLvl) { string dbgLvlString; debugLevelToString(dbgLvl, dbgLvlString); printTime(file); fprintf(file, "[%s] %s %s - ", engine->getExecutionId().c_str(), dbgLvlString.c_str(), engine->getEngineType().c_str()); } void HeaderPrinter::printHeader(FILE *file, DebugLevel dbgLvl) { string dbgLvlString; debugLevelToString(dbgLvl, dbgLvlString); printTime(file); fprintf(file, "%s - ", dbgLvlString.c_str()); } void HeaderPrinter::printHeader(FILE *file, Engine *engine) { printTime(file); fprintf(file, "[%s] %s - ", engine->getExecutionId().c_str(), engine->getEngineType().c_str()); } void HeaderPrinter::printHeader(FILE *file) { printTime(file); } <file_sep>/VideoDecoder/VideoDecoder/VideoDecoder.cpp // VideoEncoder.cpp : Defines the entry point for the console application. // // FFmpegTest.cpp : Defines the entry point for the console application. // #include <stdio.h> #include <stdlib.h> #include <memory.h> #include <math.h> //#include <tchar.h> /* * Copyright (c) 2003 <NAME> * Copyright (c) 2007 <NAME> * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include <stdlib.h> #include <stdio.h> extern "C" { #define __STDC_CONSTANT_MACROS #include "libavformat/avformat.h" } #include "FFMpegReader.h" #include "VSVideo.h" #include "VideoDecoder.h" #include "SWException.h" void VideoDecoder::parse_args(int argc, char* argv[], char **infile, char **outurl ) { int iarg; if ( argc < 1 ) { throw SWException("There must be at least one argument\n"); } *outurl = argv[argc-1]; for ( iarg = 0; iarg < argc-1; iarg++ ) { if ( strcmp(argv[iarg], "-iv") == 0 ) { iarg++; if ( iarg < argc-1 ) { *infile = argv[iarg]; } else { throw SWException("Expecting input video file\n"); } } else { throw SWException( "Unknown option: %s\n", argv[iarg]); } } if ( !*infile ) { throw SWException( "Input video file must be specified\n"); } m_logger.trace("outurl =%s\ninfile=%s\n\n", *outurl, *infile); } void VideoDecoder::executeEngine(int argc, char **argv) { int ifr; char *infile = NULL; char *outurl = NULL; parse_args(argc, argv, &infile, &outurl); // open in file CFFMpegReader reader(m_logger); CVideoProperties props; if ( !reader.Open(infile, props) ) { throw SWException( "Failed to open file %s\n", infile); } // Init output VideoStream::VIDEOINFO vi; vi.bitCount = 32; vi.frameRate = props.framerate; vi.width = props.width; vi.height = props.height; VideoStream::EnableLog (m_logger.getDebugLevel() <= DebugLevel_TRACE); VideoStream::COutputVideoStream *outputStream = VideoStream::COutputVideoStream::Create(); if ( !outputStream->Open (outurl, vi) ) { throw SWException( "Failed to open output url: %s\n", outurl); } /* * Decode video */ m_logger.trace("Reading video\n"); for ( ifr = 0; ; ifr++ ) { m_logger.trace("Reading vframe %d\n", ifr); bool bEOF = false; VideoStream::CMutableFramePtr framePtr(props.width, props.height, 32); bool rc = reader.DecodeVFrame( framePtr, bEOF ); if ( bEOF ) { m_logger.trace("Reached end of file\n"); break; } if ( !rc ) { throw SWException("Failed to read frame\n"); } rc = outputStream->WriteFrame (framePtr); if ( !rc ) { throw SWException("Error writing frame %d\n", ifr ); } } outputStream->Close(); VideoStream::COutputVideoStream::Destroy (outputStream); reader.Close(); m_logger.trace("Output written to %s\n", outurl ); } // Initialize ffmpeg //static void VideoDecoder::init(map<string, void *> *commonStorage) { CFFMpegReader::init(commonStorage); } <file_sep>/RenderingManager/RenderingManager/include/DebugLevel.h #pragma once #include <string> #ifndef WIN32 #include <strings.h> #endif using namespace std; enum DebugLevel { DebugLevel_TRACE, DebugLevel_INFO, DebugLevel_WARNING, DebugLevel_ERROR }; // Debug level from/to string bool debugLevelToString(DebugLevel dbgLvl, string & o_str); bool debugLevelFromString(const string & str, DebugLevel & o_dbgLvl); <file_sep>/NewEncoder/VideoEncoder/VideoEncoderEngineFactory.cpp #include "stdafx.h" #include "VideoEncoderEngineFactory.h" #include "VideoEncoder.h" Engine *VideoEncoderEngineFactory::createEngine() { return new VideoEncoder(); } void VideoEncoderEngineFactory::init() { VideoEncoder::init(m_commonStorage); // Initialize ffmpeg if needed } <file_sep>/VideoStream/VideoStream/VSBaseMedia.h #pragma once #ifndef WIN32 #include <stdlib.h> #include <string.h> #include "WindowDefinitions.h" #endif #include "VSMedia.h" #include "VSThreadOutput.h" #include "VSThreadInput.h" namespace VideoStream { /******************************/ /* BASE STREAM IMPLEMENTATION */ /******************************/ class CBaseVideoStream { private: int _streamId; int _curFrame; LPTSTR _location; void FreeLocation() {if (_location != NULL) {free (_location); _location = NULL;}} protected: void SetStreamId (int streamId) {_streamId = streamId;} void SetLocation (LPCTSTR location) {FreeLocation(); _location = strdup(location);} void FrameWasProcessed() {_curFrame++;} void LogTime(LPCTSTR title); int GetCurrentFrameNum() const {return _curFrame;} LPCTSTR GetLocation() const {return _location;} CBaseVideoStream() : _streamId(-1), _curFrame(0), _location(NULL) {}; virtual ~CBaseVideoStream() {FreeLocation();} }; /************************************/ /* BASE INPUT STREAM IMPLEMENTATION */ /************************************/ class CBaseInputVideoStream : public CBaseVideoStream, public IInputVideoStream { protected: virtual bool Open (LPCTSTR location, MediaLocationParams & mlParams) = 0; CBaseInputVideoStream() : CBaseVideoStream() {}; virtual ~CBaseInputVideoStream() {}; bool Open (LPCTSTR location, MediaLocationParams & mlParams, int streamId) { SetStreamId (streamId); SetLocation (location); return Open (location, mlParams); } // Asynchronous input bool IsAsync() {return false;} void SetWaitForFrameCallback (waitForFrameCallback callback, void *pContext) {}; void RunThread() {}; }; /*************************************/ /* BASE OUTPUT STREAM IMPLEMENTATION */ /*************************************/ template <bool bPersistent> class CBaseOutputVideoStream : public CBaseVideoStream, public IOutputVideoStream { protected: virtual bool Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams) = 0; CBaseOutputVideoStream() : CBaseVideoStream() {}; virtual ~CBaseOutputVideoStream() {}; public: bool Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams, int streamId) { SetStreamId (streamId); SetLocation (location); return Open (location, vi, mlParams); } virtual bool DeleteUpToFrame (int frameNum) {return true;} virtual bool IsPersistent() {return bPersistent;} // Asynchronous output bool IsAsync() {return false;} void SetFrameWrittenCallback (frameWrittenCallback callback, void *pContext) {}; }; /*****************************/ /* BASE MEDIA IMPLEMENTATION */ /*****************************/ template <class I, class O, bool IsInputSync, bool IsOutputSync> class CBaseMedia : public IMedia { public: CBaseMedia() {}; virtual ~CBaseMedia() {}; virtual IInputVideoStream *CreateInputVideoStream() { if (IsInputSync) return new I(); else return new CThreadInput (new I()); } virtual IOutputVideoStream *CreateOutputVideoStream() { if (IsOutputSync) return new O(); else return new CThreadOutput (new O()); } }; }<file_sep>/VideoStream/VideoStream/VSUtils.h #pragma once #ifdef WIN32 #include <windows.h> #else #include "WindowDefinitions.h" #endif #include <string> namespace VideoStream { extern std::string GetSyncObjName (LPCTSTR prefix, LPCTSTR location); } <file_sep>/VideoStream/VideoStream/VSLocalMemMedia.h #pragma once #include "VSBaseMedia.h" #include "VSLocalMemStream.h" namespace VideoStream { // CInputLocalMemStream class class CInputLocalMemStream : public CBaseInputVideoStream { private: CLocalMemStream * m_stream; public: CInputLocalMemStream(); ~CInputLocalMemStream(); // Tell the CInputVideoStreamImpl class not to handle synchronizing through the sync object. // We handle synchronizing within this class. bool BypassSyncObject() {return true;} bool Open (LPCTSTR location, MediaLocationParams & mlParams); void Close(); CFramePtr GetNextFrame(); bool FastForward (unsigned int frames); const VIDEOINFO &GetVideoInfo() const; }; // COutputLocalMemStream class class COutputLocalMemStream : public CBaseOutputVideoStream<false> { private: CLocalMemStream * m_stream; public: COutputLocalMemStream(); ~COutputLocalMemStream(); // Tell the COutputVideoStreamImpl class not to handle synchronizing through the sync object. // We handle synchronizing within this class. bool BypassSyncObject() {return true;} bool Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams); bool WriteFrame (CFramePtr framePtr); void Close(); }; // CLocalMemMedia class class CLocalMemMedia : public CBaseMedia<CInputLocalMemStream,COutputLocalMemStream,/*IsInputSync = */true,/*IsOutputSync = */true> { }; }<file_sep>/VideoStream/VideoStream/Audio.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSAudioImpl.h" #include "VSLog.h" #include "VSExceptions.h" #include "VSWavFileMedia.h" #include <tchar.h> #include <stdlib.h> #include "VSThreadOutput.h" #include "VSSync.h" #else #include "VSAudioImpl.h" #include "VSLog.h" #include "VSExceptions.h" #include "VSWAVFileMedia.h" #include <stdlib.h> #include <string.h> #include "VSThreadOutput.h" #include "VSSync.h" #endif using namespace VideoStream; /********************/ /* CAudioStreamImpl */ /********************/ const int MAX_MSG = 256; CAudioStreamImpl::CAudioStreamImpl (LPCTSTR processVerb) { #ifdef WIN32 _processVerb = _tcsdup (processVerb); #else _processVerb = strdup (processVerb); #endif _streamId = -1; } CAudioStreamImpl::~CAudioStreamImpl() { _Close(); free (_processVerb); } void CAudioStreamImpl::Write2Log (LPCTSTR msg) { if (CLog::IsEnabled()) CLog::Write (msg, _streamId); } void CAudioStreamImpl::_Open (LPCTSTR location) { _Close(); // Write log message if (CLog::IsEnabled()) { _streamId = CLog::Instance()->OpenContext(location); const int MAX_MSG = 256; #ifdef WIN32 TCHAR str[MAX_MSG+1]; _stprintf_s (str, MAX_MSG, _T("OPEN %s <%s>"), _processVerb, location); #else char str[MAX_MSG+1]; sprintf (str, "OPEN %s <%s>", _processVerb, location); #endif Write2Log (str); } } void CAudioStreamImpl::_Close() { if (CLog::IsEnabled()) { if (_streamId >= 0) { #ifdef WIN32 TCHAR str[MAX_MSG+1]; _stprintf_s (str, MAX_MSG, _T("CLOSE %s"), _processVerb); #else char str[MAX_MSG+1]; sprintf (str, "CLOSE %s", _processVerb); #endif Write2Log (str); CLog::Instance()->CloseContext(_streamId); _streamId = -1; } } } /*************************/ /* CInputAudioStreamImpl */ /*************************/ CInputAudioStreamImpl::CInputAudioStreamImpl() : CAudioStreamImpl (_T("READ")) { _pStream = NULL; _pSync = NULL; } bool CInputAudioStreamImpl::HandshakeWithProducer (LPCTSTR _bareLocation, bool waitForProducer, DWORD timeoutMS) { _pSync = NULL; // Wait for video to be produced try { Write2Log ("OPEN: Trying to handshake with producer..."); _pSync = CInputSync::Handshake (_bareLocation, waitForProducer, timeoutMS); Write2Log (_pSync == NULL ? _T("OPEN: Producer already gone") : _T("OPEN: Handshake complete!")); // WAV reader needs sync for events (end/kill) if ( _pStream ) _pStream->SetSync( _pSync ); return true; } catch (...) { Write2Log ("OPEN: Handshake failed"); return false; } } bool CInputAudioStreamImpl::OpenInputStream (LPCTSTR location) { // Try to open media bool mediaOpened = _pStream->Open (_bareLocation.c_str(), 2000/*Don't wait if media doesn't exist*/, false/*On error - return false (don't throw exception)*/); //evev - if there's an error in open - we'll still wait for producer. if (mediaOpened) Write2Log ("OPEN: Media already exists"); // Handshake with producer if (!HandshakeWithProducer (_bareLocation.c_str(), !mediaOpened, _locationParams.timeoutMS)) return false; // Open again if early open did not work if (!mediaOpened) { mediaOpened = _pStream->Open (_bareLocation.c_str(), _locationParams.timeoutMS); } return mediaOpened; } bool CInputAudioStreamImpl::Open (LPCTSTR location) { // If already open, close Close(); _Open (location); // Find bare location, codec, timeout, etc. CMediaFactory::ParseMediaLocation (location, &_bareLocation, &_locationParams); _pStream = new CInputWAVFile(&_locationParams); // Open input stream if (OpenInputStream (location)) { return true; } // Open has failed --> close anything dangling Close(); return false; } // Return number of bytes read / -1 on failure int CInputAudioStreamImpl::Read( unsigned char *bfr, int bfr_size, bool *eof ) { if ( !_pStream ) return -1; return _pStream->Read( bfr, bfr_size, eof); } const AUDIOINFO &CInputAudioStreamImpl::GetAudioInfo() const { if (_pStream == NULL) { static AUDIOINFO info = {0, 0, 0}; return info; } else return _pStream->GetAudioInfo(); } void CInputAudioStreamImpl::Close() { if (_pStream != NULL) { _pStream->Close(); delete _pStream; _pStream = NULL; } if (_pSync != NULL) { _pSync->Close(); delete _pSync; _pSync = NULL; } _Close(); } CInputAudioStreamImpl::~CInputAudioStreamImpl() { Close(); } /**************************/ /* COutputAudioStreamImpl */ /**************************/ COutputAudioStreamImpl::COutputAudioStreamImpl() : CAudioStreamImpl (_T("WRITE")) { _pSync = NULL; } bool COutputAudioStreamImpl::Open (LPCTSTR location ) { // If already open, close Close(); // Find bare location, codec, timeout, etc. CMediaFactory::ParseMediaLocation (location, &_bareLocation, &_locationParams); _pSync = new COutputSync (_bareLocation.c_str(), -1, _locationParams.timeoutMS); // Sign that producer started (consumer can now start reading). _pSync->Started(); return true; } void COutputAudioStreamImpl::Close() { if (_pSync != NULL) { _pSync->VideoWasComplete(); } _Close(); } COutputAudioStreamImpl::~COutputAudioStreamImpl() { Close(); } /********************/ /* CONSTRUCTION API */ /********************/ CInputAudioStream *CInputAudioStream::Create() { return new CInputAudioStreamImpl(); } void CInputAudioStream::Destroy (CInputAudioStream *pStream) { delete pStream; } COutputAudioStream *COutputAudioStream::Create() { return new COutputAudioStreamImpl(); } void COutputAudioStream::Destroy (COutputAudioStream *pStream) { delete pStream; } <file_sep>/LightSynthesizer/AviWriter.cpp //#include "stdafx.h" #include "AviWriter.h" AviWriter::AviWriter() { opened = false; outputVideoStream = VideoStream::COutputVideoStream::Create(); } AviWriter::~AviWriter() { Close(); VideoStream::COutputVideoStream::Destroy (outputVideoStream); } void AviWriter::Open(std::string fn, int w, int h, int framerate) { filename = fn; closed = false; width = w; height = h; videoInfo.frameRate = framerate; videoInfo.width = width; videoInfo.height = height; videoInfo.bitCount = 32; if (outputVideoStream->Open(filename.c_str(), videoInfo)) { opened = true; } } bool AviWriter::WriteFrame(VideoStream::CFramePtr framePtr) { if (!opened) { return false; } outputVideoStream->WriteFrame(framePtr); return true; } void AviWriter::Close() { if (!closed) { outputVideoStream->Close(); closed = true; } }<file_sep>/VideoStream/VideoStream/LocalMemMedia .cpp #ifdef WIN32 #include "StdAfx.h" #include "VSLocalMemMedia.h" #include "VSLocalMemStreamManager.h" #include "SWException.h" #else #include "VSLocalMemMedia.h" #include "VSLocalMemStreamManager.h" #include "SWException.h" #endif using namespace VideoStream; ////////////// CInputLocalMemStream #ifndef WIN32 int frameToRead = 0; int frameToWrite = 0; #endif CInputLocalMemStream::CInputLocalMemStream() : m_stream(NULL) { } CInputLocalMemStream::~CInputLocalMemStream() { Close(); } bool CInputLocalMemStream::Open (LPCTSTR location, MediaLocationParams & mlParams) { #ifdef WIN32 m_stream = CLocalMemStreamManager::openLocalStreamForReading(string(location), mlParams.timeoutMS); #else string streamName = location; // fprintf(stderr, "CInputLocalMemStream: Read from %s\n", location); m_stream = CLocalMemStreamManager::openLocalStreamForReading(streamName, mlParams.timeoutMS); #endif if (m_stream == NULL) { return false; } return true; } void CInputLocalMemStream::Close() { if (m_stream != NULL) { CLocalMemStreamManager::closeLocalStreamForReading(m_stream); m_stream = NULL; } } CFramePtr CInputLocalMemStream::GetNextFrame() { if (m_stream == NULL) { // Error throw SWException("Trying to read from an input local memory stream which is not open."); } CFramePtr nextFrame = m_stream->read(); if (nextFrame.isNull()) { // Should this happen??? } else { FrameWasProcessed(); } return nextFrame; } bool CInputLocalMemStream::FastForward (unsigned int frames) { for (unsigned int i=0; i < frames; i++) { if (GetNextFrame().isNull()) return false; } return true; } const VIDEOINFO &CInputLocalMemStream::GetVideoInfo() const { return m_stream->getVideoInfo(); } ////////////// COutputLocalMemStream COutputLocalMemStream::COutputLocalMemStream() : m_stream(NULL) { } COutputLocalMemStream::~COutputLocalMemStream() { Close(); } bool COutputLocalMemStream::Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams) { #ifdef WIN32 m_stream = CLocalMemStreamManager::openLocalStreamForWriting(string(location), vi, mlParams.codecstr, mlParams.bps, mlParams.bufSize, mlParams.timeoutMS); #else string streamName = location; // fprintf(stderr, "COutputLocalMemStream :: WRITING from %s\n", location); m_stream = CLocalMemStreamManager::openLocalStreamForWriting(streamName, vi, mlParams.codecstr, mlParams.bps, mlParams.bufSize, mlParams.timeoutMS); #endif if (m_stream == NULL) { return false; } return true; } void COutputLocalMemStream::Close() { if (m_stream != NULL) { CLocalMemStreamManager::closeLocalStreamForWriting(m_stream); m_stream = NULL; } } bool COutputLocalMemStream::WriteFrame (CFramePtr framePtr) { if (m_stream == NULL) { // Error throw SWException("Trying to write to an output local memory stream which is not open."); } m_stream->write(framePtr); FrameWasProcessed(); return true; } <file_sep>/TinyXml/CMakeLists.txt file(GLOB tinyXml_SRC "*.cpp" ) add_library(TinyXml ${tinyXml_SRC}) target_include_directories(TinyXml PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/)<file_sep>/SVG2Video/CMakeLists.txt file(GLOB SVG2Video_SRC "*.cpp" "*.cc" ) add_library(SVG2Video ${SVG2Video_SRC}) #find_package(JNI) if (UNIX) target_include_directories(SVG2Video PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/../VideoStream/VideoStream/include ${CMAKE_CURRENT_SOURCE_DIR}/../RenderingManager/RenderingManager/include ${CMAKE_CURRENT_SOURCE_DIR}/../third_party/include/grpc/third_party/protobuf/src ${CMAKE_CURRENT_SOURCE_DIR}/../third_party/include/grpc/include ${CMAKE_CURRENT_SOURCE_DIR}/../utils/Utils ${CMAKE_CURRENT_SOURCE_DIR}/../utils/StackWalker/include ) else() target_include_directories(SVG2Video PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/../VideoStream/VideoStream/include ${CMAKE_CURRENT_SOURCE_DIR}/../RenderingManager/RenderingManager/include ${CMAKE_CURRENT_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR}/../utils/Utils ${CMAKE_CURRENT_SOURCE_DIR}/../utils/StackWalker/include ${JNI_INCLUDE_DIRS}) endif() #target_link_libraries(CliManager LINK_PUBLIC StackWalker) <file_sep>/Fmod/Presets/MaximizePreset.h #ifndef __MAXIMIZE_PRESET_H__ #define __MAXIMIZE_PRESET_H__ #include "Preset.h" #include "../Effects/NormalizationDsp.h" #include "../Effects/CompressionDsp.h" class MaximizePreset : public Preset { public: MaximizePreset() { Preset(); } void create(FMOD::System *fmodSystem) { NormalizationDsp *norm = new NormalizationDsp(); norm->create(fmodSystem); norm->setFadeTime(500.0f); norm->setThreshhold(0.4f); norm->setMaxAmp(2.0f); CompressionDsp *comp = new CompressionDsp(); comp->create(fmodSystem); comp->setThreshhold(-5.0f); comp->setAttack(250.0f); comp->setRelease(1000.0f); effectDspList.push_back(norm); //effectDspList.push_back(comp); volume=1.0f; created = true; } }; #endif <file_sep>/NewEncoder/VideoEncoder/WMVWriter.cpp #include "StdAfx.h" #include <windows.h> #include <Mmreg.h> #include "WMVWriter.h" #include "WMVMacros.h" #include "SWException.h" #ifndef __countof #define __countof(x) ((sizeof(x)/sizeof(x[0]))) #endif #pragma warning (disable : 4996 ) CWMVWriter::CWMVWriter(Logger & logger) : m_logger(logger) { CoInitialize(NULL); m_pWMWriter = NULL; m_pWMProfile = NULL; m_pWMProfileManager = NULL; m_dwVideoInput = 0; m_msVideoTime = 0; m_pVideoProps = NULL; m_dwCurrentVideoSample = 0; m_pAudioProps = NULL; m_dwAudioInput = 0; m_dwCurrentAudioSample = 0; m_AudioTime_cns = 0;//time in 100nanosecs m_bInit = false; } // Initialize the encoder library bool CWMVWriter::Init(CVideoWriterOptions &options) { m_options = options; m_bIsAudio = ( options.abitrate > 0 ); LPCTSTR lpszFileName = options.name.c_str(); const GUID& guidProfileID = //WMProfile_V70_288VideoAudio; WMProfile_V80_1400NTSCVideo; //WMProfile_V80_384Video; /* GUID myProfileGUID; CLSIDFromString( L"29B00C2B-09A9-48bd-AD09-CDAE117D1DA7", &myProfileGUID );*/ HRESULT hr=E_FAIL; GUID guidInputType; DWORD dwInputCount=0; IWMInputMediaProps* pInputProps = NULL; IWMProfileManager2 *pProfileManager2=NULL; m_dwFrameRate = options.framerate; _tcscpy(m_szErrMsg, _T("Method Succeeded")); m_szErrMsg[__countof(m_szErrMsg)-1] = _T('\0'); if(FAILED(WMCreateProfileManager(&m_pWMProfileManager))) { throw SWException("Unable to Create WindowsMedia Profile Manager\n"); } if ( !options.profile || !(*options.profile) ) { // No profile - take default. if(FAILED(m_pWMProfileManager->QueryInterface(IID_IWMProfileManager2,(void**)&pProfileManager2))) { throw SWException("Unable to Query Interface for ProfileManager2\n"); } hr=pProfileManager2->SetSystemProfileVersion(WMFORMAT_SDK_VERSION); pProfileManager2->Release(); if(FAILED(hr)) { throw SWException("Unable to Set System Profile Version\n"); } if(FAILED(m_pWMProfileManager->LoadProfileByID(guidProfileID,&m_pWMProfile))) { throw SWException("Unable to Load System Profile by ID\n"); } } else { // load profile from file if ( FAILED( LoadCustomProfile( options.profile, WMFORMAT_SDK_VERSION, &m_pWMProfile) ) ) { throw SWException("Unable to Create custom profile %s\n", options.profile ); } } if(FAILED(WMCreateWriter(NULL,&m_pWMWriter))) { throw SWException("Unable to Create Media Writer Object\n"); } // // Get the IWMWriterAdvanced interface of the writer // IWMWriterAdvanced* pWriterAdvanced = NULL; hr = m_pWMWriter->QueryInterface( IID_IWMWriterAdvanced, (void **)&pWriterAdvanced ); if( FAILED( hr ) ) { throw SWException( "Create IWMWriterAdvanced interface failed: (hr=0x%08x)\n" , hr ); } hr = pWriterAdvanced->SetLiveSource(TRUE); //evev - need this? if( FAILED( hr ) ) { m_logger.error( "SetLiveSource() failed: (hr=0x%08x)\n", hr ); } hr = pWriterAdvanced->SetSyncTolerance(INT_MAX);// Allows us to write all the audio first. if( FAILED( hr ) ) { m_logger.error( "SetSyncTolerance() failed: (hr=0x%08x)\n" , hr ); } pWriterAdvanced->Release(); if(FAILED(m_pWMWriter->SetProfile(m_pWMProfile))) { throw SWException("Unable to Set System Profile\n"); } if(FAILED(m_pWMWriter->GetInputCount(&dwInputCount))) { throw SWException("Unable to Get input count For Profile\n"); } for(DWORD i=0;i<dwInputCount;i++) { if(FAILED(m_pWMWriter->GetInputProps(i,&pInputProps))) { throw SWException("Unable to GetInput Properties\n"); } if(FAILED(pInputProps->GetType(&guidInputType))) { throw SWException("Unable to Get Input Property Type\n"); } if(guidInputType==WMMEDIATYPE_Video && !m_pVideoProps) { m_pVideoProps=pInputProps; m_dwVideoInput=i; } else if(guidInputType==WMMEDIATYPE_Audio && m_bIsAudio && !m_pAudioProps) { m_pAudioProps=pInputProps; m_dwAudioInput=i; //break; } else { pInputProps->Release(); pInputProps=NULL; } } if(m_pVideoProps==NULL) { throw SWException("Profile Does not Accept Video input\n"); } if(m_pAudioProps==NULL && m_bIsAudio ) { throw SWException("Profile Does not Accept Audio input\n"); } #ifndef UNICODE WCHAR pwszOutFile[1024]; if( 0 == MultiByteToWideChar( CP_ACP, 0, lpszFileName,-1, pwszOutFile, sizeof( pwszOutFile ) ) ) { throw SWException("Unable to Convert Output Filename\n"); } if(FAILED(m_pWMWriter->SetOutputFilename( pwszOutFile ))) { throw SWException("Unable to Set Output Filename\n"); } #else if(FAILED(m_pWMWriter->SetOutputFilename(lpszFileName))) { throw SWException("Unable to Set Output Filename\n"); } #endif //UNICODE // m_nAppendFuncSelector=1; //0=Dummy 1=FirstTime 2=Usual if(!SUCCEEDED(InitVideoStream( options.width, options.height, options.vbitsperpel/*evev?*/ ) ) ) return false; if ( m_bIsAudio ) { if(!SUCCEEDED(InitAudioStream( options.achannels, options.asample_rate ) ) ) { throw SWException("Failed to Initialize audio\n"); } } if(FAILED(m_pWMWriter->BeginWriting())) { throw SWException("Unable to Initialize Writing\n"); } m_bInit = true; return true; } CWMVWriter::~CWMVWriter(void) { ReleaseMemory(); CoUninitialize(); } void CWMVWriter::ReleaseMemory() { if(m_pVideoProps) { m_pVideoProps->Release(); m_pVideoProps=NULL; } if(m_pAudioProps) { m_pAudioProps->Release(); m_pAudioProps=NULL; } if(m_pWMWriter) { m_pWMWriter->Release(); m_pWMWriter=NULL; } if(m_pWMProfile) { m_pWMProfile->Release(); m_pWMProfile=NULL; } if(m_pWMProfileManager) { m_pWMProfileManager->Release(); m_pWMProfileManager=NULL; } } HRESULT CWMVWriter::InitAudioStream( int nChannels, int nSamplesPerSec ) { // PCM 16 bit GUID guidSubType = WMMEDIASUBTYPE_PCM; WORD wBitsPerSample = GetBytesPerSample()*8; HRESULT hr = S_OK; DWORD cbSize = 0; IWMInputMediaProps* pProps = NULL; WM_MEDIA_TYPE* pType = NULL; WAVEFORMATEX* pWAVEHdr = NULL; ///////////////////////////////////////////////////////////////////////////////// DWORD cFormats = 0; DWORD formatIndex = 0; // Get the base input format for the required subtype. // Find the number of formats supported by this input. hr = m_pWMWriter->GetInputFormatCount(m_dwAudioInput, &cFormats); if ( FAILED(hr) ) goto Exit; // Loop through all of the supported formats. for (formatIndex = 0; formatIndex < cFormats; formatIndex++) { // Get the input media properties for the input format. hr = m_pWMWriter->GetInputFormat(m_dwAudioInput, formatIndex, &pProps); if ( FAILED(hr) ) goto Exit; // Get the size of the media type structure. hr = pProps->GetMediaType(NULL, &cbSize); if ( FAILED(hr) ) goto Exit; // Allocate memory for the media type structure. pType = (WM_MEDIA_TYPE*) new BYTE[cbSize]; if (pType == NULL) { hr = E_OUTOFMEMORY; goto Exit; } // Get the media type structure. hr = pProps->GetMediaType(pType, &cbSize); if ( FAILED(hr) ) goto Exit; pWAVEHdr = (WAVEFORMATEX*)pType->pbFormat; //printf(" %d = PCM %d ", formatIndex, pWAVEHdr->wFormatTag ); //printf(" = %3d ", pWAVEHdr->nAvgBytesPerSec*8 ); //printf(" = %3d ", pWAVEHdr->nSamplesPerSec ); //printf(" = %3d ", pWAVEHdr->nChannels); //printf(" = %2d \n", pWAVEHdr->wBitsPerSample ); if(pType->subtype == guidSubType) { if(pWAVEHdr->nChannels == nChannels && pWAVEHdr->nSamplesPerSec == nSamplesPerSec && pWAVEHdr->wBitsPerSample == wBitsPerSample ) { break; } } // Clean up for next iteration. if (pType) { delete [] pType; pType = NULL; } if (pProps) { pProps->Release(); pProps = NULL; } } // End for formatIndex. ///////////////////////////////////////////////////////////////////////////////// if(pProps == NULL) { m_logger.error("Unsupported input format\n"); hr = E_NOTIMPL; goto Exit; } // Apply the adjusted type to the video input. hr = pProps->SetMediaType(pType); if ( FAILED(hr) ) goto Exit; hr = m_pWMWriter->SetInputProps(m_dwAudioInput, pProps); m_lASamplesPerFrame = m_options.asample_rate;// * 10; Exit: if (pType) { delete [] pType; pType = NULL; } if (pProps) { pProps->Release(); pProps = NULL; } return hr; } HRESULT CWMVWriter::InitVideoStream(int nFrameWidth, int nFrameHeight, int nBitsPerPixel) { int nMaxWidth=GetSystemMetrics(SM_CXSCREEN), nMaxHeight=GetSystemMetrics(SM_CYSCREEN); BITMAPINFO bmpInfo; ZeroMemory(&bmpInfo,sizeof(BITMAPINFO)); bmpInfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); bmpInfo.bmiHeader.biBitCount = nBitsPerPixel; bmpInfo.bmiHeader.biWidth = nFrameWidth; bmpInfo.bmiHeader.biHeight = nFrameHeight; bmpInfo.bmiHeader.biPlanes = 1; bmpInfo.bmiHeader.biSizeImage = nFrameWidth*nFrameHeight*nBitsPerPixel/8; bmpInfo.bmiHeader.biCompression = BI_RGB; if(bmpInfo.bmiHeader.biHeight>nMaxHeight) nMaxHeight=bmpInfo.bmiHeader.biHeight; if(bmpInfo.bmiHeader.biWidth>nMaxWidth) nMaxWidth=bmpInfo.bmiHeader.biWidth; WMVIDEOINFOHEADER videoInfo; videoInfo.rcSource.left = 0; videoInfo.rcSource.top = 0; videoInfo.rcSource.right = bmpInfo.bmiHeader.biWidth; videoInfo.rcSource.bottom = bmpInfo.bmiHeader.biHeight; videoInfo.rcTarget = videoInfo.rcSource; videoInfo.rcTarget.right = videoInfo.rcSource.right; videoInfo.rcTarget.bottom = videoInfo.rcSource.bottom; videoInfo.dwBitRate= (nMaxWidth*nMaxHeight*bmpInfo.bmiHeader.biBitCount* m_dwFrameRate); videoInfo.dwBitErrorRate = 0; videoInfo.AvgTimePerFrame = ((QWORD)1) * 10000 * 1000 / m_dwFrameRate; memcpy(&(videoInfo.bmiHeader),&bmpInfo.bmiHeader,sizeof(BITMAPINFOHEADER)); WM_MEDIA_TYPE mt; mt.majortype = WMMEDIATYPE_Video; if( bmpInfo.bmiHeader.biCompression == BI_RGB ) { if( bmpInfo.bmiHeader.biBitCount == 32 ) { mt.subtype = WMMEDIASUBTYPE_RGB32; } else if( bmpInfo.bmiHeader.biBitCount == 24 ) { mt.subtype = WMMEDIASUBTYPE_RGB24; } else if( bmpInfo.bmiHeader.biBitCount == 16 ) { mt.subtype = WMMEDIASUBTYPE_RGB555; } else if( bmpInfo.bmiHeader.biBitCount == 8 ) { mt.subtype = WMMEDIASUBTYPE_RGB8; } else { mt.subtype = GUID_NULL; } } mt.bFixedSizeSamples = false; mt.bTemporalCompression = false; mt.lSampleSize = 0; mt.formattype = WMFORMAT_VideoInfo; mt.pUnk = NULL; mt.cbFormat = sizeof(WMVIDEOINFOHEADER); mt.pbFormat = (BYTE*)&videoInfo; if(FAILED(m_pVideoProps->SetMediaType(&mt))) { throw SWException("Unable to Set Media Type\n"); } if(FAILED(m_pWMWriter->SetInputProps(m_dwVideoInput,m_pVideoProps))) { throw SWException("Unable to Set Input Properties for Media Writer\n"); } return S_OK; } bool CWMVWriter::EncodeVFrame(CVideoFrame &video_frame, int* out_size) { HRESULT hr=E_FAIL; INSSBuffer *pSample=NULL; BYTE *pbBuffer=NULL; DWORD cbBuffer=0; int nBitsPerPixel=m_options.vbitsperpel; //evev if ( !m_bInit ) return false; BITMAPINFO bmpInfo; ZeroMemory(&bmpInfo,sizeof(BITMAPINFO)); bmpInfo.bmiHeader.biSize=sizeof(BITMAPINFOHEADER); bmpInfo.bmiHeader.biBitCount=nBitsPerPixel; bmpInfo.bmiHeader.biWidth=video_frame.width; bmpInfo.bmiHeader.biHeight=video_frame.height; bmpInfo.bmiHeader.biCompression=BI_RGB; bmpInfo.bmiHeader.biPlanes=1; bmpInfo.bmiHeader.biSizeImage=video_frame.width*video_frame.height*nBitsPerPixel/8; hr = m_pWMWriter->AllocateSample(bmpInfo.bmiHeader.biSizeImage,&pSample); if(FAILED(hr)) { throw SWException("Unable to Allocate Memory\n"); } if(FAILED(pSample->GetBufferAndLength(&pbBuffer,&cbBuffer))) { throw SWException("Unable to Lock Buffer\n"); } memcpy(pbBuffer,video_frame.bfr,bmpInfo.bmiHeader.biSizeImage); // Time in 1/10,000,000 sec (= 100nanosec) hr=m_pWMWriter->WriteSample(m_dwVideoInput,10000 * m_msVideoTime,0,pSample); m_msVideoTime=(++m_dwCurrentVideoSample*1000)/m_dwFrameRate; if(pSample) { pSample->Release(); pSample=NULL; } if(FAILED(hr)) { throw SWException("Unable to Write video Frame\n"); } return true; } // Encode a audio frame bool CWMVWriter::EncodeAFrame(CAudioFrame& audio_frame) { HRESULT hr=E_FAIL; INSSBuffer *pSample=NULL; LONG cbSample = audio_frame.len; BYTE *pbBuffer=NULL; DWORD cbBuffer=0; if ( !m_bInit ) return false; if ( !m_bIsAudio ) return false; // no audio if(FAILED(m_pWMWriter->AllocateSample(cbSample,&pSample))) { throw SWException("Unable to Allocate Memory for audio frame\n"); } if(FAILED(pSample->GetBufferAndLength(&pbBuffer,&cbBuffer))) { throw SWException("Unable to Lock Buffer\n"); } memcpy(pbBuffer,audio_frame.bfr,cbSample); if( FAILED( pSample->SetLength(cbSample) ) ) { throw SWException("Unable to set sample length\n"); } // Time in 1/10,000,000 sec (= 100nanosec) hr=m_pWMWriter->WriteSample(m_dwAudioInput,m_AudioTime_cns ,0,pSample); int nsamples = cbSample / GetBytesPerSample() / m_options.achannels; m_dwCurrentAudioSample += nsamples; m_AudioTime_cns =( ((QWORD)m_dwCurrentAudioSample)*10000000) / m_options.asample_rate; m_logger.info("asample=%ld atime=%lld\n", m_dwCurrentAudioSample, m_AudioTime_cns ); if(pSample) { pSample->Release(); pSample=NULL; } if(FAILED(hr)) { throw SWException("Unable to Write audio Frame\n"); } return true; } // Finish bool CWMVWriter::Close() { if ( !m_bInit ) return false; m_pWMWriter->EndWriting(); return true; } //------------------------------------------------------------------------------ // Name: CWMVWriter::LoadCustomProfile() // Desc: Loads a custom profile from file. //------------------------------------------------------------------------------ HRESULT CWMVWriter::LoadCustomProfile( LPCTSTR ptszProfileFile, WMT_VERSION ver, IWMProfile ** ppIWMProfile ) { HRESULT hr = S_OK; DWORD dwLength = 0; DWORD dwBytesRead = 0; IWMProfileManager * pProfileManager = NULL; HANDLE hFile = INVALID_HANDLE_VALUE; LPWSTR pProfile = NULL; if( NULL == ptszProfileFile || NULL == ptszProfileFile ) { return( E_POINTER ); } do { // // Create profile manager // hr = WMCreateProfileManager( &pProfileManager ); if( FAILED( hr ) ) { break; } // // Open the profile file // hFile = CreateFile( ptszProfileFile, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL ); if( INVALID_HANDLE_VALUE == hFile ) { hr = HRESULT_FROM_WIN32( GetLastError() ); break; } if( FILE_TYPE_DISK != GetFileType( hFile ) ) { hr = NS_E_INVALID_NAME; break; } dwLength = GetFileSize( hFile, NULL ); if( -1 == dwLength ) { hr = HRESULT_FROM_WIN32( GetLastError() ); break; } // // Allocate memory for profile buffer // pProfile = (WCHAR *)new BYTE[ dwLength + sizeof(WCHAR) ]; if( NULL == pProfile ) { hr = E_OUTOFMEMORY; break; } // The buffer must be null-terminated. memset( pProfile, 0, dwLength + sizeof(WCHAR) ); // // Read the profile to a buffer // if( !ReadFile( hFile, pProfile, dwLength, &dwBytesRead, NULL ) ) { hr = HRESULT_FROM_WIN32( GetLastError() ); break; } // // Load the profile from the buffer // hr = pProfileManager->LoadProfileByData( pProfile, ppIWMProfile ); if( FAILED(hr) ) { break; } } while( FALSE ); // // Release all resources // SAFE_ARRAY_DELETE( pProfile ); SAFE_CLOSE_FILE_HANDLE( hFile ); SAFE_RELEASE( pProfileManager ); return( hr ); } <file_sep>/Fmod/Parser.h #ifndef __PARSER_H__ #define __PARSER_H__ #include "../AudioPart.h" #include "../EffectGroup.h" #include "../MixerEnums.h" class Parser { private: std::vector<AudioPart *> clipAudioParts; std::vector<EffectGroup *> clipEffectGroups; void setAudioRole(AudioPart *audioPart, std::string role); void setAudioType(AudioPart *audioPart, std::string type); void setAudioOrigin(AudioPart *audioPart, std::string origin); void setAudioSpeaker(AudioPart *audioPart, std::string speaker); void parseEffect(std::vector<Effect *> *effects, TiXmlElement *effectXml); void parseVolume(std::vector<Interpolated *> *volumes, TiXmlElement *volumeXml); double getDoubleAttribute(const char* value); const char* getStringAttribute(const char* value); public: Parser(); bool parse(std::string filePath); bool parseAudioParts(TiXmlElement *audioClip); bool parseEffectGroups(TiXmlElement *audioClip); std::vector<std::string> parseEffectGroupsPartsIDs( TiXmlElement *effectGroupXml); std::vector<Effect *> parseEffectGroupsEffects(TiXmlElement *effectGroupXml); std::vector<AudioPart *> getClipAudioParts(); std::vector<EffectGroup *> getClipEffectGroups(); }; #endif <file_sep>/RenderingManager/RenderingManager/RenderingManagerLogger.cpp #include "RenderingManagerLogger.h" #include "RenderingManager.h" #include "HeaderPrinter.h" #ifndef WIN32 #include <string.h> #endif void RenderingManagerLogger::vprintf_adv(bool printHeader, bool verifyNewLine, DebugLevel dbgLvl, const char *format, va_list argList) { RenderingManager::FileMutexAutoLock mal(m_rndMng); if (printHeader) HeaderPrinter::printHeader(m_rndMng->getLogFile(), dbgLvl); vfprintf(m_rndMng->getLogFile(), format, argList); if (verifyNewLine && (strlen(format) == 0 || format[strlen(format)-1] != '\n')) fprintf(m_rndMng->getLogFile(), "\n"); fflush(m_rndMng->getLogFile()); } DebugLevel RenderingManagerLogger::getDebugLevel() { return m_rndMng->getDefaultDebugLevel(); } <file_sep>/NewEncoder/VideoEncoder/FFMpegWriter.h #if !defined(___FFMPEGWRITER_H) #define ___FFMPEGWRITER_H #include <string> #include <map> #include <list> #include "VideoWriter.h" #include "Logger.h" #include "ProgressTracker.h" #include "libavfilter/avfiltergraph.h" #include "MoovHeaderWriter.h" #include "libavutil/error.h" // Use fps or cts?? fps // Support resize? no // Support frame dup/drop? no // Which pixel formats? ARGB #define VE_CODEC_FLV "flv" #define VE_CODEC_VP6 "vp6" #define VE_CODEC_VP8 "vp8" #define VE_CODEC_XVID "xvid" #define VE_CODEC_H264 "h264" #define VE_CODEC_H264_NVENC "h264_nvenc" #define VE_CODEC_MPEG4 "mpeg4" #define VE_CODEC_QTRLE "qtrle" #define VE_CODEC_MP3 "mp3" #define VE_CODEC_AAC "aac" #define VE_CODEC_PCM_S16LE "pcm16le" #define VE_CODEC_VORBIS "vorbis" #define IE_CODEC_IMAGE2 "image2" //#define WRITE_HEADER 0 //#define DEBUG_PADDING class CVFWEncoder; struct SwsContext; class CPlaylistWriter; class MoovHeaderWriter; class CFFMpegWriter : public CVideoWriter { private: Logger & m_logger; //int *m_mediaList; std::list<int> m_mediaList; //int m_mediaListSize; std::list<AVPacket> vpacketList; std::list<AVPacket> apacketList; int nPacketCount; int nVideoCount; int nAudioCount; bool m_vFirstFrame; std::list<int> streamIds; ProgressTracker *m_progressTracker; public: static void init(map<string, void *> *commonStorage); // Initialize ffmpeg CFFMpegWriter(Logger & logger, ProgressTracker *progressTracker); virtual ~CFFMpegWriter(); // Initialize the encoder library virtual bool Init(CVideoWriterOptions &options); virtual bool BeginWrite(); // Encode a video frame virtual bool EncodeVFrame(CVideoFrame& video_frame, int *out_size); // Encode a audio frame virtual bool EncodeAFrame(CAudioFrame& audio_frame); // Finish virtual bool Close(); static void my_log_callback(void *ptr, int level, const char *fmt, va_list vargs); private: void createPaddedPacketFromPkt(AVPacket& pkt, AVPacket* paddedPkt, int packetSize); bool write_packet(std::list<AVPacket> &list, AVMediaType mediaType); AVCodecID selectCodecId(CVideoWriterOptions &options); int init_filter_graph(AVFilterGraph **graph, AVFilterContext **src, AVFilterContext **sink); int configure_filtergraph(AVFilterGraph *graph, const char *filtergraph, AVFilterContext *source_ctx, AVFilterContext *sink_ctx); bool AddVideoStream(CVideoWriterOptions &options); AVFrame *alloc_picture(AVPixelFormat pix_fmt, int width, int height); bool write_vsample(AVPacket& pkt, bool i_frame, int64_t sample_pts); int write_frame(AVFormatContext *fmt_ctx, const AVRational *time_base, AVStream *st, AVPacket *pkt, AVPacket *paddedPkt); int write_packet_list(AVStream *st, AVPacket *pkt); //AVFormatContext *fmt_ctx, const AVRational *time_base, AVStream *st, AVPacket *pkt); bool AddAudioStream(CVideoWriterOptions &options); int check_sample_fmt(AVCodec *codec, enum AVSampleFormat sample_fmt); int select_channel_layout(AVCodec *codec); int GetTotalBitrate(); bool SetPresetsFromFile( const char *filename); bool switch_segment_if_needed( int time_ms, int *flushed ); int GetCurPtsMS(); bool EncodeAndWriteFrame( AVFrame *frameForEnc, int *out_size ); bool FlushEncoder(int *flushed); // Thread safe functions for codec opening/closing void AvcodecOpenThreadSafe(AVCodecContext *avctx, AVCodec *codec); void AvcodecCloseThreadSafe(AVCodecContext *avctx); private: AVOutputFormat* m_pAVOutputFormat; AVFormatContext* m_pAVFormatContext; AVDictionary * m_codecOptions; // Video AVStream* m_pVStream; AVCodec *m_pVCodec; AVCodecContext* m_pVCodecContext; //AVFrame *m_pVFrameOrig; AVFrame *m_pVFrameForEnc; int m_lVEncodedFrames; int m_nVideoBufferSize; unsigned char *m_pVideoBuffer; SwsContext *m_pSWSContext ; AVStream* m_pAStream; AVCodec *m_pACodec; AVCodecContext* m_pACodecContext; AVFrame *m_pAFrameForEnc; AVFrame *m_pAFrameForFilter; unsigned char m_codecExtraData; FILE *m_passLogFile; CPlaylistWriter *m_pPlaylistWriter; bool m_bSetAudioPts; int m_iAudioBytesCount; bool m_bInit; uint8_t **dst_samples_data; int dst_samples_linesize; int dst_samples_size; int samples_count; int encoded_samples_count; AVFilterGraph *graph; AVFilterContext *buffer_src, *sink; MoovHeaderWriter* moovHeaderWriter; int nframeNumber; AVIOContext* m_paddedIOContext; }; #endif // !defined <file_sep>/NewEncoder/VideoEncoder/VFWEncoder.h #if !defined(___VFWENCODER_H) #define ___VFWENCODER_H #include <string> //#include <afxwin.h> // MFC core and standard components //#include <windowsx.h> //#include <memory.h> //#include <mmsystem.h> #include <windows.h> #include <vfw.h> #include "VideoWriter.h" #include "Logger.h" class CVFWEncoder { private: Logger & m_logger; public: CVFWEncoder(Logger & logger); virtual ~CVFWEncoder(); // Initialize the encoder library virtual bool Init(CVideoWriterOptions &options); virtual bool Begin(); // Encode a video frame unsigned char *EncodeVFrame(CVideoFrame& video_frame, long *bfr_size, bool *isKey); // Finish virtual bool Close(); private: bool StoreCodecConf(COMPVARS* cvar); bool LoadCodecConf(COMPVARS* cvar); void ChooseOneCompatibleCompressor(COMPVARS *cvar, BITMAPINFOHEADER *bi); private: CVideoWriterOptions m_options; bool m_bInit; int m_numFrames; int m_rowWidthBytes; unsigned char *m_pFrmBfr; BITMAPINFOHEADER outbi; COMPVARS compvars; }; #endif // !defined <file_sep>/Fmod/Presets/Preset.h #ifndef __PRESET_H__ #define __PRESET_H__ #include "../Effects/Effect.h" #include "../Effects/DspEffect.h" class Preset : public Effect { protected: std::vector<DspEffect *> effectDspList; float volume; float pan; bool created; private: void applyPan(FMOD::Channel *channel) { channel->setPan(pan); } void applyVolume(FMOD::Channel *channel) { float channelVol; channel->getVolume(&channelVol); channel->setVolume(channelVol * volume); } void applyEffects(FMOD::Channel *channel) { for (std::vector<DspEffect *>::iterator it = effectDspList.begin(); it != effectDspList.end(); ++it) { DspEffect *dspEffect = *it; dspEffect->apply(channel); } } void applyPan(FMOD::ChannelGroup *channelGroup) { int noOfChannels; channelGroup->getNumChannels(&noOfChannels); for (int i=0 ; i < noOfChannels ; i++) { FMOD::Channel *channel; channelGroup->getChannel(i, &channel); channel->setPan(pan); } } void applyEffects(FMOD::ChannelGroup *channelGroup) { for (std::vector<DspEffect *>::iterator it = effectDspList.begin(); it != effectDspList.end(); ++it) { DspEffect *dspEffect = *it; dspEffect->apply(channelGroup); } } void applyVolume(FMOD::ChannelGroup *channelGroup) { float channelVol; channelGroup->getVolume(&channelVol); channelGroup->setVolume(channelVol * volume); } public: Preset() { created = false; } void apply(FMOD::Channel *channel) { applyEffects(channel); applyVolume(channel); applyPan(channel); } void apply(FMOD::ChannelGroup *channelGroup) { applyEffects(channelGroup); applyVolume(channelGroup); applyPan(channelGroup); } float getVolume() { return volume; } float getPan() { return pan; } bool getCreated() { return created; } }; #endif<file_sep>/VideoStream/VideoStream/include/VSVideo.h #pragma once #include "VSFrame.h" namespace VideoStream { void EnableLog (bool enable = true); void KillStream (LPCTSTR location); void KillAllStreams(); class CInputVideoStream { protected: virtual ~CInputVideoStream() {}; public: static CInputVideoStream *Create(); static void Destroy (CInputVideoStream *pStream); virtual bool Open (LPCTSTR location, unsigned int startFrame = 0) =0; virtual CFramePtr GetNextFrame() =0; virtual CFramePtr GetFrame(int frameNum) =0; virtual bool FastForward (unsigned int frames) =0; virtual bool IsEOV() =0; virtual void Close() =0; virtual const VIDEOINFO &GetVideoInfo() const =0; virtual unsigned int GetWidth() const =0; virtual unsigned int GetHeight() const =0; virtual unsigned int GetFrameRate() const =0; virtual unsigned int GetBitCount() const =0; }; class COutputVideoStream { protected: virtual ~COutputVideoStream() {}; public: static COutputVideoStream *Create(); static void Destroy (COutputVideoStream *pStream); virtual bool Open (LPCTSTR location, const VIDEOINFO &vi) =0; virtual bool WriteFrame (CFramePtr framePtr) =0; virtual void Close() =0; }; class CInputAudioStream { protected: virtual ~CInputAudioStream() {}; public: static CInputAudioStream *Create(); static void Destroy (CInputAudioStream *pStream); virtual bool Open (LPCTSTR location) =0; virtual const AUDIOINFO &GetAudioInfo() const =0; virtual int Read( unsigned char *bfr, int bfr_size, bool *eof ) =0; virtual void Close() =0; }; class COutputAudioStream { protected: virtual ~COutputAudioStream() {}; public: static COutputAudioStream *Create(); static void Destroy (COutputAudioStream *pStream); virtual bool Open (LPCTSTR location) =0; virtual void Close() =0; }; };<file_sep>/RenderingManager/RenderingManager/include/RenderingManagerLogger.h #pragma once #include "Logger.h" using namespace std; class RenderingManager; class RenderingManagerLogger : public Logger { private: RenderingManager * m_rndMng; friend class RenderingManager; public: virtual void vprintf_adv(bool printHeader, bool verifyNewLine, DebugLevel dbgLvl, const char *format, va_list argList); virtual DebugLevel getDebugLevel(); }; <file_sep>/Fmod/Effects/EchoDsp.h #ifndef __ECHO_DSP_H__ #define __ECHO_DSP_H__ #include "DspEffect.h" #pragma once class EchoDsp : public DspEffect { private: FMOD::DSP *dsp; public: void create(FMOD::System *fmodSystem) { fmodSystem->createDSPByType(FMOD_DSP_TYPE_ECHO, &dsp); dspList.push_back(dsp); } void setDecayRatio(float factor) { dsp->setParameter(FMOD_DSP_ECHO_DECAYRATIO, factor); } void setDelay(float factor) { dsp->setParameter(FMOD_DSP_ECHO_DELAY, factor); } void setDryMix(float factor) { dsp->setParameter(FMOD_DSP_ECHO_DRYMIX, factor); } void setMaxChannels(float factor) { dsp->setParameter(FMOD_DSP_ECHO_MAXCHANNELS, factor); } void setWetMix(float factor) { dsp->setParameter(FMOD_DSP_ECHO_WETMIX, factor); } }; #endif <file_sep>/SVG2Video/SVG2VideoEngine.h #pragma once #include <string> #include <memory> #include "VSVideo.h" #include "Engine.h" #include "FrameWriter.h" #include "SVG2VideoArgs.h" //#include "Profiler.h" //#include "jni.h" #ifndef WIN32 #include "WindowDefinitions.h" typedef unsigned long long LARGE_INTEGER; typedef long HRESULT; #endif //class Profiler; class SVG2VideoEngine : public Engine { private: FrameWriter *m_writer; // Output stream //Profiler* m_profiler; // jmethodID m_methodTx; // JavaVM * m_jvm; // JNIEnv * m_env; bool parseArgs (int argc, char* argv[], SVG2VideoArgs &args ); // HRESULT initJNI(); HRESULT renderSvg(SVG2VideoArgs& args); HRESULT renderSvgByServer(SVG2VideoArgs& args); #ifdef WIN32 int FileTime2Ms(const FILETIME& ft); #endif void MakeFullOpaque(int width, int height, unsigned char *pixels, int count, VideoStream::CMutableFramePtr& framePtr); std::string GetRequestId(std::string outFile); public: SVG2VideoEngine(map<string, void *> *commonStorage); ~SVG2VideoEngine(void); // The entry point from the Rendeing Manager. void executeEngine(int argc, char** argv); }; <file_sep>/Fmod/AudioType.h #pragma once enum class AudioType { music, something }; <file_sep>/VideoStream/VideoStream/VSThreadInput.h #pragma once #include "VSMedia.h" #include <queue> #ifndef WIN32 #include <pthread.h> #include <semaphore.h> #include "CEventManager.h" #endif namespace VideoStream { class CThreadInput : public IInputVideoStream { private: IInputVideoStream *_pStream; waitForFrameCallback _pFrameCallback; void *_pContext; HANDLE _frameReady; HANDLE _framesDone; HANDLE _readEnd; CRITICAL_SECTION _mutex; bool _shouldStop; bool _eov; std::queue<CFramePtr> _queue; #ifdef WIN32 HANDLE _thread; HANDLE _queueBuffer; DWORD _threadId; unsigned long ThreadRun(); static unsigned long __stdcall ThreadEntryProc( void* pThis ); #else pthread_t _thread; sem_t _queueBuffer; static void* ThreadEntryProc( void* pThis ); #endif protected: public: #ifndef WIN32 DWORD _threadId; unsigned long ThreadRun(); #endif CThreadInput (IInputVideoStream *pStream); virtual ~CThreadInput(); bool Open (LPCTSTR location, MediaLocationParams & mlParams, int streamId); const VIDEOINFO &GetVideoInfo() const {return _pStream->GetVideoInfo();} bool FastForward (unsigned int frames); CFramePtr GetNextFrame(); void Close(); bool IsAsync() {return true;} void SetWaitForFrameCallback (waitForFrameCallback callback, void *pContext) {_pFrameCallback = callback; _pContext = pContext;} void RunThread(); }; };<file_sep>/LightSynthesizer/TargetFrames.cpp //#include "stdafx.h" #include "TargetFrames.h" FrameObject::FrameObject(TiXmlElement *frameObjectElement) { _sourceId = frameObjectElement->Attribute("id"); TiXmlElement *frameNumElement = frameObjectElement->FirstChildElement("Frame"); if (frameNumElement != NULL) _frameNum = frameNumElement->GetText(); TiXmlElement *positionElement = frameObjectElement->FirstChildElement("Position"); if (positionElement != NULL) _zPos = positionElement->Attribute("z"); } TargetFrame::TargetFrame(TiXmlElement *targetFrameElement) { _sequenceNumber = targetFrameElement->Attribute("sequenceNumber"); TiXmlElement *frameObjectElem; TiXmlElement *frameObjectsElem = targetFrameElement->FirstChildElement("FrameObjects"); for (frameObjectElem = frameObjectsElem->FirstChildElement(); frameObjectElem != NULL; frameObjectElem = frameObjectElem->NextSiblingElement()) { std::string frameObjectType = frameObjectElem->Attribute("xsi:type"); if (frameObjectType == "Video") _frameObjects.push_back(new FrameObject(frameObjectElem)); } frameObjectsElem = targetFrameElement->FirstChildElement("RemovedObjects"); if (frameObjectsElem != NULL) { for (frameObjectElem = frameObjectsElem->FirstChildElement(); frameObjectElem != NULL; frameObjectElem = frameObjectElem->NextSiblingElement()) { std::string frameObjectType = frameObjectElem->Attribute("xsi:type"); if (frameObjectType == "Video") _removedObjects.push_back(new FrameObject(frameObjectElem)); } } } TargetFrame::~TargetFrame() { // TBD: delete all frame objects } TargetFrames::TargetFrames(TiXmlDocument *document) { TiXmlElement *targetFramesElem = document->FirstChild("Movie")->FirstChildElement("TargetFrames"); TiXmlElement *targetFrameElem; for (targetFrameElem = targetFramesElem->FirstChildElement(); targetFrameElem != NULL; targetFrameElem = targetFrameElem->NextSiblingElement()) _frames.push_back(new TargetFrame(targetFrameElem)); } TargetFrames::~TargetFrames() { // TBD: delete all target frame objects }<file_sep>/Fmod/AudioRole.h #pragma once class AudioRole { public: AudioRole(void); ~AudioRole(void); }; <file_sep>/VideoStream/VideoStream/WAVFileMedia.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSWAVFileMedia.h" #include "VSFrame.h" #include "VSSync.h" #include "VSLog.h" #include "SWException.h" #include <stdio.h> #include <Share.h> using namespace VideoStream; const DWORD dwRiffId = mmioFOURCC ('R', 'I', 'F', 'F'); const DWORD dwRiffWaveId = mmioFOURCC ('W', 'A', 'V', 'E'); const DWORD dwRiffFmtId = mmioFOURCC ('f', 'm', 't', ' '); const DWORD dwRiffDataId = mmioFOURCC ('d', 'a', 't', 'a'); /*****************/ /* CInputWAVFile */ /*****************/ CInputWAVFile::CInputWAVFile( MediaLocationParams *locationParams ) { _fp = NULL; _pSync = NULL; _locationParams = locationParams; } // If throwOnErr is true - throw on error, otherwise return false (for the case that file is not full yet) bool CInputWAVFile::Open (LPCTSTR location, DWORD timeoutMS, bool throwOnErr) { // Try opening until we succeed or timeout passes DWORD starttick = GetTickCount(); #define INTER_OPEN_SLEEP 50 while ( true ) { _fp = _tfsopen (location, _T("rb"), _SH_DENYNO); if (_fp != NULL) break; // Succeeded // Test timeout DWORD curtick = GetTickCount(); if ( curtick-starttick > timeoutMS ) { if ( timeoutMS > 0 ) fprintf( stderr, "Timeout (%d ms) passed waiting to open wav\n", timeoutMS ); return false; // Timeout } // Sleep a while if ( _pSync ) _pSync->Sleep( INTER_OPEN_SLEEP ); // test for kill as well else Sleep( INTER_OPEN_SLEEP ); } // Read header if ( !ReadWavHeader(throwOnErr) ) return false; // Skip everything until a 'data' chunk if ( !ReadUpToData(throwOnErr) ) return false; return true; } void CInputWAVFile::Close() { if (_fp != NULL) { fclose (_fp); _fp = NULL; } } CInputWAVFile::~CInputWAVFile() { Close(); } bool CInputWAVFile::ReadWavHeader( bool throwOnErr ) { int nread; bool eof; /* read riff/wav header */ nread = ReadFullBfr( (unsigned char *)&_wavHdr,sizeof(_wavHdr),&eof ); if ( nread != sizeof(_wavHdr) ) { if ( throwOnErr ) throw SWException("Failed to read wav header\n"); else return false; } // check format of header if ( _wavHdr.rID != dwRiffId ) { if ( throwOnErr ) throw SWException("bad RIFF format\n"); else return false; } if ( _wavHdr.wID != dwRiffWaveId ) { if ( throwOnErr ) throw SWException("bad WAVE format\n"); else return false; } if ( _wavHdr.fId != dwRiffFmtId ) { if ( throwOnErr ) throw SWException("bad fmt format\n"); else return false; } if(_wavHdr.wFormatTag!=1) { if ( throwOnErr ) throw SWException("bad wav wFormatTag\n"); else return false; } // skip over any remaining portion of wav header int rmore = _wavHdr.pcm_header_len - (sizeof(WAV_HDR) - 20); unsigned char *tmp = new unsigned char[rmore]; nread = ReadFullBfr( tmp,rmore,&eof ); if ( nread != rmore ) { delete [] tmp; if ( throwOnErr ) throw SWException("cant seek to end of header\n"); else return false; } delete [] tmp; // Fill audio info _audioInfo.bitsPerSample = _wavHdr.nBitsPerSample; _audioInfo.numChannels = _wavHdr.nChannels; _audioInfo.sampleRate = _wavHdr.nSamplesPerSec; return true; } bool CInputWAVFile::ReadUpToData( bool throwOnErr ) { unsigned char tmpbfr[1024]; int nread; CHUNK_HDR chk; bool eof; // read chunks until a 'data' chunk is found while ( true ) { // read chunk header nread = ReadFullBfr( (unsigned char *)&chk,sizeof(chk),&eof ); if ( nread != sizeof(chk) ) { if ( throwOnErr ) throw SWException("Failed to read chunk header\n"); else return false; } // check chunk type if ( chk.dId == dwRiffDataId ) break; // skip over chunk nread = 0; while ( nread < chk.dLen ) { bool eof; nread += Read( tmpbfr,min(sizeof(tmpbfr),chk.dLen-nread),&eof ); if ( eof ) return false; } } return true; } // Read bfr_size bytes. // Only if data ended (or timeout/kill) return less than bfr_size bytes // Return number of bytes read / -1 for error int CInputWAVFile::ReadFullBfr( unsigned char *bfr, int bfr_size, bool *eof) { int nread = 0; while ( nread < bfr_size ) { bool eof_cur; int nread_cur = Read( bfr+nread, bfr_size-nread, &eof_cur ); if ( nread_cur < 0 ) return nread_cur;//error nread += nread_cur; if ( eof_cur ) { *eof = true; break; } } return nread; } // Read bfr_size bytes. // If less bytes are currently available - return less than bfr_size bytes // If no bytes are currently available - wait until at least one byte is available (or timeout) // Return number of bytes read / -1 for error int CInputWAVFile::Read( unsigned char *bfr, int bfr_size, bool *eof) { *eof = false; int nread = 0; bool endEventReceived = false; DWORD timeoutMS = _locationParams->timeoutMS; DWORD starttick = GetTickCount(); // Wait until we get at least one byte or a timeout occurs. while ( true ) { nread = (int)fread(bfr,1,bfr_size,_fp); if ( nread > 0 ) break; // // No bytes read // if ( _pSync ) { // if we're streaming - wait for end event first. if ( endEventReceived ) { //evev - maybe look at chunk size now?? *eof = true; break; } if ( _pSync->HasProducingEnded( 100 ) )//sleep a while - check for timeout / kill { // File is complete - next time we get nread==0 it means end of file. endEventReceived = true; } } else // Not streaming - end of file { *eof = true; break; } // Check for timeout DWORD curtick = GetTickCount(); if ( curtick-starttick > timeoutMS ) { throw SWException("Timeout (%d ms) passed waiting to read from wav\n", timeoutMS ); } } return nread; } #else #include "VSWAVFileMedia.h" #include "VSFrame.h" #include "VSSync.h" #include "VSLog.h" #include "SWException.h" #include <stdio.h> #include <unistd.h> #include <algorithm> using namespace VideoStream; const DWORD dwRiffId = mmioFOURCC('R', 'I', 'F', 'F'); const DWORD dwRiffWaveId = mmioFOURCC('W', 'A', 'V', 'E'); const DWORD dwRiffFmtId = mmioFOURCC('f', 'm', 't', ' '); const DWORD dwRiffDataId = mmioFOURCC('d', 'a', 't', 'a'); #ifndef min #define min(a,b) (((a) < (b)? (a) : (b))) #endif /*****************/ /* CInputWAVFile */ /*****************/ CInputWAVFile::CInputWAVFile(MediaLocationParams *locationParams) { _fp = NULL; _pSync = NULL; _locationParams = locationParams; } // If throwOnErr is true - throw on error, otherwise return false (for the case that file is not full yet) bool CInputWAVFile::Open(LPCTSTR location, DWORD timeoutMS, bool throwOnErr) { // Try opening until we succeed or timeout passes // DWORD starttick = GetTickCount(); struct timeval starttv; gettimeofday(&starttv, 0); DWORD starttick = starttv.tv_sec * 1000 + starttv.tv_usec / 1000; _location = location; #define INTER_OPEN_SLEEP 70 while (true) { _fp = fopen(location, _T("rb")); //, _SH_DENYNO); if (_fp != NULL) break; // Succeeded // Test timeout struct timeval curtv; gettimeofday(&curtv, 0); DWORD curtick = curtv.tv_sec * 1000 + curtv.tv_usec / 1000; //GetTickCount(); if (curtick - starttick > timeoutMS) { if (timeoutMS > 0) fprintf(stderr, "Timeout (%d ms) passed waiting to open wav\n", timeoutMS); return false; // Timeout } // Sleep a while if (_pSync) _pSync->Sleep(INTER_OPEN_SLEEP); // test for kill as well else usleep(INTER_OPEN_SLEEP * 1000); } /*if (_pSync) _pSync->Sleep(INTER_OPEN_SLEEP); // test for kill as well else usleep(INTER_OPEN_SLEEP * 1000); if (!ReadWavHeader(throwOnErr)) return false; */ // Read header int counter = 0; int good = 0; while (counter < 100) { if (!ReadWavHeader(throwOnErr)) { //return false; counter++; if (_pSync) _pSync->Sleep(INTER_OPEN_SLEEP); else usleep(INTER_OPEN_SLEEP * 1000); } else { good = 1; break; } } if (good == 0) return false; // Skip everything until a 'data' chunk if (!ReadUpToData(throwOnErr)) return false; return true; } void CInputWAVFile::Close() { if (_fp != NULL) { fclose(_fp); _fp = NULL; } } CInputWAVFile::~CInputWAVFile() { Close(); } bool CInputWAVFile::ReadWavHeader(bool throwOnErr) { int nread; bool eof; /* read riff/wav header */ nread = ReadFullBfr((unsigned char *)&_wavHdr, sizeof(_wavHdr), &eof); // fprintf(myfile, "nread %d\n", nread); if (nread != sizeof(_wavHdr)) { if (throwOnErr) throw SWException("Failed to read wav header\n"); else return false; } // check format of header if (_wavHdr.rID != dwRiffId) { if (throwOnErr) throw SWException("bad RIFF format\n"); else return false; } if (_wavHdr.wID != dwRiffWaveId) { if (throwOnErr) throw SWException("bad WAVE format\n"); else return false; } if (_wavHdr.fId != dwRiffFmtId) { if (throwOnErr) throw SWException("bad fmt format\n"); else return false; } if (_wavHdr.wFormatTag != 1) { if (throwOnErr) throw SWException("bad wav wFormatTag\n"); else return false; } // skip over any remaining portion of wav header int rmore = _wavHdr.pcm_header_len - (sizeof(WAV_HDR) - 20); unsigned char *tmp = new unsigned char[rmore]; nread = ReadFullBfr(tmp, rmore, &eof); if (nread != rmore) { delete[] tmp; if (throwOnErr) throw SWException("cant seek to end of header\n"); else return false; } delete[] tmp; // Fill audio info _audioInfo.bitsPerSample = _wavHdr.nBitsPerSample; _audioInfo.numChannels = _wavHdr.nChannels; _audioInfo.sampleRate = _wavHdr.nSamplesPerSec; return true; } bool CInputWAVFile::ReadUpToData(bool throwOnErr) { unsigned char tmpbfr[1024]; int nread; CHUNK_HDR chk; bool eof; // read chunks until a 'data' chunk is found while (true) { // read chunk header nread = ReadFullBfr((unsigned char *)&chk, sizeof(chk), &eof); if (nread != sizeof(chk)) { if (throwOnErr) throw SWException("Failed to read chunk header\n"); else return false; } // check chunk type if (chk.dId == dwRiffDataId) break; // skip over chunk nread = 0; while (nread < chk.dLen) { bool eof; nread += Read(tmpbfr, min(sizeof(tmpbfr), chk.dLen - nread), &eof); if (eof) return false; } } return true; } // Read bfr_size bytes. // Only if data ended (or timeout/kill) return less than bfr_size bytes // Return number of bytes read / -1 for error int CInputWAVFile::ReadFullBfr(unsigned char *bfr, int bfr_size, bool *eof) { int nread = 0; while (nread < bfr_size) { bool eof_cur; int nread_cur = Read(bfr + nread, bfr_size - nread, &eof_cur); if (nread_cur < 0) return nread_cur;//error nread += nread_cur; if (eof_cur) { *eof = true; break; } } return nread; } // Read bfr_size bytes. // If less bytes are currently available - return less than bfr_size bytes // If no bytes are currently available - wait until at least one byte is available (or timeout) // Return number of bytes read / -1 for error int CInputWAVFile::Read(unsigned char *bfr, int bfr_size, bool *eof) { *eof = false; int nread = 0; bool endEventReceived = false; DWORD timeoutMS = _locationParams->timeoutMS; struct timeval starttv; gettimeofday(&starttv, 0); DWORD starttick = starttv.tv_sec * 1000 + starttv.tv_usec / 1000; // Wait until we get at least one byte or a timeout occurs. while (true) { nread = (int)fread(bfr, 1, bfr_size, _fp); if (nread > 0) { break; } // // No bytes read // //if (_pSync) { // if we're streaming - wait for end event first. if (endEventReceived) { //evev - maybe look at chunk size now?? *eof = true; break; } //Check if the finished file is already written by fmod std::string finishedFilename = _location + ".finished"; FILE * finishedFile = fopen(finishedFilename.c_str(), "r"); if (finishedFile != NULL) { // File is complete - next time we get nread==0 it means end of file. endEventReceived = true; fclose(finishedFile); } else { usleep(100000); } //} //else // Not streaming - end of file //{ // *eof = true; // break; //} // Check for timeout struct timeval curtv; gettimeofday(&curtv, 0); DWORD curtick = curtv.tv_sec * 1000 + curtv.tv_usec / 1000; //GetTickCount(); if (curtick - starttick > timeoutMS) { throw SWException("Timeout (%d ms) passed waiting to read from wav\n", timeoutMS); } } return nread; } #endif <file_sep>/NewEncoder/VideoEncoder/include/VideoEncoderEngineFactory.h #pragma once //#include <windows.h> #include <string> #include "EngineFactory.h" class VideoEncoderEngineFactory : public EngineFactory { public: Engine *createEngine(); // The init function is called at the beginning (even if this engine is never executed) virtual void init(); // Initialize ffmpeg if needed }; <file_sep>/VideoStream/VideoStream/VSAudio.h #pragma once namespace AudioStream { class __declspec(dllexport) CInputAudioStream abstract { protected: virtual ~CInputAudioStream() {}; public: static CInputAudioStream *Create(); static void Destroy (CInputAudioStream *pStream); virtual bool Open (LPCTSTR location, unsigned int startFrame = 0) abstract; virtual bool Read( unsigned char *bfr, size_t bfr_size ) abstract; //virtual bool HasProducingEnded( DWORD timeoutMs ) abstract; virtual void Close() abstract; }; class __declspec(dllexport) COutputAudioStream abstract { protected: virtual ~COutputAudioStream() {}; public: static COutputAudioStream *Create(); static void Destroy (COutputAudioStream *pStream); virtual bool Open (LPCTSTR location) abstract; virtual void Close() abstract; }; };<file_sep>/utils/Utils/ColorSpace.cpp #include "ColorSpace.h" //////////////////////////////////////////////////////////////////////////////// // Color Conversions //////////////////////////////////////////////////////////////////////////////// #define SCALEBITS 10 #define ONE_HALF (1 << (SCALEBITS - 1)) #define FIX(x) ((int) ((x) * (1<<SCALEBITS) + 0.5)) #define RGB_TO_Y_CCIR(r, g, b) \ ((FIX(0.29900*219.0/255.0) * (r) + FIX(0.58700*219.0/255.0) * (g) + \ FIX(0.11400*219.0/255.0) * (b) + (ONE_HALF + (16 << SCALEBITS))) >> SCALEBITS) #define RGB_TO_U_CCIR(r1, g1, b1, shift)\ (((- FIX(0.16874*224.0/255.0) * r1 - FIX(0.33126*224.0/255.0) * g1 + \ FIX(0.50000*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128) #define RGB_TO_V_CCIR(r1, g1, b1, shift)\ (((FIX(0.50000*224.0/255.0) * r1 - FIX(0.41869*224.0/255.0) * g1 - \ FIX(0.08131*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128) //static void ColorSpace::rgb24_to_yuv420p(unsigned char *lum, unsigned char *cb, unsigned char *cr, const unsigned char *src, int width, int height, int src_stride) { int wrap, x, y; int r, g, b, r1, g1, b1; wrap = width; const unsigned char *prow = src; for(y=0;y<height;y+=2) { const unsigned char *p = prow; for(x=0;x<width;x+=2) { r = p[0]; g = p[1]; b = p[2]; r1 = r; g1 = g; b1 = b; lum[0] = RGB_TO_Y_CCIR(r,g,b); r = p[3]; g = p[4]; b = p[5]; r1 += r; g1 += g; b1 += b; lum[1] = RGB_TO_Y_CCIR(r,g,b); p += src_stride; lum += wrap; r = p[0]; g = p[1]; b = p[2]; r1 += r; g1 += g; b1 += b; lum[0] = RGB_TO_Y_CCIR(r,g,b); r = p[3]; g = p[4]; b = p[5]; r1 += r; g1 += g; b1 += b; lum[1] = RGB_TO_Y_CCIR(r,g,b); cb[0] = RGB_TO_U_CCIR(r1, g1, b1, 2); cr[0] = RGB_TO_V_CCIR(r1, g1, b1, 2); cb++; cr++; p += -src_stride + 2 * 3; // Go up one row and forward two pixels. Each pixel is 3 bytes. lum += -wrap + 2; } prow += (2*src_stride); lum += wrap; } } //static void ColorSpace::rgb32_to_yuv420p(unsigned char *lum, unsigned char *cb, unsigned char *cr, const unsigned char *src, int width, int height, int src_stride ) { int wrap, x, y; int r, g, b, r1, g1, b1; wrap = width; const unsigned char *prow = src; for(y=0;y<height;y+=2) { const unsigned char *p = prow; for(x=0;x<width;x+=2) { b = p[0]; g = p[1]; r = p[2]; r1 = r; g1 = g; b1 = b; lum[0] = RGB_TO_Y_CCIR(r,g,b); b = p[4]; g = p[5]; r = p[6]; r1 += r; g1 += g; b1 += b; lum[1] = RGB_TO_Y_CCIR(r,g,b); p += src_stride; lum += wrap; b = p[0]; g = p[1]; r = p[2]; r1 += r; g1 += g; b1 += b; lum[0] = RGB_TO_Y_CCIR(r,g,b); b = p[4]; g = p[5]; r = p[6]; r1 += r; g1 += g; b1 += b; lum[1] = RGB_TO_Y_CCIR(r,g,b); cb[0] = RGB_TO_U_CCIR(r1, g1, b1, 2); cr[0] = RGB_TO_V_CCIR(r1, g1, b1, 2); cb++; cr++; p += -src_stride + 2 * 4; // Go up one row and forward two pixels. Each pixel is 4 bytes. lum += -wrap + 2; } prow += (2*src_stride); lum += wrap; } } ////////////////////////////////////////////////////////////////////////////// #ifdef OUR_YUV2RGB #define MAX_NEG_CROP 1024 static unsigned char ff_cropTbl[256 + 2 * MAX_NEG_CROP]; static const unsigned char *fillCroTbl() { int i; for(i=0;i<256;i++) ff_cropTbl[i + MAX_NEG_CROP] = i; for(i=0;i<MAX_NEG_CROP;i++) { ff_cropTbl[i] = 0; ff_cropTbl[i + MAX_NEG_CROP + 256] = 255; } return ff_cropTbl + MAX_NEG_CROP; } static const unsigned char *cm = fillCroTbl(); //#define EVEV_CCIR #ifdef EVEV_CCIR #define YUV_TO_RGB1(cb1, cr1)\ {\ cb = (cb1) - 128;\ cr = (cr1) - 128;\ r_add = FIX(1.40200*255.0/224.0) * cr + ONE_HALF;\ g_add = - FIX(0.34414*255.0/224.0) * cb - FIX(0.71414*255.0/224.0) * cr + \ ONE_HALF;\ b_add = FIX(1.77200*255.0/224.0) * cb + ONE_HALF;\ } #define YUV_TO_RGB2(r, g, b, y1)\ {\ y = ((y1) - 16) * FIX(255.0/219.0);\ r = cm[(y + r_add) >> SCALEBITS];\ g = cm[(y + g_add) >> SCALEBITS];\ b = cm[(y + b_add) >> SCALEBITS];\ } #else #define YUV_TO_RGB1(cb1, cr1)\ {\ cb = (cb1) - 128;\ cr = (cr1) - 128;\ r_add = FIX(1.40200) * cr + ONE_HALF;\ g_add = - FIX(0.34414) * cb - FIX(0.71414) * cr + ONE_HALF;\ b_add = FIX(1.77200) * cb + ONE_HALF;\ } #define YUV_TO_RGB2(r, g, b, y1)\ {\ y = (y1) << SCALEBITS;\ r = cm[(y + r_add) >> SCALEBITS];\ g = cm[(y + g_add) >> SCALEBITS];\ b = cm[(y + b_add) >> SCALEBITS];\ } #endif #define RGB_OUT(d, r, g, b)\ {\ (d)[0] = b;\ (d)[1] = g;\ (d)[2] = r;\ } //static void ColorSpace::yuv420p_to_rgb32(const unsigned char *plum, const unsigned char *pcb, const unsigned char *pcr, int lum_stride, int cb_stride, int cr_stride, unsigned char *dst, int width, int height, int dst_stride ) { const unsigned char *y1_ptr, *y2_ptr, *cb_ptr, *cr_ptr; unsigned char *dstrow, *d1, *d2; int w, y, cb, cr, r_add, g_add, b_add, width2; unsigned int r, g, b; int bytes_per_pixel = 4; dstrow = dst; y1_ptr = plum; cb_ptr = pcb; cr_ptr = pcr; width2 = (width + 1) >> 1; for(;height >= 2; height -= 2) { d1 = dstrow; d2 = dstrow + dst_stride; y2_ptr = y1_ptr + lum_stride; for(w = width; w >= 2; w -= 2) { YUV_TO_RGB1(cb_ptr[0], cr_ptr[0]); // Fill r_add, g_add, b_add (according to cb,cr) // r_add = g_add = b_add = 0;//evev remove /* output 4 pixels */ YUV_TO_RGB2(r, g, b, y1_ptr[0]); // Fill r,g,b RGB_OUT(d1, r, g, b); // Put data in destination YUV_TO_RGB2(r, g, b, y1_ptr[1]); RGB_OUT(d1 + bytes_per_pixel, r, g, b); YUV_TO_RGB2(r, g, b, y2_ptr[0]); RGB_OUT(d2, r, g, b); YUV_TO_RGB2(r, g, b, y2_ptr[1]); RGB_OUT(d2 + bytes_per_pixel, r, g, b); d1 += 2 * bytes_per_pixel; d2 += 2 * bytes_per_pixel; y1_ptr += 2; y2_ptr += 2; cb_ptr++; cr_ptr++; } /* handle odd width */ if (w) { YUV_TO_RGB1(cb_ptr[0], cr_ptr[0]); YUV_TO_RGB2(r, g, b, y1_ptr[0]); RGB_OUT(d1, r, g, b); YUV_TO_RGB2(r, g, b, y2_ptr[0]); RGB_OUT(d2, r, g, b); d1 += bytes_per_pixel; d2 += bytes_per_pixel; y1_ptr++; y2_ptr++; cb_ptr++; cr_ptr++; } dstrow += 2 * dst_stride; //y1_ptr += width; y1_ptr += 2 * lum_stride - width; cb_ptr += cb_stride - width2; cr_ptr += cr_stride - width2; } /* handle odd height */ if (height) { d1 = dstrow; for(w = width; w >= 2; w -= 2) { YUV_TO_RGB1(cb_ptr[0], cr_ptr[0]); /* output 2 pixels */ YUV_TO_RGB2(r, g, b, y1_ptr[0]); RGB_OUT(d1, r, g, b); YUV_TO_RGB2(r, g, b, y1_ptr[1]); RGB_OUT(d1 + bytes_per_pixel, r, g, b); d1 += 2 * bytes_per_pixel; y1_ptr += 2; cb_ptr++; cr_ptr++; } /* handle odd width */ if (w) { YUV_TO_RGB1(cb_ptr[0], cr_ptr[0]); /* output 2 pixels */ YUV_TO_RGB2(r, g, b, y1_ptr[0]); RGB_OUT(d1, r, g, b); d1 += bytes_per_pixel; y1_ptr++; cb_ptr++; cr_ptr++; } } } #endif <file_sep>/utils/StackWalker/CMakeLists.txt add_library(StackWalker StackWalker.cpp SWException.cpp) target_include_directories(StackWalker PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/)<file_sep>/LightSynthesizer/TargetFrames.h #pragma once #include <vector> #include <string> #include "tinyxml.h" class FrameObject { std::string _sourceId; std::string _frameNum; std::string _zPos; public: FrameObject(TiXmlElement *frameObjectElement); virtual ~FrameObject() {}; const std::string &getSourceId() const {return _sourceId;} const std::string &getFrameNum() const {return _frameNum;} const std::string &getZPos() const {return _zPos;} }; class TargetFrame { private: std::vector<FrameObject *> _frameObjects; std::vector<FrameObject *> _removedObjects; std::string _sequenceNumber; public: TargetFrame(TiXmlElement *targetFrameElement); virtual ~TargetFrame(); const std::vector<FrameObject *> &getFrameObjects() const {return _frameObjects;} const std::vector<FrameObject *> &getRemovedObjects() const {return _removedObjects;} const std::string &getSequenceNumber() const {return _sequenceNumber;} }; class TargetFrames { private: std::vector<TargetFrame *> _frames; public: TargetFrames(TiXmlDocument *document); virtual ~TargetFrames(); const std::vector<TargetFrame *> &getFrames() const {return _frames;} };<file_sep>/RenderingManager/RenderingManager/RenderingManager.h #pragma once //#include <windows.h> #include <iostream> #include <string> #include <map> #ifndef WIN32 #include <pthread.h> #include <stdarg.h> #endif #include "EngineManager.h" #include "CliManager.h" #include "DebugLevel.h" #include "RenderingManagerLogger.h" #include "MutexAutoLock.h" //#include "jni.h" #ifndef WIN32 using namespace std; class MutexAutoLock; #endif class RenderingManager { private: EngineManager m_engineManager; CliManager m_cliManager; istream *m_inputFile; FILE * m_notificationFile; FILE * m_logFile; FILE * m_captureFile; DebugLevel m_defaultDebugLevel; // can be overriden for specific engine executions // Mutexes: m_fileMutex and m_stopExecutionMutex can be taken under m_engineListMutex but never vice versa. They should not be taken under each other. #ifdef WIN32 HANDLE m_fileMutex; // For protecting the various files (notification, log) from multiple threads. HANDLE m_stopExecutionMutex; // For protecting m_stopExecutions. HANDLE m_engineListMutex; // For protecting the engine list from multiple threads. #else pthread_mutex_t m_fileMutex; // For protecting the various files (notification, log) from multiple threads. pthread_mutex_t m_stopExecutionMutex; // For protecting m_stopExecutions. pthread_mutex_t m_engineListMutex; // For protecting the engine list from multiple threads. #endif bool m_noMoreNewExecutions; // If true we do not allow any further executions and a trigger is set for the process ending (either after all engines end or after all engines of a certain type end). string m_stopAfterEngineType; // If not empty (and m_noMoreNewExecutions is true) the process ends when all executions of this type end. bool m_stopExecutions; // We are in the process of stopping the process (whether or not all executions ended). bool m_dbg; RenderingManagerLogger m_logger; map<string, void *> m_commonStorage; // Storage to be shared between all engines //JavaVM * m_jvm; //JNIEnv * m_env; //HINSTANCE jvmdll; bool m_svgSupported; public: #ifdef WIN32 RenderingManager(bool showPrompt, bool dbg = false) : m_cliManager(showPrompt), m_inputFile(NULL), m_fileMutex(NULL), m_stopExecutionMutex(NULL), m_engineListMutex(NULL), m_noMoreNewExecutions(false), m_stopExecutions(false), m_dbg(dbg) {} #else RenderingManager(bool showPrompt, bool dbg = false) : m_cliManager(showPrompt), m_inputFile(NULL), m_noMoreNewExecutions(false), m_stopExecutions(false), m_dbg(dbg) {} #endif bool init(const string & baseFolder, const string &inputFileName, const string & notificationFileName, const string & logFileName, const string & captureFileName, const string & debugLevelString, const string & params); ~RenderingManager(); int run(); EngineManager & getEngineManager() {return m_engineManager;}; FILE * getNotificationFile() {return m_notificationFile;}; FILE * getLogFile() {return m_logFile;}; map<string, void *> *getCommonStorage() {return &m_commonStorage;} DebugLevel getDefaultDebugLevel() {return m_defaultDebugLevel;}; bool areExecutionsAllowed() {return !m_noMoreNewExecutions;}; // Write to notification file that an engine starts/ends void writeEngineStart(Engine * engine); void writeEngineEnd(Engine * engine); void writeProgress(unsigned int progress, unsigned int outOf); void writeReadyToPlay(unsigned int renderedChunk); void writeTotalSleepTime(unsigned int totalSleepTime); void sceneRendered(const char* sceneName, unsigned int frames, float renditionTime, float videoRenditionTime); void setEncoderProgress(unsigned int frames, double timePassed, double averageFramesPerSec); void exceedPaddedSize(int codecType, int frameNumber, int frameSize, int maxFrameSize); void writeEvent(const char* name,const char* value); void stopExecutionsDueToEngineFailure(Engine *engine, const char *format, va_list argList); bool applyExitConditions(); private: void initEngineTypes(); bool parseArgs(char *argv[], int argc, string & engineType, string & executionId, DebugLevel & debugLevel, int & firstArgIndex); bool parseParams(const string & params); void endJobExecutionNormally(); void stopExecutionsDueToEngineEnding(string endingEngineType); void killExecutions(); bool testAndSetStopExecutions(); void writeToNotificationFile(const char *format, ...); void writeToNotificationFile(Engine *engine, const char *format, ...); void writeToNotificationFileNoHeader(const char *format, ...); void writeToNotificationFile_va(bool printHeader, Engine *engine, const char *format, va_list argList); // CLI command callbacks static void executeEngine(string fullCommand, void *callbackData, bool & exitCli); static void reportEngines(string fullCommand, void *callbackData, bool & exitCli); static void reportEngineTypes(string fullCommand, void *callbackData, bool & exitCli); static void stopExecutions(string fullCommand, void *callbackData, bool & exitCli); static void stopExecutionsAfter(string fullCommand, void *callbackData, bool & exitCli); static void killEngines(string fullCommand, void *callbackData, bool & exitCli); static void limitExecutions(string fullCommand, void *callbackData, bool & exitCli); static void pauseLocalVideoStreams(string fullCommand, void *callbackData, bool & exitCli); static void resumeLocalVideoStreams(string fullCommand, void *callbackData, bool & exitCli); public: // The following MutexAutoLock classes are used for the rendering manager's mutexes. class FileMutexAutoLock : public MutexAutoLock { public: FileMutexAutoLock(RenderingManager *rndMng) : MutexAutoLock(rndMng->m_fileMutex) {} }; class StopExecutionMutexAutoLock : public MutexAutoLock { public: StopExecutionMutexAutoLock(RenderingManager *rndMng) : MutexAutoLock(rndMng->m_stopExecutionMutex) {} }; class EngineListMutexAutoLock : public MutexAutoLock { public: EngineListMutexAutoLock(RenderingManager *rndMng) : MutexAutoLock(rndMng->m_engineListMutex) {} }; }; <file_sep>/VideoStream/VideoStream/Sync.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSSync.h" #include "VSLog.h" #include "VSExceptions.h" #include "VSUtils.h" #include <stdlib.h> #include <tchar.h> using namespace VideoStream; /****************/ /* CSyncObjects */ /****************/ const LPCTSTR produceStartEventPrefix = _T("ProduceStart_"); const LPCTSTR produceEndEventPrefix = _T("ProduceEnd_"); const LPCTSTR consumeEndEventPrefix = _T("ConsumeEnd_"); const LPCTSTR newFrameEventPrefix = _T("NewFrameEvent"); const LPCTSTR newBufSpaceEventPrefix = _T("NewBufSpace"); const LPCTSTR framesProducedPrefix = _T("FramesProduced_"); const LPCTSTR framesConsumedPrefix = _T("FramesConsumed_"); const LPCTSTR nConsumersPrefix = _T("nConsumers_"); const LPCTSTR handhsakeMutexPrefix = _T("Handshake_"); const LPCTSTR killEventPrefix = _T("VSKill_"); static std::string produceStartEventName (LPCTSTR location) {return GetSyncObjName (produceStartEventPrefix, location);} static std::string produceEndEventName (LPCTSTR location) {return GetSyncObjName (produceEndEventPrefix, location);} static std::string consumeEndEventName (LPCTSTR location) {return GetSyncObjName (consumeEndEventPrefix, location);} static std::string newBufSpaceEventName (LPCTSTR location) {return GetSyncObjName (newBufSpaceEventPrefix, location);} static std::string framesProducedName (LPCTSTR location) {return GetSyncObjName (framesProducedPrefix, location);} static std::string framesConsumedName (LPCTSTR location) {return GetSyncObjName (framesConsumedPrefix, location);} static std::string nConsumersName (LPCTSTR location) {return GetSyncObjName (nConsumersPrefix, location);} static std::string handshakeMutexName (LPCTSTR location) {return GetSyncObjName (handhsakeMutexPrefix, location);} static std::string killEventName (LPCTSTR location) {return GetSyncObjName (killEventPrefix, location);} CSyncObjects::CSyncObjects (LPCTSTR location) : _framesProduced(framesProducedName(location).c_str()), _framesConsumed(framesConsumedName(location).c_str()), _nConsumers(nConsumersName(location).c_str()) { _location = _tcsdup (location); _hProduceStartEvent = CreateEvent (NULL, TRUE, FALSE, produceStartEventName (location).c_str()); _hProduceEndEvent = CreateEvent (NULL, TRUE, FALSE, produceEndEventName (location).c_str()); _hConsumeEndEvent = CreateEvent (NULL, TRUE, FALSE, consumeEndEventName (location).c_str()); _hNewBufSpaceEvent = CreateEvent (NULL, TRUE, FALSE, newBufSpaceEventName (location).c_str()); _hKillEvent = CreateEvent (NULL, TRUE, FALSE, killEventName (location).c_str()); } CSyncObjects::~CSyncObjects() { free (_location); if (_hProduceStartEvent != NULL) CloseHandle (_hProduceStartEvent); if (_hProduceEndEvent != NULL) CloseHandle (_hProduceEndEvent); if (_hConsumeEndEvent != NULL) CloseHandle (_hConsumeEndEvent); if (_hNewBufSpaceEvent != NULL) CloseHandle (_hNewBufSpaceEvent); if (_hKillEvent != NULL) CloseHandle (_hKillEvent); } std::string CSyncObjects::GetNewFrameEventName (int consumerNum) { TCHAR prefix[MAX_PATH+1]; _stprintf_s (prefix, MAX_PATH, _T("%s%d_"), newFrameEventPrefix, consumerNum); return GetSyncObjName (prefix, _location); } std::string CSyncObjects::GetHandshakeMutexName() { return handshakeMutexName (_location); } HANDLE CSyncObjects::_hGlobalKillEvent = NULL; HANDLE CSyncObjects::GlobalKillEvent() { if (_hGlobalKillEvent == NULL) _hGlobalKillEvent = CreateEvent (NULL, TRUE, FALSE, killEventPrefix); return _hGlobalKillEvent; } HANDLE CSyncObjects::ProduceStartEvent() { return _hProduceStartEvent; } HANDLE CSyncObjects::ProduceEndEvent() { return _hProduceEndEvent; } bool CSyncObjects::AlreadyCreated (LPCTSTR location) { HANDLE hProduceStartEvent = OpenEvent (SYNCHRONIZE, FALSE, produceStartEventName (location).c_str()); if (hProduceStartEvent == NULL) return false; else { CloseHandle (hProduceStartEvent); return true; } } void CSyncObjects::KillStream (LPCTSTR location) { HANDLE killEvent = CreateEvent (NULL, TRUE, FALSE, killEventName (location).c_str()); SetEvent (killEvent); CloseHandle (killEvent); } void CSyncObjects::KillAllStreams() { SetEvent (GlobalKillEvent()); } // On timeout: if throwOnTimeout==false - return -1, otherwise throw exception int CSyncObjects::WaitForObjects (int nObjects, HANDLE objects[], DWORD dwTimeout, bool throwOnTimeout) // throws CSyncTimeoutException, CSyncKillException { // Add kill events to objects const int MAX_OBJS = 10; HANDLE allObjs[MAX_OBJS+2]; for (int i=0; i < nObjects; i++) allObjs[i] = objects[i]; allObjs[nObjects] = KillEvent(); allObjs[nObjects+1] = GlobalKillEvent(); // Wait for original objects or kill events DWORD dwResult = WaitForMultipleObjects(nObjects+2, allObjs, FALSE, dwTimeout); if (dwResult == WAIT_TIMEOUT) { CLog::Write ("TIMEOUT reached"); if ( throwOnTimeout ) throw CSyncTimeoutException(); else return -1; } int waitObjNum = dwResult - (dwResult >= WAIT_ABANDONED_0 ? WAIT_ABANDONED_0 : WAIT_OBJECT_0); if (waitObjNum >= nObjects) { CLog::Write ("KILL event caught"); throw CSyncKillException(); } // Return number of object that was set return waitObjNum; } void CSyncObjects::WaitForObject (HANDLE object, DWORD dwTimeout) // throws CSyncTimeoutException, CSyncKillException { WaitForObjects (1, &object, dwTimeout); } /**************/ /* CInputSync */ /**************/ CInputSync::CInputSync (LPCTSTR location, DWORD timeoutMS) : _timeoutMS(timeoutMS) { _pSyncObjs = new CSyncObjects (location); _framesConsumed = 0; _framesWaited = 0; } CInputSync::~CInputSync() { Close(); delete _pSyncObjs; } bool CInputSync::IsVideoBeingProduced (LPCTSTR location) { return CSyncObjects::AlreadyCreated (location); } void CInputSync::WaitForVideo() // throws CSyncTimeoutException, CSyncKillException { _pSyncObjs->WaitForObject (_pSyncObjs->ProduceStartEvent(), getTimeoutMS() ); } void CInputSync::InitConsumer() { int consumerNum = GetConsumerNumber(); _hNewFrameEvent = CreateEvent (NULL, TRUE, FALSE, _pSyncObjs->GetNewFrameEventName(consumerNum).c_str()); } CInputSync *CInputSync::Handshake (LPCTSTR location, bool waitForProducer, DWORD timeoutMS) // throws CSyncTimeoutException, CSyncKillException { // This mutex assures that the producer sync objects are not being destroyed during handshake HANDLE handshakeMutex = CreateMutex (NULL, FALSE, handshakeMutexName (location).c_str()); WaitForSingleObject (handshakeMutex, timeoutMS ); try { CInputSync *pSync = NULL; if (waitForProducer || IsVideoBeingProduced (location)) { // Wait for video to be produced pSync = new CInputSync (location, timeoutMS); pSync->WaitForVideo(); pSync->InitConsumer(); } ReleaseMutex(handshakeMutex); CloseHandle (handshakeMutex); return pSync; } catch (...) { ReleaseMutex(handshakeMutex); CloseHandle (handshakeMutex); throw; } } bool CInputSync::HasProducingEnded( DWORD timeoutMs ) { HANDLE produceEndEvent = _pSyncObjs->ProduceEndEvent(); int result = _pSyncObjs->WaitForObjects (1, &produceEndEvent, timeoutMs, false); // -1 ==> timeout, i.e. producing not ended. return (result != -1); } // Sleep (also test kill events) void CInputSync::Sleep( DWORD timeoutMs ) { _pSyncObjs->WaitForObjects (0, NULL, timeoutMs, false); } bool CInputSync::WaitForFrame (HANDLE readEndEventHandle) // throws CSyncTimeoutException, CSyncKillException { CSharedData &framesData = _pSyncObjs->FramesProduced(); framesData.Block(); int framesProduced = framesData.ReadInt(false); if (framesProduced <= _framesWaited) { ResetEvent (_hNewFrameEvent); framesData.Release(); HANDLE newFrameHandles[3] = {_hNewFrameEvent, _pSyncObjs->ProduceEndEvent(), readEndEventHandle}; // Wait either for new frame event or for produce-end event which means that video is complete int result = _pSyncObjs->WaitForObjects (readEndEventHandle != NULL ? 3 : 2, newFrameHandles, getTimeoutMS() ); _framesWaited++; return (result == 0 ? true : false); } else { framesData.Release(); _framesWaited++; return true; } } void CInputSync::FrameWasRead() { _framesConsumed++; CSharedData &maxConsumed = _pSyncObjs->FramesConsumed(); maxConsumed.Block(); try { int maxFramesConsumed = maxConsumed.ReadInt(false); if (_framesConsumed > maxFramesConsumed) { maxConsumed.WriteInt(_framesConsumed, false); // Flag the producer that another frame was consumed SetEvent(_pSyncObjs->NewBufSpaceEvent()); } } catch (CSharedDataIOException) {} maxConsumed.Release(); }; void CInputSync::Close() { SetEvent (_pSyncObjs->ConsumeEndEvent()); } int CInputSync::GetConsumerNumber() { return _pSyncObjs->NConsumers().IncreaseInt(); } /***************/ /* COutputSync */ /***************/ COutputSync::COutputSync (LPCTSTR location, int maxBufferSize, DWORD timeoutMS) : _maxBufferSize (maxBufferSize), _timeoutMS(timeoutMS) { _framesProduced = 0; _pSyncObjs = new CSyncObjects (location); _pSyncObjs->FramesProduced().WriteInt(0); _pSyncObjs->FramesConsumed().WriteInt(0); _pSyncObjs->NConsumers().WriteInt(0); // SetEvent (_pSyncObjs->ProduceStartEvent());// evev-moved to Started(), so consumer will wait until AFTER media creation } // Sign starting of producer (AFTER media creation) void COutputSync::Started() { SetEvent (_pSyncObjs->ProduceStartEvent()); } COutputSync::~COutputSync() { // Prevent deletion of sync objects during handshake HANDLE handshakeMutex = CreateMutex (NULL, FALSE, _pSyncObjs->GetHandshakeMutexName().c_str()); WaitForSingleObject (handshakeMutex, getTimeoutMS() ); delete _pSyncObjs; ReleaseMutex(handshakeMutex); CloseHandle (handshakeMutex); } int COutputSync::HowManyFramesConsumed() { CSharedData &framesData = _pSyncObjs->FramesConsumed(); return framesData.ReadInt(); } void COutputSync::WaitForBufferSpace() // throws CSyncTimeoutException, CSyncKillException { if (_maxBufferSize > 0) { CSharedData &framesData = _pSyncObjs->FramesConsumed(); framesData.Block(); int framesConsumed = framesData.ReadInt(false); if (_framesProduced >= framesConsumed + _maxBufferSize) { ResetEvent (_pSyncObjs->NewBufSpaceEvent()); framesData.Release(); // Wait for buffer space _pSyncObjs->WaitForObject (_pSyncObjs->NewBufSpaceEvent(), getTimeoutMS() ); } else framesData.Release(); } _framesProduced++; } void COutputSync::FrameWasWritten() { _pSyncObjs->FramesProduced().Block(); try { _pSyncObjs->FramesProduced().IncreaseInt(false); // Update new frame events int nConsumers = _pSyncObjs->NConsumers().ReadInt(); for (int consumerNum=(int)_hNewFrameEvents.size(); consumerNum < nConsumers; consumerNum++) { HANDLE hNewFrameEvent = CreateEvent (NULL, TRUE, FALSE, _pSyncObjs->GetNewFrameEventName(consumerNum).c_str()); _hNewFrameEvents.push_back(hNewFrameEvent); } // Flag all consumers that there is a new frame std::list<HANDLE>::iterator i; for (i = _hNewFrameEvents.begin(); i != _hNewFrameEvents.end(); i++) { HANDLE hNewFrameEvent = (HANDLE)(*i); SetEvent(hNewFrameEvent); } } catch (CSharedDataIOException) {} _pSyncObjs->FramesProduced().Release(); } void COutputSync::VideoWasComplete() { SetEvent (_pSyncObjs->ProduceEndEvent()); } void COutputSync::WaitForConsumerEnd() // throws CSyncTimeoutException, CSyncKillException { CSharedData &framesData = _pSyncObjs->FramesConsumed(); HANDLE consumerEndHandles[2] = {_pSyncObjs->ConsumeEndEvent(), _pSyncObjs->NewBufSpaceEvent()}; // Check how many frames were consumed framesData.Block(); int framesConsumed = framesData.ReadInt(false); // Were all frames consumed? while (_framesProduced > framesConsumed) { // Wait until consumer closes input stream or another frame is consumed CLog::Write ("Waiting for consumer to end..."); ResetEvent (_pSyncObjs->NewBufSpaceEvent()); framesData.Release(); int result = _pSyncObjs->WaitForObjects (2, consumerEndHandles, getTimeoutMS() ); if (result == 0) { CLog::Write ("Consumer sent END event"); return; // consumer closed input stream } // Consumer read another frame, let's check framesData.Block(); framesConsumed = framesData.ReadInt(false); } framesData.Release(); CLog::Write ("Consumer completed reading all frames"); } #else #include "VSSync.h" #include "VSLog.h" #include "VSExceptions.h" #include "VSUtils.h" #include <stdio.h> #include <string.h> #include <stdlib.h> //#include "CMutex.h" using namespace VideoStream; /****************/ /* CSyncObjects */ /****************/ const LPCTSTR produceStartEventPrefix = _T("1"); const LPCTSTR produceEndEventPrefix = _T("2"); const LPCTSTR consumeEndEventPrefix = _T("3"); const LPCTSTR newFrameEventPrefix = _T("4"); const LPCTSTR newBufSpaceEventPrefix = _T("5"); const LPCTSTR framesProducedPrefix = _T("6"); const LPCTSTR framesConsumedPrefix = _T("7"); const LPCTSTR nConsumersPrefix = _T("8"); const LPCTSTR handhsakeMutexPrefix = _T("9"); const LPCTSTR killEventPrefix = _T("10"); static std::string produceStartEventName(LPCTSTR location) { return GetSyncObjName(produceStartEventPrefix, location); } static std::string produceEndEventName(LPCTSTR location) { return GetSyncObjName(produceEndEventPrefix, location); } static std::string consumeEndEventName(LPCTSTR location) { return GetSyncObjName(consumeEndEventPrefix, location); } static std::string newBufSpaceEventName(LPCTSTR location) { return GetSyncObjName(newBufSpaceEventPrefix, location); } static std::string framesProducedName(LPCTSTR location) { return GetSyncObjName(framesProducedPrefix, location); } static std::string framesConsumedName(LPCTSTR location) { return GetSyncObjName(framesConsumedPrefix, location); } static std::string nConsumersName(LPCTSTR location) { return GetSyncObjName(nConsumersPrefix, location); } static std::string handshakeMutexName(LPCTSTR location) { return GetSyncObjName(handhsakeMutexPrefix, location); } static std::string killEventName(LPCTSTR location) { return GetSyncObjName(killEventPrefix, location); } CSyncObjects::CSyncObjects(LPCTSTR location) : _framesProduced(framesProducedName(location).c_str()), _framesConsumed( framesConsumedName(location).c_str()), _nConsumers(nConsumersName( location).c_str()) { _location = strdup(location); _hProduceStartEvent = CreateEvent(NULL, TRUE, FALSE, produceStartEventName( location).c_str()); _hProduceEndEvent = CreateEvent(NULL, TRUE, FALSE, produceEndEventName( location).c_str()); _hConsumeEndEvent = CreateEvent(NULL, TRUE, FALSE, consumeEndEventName( location).c_str()); _hNewBufSpaceEvent = CreateEvent(NULL, TRUE, FALSE, newBufSpaceEventName( location).c_str()); _hKillEvent = CreateEvent(NULL, TRUE, FALSE, killEventName(location).c_str()); } CSyncObjects::~CSyncObjects() { free(_location); if (_hProduceStartEvent != NULL) CloseHandle(_hProduceStartEvent); if (_hProduceEndEvent != NULL) CloseHandle(_hProduceEndEvent); if (_hConsumeEndEvent != NULL) CloseHandle(_hConsumeEndEvent); if (_hNewBufSpaceEvent != NULL) CloseHandle(_hNewBufSpaceEvent); if (_hKillEvent != NULL) CloseHandle(_hKillEvent); } std::string CSyncObjects::GetNewFrameEventName(int consumerNum) { TCHAR prefix[MAX_PATH + 1]; snprintf(prefix, MAX_PATH, _T("%s%d_"), newFrameEventPrefix, consumerNum); prefix[MAX_PATH + 1] = '\0'; return GetSyncObjName(prefix, _location); } std::string CSyncObjects::GetHandshakeMutexName() { return handshakeMutexName(_location); } HANDLE CSyncObjects::_hGlobalKillEvent = NULL; HANDLE CSyncObjects::GlobalKillEvent() { if (_hGlobalKillEvent == NULL) _hGlobalKillEvent = CreateEvent(NULL, TRUE, FALSE, killEventPrefix); return _hGlobalKillEvent; } HANDLE CSyncObjects::ProduceStartEvent() { return _hProduceStartEvent; } HANDLE CSyncObjects::ProduceEndEvent() { return _hProduceEndEvent; } bool CSyncObjects::AlreadyCreated(LPCTSTR location) { HANDLE hProduceStartEvent = OpenEvent(SYNCHRONIZE, FALSE, produceStartEventName(location).c_str()); if (hProduceStartEvent == NULL) return false; else { CloseHandle(hProduceStartEvent); return true; } } void CSyncObjects::KillStream(LPCTSTR location) { HANDLE killEvent = CreateEvent(NULL, TRUE, FALSE, killEventName(location).c_str()); SetEvent(killEvent); CloseHandle(killEvent); } void CSyncObjects::KillAllStreams() { SetEvent(GlobalKillEvent()); } // On timeout: if throwOnTimeout==false - return -1, otherwise throw exception int CSyncObjects::WaitForObjects(int nObjects, HANDLE objects[], DWORD dwTimeout, bool throwOnTimeout) // throws CSyncTimeoutException, CSyncKillException { // Add kill events to objects const int MAX_OBJS = 10; HANDLE allObjs[MAX_OBJS + 2]; for (int i = 0; i < nObjects; i++) allObjs[i] = objects[i]; allObjs[nObjects] = KillEvent(); allObjs[nObjects + 1] = GlobalKillEvent(); // Wait for original objects or kill events DWORD dwResult = WaitForMultipleObjects(nObjects + 2, allObjs, FALSE, dwTimeout); if (dwResult == WAIT_TIMEOUT) { CLog::Write("TIMEOUT reached"); if (throwOnTimeout) throw CSyncTimeoutException(); else return -1; } int waitObjNum = dwResult - (dwResult >= WAIT_ABANDONED_0 ? WAIT_ABANDONED_0 : WAIT_OBJECT_0); if (waitObjNum >= nObjects) { CLog::Write("KILL event caught"); throw CSyncKillException(); } // Return number of object that was set return waitObjNum; } void CSyncObjects::WaitForObject(HANDLE object, DWORD dwTimeout) // throws CSyncTimeoutException, CSyncKillException { WaitForObjects(1, &object, dwTimeout); } /**************/ /* CInputSync */ /**************/ CInputSync::CInputSync(LPCTSTR location, DWORD timeoutMS) : _timeoutMS(timeoutMS) { _pSyncObjs = new CSyncObjects(location); _framesConsumed = 0; _framesWaited = 0; } CInputSync::~CInputSync() { Close(); delete _pSyncObjs; } bool CInputSync::IsVideoBeingProduced(LPCTSTR location) { return CSyncObjects::AlreadyCreated(location); } void CInputSync::WaitForVideo() // throws CSyncTimeoutException, CSyncKillException { _pSyncObjs->WaitForObject(_pSyncObjs->ProduceStartEvent(), getTimeoutMS()); } void CInputSync::InitConsumer() { int consumerNum = GetConsumerNumber(); _hNewFrameEvent = CreateEvent(NULL, TRUE, FALSE, _pSyncObjs->GetNewFrameEventName(consumerNum).c_str()); } CInputSync *CInputSync::Handshake(LPCTSTR location, bool waitForProducer, DWORD timeoutMS) // throws CSyncTimeoutException, CSyncKillException { // This mutex assures that the producer sync objects are not being destroyed during handshake //HANDLE handshakeMutex = CreateMutex(NULL, FALSE, handshakeMutexName(location).c_str()); pthread_mutex_t handshakeMutex; pthread_mutex_init(&handshakeMutex, NULL); int rc = pthread_mutex_lock(&handshakeMutex); if (rc == 0) { //success struct timespec timeout; timeout.tv_sec = timeoutMS; pthread_mutex_timedlock(&handshakeMutex, &timeout); } else { throw; } //union semun arg; //int handshakeMutex = CreateNamedMutex(arg, FALSE, handshakeMutexName( // location).c_str()); //if (handshakeMutex == 0) { // perror("Handshake CreateNamedMutex failed:\n"); // throw; //} //MutexLock(handshakeMutex, timeoutMS); //WaitForSingleObject(&handshakeMutex, timeoutMS); try { CInputSync *pSync = NULL; if (waitForProducer || IsVideoBeingProduced(location)) { // Wait for video to be produced pSync = new CInputSync(location, timeoutMS); fprintf(stderr, "GO to wait video...\n"); pSync->WaitForVideo(); fprintf(stderr, "Back from wait video...\n"); pSync->InitConsumer(); } // ReleaseMutex(handshakeMutex); // DeleteMutex(handshakeMutex); // CloseHandle(handshakeMutex); pthread_mutex_unlock(&handshakeMutex); pthread_mutex_destroy(&handshakeMutex); return pSync; } catch (...) { // ReleaseMutex(handshakeMutex); // CloseHandle(handshakeMutex); pthread_mutex_unlock(&handshakeMutex); pthread_mutex_destroy(&handshakeMutex); //ReleaseMutex(handshakeMutex); //DeleteMutex(handshakeMutex); throw; } } bool CInputSync::HasProducingEnded(DWORD timeoutMs) { HANDLE produceEndEvent = _pSyncObjs->ProduceEndEvent(); int result = _pSyncObjs->WaitForObjects(1, &produceEndEvent, timeoutMs, false); // -1 ==> timeout, i.e. producing not ended. return (result != -1); } // Sleep (also test kill events) void CInputSync::Sleep(DWORD timeoutMs) { _pSyncObjs->WaitForObjects(0, NULL, timeoutMs, false); } bool CInputSync::WaitForFrame(HANDLE readEndEventHandle) // throws CSyncTimeoutException, CSyncKillException { CSharedData &framesData = _pSyncObjs->FramesProduced(); framesData.Block(); int framesProduced = framesData.ReadInt(false); if (framesProduced <= _framesWaited) { ResetEvent(_hNewFrameEvent); framesData.Release(); HANDLE newFrameHandles[3] = { _hNewFrameEvent, _pSyncObjs->ProduceEndEvent(), readEndEventHandle }; // Wait either for new frame event or for produce-end event which means that video is complete int result = _pSyncObjs->WaitForObjects(readEndEventHandle != NULL ? 3 : 2, newFrameHandles, getTimeoutMS()); _framesWaited++; return (result == 0 ? true : false); } else { framesData.Release(); _framesWaited++; return true; } } void CInputSync::FrameWasRead() { _framesConsumed++; CSharedData &maxConsumed = _pSyncObjs->FramesConsumed(); maxConsumed.Block(); try { int maxFramesConsumed = maxConsumed.ReadInt(false); if (_framesConsumed > maxFramesConsumed) { maxConsumed.WriteInt(_framesConsumed, false); // Flag the producer that another frame was consumed SetEvent(_pSyncObjs->NewBufSpaceEvent()); } } catch (CSharedDataIOException) { } maxConsumed.Release(); } ; void CInputSync::Close() { SetEvent(_pSyncObjs->ConsumeEndEvent()); } int CInputSync::GetConsumerNumber() { return _pSyncObjs->NConsumers().IncreaseInt(); } /***************/ /* COutputSync */ /***************/ COutputSync::COutputSync(LPCTSTR location, int maxBufferSize, DWORD timeoutMS) : _maxBufferSize(maxBufferSize), _timeoutMS(timeoutMS) { _framesProduced = 0; _pSyncObjs = new CSyncObjects(location); _pSyncObjs->FramesProduced().WriteInt(0); _pSyncObjs->FramesConsumed().WriteInt(0); _pSyncObjs->NConsumers().WriteInt(0); // SetEvent (_pSyncObjs->ProduceStartEvent());// evev-moved to Started(), so consumer will wait until AFTER media creation } // Sign starting of producer (AFTER media creation) void COutputSync::Started() { SetEvent(_pSyncObjs->ProduceStartEvent()); } COutputSync::~COutputSync() { // Prevent deletion of sync objects during handshake pthread_mutex_t handshakeMutex; // = CreateMutex(NULL, FALSE, _pSyncObjs->GetHandshakeMutexName().c_str()); pthread_mutex_init(&handshakeMutex, NULL); int rc = pthread_mutex_lock(&handshakeMutex); if (rc == 0) { //success struct timespec timeout; timeout.tv_sec = getTimeoutMS(); pthread_mutex_timedlock(&handshakeMutex, &timeout); } else { throw; } //union semun arg; //int handshakeMutex = CreateNamedMutex(arg, FALSE, // _pSyncObjs->GetHandshakeMutexName().c_str()); //if (handshakeMutex == 0) { // //try delete sync // try { // pthread_mutex_t handshakeMutex; // pthread_mutex_init(&handshakeMutex, NULL); // // pthread_mutex_lock(&handshakeMutex); // // if (_pSyncObjs) // delete _pSyncObjs; // // pthread_mutex_unlock(&handshakeMutex); // // } // catch (...) { // // } // } // else { // MutexLock(handshakeMutex, getTimeoutMS()); //WaitForSingleObject(&handshakeMutex, getTimeoutMS()); // if (_pSyncObjs) delete _pSyncObjs; // ReleaseMutex(handshakeMutex); // DeleteMutex(handshakeMutex); // } pthread_mutex_unlock(&handshakeMutex); pthread_mutex_destroy(&handshakeMutex); // ReleaseMutex(handshakeMutex); // CloseHandle(handshakeMutex); } int COutputSync::HowManyFramesConsumed() { CSharedData &framesData = _pSyncObjs->FramesConsumed(); return framesData.ReadInt(); } void COutputSync::WaitForBufferSpace() // throws CSyncTimeoutException, CSyncKillException { if (_maxBufferSize > 0) { CSharedData &framesData = _pSyncObjs->FramesConsumed(); framesData.Block(); int framesConsumed = framesData.ReadInt(false); if (_framesProduced >= framesConsumed + _maxBufferSize) { ResetEvent(_pSyncObjs->NewBufSpaceEvent()); framesData.Release(); // Wait for buffer space _pSyncObjs->WaitForObject(_pSyncObjs->NewBufSpaceEvent(), getTimeoutMS()); } else framesData.Release(); } _framesProduced++; } void COutputSync::FrameWasWritten() { _pSyncObjs->FramesProduced().Block(); try { _pSyncObjs->FramesProduced().IncreaseInt(false); // Update new frame events int nConsumers = _pSyncObjs->NConsumers().ReadInt(); for (int consumerNum = (int)_hNewFrameEvents.size(); consumerNum < nConsumers; consumerNum++) { HANDLE hNewFrameEvent = CreateEvent(NULL, TRUE, FALSE, _pSyncObjs->GetNewFrameEventName(consumerNum).c_str()); _hNewFrameEvents.push_back(hNewFrameEvent); } // Flag all consumers that there is a new frame std::list<HANDLE>::iterator i; for (i = _hNewFrameEvents.begin(); i != _hNewFrameEvents.end(); i++) { HANDLE hNewFrameEvent = (HANDLE)(*i); SetEvent(hNewFrameEvent); } } catch (CSharedDataIOException) { } _pSyncObjs->FramesProduced().Release(); } void COutputSync::VideoWasComplete() { SetEvent(_pSyncObjs->ProduceEndEvent()); } void COutputSync::WaitForConsumerEnd() // throws CSyncTimeoutException, CSyncKillException { CSharedData &framesData = _pSyncObjs->FramesConsumed(); HANDLE consumerEndHandles[2] = { _pSyncObjs->ConsumeEndEvent(), _pSyncObjs->NewBufSpaceEvent() }; // Check how many frames were consumed framesData.Block(); int framesConsumed = framesData.ReadInt(false); // Were all frames consumed? while (_framesProduced > framesConsumed) { // Wait until consumer closes input stream or another frame is consumed CLog::Write("Waiting for consumer to end..."); ResetEvent(_pSyncObjs->NewBufSpaceEvent()); framesData.Release(); int result = _pSyncObjs->WaitForObjects(2, consumerEndHandles, getTimeoutMS()); if (result == 0) { CLog::Write("Consumer sent END event"); return; // consumer closed input stream } // Consumer read another frame, let's check framesData.Block(); framesConsumed = framesData.ReadInt(false); } framesData.Release(); CLog::Write("Consumer completed reading all frames"); } #endif <file_sep>/utils/Utils/EngineInitProtector.h // EngineInitProtector.h // Prevent two engines to enter some critical init section simultaneously #pragma once #include <list> class Engine; class EngineInitProtector { public: EngineInitProtector(); virtual ~EngineInitProtector(); void EngineLaunched(const Engine *engine); // Add to list bool WaitBeforeEngineInit(const Engine *engine, int timeoutMS) const; // iteratively call CanEngineInit() until return true bool CanEngineInit(const Engine *engine) const; // Is in the list and not first? void EngineInitEnded(const Engine *engine); // Remove from list private: std::list<const Engine *> m_activeEngines; HANDLE m_activeEnginesMutex; // mutex for protexting list. }; <file_sep>/VideoDecoder/VideoDecoder/FFMpegReader.h #pragma once #include <string> #include <map> #include "Logger.h" #include "VSVideo.h" // Use fps or cts?? fps // Support resize? no // Support frame dup/drop? no // Which pixel formats? ARGB class CVideoProperties { public: std::string name; std::string vcodec; int width; int height; int vbitrate; int framerate; // numenator int framerate_scale; // denumenator }; struct SwsContext; class CFFMpegReader { public: CFFMpegReader(Logger &logger); virtual ~CFFMpegReader(); // Open a file bool Open(const char *name, CVideoProperties &properties ); bool DecodeVFrame(VideoStream::CMutableFramePtr framePtr, bool &a_bEOF); bool Close(); static void init(map<string, void *> *commonStorage);//init ffmpeg if needed private: bool FindVideoStream(); bool InitVideoStream(); bool ReadVSample( AVPacket &packet ); // Allocate picture and its buffer AVFrame* alloc_picture(AVPixelFormat pix_fmt, int width, int height); // Thread safe functions for codec opening/closing void AvcodecCloseThreadSafe(AVCodecContext *avctx); private: AVFormatContext* m_pAVFormatContext; SwsContext *m_pSWSContext ; // Context for color conversion // Video int m_nWidth; int m_nHeight; AVStream* m_pVStream; AVCodecContext* m_pVCodecContext; AVFrame *m_pVDecodedFrame; AVFrame *m_pVOutFrame; int m_nVideoBufferSize; unsigned char *m_pVideoBuffer; int m_iVStreamIdx; AVPixelFormat m_eOutPixFmt; Logger & m_logger; }; <file_sep>/NewEncoder/VideoEncoder/VFWEncoder.cpp #include <stdlib.h> #include <stdio.h> #include <assert.h> #include "VFWEncoder.h" #include "SWException.h" #pragma warning (disable : 4996 ) CVFWEncoder::CVFWEncoder(Logger & logger) : m_logger(logger) { m_bInit = false; m_pFrmBfr = NULL; } CVFWEncoder::~CVFWEncoder() { if ( m_pFrmBfr ) delete [] m_pFrmBfr; } // Initialize the encoder library bool CVFWEncoder::Init(CVideoWriterOptions &options) { m_options = options; // Row width is 4 bytes aligned. m_rowWidthBytes = (((m_options.width* m_options.vbitsperpel/8) + 3) & 0xFFFC); // Fill bitmap info header outbi.biSize = sizeof(outbi); outbi.biWidth = m_options.width; outbi.biHeight = m_options.height; outbi.biPlanes = 1; outbi.biBitCount = m_options.vbitsperpel; outbi.biCompression = BI_RGB; outbi.biSizeImage = m_rowWidthBytes * m_options.height; outbi.biXPelsPerMeter = 1; outbi.biYPelsPerMeter = 1; outbi.biClrUsed = 0; outbi.biClrImportant = 0; // Fill Compression opts if ( !m_options.vconfstore.empty() ) { // Open dialog and store codec conf ChooseOneCompatibleCompressor(&compvars, &outbi); m_logger.info("Codec FCC = 0x%x\n", compvars.fccHandler); if ( !StoreCodecConf(&compvars) ) return false; } else if ( !m_options.vconfload.empty() ) { // Load codec params if ( !LoadCodecConf(&compvars) ) return false; } else { throw SWException( "Either vconfload or vconfstore must be specified\n"); } // Allocate buffer for frames m_pFrmBfr = new unsigned char[outbi.biSizeImage]; m_bInit = true; return true; } bool CVFWEncoder::Begin() { if ( !m_bInit ) return false; if (!ICSeqCompressFrameStart(&compvars, (LPBITMAPINFO)&outbi)) { m_logger.error("ICSeqCompressFrameStart failed\n"); } m_numFrames = 0; return true; } // Encode a video frame unsigned char *CVFWEncoder::EncodeVFrame(CVideoFrame& video_frame, long *bfr_size, bool *isKey) { if ( !m_bInit ) { assert(false); return NULL; } // For vp6 - need to flip vertically (to fix incositstnecy with flv) int irow; for ( irow = 0; irow < video_frame.height; irow ++ ) { if ( irow != video_frame.height-1-irow ) { memcpy( m_pFrmBfr + m_rowWidthBytes*irow, video_frame.bfr + m_rowWidthBytes*(video_frame.height-1-irow), m_rowWidthBytes ); } } if ( video_frame.is_key ) { m_logger.info("Forcing key frame at %d\n", m_numFrames); ICSeqCompressFrameEnd(&compvars); if (!ICSeqCompressFrameStart(&compvars, (LPBITMAPINFO)&outbi)) { m_logger.error("ICSeqCompressFrameStart failed\n"); } } // Compress *bfr_size = -1; BOOL is_key = false; LPVOID compData = ICSeqCompressFrame(&compvars, 0, m_pFrmBfr, &is_key, bfr_size); if (!compData) { m_logger.error("ICSeqCompressFrame failed\n"); *bfr_size = -1; return NULL; } *isKey = (is_key != 0); // BOOL->bool m_numFrames++; return (unsigned char *)compData; } // Finish bool CVFWEncoder::Close() { ICSeqCompressFrameEnd(&compvars); ICCompressorFree(&compvars); return true; } // Store the codec data in a file bool CVFWEncoder::StoreCodecConf(COMPVARS* cvar) { LONG lStateSize; void* memState; FILE *fp = fopen(m_options.vconfstore.c_str(), "wb"); if ( !fp ) { throw SWException("Failed to read conf file %s\n", m_options.vconfload.c_str() ); } lStateSize = ICGetStateSize(cvar->hic); memState = malloc(lStateSize); if (ICGetState(cvar->hic, memState, lStateSize) != ICERR_OK) { free(memState); throw SWException("ICGetState failed\n"); } fwrite(&cvar->fccHandler, sizeof(cvar->fccHandler), 1, fp); fwrite(&cvar->lKey, sizeof(cvar->lKey), 1, fp); fwrite(&cvar->lQ, sizeof(cvar->lQ), 1, fp); fwrite(&cvar->lDataRate, sizeof(cvar->lDataRate), 1, fp); fwrite(&lStateSize, sizeof(lStateSize), 1, fp); fwrite(memState, lStateSize, 1, fp); free(memState); fclose(fp); m_logger.info("Video Codec configuration stored in %s\n", m_options.vconfstore.c_str() ); return true; } // Load codec data from a file bool CVFWEncoder::LoadCodecConf(COMPVARS* cvar) { LONG lStateSize; unsigned char* memState; FILE *fp = fopen(m_options.vconfload.c_str(), "rb"); if ( !fp ) { throw SWException("Failed to read conf file %s\n", m_options.vconfload.c_str() ); } ZeroMemory(cvar, sizeof(COMPVARS)); cvar->cbSize = sizeof(COMPVARS); cvar->dwFlags = ICMF_COMPVARS_VALID; cvar->fccType = ICTYPE_VIDEO; fread(&cvar->fccHandler, sizeof(cvar->fccHandler), 1, fp); fread(&cvar->lKey, sizeof(cvar->lKey), 1, fp); fread(&cvar->lQ, sizeof(cvar->lQ), 1, fp); fread(&cvar->lDataRate, sizeof(cvar->lDataRate), 1, fp); fread(&lStateSize, sizeof(lStateSize), 1, fp); memState = (unsigned char *)malloc(lStateSize); fread(memState, lStateSize, 1, fp); // Override bitrate and keyframe interval // ASSUMING STRUCTURE OF VP62 STATE DATA!!! #define VP62_KBPS_CONF_POS 20 #define VP62_KFINTERVAL_CONF_POS 56 cvar->lDataRate = m_options.vbitrate/1000; if ( lStateSize > VP62_KBPS_CONF_POS+1 ) { // put bitrate in kbps - 2 bytes memState[VP62_KBPS_CONF_POS] = cvar->lDataRate % 256; memState[VP62_KBPS_CONF_POS+1] = (cvar->lDataRate /256) % 256; } cvar->lKey = m_options.GetKFInterval(); if ( lStateSize > VP62_KFINTERVAL_CONF_POS+1 ) { // put keyframe interval - 2 bytes memState[VP62_KFINTERVAL_CONF_POS] = cvar->lKey% 256; memState[VP62_KFINTERVAL_CONF_POS+1] = (cvar->lKey/256) % 256; } cvar->hic = ICOpen(cvar->fccType, cvar->fccHandler, ICMODE_COMPRESS); if ( !(cvar->hic) ) { free(memState); throw SWException("Unable to open compressor 0x%x.\n", cvar->fccHandler); } // Ignore return (ICSetState sometimes returns 0 even when successful) ICSetState(cvar->hic, memState, lStateSize); free(memState); fclose(fp); m_logger.info("Video Codec configuration loaded from %s\n", m_options.vconfload.c_str() ); return true; } // Open dialog for codec selection and configuration void CVFWEncoder::ChooseOneCompatibleCompressor(COMPVARS *cvar, BITMAPINFOHEADER *bi) { // Choose a compatible compressor ZeroMemory(cvar,sizeof(COMPVARS)); cvar->cbSize = sizeof(COMPVARS); if(ICCompressorChoose(NULL, ICMF_CHOOSE_DATARATE | ICMF_CHOOSE_KEYFRAME, bi, NULL, cvar, NULL) == FALSE) { m_logger.error("Compressor selection aborted.\n"); } /* if (cvar->fccHandler == FCC_DIB) { // "Full Frames (Uncompressed)" ShowErrorAndExit("Valid compressor not chosen."); } else if (SaveState && (cvar->fccHandler == FCC_NULL)) { // "No Recompression" ShowErrorAndExit("No Recompression isn't allowed in codec parameters file."); }*/ } <file_sep>/VideoStream/VideoStream/include/VSFrame.h #pragma once #include <string> #ifndef WIN32 #include "WindowDefinitions.h" #else #include <windows.h> #endif namespace VideoStream { struct VIDEOINFO { public: unsigned int width; unsigned int height; unsigned int frameRate; unsigned int bitCount; // only 24 and 32 supported }; struct AUDIOINFO { public: unsigned int sampleRate; // Hz unsigned int bitsPerSample; unsigned int numChannels; }; class CFrame { protected: CFrame() { } ; virtual ~CFrame() { } ; public: // Construction/destruction static CFrame *Create (unsigned int width, unsigned int height, unsigned int bitCount, LPCSTR pixelFormat = NULL); static CFrame *Create (const CFrame &frame); // Reference counting. It is recommended to use the CFramePtr wrapper for handling CFrame reference counting. virtual void AddRef() =0; virtual void RemRef() =0; virtual bool IsSingleRef() const =0; // There are two GetData(). One returns a const data if the CFrame is const and the other // returns a non-const data if the CFrame is non-const. virtual unsigned char *GetData() =0; virtual const unsigned char *GetData() const =0; virtual int GetDataSize() const =0; static int GetFrameSize(unsigned int width, unsigned int height, unsigned int bitCount); virtual unsigned int GetWidth() const =0; virtual unsigned int GetHeight() const =0; virtual unsigned int GetBitCount() const =0; virtual LPCSTR GetPixelFormat() const =0; virtual bool IsStandardPixelFormat() const =0; virtual unsigned int GetStride() const =0; // Pixel related API virtual bool InFrame(unsigned int x, unsigned int y) const =0; virtual unsigned char *GetPixelData(unsigned int x, unsigned int y) =0; virtual const unsigned char *GetPixelData(unsigned int x, unsigned int y) const =0; /* * Return new frame with same image as this frame but different bit-count (32-->24 or 24-->32). * Return NULL if newBitCount is not valid or equal to pFrame bit count. * The new frame will have the standard pixel format regardless of the pixel format of the original frame. */ virtual CFrame *ConvertBitCount(unsigned int newBitCount) const =0; /* * Return new frame with same image as this frame only upside down. */ virtual CFrame *FlipFrameVertically() const =0; /* * Return new frame with same image as this frame only different pixel format, * e.g. ChangePixelFormat("ARGB"); * If newPixelFormat is NULL, the standard pixel format BGR(A) will be used. */ virtual CFrame *ChangePixelFormat(LPCSTR newPixelFormat = NULL) const =0; }; // It is recommended to handle the CFrame objects through CFramePtr (or CMutableFramePtr) wrappers which handle the refernce counting. // CFramePtr should be thought of as a pointer to a const CFrame and CMutableFramePtr should be thought of as a pointer to a CFrame class CFramePtr { protected: CFrame *_frame; // Despite it inheriting CFramePtr it needs to be a frirend class so it can access _frame // of other CFramePtr objects. friend class CMutableFramePtr; public: CFramePtr() : _frame(NULL) {} CFramePtr(const CFramePtr &other) { _frame = other._frame; if (_frame != NULL) _frame->AddRef(); } virtual ~CFramePtr() { if (_frame != NULL) _frame->RemRef(); } CFramePtr & operator= (const CFramePtr & other) { if (this != &other) // protect against self-assignment { if (_frame != NULL) _frame->RemRef(); _frame = other._frame; if (_frame != NULL) _frame->AddRef(); } return *this; } void setNull() { if (_frame != NULL) _frame->RemRef(); _frame = NULL; } // const CFrame *getFrame() {return _frame;} bool isNull() const {return (_frame==NULL);} // Delegate to CFrame const unsigned char *GetData() const {return _frame->GetData();} int GetDataSize() const {return _frame->GetDataSize();} unsigned int GetWidth() const {return _frame->GetWidth();} unsigned int GetHeight() const {return _frame->GetHeight();} unsigned int GetBitCount() const {return _frame->GetBitCount();} LPCSTR GetPixelFormat() const {return _frame->GetPixelFormat();} bool IsStandardPixelFormat() const {return _frame->IsStandardPixelFormat();} unsigned int GetStride() const {return _frame->GetStride();} bool InFrame(unsigned int x, unsigned int y) const {return _frame->InFrame(x,y);} const unsigned char *GetPixelData(unsigned int x, unsigned int y) const {return _frame->GetPixelData(x,y);} }; // While CFramePtr provides API which does not change the data in the frame, // CMutableFramePtr provides such API class CMutableFramePtr : public CFramePtr { public: CMutableFramePtr() : CFramePtr() {} CMutableFramePtr(unsigned int width, unsigned int height, unsigned int bitCount, LPCSTR pixelFormat = NULL) { _frame = CFrame::Create(width, height, bitCount, pixelFormat); } // Copy constructor CMutableFramePtr(const CMutableFramePtr &other) : CFramePtr(other) {} // Note: This is NOT a copy constructor // It is used in order to convert a CFramePtr into a CMutableFramePtr. // The logic is as follows: // 1. If the frame is only referenced by the original CFramePtr then it can become mutable // with no need for a further copy. We do this if forceNewCopy==false. // 2. If the frame is referenced by others as well, then we cannot change its value and // we must create a new copy. // In any case, after calling this constructor, original will point to a null frame. CMutableFramePtr(CFramePtr & original, bool forceNewCopy) { if (original.isNull()) { _frame = NULL; } else if (!forceNewCopy && original._frame->IsSingleRef()) { // Since we are the only refence to _frame, there is no need for a new copy. _frame = original._frame; original._frame = NULL; } else { // Since there are other references to the frame, we must create our own copy. // CFrame::Create (const CFrame &frame) actually copies the data in the Frame. _frame = CFrame::Create(*original._frame); original.setNull(); } } virtual ~CMutableFramePtr() {} CMutableFramePtr & operator= (const CMutableFramePtr & other) { if (this != &other) // protect against self-assignment { if (_frame != NULL) _frame->RemRef(); _frame = other._frame; if (_frame != NULL) _frame->AddRef(); } return *this; } // CFrame *getFrame() {return _frame;} // CFramePtr provides GetData() and GetPixelData() methods that return a const unsigned char * // CMutableFramePtr provides these methods that return an unsigned char * unsigned char *GetData() const {return _frame->GetData();} unsigned char *GetPixelData(unsigned int x, unsigned int y) const {return _frame->GetPixelData(x,y);} static CMutableFramePtr ConvertBitCount(const CFramePtr & original, unsigned int newBitCount) { CMutableFramePtr newFramePtr; newFramePtr._frame = original._frame->ConvertBitCount(newBitCount); return newFramePtr; } static CMutableFramePtr FlipFrameVertically(const CFramePtr & original) { CMutableFramePtr newFramePtr; newFramePtr._frame = original._frame->FlipFrameVertically(); return newFramePtr; } static CMutableFramePtr ChangePixelFormat (const CFramePtr & original, LPCSTR newPixelFormat = NULL) { CMutableFramePtr newFramePtr; newFramePtr._frame = original._frame->ChangePixelFormat(newPixelFormat); return newFramePtr; } }; }; <file_sep>/CMakeLists.txt cmake_minimum_required (VERSION 2.6) project (Renderer) if(UNIX) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -std=gnu++11") endif() add_subdirectory(utils/StackWalker) add_subdirectory(utils/Utils) add_subdirectory(utils/CliManager) add_subdirectory(TinyXml) add_subdirectory(RenderingManager/RenderingManager) add_subdirectory(VideoStream/VideoStream) add_subdirectory(NewEncoder/VideoEncoder) add_subdirectory(VideoDecoder/VideoDecoder) add_subdirectory(LightSynthesizer) add_subdirectory(SVG2Video) add_subdirectory(Fmod) #add_subdirectory(test) <file_sep>/utils/Utils/MutexAutoLock.h // MutexAutoLock // A class for protecting a mutex through object construction/destruction. // The scope of an object is protected by the mutex #ifdef WIN32 #pragma once class MutexAutoLock { private: HANDLE m_mutex; bool m_locked; public: MutexAutoLock(HANDLE mutex) : m_mutex(mutex) { WaitForSingleObject(m_mutex, INFINITE); m_locked = true; } ~MutexAutoLock() { unlock(); } void unlock() { if (m_locked) { m_locked = false; ReleaseMutex(m_mutex); } } }; #else #pragma once #include <pthread.h> class MutexAutoLock { private: pthread_mutex_t m_mutex; bool m_locked; pthread_cond_t cv; bool m_isSet; public: MutexAutoLock(pthread_mutex_t& mutex); ~MutexAutoLock(); void unlock(); }; #endif <file_sep>/NewEncoder/VideoEncoder/VideoWriter.h #if !defined(___VIDEOWRITER_H) #define ___VIDEOWRITER_H #include <string> #define MAX(_a,_b) ((_a)>(_b) ? (_a):(_b)) // Use fps or cts?? fps // Support resize? no // Support frame dup/drop? no // Which pixel formats? ARGB class CVideoWriterOptions { public: std::string name; const char *profile; // e.g. for wmv std::string vcodec; int width; int height; int vbitrate; int framerate; // numenator int framerate_scale; // denumenator int vbitsperpel; bool vno_rc; // no rate control int vqf; // constant quantization factor (<= 0 ==> not used). int vrcbuf_ms; // Size of ratecontrol buffer size in milliseconds (-1 = default) float vconstantRateFactor; // Rate factor: the smaller it is, the higher the quality. // -1=none (use bitrate), otherwise, bitrate is ignored. std::string vprofile; // baseline/main/high (default=baseline) int vlevel; // level (2 digits without period). default=30 int vcompress_factor; // OBSOLETED (by two following fields) - 100 - smallest output, 50 - normal, 0 - Prefer speed. int vqual2perf_tradeoff; // 100 - highest quality, 0 - Best performance. int vkeyframe_interval; // # of frames between keyframes. std::string vconfload; // File to write video codec configuration from std::string vconfstore; // File to write video codec configuration to int vduration_ms; // Video duration in ms std::string acodec; // Because of weird libavcodec crash - dropped this field. Use abitrate ( 0 -> no-audio) bool m_bAudio; int abitrate; int asample_rate; int achannels; // 2pass int passnum; // -1 - only 1 pass std::string passlogname; // total bitrate to be written in header (to overcome a situation where // resulting bitrate is lower than requested) . -1 = use abr + vbr int br_inhdr; int playlist_segment_dur_ms; // For playlist output - duration of each segment float playlist_first_segment_dur_ms; bool fragmented; //indicate fragmented mp4 format int vlookahead_frames; // Number of frames the encoder will look ahead before encoding (higher value -> higher latency) float vpsy_rd; // Strength of psy. Rade-Distortion for x264 (0 - fastest, lowest quality, >0 - increase) double stretch_time; double atempo; int startNumber; std::string jobId; int fps; int renderedChunk; int frame_number; int vpacket_size; int apacket_size; int aPacketsNumber; int vtrack_duration; int atrack_duration; bool padded; std::string h264Impl; public: int GetKFInterval() { if ( vkeyframe_interval <= 0 ) // For backward compatibility -> deduce interval from compress_factor. // 51 -> gop size=12; 100 -> gop size=600 return 12*MAX( 1, (vcompress_factor-50) ); else return vkeyframe_interval; } int GetQual2PerfTradeoff() { if ( vqual2perf_tradeoff < 0 ) // For backward compatibility -> deduce value from compress_factor. return (vcompress_factor > 50) ? 100 : 0; else return vqual2perf_tradeoff; } }; class CAudioFrame { public: unsigned char *bfr; int len; }; class CVideoFrame { public: int width; int height; int bytesperpel; bool is_last; bool is_key; const unsigned char *bfr; }; class CVideoWriter { public: CVideoWriter() : m_lASamplesPerFrame(0) {} virtual ~CVideoWriter() {}; // Initialize the encoder library virtual bool Init(CVideoWriterOptions &options) = 0; virtual bool BeginWrite() = 0; // Encode a video frame virtual bool EncodeVFrame(CVideoFrame& video_frame, int *out_size) = 0; // Encode a audio frame virtual bool EncodeAFrame(CAudioFrame& audio_frame) = 0; // Finish virtual bool Close() = 0; int GetSamplesPerFrame() { return m_lASamplesPerFrame; } int GetBytesPerSample() { return 2; } // pcm 16bit protected: int m_lASamplesPerFrame; CVideoWriterOptions m_options; }; #endif // !defined <file_sep>/NewEncoder/VideoEncoder/VFWWriter.cpp #include <stdlib.h> #include <stdio.h> #include "VFWWriter.h" #include "OutputAvi.h" #include "SWException.h" #pragma warning (disable : 4996 ) HINSTANCE g_hInst; // vdubstuff CVFWWriter::CVFWWriter(Logger & logger) : m_logger(logger) { m_bInit = false; poutfile = NULL; m_pFrmBfr = NULL; outavi = NULL; } CVFWWriter::~CVFWWriter() { //evev free all if ( m_pFrmBfr ) delete [] m_pFrmBfr; } // Initialize the encoder library bool CVFWWriter::Init(CVideoWriterOptions &options) { m_options = options; if ( !OpenOutputVStream() ) return false; m_lASamplesPerFrame = 0;//evev return true; } /* * Create Video Stream */ bool CVFWWriter::OpenOutputVStream() { // Row width is 4 bytes aligned. m_rowWidthBytes = (((m_options.width* m_options.vbitsperpel/8) + 3) & 0xFFFC); // Fill bitmap info header outbi.biSize = sizeof(outbi); outbi.biWidth = m_options.width; outbi.biHeight = m_options.height; outbi.biPlanes = 1; outbi.biBitCount = m_options.vbitsperpel; outbi.biCompression = BI_RGB; outbi.biSizeImage = m_rowWidthBytes * m_options.height; outbi.biXPelsPerMeter = 1; outbi.biYPelsPerMeter = 1; outbi.biClrUsed = 0; outbi.biClrImportant = 0; // Fill Compression opts if ( !m_options.vconfstore.empty() ) { // Open dialog and store codec conf ChooseOneCompatibleCompressor(&compvars, &outbi); m_logger.info("Codec FCC = 0x%x\n", compvars.fccHandler); if ( !StoreCodecConf(&compvars) ) return false; } if ( !m_options.vconfload.empty() ) { // Load codec params if ( !LoadCodecConf(&compvars) ) return false; } // Fill Stream header memset(&vstrhdr, 0, sizeof(vstrhdr)); vstrhdr.fccType = streamtypeVIDEO;// stream type vstrhdr.fccHandler = compvars.fccHandler; vstrhdr.dwScale = m_options.framerate_scale; vstrhdr.dwRate = m_options.framerate; vstrhdr.dwSuggestedBufferSize = 0;//m_options.width*m_options.height*m_options.vbitsperpel; vstrhdr.dwLength = 1662; SetRect(&vstrhdr.rcFrame, 0, 0, // rectangle for stream m_options.width, m_options.height); // Allocate buffer for frames m_pFrmBfr = new unsigned char[outbi.biSizeImage]; int formatSize = ICCompressGetFormatSize(compvars.hic, (void*)&outbi); BITMAPINFOHEADER *bih = (BITMAPINFOHEADER*)malloc(formatSize); memset(bih, 0, formatSize); ICCompressGetFormat(compvars.hic, &outbi, bih); if (bih->biSize == 0) bih->biSize = formatSize; /*evev - to use this class - need to add vdub files to project and uncomment this outavi = new OutputAvi(m_options.name.c_str(), vstrhdr.dwLength, vstrhdr.dwRate, vstrhdr.dwScale, compvars.fccHandler, compvars.lQ, bih); */ free(bih); m_bInit = true; return true; } bool CVFWWriter::BeginWrite() { if ( !m_bInit ) return false; if (!ICSeqCompressFrameStart(&compvars, (LPBITMAPINFO)&outbi)) { m_logger.error("ICSeqCompressFrameStart failed\n"); } m_numFrames = 0; return true; } // Encode a video frame bool CVFWWriter::EncodeVFrame(CVideoFrame& video_frame, int* out_size) { if ( !m_bInit ) return false; // For vp6 - need to flip vertically (to fix incositstnecy with flv) int irow; for ( irow = 0; irow < video_frame.height; irow ++ ) { if ( irow != video_frame.height-1-irow ) { memcpy( m_pFrmBfr + m_rowWidthBytes*irow, video_frame.bfr + m_rowWidthBytes*(video_frame.height-1-irow), m_rowWidthBytes ); } } if ( video_frame.is_key ) { m_logger.trace("Forcing key frame at %d\n", m_numFrames); ICSeqCompressFrameEnd(&compvars); if (!ICSeqCompressFrameStart(&compvars, (LPBITMAPINFO)&outbi)) { m_logger.error("ICSeqCompressFrameStart failed\n"); } } LONG compDataSize = -1; BOOL isKey = false; LPVOID compData = ICSeqCompressFrame(&compvars, 0, m_pFrmBfr, &isKey, &compDataSize); if (!compData) { throw SWException("ICSeqCompressFrame failed\n"); } outavi->writeData(compData, compDataSize, isKey != 0); m_numFrames++; return true; } // Encode a audio frame bool CVFWWriter::EncodeAFrame(CAudioFrame& audio_frame) { if ( !m_bInit ) return false; return false; } // Finish bool CVFWWriter::Close() { ICSeqCompressFrameEnd(&compvars); ICCompressorFree(&compvars); if (outavi) { delete outavi; outavi = NULL; } return true; } const char *CVFWWriter::AviMessage(HRESULT code ) { const char *msg="unknown avi result code"; switch (code) { case S_OK: msg="Success"; break; case AVIERR_BADFORMAT: msg="AVIERR_BADFORMAT: corrupt file or unrecognized format"; break; case AVIERR_MEMORY: msg="AVIERR_MEMORY: insufficient memory"; break; case AVIERR_FILEREAD: msg="AVIERR_FILEREAD: disk error while reading file"; break; case AVIERR_FILEOPEN: msg="AVIERR_FILEOPEN: disk error while opening file"; break; case REGDB_E_CLASSNOTREG: msg="REGDB_E_CLASSNOTREG: file type not recognised"; break; case AVIERR_READONLY: msg="AVIERR_READONLY: file is read-only"; break; case AVIERR_NOCOMPRESSOR: msg="AVIERR_NOCOMPRESSOR: a suitable compressor could not be found"; break; case AVIERR_UNSUPPORTED: msg="AVIERR_UNSUPPORTED: compression is not supported for this type of data"; break; case AVIERR_INTERNAL: msg="AVIERR_INTERNAL: internal error"; break; case AVIERR_BADFLAGS: msg="AVIERR_BADFLAGS"; break; case AVIERR_BADPARAM: msg="AVIERR_BADPARAM"; break; case AVIERR_BADSIZE: msg="AVIERR_BADSIZE"; break; case AVIERR_BADHANDLE: msg="AVIERR_BADHANDLE"; break; case AVIERR_FILEWRITE: msg="AVIERR_FILEWRITE: disk error while writing file"; break; case AVIERR_COMPRESSOR: msg="AVIERR_COMPRESSOR"; break; case AVIERR_NODATA: msg="AVIERR_READONLY"; break; case AVIERR_BUFFERTOOSMALL: msg="AVIERR_BUFFERTOOSMALL"; break; case AVIERR_CANTCOMPRESS: msg="AVIERR_CANTCOMPRESS"; break; case AVIERR_USERABORT: msg="AVIERR_USERABORT"; break; case AVIERR_ERROR: msg="AVIERR_ERROR"; break; } return msg; } bool CVFWWriter::StoreCodecConf(COMPVARS* cvar) { LONG lStateSize; void* memState; FILE *fp = fopen(m_options.vconfstore.c_str(), "wb"); if ( !fp ) { throw SWException("Failed to read conf file %s\n", m_options.vconfload.c_str() ); } lStateSize = ICGetStateSize(cvar->hic); memState = malloc(lStateSize); if (ICGetState(cvar->hic, memState, lStateSize) != ICERR_OK) { free(memState); throw SWException("ICGetState failed\n"); } fwrite(&cvar->fccHandler, sizeof(cvar->fccHandler), 1, fp); fwrite(&cvar->lKey, sizeof(cvar->lKey), 1, fp); fwrite(&cvar->lQ, sizeof(cvar->lQ), 1, fp); fwrite(&cvar->lDataRate, sizeof(cvar->lDataRate), 1, fp); fwrite(&lStateSize, sizeof(lStateSize), 1, fp); fwrite(memState, lStateSize, 1, fp); free(memState); fclose(fp); m_logger.info("Video Codec configuration stored in %s\n", m_options.vconfstore.c_str() ); return true; } bool CVFWWriter::LoadCodecConf(COMPVARS* cvar) { LONG lStateSize; void* memState; FILE *fp = fopen(m_options.vconfload.c_str(), "rb"); if ( !fp ) { throw SWException("Failed to read conf file %s\n", m_options.vconfload.c_str() ); } ZeroMemory(cvar, sizeof(COMPVARS)); cvar->cbSize = sizeof(COMPVARS); cvar->dwFlags = ICMF_COMPVARS_VALID; cvar->fccType = ICTYPE_VIDEO; fread(&cvar->fccHandler, sizeof(cvar->fccHandler), 1, fp); fread(&cvar->lKey, sizeof(cvar->lKey), 1, fp); fread(&cvar->lQ, sizeof(cvar->lQ), 1, fp); fread(&cvar->lDataRate, sizeof(cvar->lDataRate), 1, fp); fread(&lStateSize, sizeof(lStateSize), 1, fp); memState = malloc(lStateSize); fread(memState, lStateSize, 1, fp); cvar->hic = ICOpen(cvar->fccType, cvar->fccHandler, ICMODE_COMPRESS); if ( !(cvar->hic) ) { free(memState); throw SWException("Unable to open compressor 0x%x.\n", cvar->fccHandler); } // Ignore return (ICSetState sometimes returns 0 even when successful) ICSetState(cvar->hic, memState, lStateSize); free(memState); fclose(fp); m_logger.info("Video Codec configuration loaded from %s\n", m_options.vconfload.c_str() ); return true; } void CVFWWriter::ChooseOneCompatibleCompressor(COMPVARS *cvar, BITMAPINFOHEADER *bi) { // Choose a compatible compressor ZeroMemory(cvar,sizeof(COMPVARS)); cvar->cbSize = sizeof(COMPVARS); if(ICCompressorChoose(NULL, ICMF_CHOOSE_DATARATE | ICMF_CHOOSE_KEYFRAME, bi, NULL, cvar, NULL) == FALSE) { m_logger.error("Compressor selection aborted.\n"); } /* if (cvar->fccHandler == FCC_DIB) { // "Full Frames (Uncompressed)" ShowErrorAndExit("Valid compressor not chosen."); } else if (SaveState && (cvar->fccHandler == FCC_NULL)) { // "No Recompression" ShowErrorAndExit("No Recompression isn't allowed in codec parameters file."); }*/ } <file_sep>/Fmod/EffectGroup.h #ifndef __EFFECT_GROUP_H__ #define __EFFECT_GROUP_H__ #pragma once #include "Effects/Effect.h" class EffectGroup { private: std::string id ; std::vector<std::string> partsIDs; std::vector<Effect *> effects; public: EffectGroup(void){} ~EffectGroup(void) { for (std::vector<Effect *>::iterator it = effects.begin(); it != effects.end() ; ++it) { Effect *effect = *it; delete effect; } } std::string getId() { return id; } void setId(std::string i) { id = i; } std::vector<std::string> getPartsIDs() { return partsIDs; } void setPartsIDs(std::vector<std::string> pid) { partsIDs = pid; } std::vector<Effect *> getEffects() { return effects; } void setEffects(std::vector<Effect *> e) { effects = e; } }; #endif <file_sep>/RenderingManager/RenderingManager/include/EngineLogger.h #pragma once #include "Logger.h" using namespace std; class Engine; class EngineLogger : public Logger { private: Engine *m_engine; friend class Engine; public: virtual void vprintf_adv(bool printHeader, bool verifyNewLine, DebugLevel dbgLvl, const char *format, va_list argList); virtual DebugLevel getDebugLevel(); }; <file_sep>/utils/CliManager/CommandParser.cpp #include <stdio.h> #include <stdlib.h> #include <ctype.h> #include <string.h> #include "CommandParser.h" #define HEX_CHARS "0123456789abcdefABCDEF" using namespace std; unsigned int CommandParser::CountLeadingSpaces(string & str) { unsigned int i; for (i=0 ; i<str.length() && isspace(str.at(i)); i++) { } return i; } unsigned int CommandParser::GetLengthExcludingTrailingSpaces(string & str) { // Assuming str includes non-spaces (at least one). unsigned int i; for (i=str.length() ; isspace(str.at(i-1)); i--) { } return i; } int CommandParser::getWords(char * str, char **pointers) { // This function runs in two modes: // 1. If pointers == NULL, then it simply counts the number of words. // 2. If pointers != NULL, then it puts the beginning of each word in the proper pointer and puts a '\0' at the end (kind of like strtok). // // Assumptions: // 1. If pointers != NULL, then it is allocated to the correct number of words. // 2. str has no leading or trailing spaces. // 3. str has at least one word. int nWords = 0; unsigned int i = 0; unsigned int length = strlen(str); if (pointers) pointers[0] = str; for(; i < length; i++) { if(isspace(str[i])) { // space encountered if (pointers) { str[i] = '\0'; } i++; // Skip over duplicate spaces while(isspace(str[i])) { i++; } nWords++; if (pointers) pointers[nWords] = str + i; } } return nWords + 1; } bool CommandParser::urlStringToArgv(string & str, int * argc_p, char *** argv_p) { // This parser parses the string according to the following rules: // Parameters are separated by one or more consecutive spaces. // Each parameter is encoded using URL encoding. // // For example // "arg1 arg2" --> argc=2, argv[0]="arg1", argv[1]="arg2" // "arg 1 arg2" --> argc=3, argv[0]="arg", argv[1]="1", argv[2]="arg2" // "arg%201 arg2" --> argc=2, argv[0]="arg 1", argv[1]="arg2" // Allocate a working_string which will be used for all argv arguments (we will insert '\0' chars in this string). // But first remove leading spaces. unsigned int actualStart = CountLeadingSpaces(str); if (actualStart == str.length()) { *argc_p = 0; *argv_p = NULL; return true; } unsigned int actualEnd = GetLengthExcludingTrailingSpaces(str); char *working_string = (char *)malloc((actualEnd+1-actualStart) * sizeof (char)); // The strings in argv may only become shorter (not longer). if (working_string == NULL) { cerr << "Failed to allocate memory"; return false; } std::size_t length = str.substr(actualStart, actualEnd - actualStart).copy(working_string, actualEnd - actualStart + 1); working_string[length] = '\0'; cout << "working string :" << working_string << "\n"; //strcpy_s(working_string, actualEnd-actualStart+1, str.substr(actualStart, actualEnd-actualStart).c_str()); // Allocate a pointer for each argument. char **pointers = (char **)malloc(getWords(working_string, NULL) * sizeof(char *)); if (pointers == NULL) { free (working_string); cerr << "Failed to allocate memory"; return false; } // Fill the pointers and put '\0' chars in the string * argc_p = getWords(working_string, pointers); * argv_p = pointers; // Now go through the arguments and url-decode them for (int i=0; i<(* argc_p); i++) { urlDecode((* argv_p)[i]); } return true; } void CommandParser::urlDecode(char *str) { // Convert each sequence of '%' followed by two hexidecimal characters with the appropriate char. // This can only shorten the string. char *in = str; char *out = str; while (*in != '\0') { if (*in != '%') { *out = *in; in++; out++; continue; } if (strchr(HEX_CHARS, (int)(*(in+1))) && strchr(HEX_CHARS, (int)(*(in+2)))) { *out = hexStringToValue(in+1); out++; in+=3; } } *out = *in; } char CommandParser::hexStringToValue(char *str) { // str is assumed to be at least 2 cahrs long and these two chars must be valid hex values. int value; if (str[0] >= '0' && str[0] <= '9') { value = 16 * (str[0] - '0'); } else if (str[0] >= 'a' && str[0] <= 'f') { value = 16 * (str[0] - 'a'); } else if (str[0] >= 'A' && str[0] <= 'F') { value = 16 * (str[0] - 'A'); } else { cerr << "CommandParser: Internal error" << endl; return 0; } if (str[1] >= '0' && str[1] <= '9') { value += str[1] - '0'; } else if (str[1] >= 'a' && str[1] <= 'f') { value += str[1] - 'a'; } else if (str[1] >= 'A' && str[1] <= 'F') { value += str[1] - 'A'; } else { cerr << "CommandParser: Internal error" << endl; return 0; } return (char)value; } // The C++ CommandParser class wrapper CommandParser::CommandParser(string & command) { m_isValidCommand = urlStringToArgv(command, &m_argc, &m_argv); } void CommandParser::freeArgs() { if (m_argv != NULL) { if (m_argv[0] != NULL) free (m_argv[0]); free (m_argv); } m_argv = NULL; m_argc = 0; } <file_sep>/RenderingManager/RenderingManager/Engine.cpp #include <iostream> #ifdef WIN32 #include <process.h> #else #include <pthread.h> #endif #include <stdio.h> #include <stdarg.h> #include "Engine.h" #include "EngineManager.h" #include "RenderingManager.h" #include "HeaderPrinter.h" #include "SWException.h" using namespace std; Engine::Engine() : m_state(CREATED), m_rndMng(NULL), m_debugLevel(DebugLevel_ERROR) { m_logger.m_engine = this; } // Should be called right after the creation of Engine bool Engine::init(RenderingManager * rndMng) { m_rndMng = rndMng; m_commonStorage = m_rndMng->getCommonStorage(); return true; } void Engine::entryPoint(void * pthis) { Engine *pEngine = (Engine *)pthis; // Update state pEngine->m_state = RUNNING; // Write a trace message to the log file pEngine->m_logger.trace("Engine Starting\n"); // Write standard message to notification file pEngine->m_rndMng->writeEngineStart(pEngine); // Call the engine's Execute method. try{ pEngine->executeEngine(pEngine->m_argc - pEngine->m_firstArgIndex, pEngine->m_argv + pEngine->m_firstArgIndex); } catch (const SWException& ex) { pEngine->m_logger.error("Caught exception at\n%s", ex.getStackTrace().c_str() ); pEngine->abortEngineWithError("Caught exception: %s\n", ex.what()); } catch (const exception& ex) { pEngine->abortEngineWithError("Caught exception: %s\n", ex.what()); } catch (const string& ex) { pEngine->abortEngineWithError("Caught string: %s\n", ex.c_str()); } catch (...) { pEngine->abortEngineWithError("Caught an error\n"); } // Free the argumnets free(pEngine->m_argv[0]); free(pEngine->m_argv); // Update state and update EngineManager pEngine->m_state = DONE; // Write a trace message to the log file pEngine->m_logger.trace("Engine Finished after %.3f seconds.\n", ((float)clock() - (float)pEngine->m_startExecutionTime) / ((float)CLOCKS_PER_SEC)); pEngine->m_rndMng->getEngineManager().removeEngine(pEngine); // Delete the engine delete pEngine; } void Engine::run(int argc, char** argv, int firstArgIndex) { if (m_state != CREATED) { // Can't run an engine twice. cerr << "Internal error. The engine was already started." << endl; return; } m_state = STARTED; m_argc = argc; m_argv = argv; m_firstArgIndex = firstArgIndex; m_startExecutionTime = clock(); // Add the engine to the engine manager. // This method will also launch the engine, i.e. call launchEngineThread(), // when then engine is ready to run. m_rndMng->getEngineManager().addEngine(this); } // Call Engine::EntryPoint from a different thread. uintptr_t Engine::launchEngineThread() { #ifndef WIN32 pthread_t thread; pthread_create(&thread, NULL, (void*(*)(void*))Engine::entryPoint, this); return thread; #else return _beginthread( Engine::entryPoint, 0, this ); #endif } // This function is used by the engine to exit with an error message. // Such an exit is logged in the notification file and causes all other engine executions to cease. void Engine::abortEngineWithError(const char *format, ...) { va_list argList; va_start(argList, format); { // Write an error message to the log file m_logger.v_error(format, argList); m_logger.error("Engine aborting.\n"); va_end(argList); va_start(argList, format); { RenderingManager::EngineListMutexAutoLock mal(m_rndMng); m_rndMng->stopExecutionsDueToEngineFailure(this, format, argList); } } va_end(argList); // Before exiting, write to the log file without a mutex protection. // This is used for debugging to make sure there is no deadlock. fprintf(m_rndMng->getLogFile(), "Exiting due to engine abortion\n"); fflush(m_rndMng->getLogFile()); exit(-1); } string Engine::engineStateToString(EngineState state) { switch (state) { case CREATED: return "CREATED"; case STARTED: return "STARTED"; case PENDING: return "PENDING"; case RUNNING: return "RUNNING"; case DONE: return "DONE"; default: return "Invalid state" + state; } } <file_sep>/VideoStream/VideoStream/Frame.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSFrameImpl.h" #include "SWException.h" #include <stdlib.h> #else #include "VSFrameImpl.h" #include "SWException.h" #include <stdlib.h> #include <cstring> #endif using namespace VideoStream; CFrameImpl::CFrameImpl (unsigned int width, unsigned int height, unsigned int bitCount, LPCSTR pixelFormat) : _refCount(1), _width (width), _height (height), _bitCount (bitCount), _pixelFormat (bitCount, pixelFormat) { Allocate(); } CFrameImpl::CFrameImpl (const CFrameImpl &frame) : _refCount(1), _width (frame._width), _height (frame._height), _bitCount (frame._bitCount), _pixelFormat (frame._pixelFormat) { Allocate(); memcpy (_pData, frame._pData, GetDataSize()); } void CFrameImpl::Allocate() { int size = GetDataSize(); /* * There's a bug in lagarith decompression (near the end of MMX_Restore32()) that changes X extra bytes after the end of * the frame area, where X can theoretically reach 28 bytes. * So we will allocate additional 32 bytes to be on the safe side... */ size += 32; _pData = (unsigned char *) malloc (size); if (_pData == NULL) throw SWException("Error allocating Frame data of size %d", size); // Create mutex. #ifdef WIN32 m_mutex = CreateMutex (NULL, FALSE, NULL); if (m_mutex == NULL) { throw SWException("Failed to create mutex."); } #else int res = pthread_mutex_init(&m_mutex, NULL); if (res != 0) { throw SWException("Failed to create mutex."); } #endif } CFrameImpl::~CFrameImpl(void) { free (_pData); #ifdef WIN32 if (! CloseHandle(m_mutex)) { throw SWException("Failed to close mutex handle."); } #else int res = pthread_mutex_destroy(&m_mutex); if (res != 0) { throw SWException("Failed to close mutex handle."); } #endif } void CFrameImpl::AddRef() { #ifdef WIN32 WaitForSingleObject(m_mutex, INFINITE); _refCount ++; ReleaseMutex(m_mutex); #else pthread_mutex_lock(&m_mutex); _refCount ++; pthread_mutex_unlock(&m_mutex); #endif } void CFrameImpl::RemRef() { #ifdef WIN32 WaitForSingleObject(m_mutex, INFINITE); _refCount--; if (_refCount == 0) { ReleaseMutex(m_mutex); delete this; return; } ReleaseMutex(m_mutex); #else pthread_mutex_lock(&m_mutex); _refCount--; if (_refCount == 0) { pthread_mutex_unlock(&m_mutex); delete this; return; } pthread_mutex_unlock(&m_mutex); #endif } bool CFrameImpl::IsSingleRef() const { return (_refCount==1); } // y must be 2^n #define align_round(x,y) ((((unsigned int)(x))+(y-1))&(~(y-1))) static unsigned int GetStride (int width, int bitCount) { /* * "For uncompressed RGB formats, the minimum stride is always the image width in bytes, rounded up to the nearest DWORD. * You can use the following formula to calculate the stride: stride = (biWidth * (biBitCount / 8) + 3) & ~3." * (help on BITMAPINFOHEADER, MSDN) */ unsigned int stride = align_round (width * (bitCount/8), 4); return stride; } unsigned int CFrameImpl::GetStride() const { return ::GetStride (_width, _bitCount); } const unsigned char *CFrameImpl::GetData() const { return _pData; } unsigned char *CFrameImpl::GetData() { return _pData; } int CFrameImpl::GetDataSize() const { return GetFrameSize (_width, _height, _bitCount); } int CFrame::GetFrameSize (unsigned int width, unsigned int height, unsigned int bitCount) { return ::GetStride (width, bitCount) * height; } CFrame *CFrameImpl::ConvertBitCount (unsigned int newBitCount) const { const unsigned int inBitCount = GetBitCount(); if ((newBitCount != 32 && newBitCount != 24) || inBitCount == newBitCount) return NULL; // invalid newBitCount const unsigned int inBytesCount = inBitCount / 8; const unsigned int outBytesCount = newBitCount / 8; const unsigned int width = GetWidth(), height = GetHeight(); const unsigned int pinStride = GetStride(); const unsigned int poutStride = ::GetStride(width, newBitCount); VideoStream::CFrame *poutFrame = Create (width, height, newBitCount); const unsigned char *pinPixel = GetData(); unsigned char *poutPixel = poutFrame->GetData(); for (unsigned int y=0; y < height; y++) { const unsigned char *pinPixel = GetData() + y * pinStride; unsigned char *poutPixel = poutFrame->GetData() + y * poutStride; for (unsigned int x=0; x < width; x++) { poutPixel[0] = pinPixel[_pixelFormat.GetByteB()]; poutPixel[1] = pinPixel[_pixelFormat.GetByteG()]; poutPixel[2] = pinPixel[_pixelFormat.GetByteR()]; if (newBitCount == 32) poutPixel[3] = 0xFF; // Alpha channel - not transparent pinPixel += inBytesCount; poutPixel += outBytesCount; } } return poutFrame; } /* * Return new frame with same image as this frame only upside down. */ CFrame *CFrameImpl::FlipFrameVertically() const { const unsigned int inBitCount = GetBitCount(); const unsigned int inBytesCount = inBitCount / 8; const unsigned int width = GetWidth(), height = GetHeight(); const unsigned int inStride = GetStride(); VideoStream::CFrame *poutFrame = Create (width, height, inBitCount, GetPixelFormat()); for (unsigned int y=0; y < height; y++) { const unsigned char *pinPixel = GetData() + y * inStride; unsigned char *poutPixel = poutFrame->GetData() + (height-y-1) * inStride; for (unsigned int x=0; x < width; x++) { memcpy (poutPixel, pinPixel, inBytesCount); pinPixel += inBytesCount; poutPixel += inBytesCount; } } return poutFrame; } /* * Return new frame with same image as this frame only different pixel format, * e.g. ChangePixelFormat("ARGB"); * If newPixelFormat is NULL, the standard pixel format BGR(A) will be used. */ CFrame *CFrameImpl::ChangePixelFormat (LPCSTR newPixelFormat) const { const unsigned int inBitCount = GetBitCount(); const unsigned int inBytesCount = inBitCount / 8; const unsigned int width = GetWidth(), height = GetHeight(); const unsigned int inStride = GetStride(); VideoStream::CFrame *poutFrame = Create (width, height, inBitCount, newPixelFormat); CPixelFormat f(inBitCount, newPixelFormat); for (unsigned int y=0; y < height; y++) { const unsigned char *pinPixel = GetData() + y * inStride; unsigned char *poutPixel = poutFrame->GetData() + y * inStride; for (unsigned int x=0; x < width; x++) { poutPixel[f.GetByteB()] = pinPixel[_pixelFormat.GetByteB()]; poutPixel[f.GetByteG()] = pinPixel[_pixelFormat.GetByteG()]; poutPixel[f.GetByteR()] = pinPixel[_pixelFormat.GetByteR()]; if (inBitCount == 32) poutPixel[f.GetByteA()] = pinPixel[_pixelFormat.GetByteA()]; pinPixel += inBytesCount; poutPixel += inBytesCount; } } return poutFrame; } /**********************/ /* CPixelFormat class */ /**********************/ static LPCSTR GetStandardStringFormat(int bitCount) { static LPCSTR standardFormat32 = "BGRA"; static LPCSTR standardFormat24 = "BGR"; if (bitCount == 24) return standardFormat24; if (bitCount == 32) return standardFormat32; return NULL; } void CFrameImpl::CPixelFormat::Init(int bitCount, LPCSTR strFormat) { // Make sure that the format string is of the right length if (strlen(strFormat) != bitCount / 8) throw SWException("Pixel format %s does not match bitcount %d", strFormat, bitCount); _byteR = _byteG = _byteB = _byteA = -1; for (unsigned int i=0; i < strlen(strFormat); i++) { char colorLetter = toupper(strFormat[i]); _strFormat[i] = colorLetter; switch (colorLetter) { case 'R': _byteR = i; break; case 'G': _byteG = i; break; case 'B': _byteB = i; break; case 'A': _byteA = i; break; default: throw SWException("Pixel format %s includes an invalid character %c", strFormat, colorLetter); } } _strFormat[strlen(strFormat)] = '\0'; // Make sure all the colors were included in the format if (_byteR < 0 || _byteG < 0 || _byteB < 0 || (_byteA < 0 && bitCount == 32)) throw SWException("Pixel values are invalid"); } CFrameImpl::CPixelFormat::CPixelFormat(int bitCount, LPCSTR strFormat) { Init (bitCount, strFormat ? strFormat : GetStandardStringFormat(bitCount)); } CFrameImpl::CPixelFormat::CPixelFormat(const CFrameImpl::CPixelFormat &format) { _byteR = format._byteR; _byteG = format._byteG; _byteB = format._byteB; _byteA = format._byteA; #ifdef WIN32 strcpy_s (_strFormat, 5, format._strFormat); #else strncpy(_strFormat, format._strFormat, 5); #endif } bool CFrameImpl::CPixelFormat::IsEqual(const CPixelFormat &format) const { return (strcmp (GetStringFormat(), format.GetStringFormat()) == 0); } /**************************/ /* CONSTRUCTION OF CFRAME */ /**************************/ CFrame *CFrame::Create (unsigned int width, unsigned int height, unsigned int bitCount, LPCSTR pixelFormat) { return new CFrameImpl (width, height, bitCount, pixelFormat); } CFrame *CFrame::Create (const CFrame &frame) { return new CFrameImpl ((const CFrameImpl &)frame); } <file_sep>/VideoStream/VideoStream/VSAudioImpl.h #pragma once #include "VSMedia.h" #include "VSISync.h" #include "VSVideo.h" #include "VSWAVFileMedia.h" namespace VideoStream { class CAudioStreamImpl { private: LPTSTR _processVerb; protected: int _streamId; std::string _bareLocation; MediaLocationParams _locationParams; CAudioStreamImpl (LPCTSTR processVerb); virtual ~CAudioStreamImpl(); void _Open (LPCTSTR location); void _Close(); void Write2Log (LPCTSTR msg); }; class CInputAudioStreamImpl : public CAudioStreamImpl, public CInputAudioStream { private: CInputWAVFile *_pStream; IInputSync *_pSync; bool HandshakeWithProducer (LPCTSTR bareLocation, bool waitForProducer, DWORD timeoutMS); bool OpenInputStream (LPCTSTR location); public: CInputAudioStreamImpl(); virtual ~CInputAudioStreamImpl(); bool Open (LPCTSTR location); const AUDIOINFO &GetAudioInfo() const; int Read( unsigned char *bfr, int bfr_size, bool *eof ); void Close(); }; class COutputAudioStreamImpl : public CAudioStreamImpl, public COutputAudioStream { private: IOutputSync *_pSync; public: COutputAudioStreamImpl(); virtual ~COutputAudioStreamImpl(); bool Open (LPCTSTR location); void Close(); }; };<file_sep>/VideoDecoder/VideoDecoder/CMakeLists.txt file(GLOB VideoDecoder_SRC "*.cpp" ) add_library(VideoDecoder ${VideoDecoder_SRC}) #add_definitions(-D__STDC_CONSTANT_MACROS) if(UNIX) target_include_directories(VideoDecoder PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/../../VideoStream/VideoStream/include ${CMAKE_CURRENT_SOURCE_DIR}/../../RenderingManager/RenderingManager/include ${CMAKE_CURRENT_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR}/../../third_party/include/ffmpeg/include ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/Utils ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/StackWalker/include ${CMAKE_CURRENT_SOURCE_DIR}/) else() target_include_directories(VideoDecoder PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/../../VideoStream/VideoStream/include ${CMAKE_CURRENT_SOURCE_DIR}/../../RenderingManager/RenderingManager/include ${CMAKE_CURRENT_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR}/../../ffmpeg/include ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/Utils ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/StackWalker/include ${CMAKE_CURRENT_SOURCE_DIR}/) endif() #target_link_libraries(CliManager LINK_PUBLIC StackWalker) <file_sep>/Fmod/AudioMixer.cpp #include "afx.h" #include <math.h> #include "AudioMixer.h" #include "ConstantInterpolated.h" #include "Parser.h" #include "Presets/Preset.h" #include "Presets/MaximizePreset.h" #include "Presets/BackgroundPreset.h" #include "Presets/NarrationPreset.h" #include "Events/SyncPointEventDescriptor.h" #include "Events/DurationEvent.h" #include "Events/SequenceEvent.h" #include "Events/SetVolumeEvent.h" #include "Events/AddVolumeInterpolationStepEvent.h" #include "Effects/AudioStretchDsp.h" #include <algorithm> #include <sys/time.h> //#include "VSVideo.h" vector<AudioPart*> clipAudioParts; vector<EffectGroup*> clipEffectGroups; vector<FMOD::Sound*> soundsToBeReleased; map<std::string, std::vector<SyncPointEventDescriptor *> > eventMap; System *fmodSystem; ChannelGroup *parentChannelGroup; long key = 0; int numberOfAudioParts = 0; int releasedAudioParts = 0; Channel *timelineChannel; double clipEndTime; FILE * logFile; FMOD_RESULT F_CALLBACK channelCallback(FMOD_CHANNEL *channel, FMOD_CHANNEL_CALLBACKTYPE type, void *commanddata1, void *commanddata2); void processAudioPart(AudioPart *audioPart); FMOD::Channel *sequenceAudioPart(AudioPart *audioPart); void loadPresets(AudioPart *audioPart, FMOD::Channel *channel); void loadOriginPreset(AudioPart *audioPart); void loadRolePreset(AudioPart *audioPart); std::string getKey(); double toRelativeTime(double time, AudioPart *audioPart); void setChannelVolume(AudioPart *audioPart, FMOD::Channel *channel); std::string addVolumeEventToMap(SetVolumeEvent *setVolumeEvent); double getPresetsVolume(AudioPart *audioPart); void setInterpolationStep(LinearInterpolated *linear, double startVol, double endVol, double numOfSteps); void addInterpolationSyncPointsToChannel(FMOD::Channel *channel, double startTime, double startValue, double endValue, double step, double duration, double numOfSteps); std::string addStepEventToMap(double step); void setChannelPace(AudioPart *audioPart); void setChannelOffsetAndDuration(AudioPart *audioPart, FMOD::Channel *channel); std::string addDurationEventToMap(DurationEvent *durationEvent); void applyPresets(AudioPart *audioPart, FMOD::Channel *channel); void associateWithEffectGroup(AudioPart *audioPart, FMOD::Channel *channel); void associateChannelWithChannelGroup(FMOD::Channel *channel, int channelGroupIndex); void releaseSound(FMOD::Channel *channel); void stopTimeline(FMOD::Channel *timelineChannel); void checkResult(FMOD_RESULT result); AudioMixer::AudioMixer() { numberOfAudioParts = 0; releasedAudioParts = 0; key = 0; } AudioMixer::~AudioMixer() { for (std::vector<AudioPart *>::iterator it = clipAudioParts.begin(); it != clipAudioParts.end(); ++it) { AudioPart *audioPart = *it; delete audioPart; } for (std::vector<EffectGroup *>::iterator it = clipEffectGroups.begin(); it != clipEffectGroups.end(); ++it) { EffectGroup *effectGroup = *it; delete effectGroup; } for (map<string, vector<SyncPointEventDescriptor *> >::iterator it = eventMap.begin(); it != eventMap.end(); ++it) { vector<SyncPointEventDescriptor *> eventVector = it->second; for (std::vector<SyncPointEventDescriptor *>::iterator eventIt = eventVector.begin(); eventIt != eventVector.end(); ++eventIt) { SyncPointEventDescriptor *eventDescriptor = *eventIt; delete eventDescriptor; } } fclose(logFile); } void AudioMixer::config(std::string in, std::string out, std::string logName) { inputFilename = in; outputFilename = out; log = logName; } void writeToLogFile(std::string msg) { char buffer[26]; int millisec; struct tm* tm_info; struct timeval tv; ::gettimeofday(&tv, 0); millisec = lrint(tv.tv_usec/1000.0); // Round to nearest millisec if (millisec>=1000) { // Allow for rounding up to nearest second millisec -=1000; tv.tv_sec++; } tm_info = localtime(&tv.tv_sec); strftime(buffer, 26, "%Y:%m:%d %H:%M:%S", tm_info); char buffer1[30]; sprintf(buffer1, "%s.%03d", buffer, millisec); fprintf(logFile, "%s %s\n", buffer1, msg.c_str()); } void AudioMixer::runService() { //Open the log file logFile = fopen(log.c_str(), "w"); if (logFile == NULL) { printf ( "Failed to open log file: %s\n ", log.c_str()); exit(6); } writeToLogFile("Start running Fmod service"); // parse MF, parse!!! Parser *parser = new Parser(); parser->parse(inputFilename); clipAudioParts = parser->getClipAudioParts(); clipEffectGroups = parser->getClipEffectGroups(); // sort the audio parts in the order of their start time std::sort(clipAudioParts.begin(), clipAudioParts.end(), StartTimeAscendingDateSort()); writeToLogFile("After parsing input"); createAndInitFmodSystem(&fmodSystem); writeToLogFile("After init fmod system"); associateSimplePartsWithChannelGroups(); initializeParentChannelGroup(); createEffectGroups(); clipEndTime = calculateClipEndTime(); FMOD::Sound *clipTimeline = 0; setClipTimeline(&clipTimeline); startTimeline(clipTimeline, &timelineChannel); writeToLogFile("After starting timeline"); // Init output // This merely creates the file and sets an event for the consumer //VideoStream::EnableLog(true); //VideoStream::COutputAudioStream *outputStream = // VideoStream::COutputAudioStream::Create(); //if (!outputStream->Open(outputFilename.c_str())) { // printf("Failed to open output url: %s\n", outputFilename.c_str()); // exit(7); //} playAudioParts(); writeToLogFile("After playing audio parts"); shutdownFmod(); std::string endFilename = outputFilename + ".finished"; FILE * finishedFile = fopen(endFilename.c_str(), "w"); fclose(finishedFile); writeToLogFile("Finished running Fmod service"); //outputStream->Close(); //VideoStream::COutputAudioStream::Destroy(outputStream); } bool AudioMixer::createAndInitFmodSystem(FMOD::System **fmodSystem) { unsigned int version; FMOD::System_Create(fmodSystem); (*fmodSystem)->getVersion(&version); if (version < FMOD_VERSION) { printf( "Error! You are using an old version of FMOD %08x. This program requires %08x\n", version, FMOD_VERSION); return false; } (*fmodSystem)->setOutput(FMOD_OUTPUTTYPE_WAVWRITER_NRT); (*fmodSystem)->setSoftwareFormat(44100, FMOD_SOUND_FORMAT_PCM16, 0, 0, FMOD_DSP_RESAMPLER_LINEAR); (*fmodSystem)->init(32, FMOD_INIT_STREAM_FROM_UPDATE, (void *) outputFilename.c_str()); return true; } bool AudioMixer::associateSimplePartsWithChannelGroups() { int effectGroupIndex = 1; for (std::vector<EffectGroup *>::iterator it = clipEffectGroups.begin(); it != clipEffectGroups.end(); ++it) { EffectGroup *effectGroup = *it; std::vector<std::string> partsIDs = effectGroup->getPartsIDs(); for (std::vector<std::string>::iterator partsIDsItr = partsIDs.begin(); partsIDsItr != partsIDs.end(); ++partsIDsItr) { std::string effectGroupPartId = *partsIDsItr; for (std::vector<AudioPart *>::iterator audioPartsItr = clipAudioParts.begin(); audioPartsItr != clipAudioParts.end(); ++audioPartsItr) { AudioPart *audioPart = *audioPartsItr; std::string audioPartId = audioPart->getId(); if (effectGroupPartId == audioPartId) { audioPart->setChannelGroup(effectGroupIndex); } } } effectGroupIndex++; } return true; } bool AudioMixer::initializeParentChannelGroup() { fmodSystem->createChannelGroup("parentChannelGroup", &parentChannelGroup); MaximizePreset *max = new MaximizePreset(); max->create(fmodSystem); max->apply(parentChannelGroup); return true; } bool AudioMixer::createEffectGroups() { int channelGroupIndex = 0; for (std::vector<EffectGroup *>::iterator it = clipEffectGroups.begin(); it != clipEffectGroups.end(); ++it) { EffectGroup *effectGroup = *it; // create channel groups in Fmod ChannelGroup *newChannelGroup; fmodSystem->createChannelGroup("EffectGroup" + channelGroupIndex, &newChannelGroup); parentChannelGroup->addGroup(newChannelGroup); // add the effects to the channel groups loadPresetsToChannelGroups(newChannelGroup, effectGroup); channelGroupIndex++; } return true; } void AudioMixer::loadPresetsToChannelGroups(ChannelGroup *channelGroup, EffectGroup *effectGroup) { std::vector<Effect *> effects = effectGroup->getEffects(); for (std::vector<Effect *>::iterator it = effects.begin(); it != effects.end(); ++it) { Effect *effect = *it; effect->apply(channelGroup); } } double AudioMixer::calculateClipEndTime() { double maxEndTime = 0.0; for (std::vector<AudioPart *>::iterator it = clipAudioParts.begin(); it != clipAudioParts.end(); ++it) { AudioPart *audioPart = *it; double endTime = audioPart->getEnd(); if (endTime > maxEndTime) { maxEndTime = endTime; } } return maxEndTime; } bool AudioMixer::setClipTimeline(FMOD::Sound **clipTimeline) { // enter silence channel FMOD_CREATESOUNDEXINFO exinfo; memset(&exinfo, 0, sizeof(FMOD_CREATESOUNDEXINFO)); exinfo.cbsize = sizeof(FMOD_CREATESOUNDEXINFO); exinfo.numchannels = 1; exinfo.defaultfrequency = 44100; exinfo.length = 50000000; exinfo.format = FMOD_SOUND_FORMAT_PCM16; fmodSystem->createSound(0, FMOD_OPENUSER, &exinfo, clipTimeline); writeToLogFile("After creating silence track"); //fmodSystem->createSound(silence.c_str(), FMOD_SOFTWARE, 0, clipTimeline); AudioPart *previousAudioPart = clipAudioParts[0]; std::vector<SyncPointEventDescriptor *> *eventList = new std::vector< SyncPointEventDescriptor *>(); for (std::vector<AudioPart *>::iterator it = clipAudioParts.begin(); it != clipAudioParts.end(); ++it) { AudioPart *audioPart = *it; if (previousAudioPart->getStart() != audioPart->getStart()) { setSequenceEvent(*clipTimeline, previousAudioPart, *eventList); eventList = new std::vector<SyncPointEventDescriptor *>(); } SequenceEvent *sequenceEvent = new SequenceEvent(numberOfAudioParts); eventList->push_back(sequenceEvent); previousAudioPart = audioPart; numberOfAudioParts++; } setSequenceEvent(*clipTimeline, previousAudioPart, *eventList); return true; } void AudioMixer::setSequenceEvent(FMOD::Sound *clipTimeline, AudioPart *previousAudioPart, std::vector<SyncPointEventDescriptor *> eventList) { std::string key = getKey(); eventMap[key] = eventList; FMOD_SYNCPOINT *point; clipTimeline->addSyncPoint((unsigned int) previousAudioPart->getStart(), FMOD_TIMEUNIT_MS, key.c_str(), &point); } bool AudioMixer::startTimeline(FMOD::Sound *clipTimeline, FMOD::Channel **timelineChannel) { checkResult(fmodSystem->playSound(FMOD_CHANNEL_FREE, clipTimeline, true, timelineChannel)); checkResult((*timelineChannel)->setChannelGroup(parentChannelGroup)); checkResult((*timelineChannel)->setCallback(channelCallback)); return true; } bool AudioMixer::playAudioParts() { unpauseChannels(); while (releasedAudioParts < numberOfAudioParts) { fmodSystem->update(); } return true; } void AudioMixer::unpauseChannels() { // check the individual channels under MGC int noOfChannels; parentChannelGroup->getNumChannels(&noOfChannels); for (int i = 0; i < noOfChannels; i++) { FMOD::Channel *channel; parentChannelGroup->getChannel(i, &channel); if (channel != NULL) { channel->setPaused(false); } } } bool AudioMixer::shutdownFmod() { for (std::vector<FMOD::Sound *>::iterator it = soundsToBeReleased.begin(); it != soundsToBeReleased.end(); ++it) { FMOD::Sound *sound = *it; sound->release(); } fmodSystem->close(); fmodSystem->release(); return true; } ////////////////////////////////////////////////// //////////// Out Of Class Functions ////////////// ////////////////////////////////////////////////// //typedef FMOD_RESULT (F_CALLBACK *FMOD_CHANNEL_CALLBACK) //(FMOD_CHANNEL *channel, FMOD_CHANNEL_CALLBACKTYPE type, void *commanddata1, void *commanddata2); FMOD_RESULT F_CALLBACK channelCallback(FMOD_CHANNEL *channel, FMOD_CHANNEL_CALLBACKTYPE type, void *commanddata1, void *commanddata2) { FMOD::Channel *_channel = (FMOD::Channel *) channel; if (type == FMOD_CHANNEL_CALLBACKTYPE_SYNCPOINT) { writeToLogFile("Channel callback of type synchpoint - start"); FMOD::Sound *sound; checkResult(_channel->getCurrentSound(&sound)); unsigned int length; checkResult(_channel->getPosition(&length, FMOD_TIMEUNIT_MS)); FMOD_SYNCPOINT *sync; int index = (int) ((long) commanddata1); checkResult(sound->getSyncPoint(index, &sync)); unsigned int offset; char syncPointName[MAX_LEN]; checkResult(sound->getSyncPointInfo(sync, syncPointName, MAX_LEN, &offset, FMOD_TIMEUNIT_MS)); std::vector<SyncPointEventDescriptor *> eventList = eventMap[syncPointName]; for (std::vector<SyncPointEventDescriptor *>::iterator it = eventList.begin(); it != eventList.end(); ++it) { SyncPointEventDescriptor *eventDescriptor = *it; int audioPartIndex = eventDescriptor->apply(_channel); SequenceEvent *sequenceEvent = dynamic_cast<SequenceEvent *> (eventDescriptor); if (sequenceEvent != NULL) { static int evev = 3; evev--; // if ( evev >= 0 ) { AudioPart *audioPart = clipAudioParts[audioPartIndex]; processAudioPart(audioPart); } } } writeToLogFile("Channel callback of type synchpoint - end"); } else if (type == FMOD_CHANNEL_CALLBACKTYPE_END) { writeToLogFile("Channel callback of type end - start"); FMOD::Sound *sound; checkResult(_channel->getCurrentSound(&sound)); soundsToBeReleased.push_back(sound); releasedAudioParts++; if (releasedAudioParts == numberOfAudioParts) { stopTimeline(timelineChannel); fmodSystem->update(); //playing = false; } writeToLogFile("Channel callback of type end - end"); } return FMOD_OK; } void processAudioPart(AudioPart *audioPart) { char buffer[2000]; sprintf(buffer, "Start process audio part %s", audioPart->getSource().c_str()); std::string buffAsStdStr = buffer; writeToLogFile(buffAsStdStr); FMOD::Channel *channel = sequenceAudioPart(audioPart); //channel->setCallback(channelCallback); //channel->setCallback(FMOD_CHANNEL_CALLBACKTYPE_SYNCPOINT, channelCallback, 0); loadPresets(audioPart, channel); checkResult(channel->setChannelGroup(parentChannelGroup)); setChannelOffsetAndDuration(audioPart, channel); setChannelVolume(audioPart, channel); setChannelPace(audioPart); applyPresets(audioPart, channel); // associate the simple part with the effect group it belongs to, if relevant associateWithEffectGroup(audioPart, channel); // set channel panning //setChannelPan(audioPart, channel);i sprintf(buffer, "End process audio part %s", audioPart->getSource().c_str()); buffAsStdStr = buffer; writeToLogFile(buffAsStdStr); } FMOD::Channel *sequenceAudioPart(AudioPart *audioPart) { FMOD::Sound *sound; FMOD::Channel *channel; checkResult(fmodSystem->createSound((audioPart->getSource()).c_str(), FMOD_SOFTWARE|FMOD_CREATESTREAM, 0, &sound)); char buffer[2000]; sprintf(buffer, "After create sound audio part %s", audioPart->getSource().c_str()); std::string buffAsStdStr = buffer; writeToLogFile(buffAsStdStr); // create a channel for the sound, play it checkResult( fmodSystem->playSound(FMOD_CHANNEL_FREE, sound, false, &channel)); checkResult(channel->setCallback(channelCallback)); //channel->setCallback(FMOD_CHANNEL_CALLBACKTYPE_END, channelCallback, 0); return channel; } void loadPresets(AudioPart *audioPart, FMOD::Channel *channel) { loadOriginPreset(audioPart); loadRolePreset(audioPart); } void loadOriginPreset(AudioPart *audioPart) { /* AudioSpeaker speaker = audioPart->getSpeaker(); if (speaker == DAVE) { TtsDavePreset dave = new TtsDavePreset(); audioPart->addEffect(dave); } else if (speaker == KATE) { TtsKatePreset kate = new TtsKatePreset(); audioPart->addEffect(kate); } else if (speaker == SIMON) { TtsSimonPreset simon = new TtsSimonPreset(); audioPart->addEffect(simon); } else if (speaker == SUSAN) { TtsSusanPreset susan = new TtsSusanPreset(); audioPart->addEffect(susan); } */ } void loadRolePreset(AudioPart *audioPart) { AudioRole role = audioPart->getRole(); if (role == NARRATION) { NarrationPreset *narrPreset = new NarrationPreset(); audioPart->addEffect(narrPreset); } else if (role == BACKGROUND) { BackgroundPreset *backPreset = new BackgroundPreset(); audioPart->addEffect(backPreset); } } void setChannelOffsetAndDuration(AudioPart *audioPart, FMOD::Channel *channel) { double start = audioPart->getStart(); double end = audioPart->getEnd(); double offset = audioPart->getOffset(); double paceFactor = audioPart->getPaceFactor(); double duration = ceil((end - start) * paceFactor); int sampleRate; fmodSystem->getSoftwareFormat(&sampleRate, NULL, NULL, NULL, NULL, NULL); checkResult(channel->setDelay(FMOD_DELAYTYPE_DSPCLOCK_END, 0, (int) (end * sampleRate / 1000))); if (offset != 0.0) { checkResult(channel->setPosition((int) offset, FMOD_TIMEUNIT_MS)); } /* FMOD::Sound *sound; getCurrentSound(&sound)); unsigned int length; checkResult( sound->getLength(&length, FMOD_TIMEUNIT_MS)); if (length > offset + duration) { double durationOffset = offset + duration; checkResult( channel->getCurrentSound(&sound)); DurationEvent *durationEvent = new DurationEvent(); std::string key = addDurationEventToMap(durationEvent); FMOD_SYNCPOINT *syncPoint; checkResult( sound->addSyncPoint((int)durationOffset, FMOD_TIMEUNIT_MS, key.c_str(), &syncPoint)); } */ } std::string addDurationEventToMap(DurationEvent *durationEvent) { std::vector<SyncPointEventDescriptor *> eventList; eventList.push_back(durationEvent); std::string key = getKey(); eventMap[key] = eventList; return key; } void setChannelPace(AudioPart *audioPart) { double paceFactor = audioPart->getPaceFactor(); if (paceFactor != 1.0) { AudioStretchDsp *audioStretch = new AudioStretchDsp(); audioStretch->setFactor((float) paceFactor); audioPart->addEffect(audioStretch); } } void setChannelVolume(AudioPart *audioPart, FMOD::Channel *channel) { std::vector<Interpolated *> volumeList = audioPart->getVolume(); for (std::vector<Interpolated *>::iterator it = volumeList.begin(); it != volumeList.end(); ++it) { Interpolated *volume = *it; std::string interpolationType = volume->getType(); if (interpolationType == "ConstantInterpolated") { ConstantInterpolated *constantVolume = (ConstantInterpolated *) volume; Sound *sound; checkResult(channel->getCurrentSound(&sound)); SetVolumeEvent *volumeEvent = new SetVolumeEvent( constantVolume->getValue()); std::string key = addVolumeEventToMap(volumeEvent); // add sync point to the channel in the starting point of the constant volume part FMOD_SYNCPOINT *syncPoint; checkResult(sound->addSyncPoint((int) toRelativeTime( constantVolume->getStartTime(), audioPart), FMOD_TIMEUNIT_MS, key.c_str(), &syncPoint)); } else if (interpolationType == "LinearInterpolated") { LinearInterpolated *linearVolume = (LinearInterpolated *) volume; // FIXME - documentation here - important double presetsVolume = getPresetsVolume(audioPart); double startVol = linearVolume->getStartValue() * presetsVolume; double endVol = linearVolume->getEndValue() * presetsVolume; double startTime = toRelativeTime(linearVolume->getStartTime(), audioPart); double endTime = toRelativeTime(linearVolume->getEndTime(), audioPart); double duration = endTime - startTime; // the automation engine will change the interpolated value 20 times // per second, calculate double numOfSteps = duration / STEP_INTERVAL; // FIXME - step thing - big explanation needed // set the step value - it wouldnt be efficient to calculate the step value // every time there's a callback setInterpolationStep(linearVolume, startVol, endVol, numOfSteps); addInterpolationSyncPointsToChannel(channel, toRelativeTime( linearVolume->getStartTime(), audioPart), startVol, endVol, linearVolume->getStep(), duration, numOfSteps); } } } std::string addVolumeEventToMap(SetVolumeEvent *setVolumeEvent) { std::vector<SyncPointEventDescriptor *> eventList; eventList.push_back(setVolumeEvent); std::string key = getKey(); eventMap[key] = eventList; return key; } double toRelativeTime(double time, AudioPart *audioPart) { return time - audioPart->getStart(); } std::string getKey() { // inc the key - not atomic, but we'll check that later char keyArray[256]; key++; // _itoa_s(key, keyArray, 10); snprintf(keyArray, 10, "%d", key); return keyArray; } double getPresetsVolume(AudioPart *audioPart) { double presetsVolume = 1.0; std::vector<Effect *> effectList = audioPart->getEffects(); for (std::vector<Effect *>::iterator it = effectList.begin(); it != effectList.end(); ++it) { Effect *effect = *it; Preset *preset = dynamic_cast<Preset *> (effect); if (preset != NULL) { bool created = preset->getCreated(); if (created) { float presetVolume = preset->getVolume(); presetsVolume *= presetVolume; } } } return presetsVolume; } void setInterpolationStep(LinearInterpolated *linear, double startVol, double endVol, double numOfSteps) { linear->setStep((endVol - startVol) / numOfSteps); } void addInterpolationSyncPointsToChannel(FMOD::Channel *channel, double startTime, double startValue, double endValue, double step, double duration, double numOfSteps) { FMOD::Sound *sound; checkResult(channel->getCurrentSound(&sound)); SetVolumeEvent *setVolumeEvent = new SetVolumeEvent(startValue); std::string key = addVolumeEventToMap(setVolumeEvent); FMOD_SYNCPOINT *syncPoint; sound->addSyncPoint((int) startTime, FMOD_TIMEUNIT_MS, key.c_str(), &syncPoint); for (int i = 1; i < (int) numOfSteps; i++) { // add sync points to the channel - numOfSteps sync points every STEP_INTERVAL ms key = addStepEventToMap(step); FMOD_SYNCPOINT *syncPoint; checkResult(sound->addSyncPoint((int) startTime + i * STEP_INTERVAL, FMOD_TIMEUNIT_MS, key.c_str(), &syncPoint)); } } std::string addStepEventToMap(double step) { std::vector<SyncPointEventDescriptor *> eventList; AddVolumeInterpolationStepEvent *avise = new AddVolumeInterpolationStepEvent(step); eventList.push_back(avise); std::string key = getKey(); eventMap[key] = eventList; return key; } /* private void setChannelPan(AudioPart audioPart, Channel channel) { Interpolated<Double> pan = audioPart.getPan(); if (pan != null) { if (pan.getClass() == Constant.class) { Constant<Double> v = (Constant<Double>) pan; channel.setVolume(v.getValue().floatValue()); } else if (pan.getClass() == LinearInterpolated.class) { Double length = getAudioPartLength(audioPart); // the automation engine will change the interpolated value 20 times // per second, calculate Double numOfSteps = length / STEP_INTERVAL; Sound sound = new Sound(); channel.getCurrentSound(sound); //addInterpolationSyncPointsToChannel(channel, length, numOfSteps, PAN); } } } */ void applyPresets(AudioPart *audioPart, FMOD::Channel *channel) { std::vector<Effect *> effectList = audioPart->getEffects(); if (!effectList.empty()) { for (std::vector<Effect *>::iterator it = effectList.begin(); it != effectList.end(); ++it) { Effect *effect = *it; effect->apply(channel); } } } void associateWithEffectGroup(AudioPart *audioPart, FMOD::Channel *channel) { int channelGroupIndex = audioPart->getChannelGroup(); if (channelGroupIndex != -1) { associateChannelWithChannelGroup(channel, channelGroupIndex); } } void associateChannelWithChannelGroup(FMOD::Channel *channel, int channelGroupIndex) { ChannelGroup *channelGroup; checkResult(parentChannelGroup->getGroup(channelGroupIndex, &channelGroup)); checkResult(channel->setChannelGroup(channelGroup)); } void releaseSound(FMOD::Channel *channel) { Sound *sound; checkResult(channel->getCurrentSound(&sound)); checkResult(sound->release()); } void stopTimeline(FMOD::Channel *timelineChannel) { checkResult(timelineChannel->stop()); } // TODO - should be upgraded to throw exceptions void checkResult(FMOD_RESULT result) { if (result != FMOD_OK) { printf("error in result: (%d) %s\n", result, FMOD_ErrorString(result)); } } <file_sep>/utils/Utils/FileUtils.h /* * FileUtils.h * * Created on: Aug 27, 2013 * Author: eranv */ #ifndef FILEUTILS_H_ #define FILEUTILS_H_ #include <sys/stat.h> #include "WindowDefinitions.h" static mode_t set_mode(){ mode_t mode = 0; mode |= S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP; //| S_IROTH | S_IWOTH; return mode; } //#define MY_MASK 0777 static mode_t MY_MASK = set_mode(); class FileUtils { public: static bool folderExists(const char* folderName); static bool fileExists(const char* filename); }; #endif /* FILEUTILS_H_ */ <file_sep>/VideoStream/VideoStream/LocalMemStream.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSLocalMemStream.h" #include "SWException.h" #include "VSLocalMemStream.h" #include "SWException.h" using namespace std; using namespace VideoStream; // Static mutex. Used for pausing all local memory streams. HANDLE CLocalMemStream::m_pausingMutex = CreateMutex (NULL, FALSE, NULL); CLocalMemStream::CLocalMemStream(string streamName) : m_streamName(streamName), m_mutex(NULL), m_fullSem(NULL), m_emptySem(NULL), m_openingSem(NULL), m_firstElem(0), m_firstFreeElem(0), m_bps(-1), m_maxBufferSize(0), m_readerTimeoutMS(INFINITE), m_writerTimeoutMS(INFINITE), m_readerState(NOT_OPENED), m_writerState(NOT_OPENED) { // Create mutex. m_mutex = CreateMutex (NULL, FALSE, NULL); if (m_mutex == NULL) { throw SWException("Failed to create mutex."); } // There are three counting semaphores. // m_fullSem counts the number of full slots. It is initialized here. // m_emptySem counts the number of empty slots. It will be initialized when we know how many frames we support in a stream. // m_openingSem counts only until 1. It blocks the readerOpen until a writerOpen takes place. m_fullSem = CreateSemaphore (NULL, 0, m_frameArraySize, NULL); if (m_fullSem == NULL) { CloseHandle(m_mutex); m_mutex = NULL; throw SWException("Failed to create full semaphore."); } m_openingSem = CreateSemaphore (NULL, 0, 1, NULL); if (m_openingSem == NULL) { CloseHandle(m_mutex); m_mutex = NULL; CloseHandle(m_fullSem); m_fullSem = NULL; throw SWException("Failed to create opening semaphore."); } } CLocalMemStream::~CLocalMemStream() { if (m_mutex) CloseHandle(m_mutex); if (m_fullSem) CloseHandle(m_fullSem); if (m_emptySem) CloseHandle(m_emptySem); if (m_openingSem) CloseHandle(m_openingSem); for (int i= m_firstElem; i != m_firstFreeElem; i= (i+1) % m_frameArraySize) { m_frameArray[i].setNull(); } } CFramePtr CLocalMemStream::read() { // The following two lines cause the read command to block when the memory streams are paused. WaitForSingleObject(m_pausingMutex, INFINITE); ReleaseMutex(m_pausingMutex); DWORD rc = WaitForSingleObject(m_fullSem, m_readerTimeoutMS); switch (rc) { case WAIT_OBJECT_0: // good break; case WAIT_TIMEOUT: throw SWException("Local memory stream read timed out."); default: throw SWException("Local memory stream read failed taking full semaphore. rc = 0x%x.", rc); } WaitForSingleObject(m_mutex, INFINITE); CFramePtr framePtr = m_frameArray[m_firstElem]; if (framePtr.isNull()) { // This is supposed to happen only if the writer has closed. if (m_writerState != CLOSED) { throw SWException("Error in local memory stream. A null frame encountered unexpectedly."); } } else { m_frameArray[m_firstElem].setNull(); m_firstElem = (m_firstElem + 1) % m_frameArraySize; } ReleaseMutex(m_mutex); ReleaseSemaphore(m_emptySem, 1, NULL); return framePtr; } void CLocalMemStream::write(CFramePtr framePtr) { // The following two lines cause the write command to block when the memory streams are paused. WaitForSingleObject(m_pausingMutex, INFINITE); ReleaseMutex(m_pausingMutex); if (framePtr.isNull()) { throw SWException("Error. Attempting to write a null frame."); } DWORD rc = WaitForSingleObject(m_emptySem, m_writerTimeoutMS); switch (rc) { case WAIT_OBJECT_0: // good break; case WAIT_TIMEOUT: throw SWException("Local memory stream write timed out."); default: throw SWException("Local memory stream write failed taking empty semaphore. rc = 0x%x.", rc); } WaitForSingleObject(m_mutex, INFINITE); m_frameArray[m_firstFreeElem] = framePtr; m_firstFreeElem = (m_firstFreeElem + 1) % m_frameArraySize; ReleaseMutex(m_mutex); ReleaseSemaphore(m_fullSem, 1, NULL); } int CLocalMemStream::getNumberOfFrames() { WaitForSingleObject(m_mutex, INFINITE); int s = (m_firstFreeElem + m_frameArraySize - m_firstElem) % m_frameArraySize; ReleaseMutex(m_mutex); return s; } int CLocalMemStream::getMaxBufferSize() { return m_maxBufferSize; } void CLocalMemStream::readerOpen(DWORD readerTimeoutMS) { // If readerOpen() is called before writerOpen() we will be stuck on this semaphore. DWORD rc = WaitForSingleObject(m_openingSem, readerTimeoutMS); switch (rc) { case WAIT_OBJECT_0: // good break; case WAIT_TIMEOUT: throw SWException("Local memory stream opening timed out."); default: throw SWException("Local memory stream read failed taking opening semaphore. rc = 0x%x.", rc); } WaitForSingleObject(m_mutex, INFINITE); if (m_readerState != NOT_OPENED) { ReleaseMutex(m_mutex); throw SWException("Failed to open a local memory stream for reading. Current state %d.", m_readerState); } m_readerTimeoutMS = readerTimeoutMS; m_readerState = OPENED; ReleaseMutex(m_mutex); } void CLocalMemStream::readerClose() { WaitForSingleObject(m_mutex, INFINITE); if (m_readerState != OPENED) { ReleaseMutex(m_mutex); throw SWException("Failed to close a local memory stream for reading. Current state %d.", m_readerState); } m_readerState = CLOSED; ReleaseMutex(m_mutex); } void CLocalMemStream::writerOpen(const VIDEOINFO &vi, const string & codecstr, int bps, int maxBufferSize, DWORD writerTimeoutMS) { WaitForSingleObject(m_mutex, INFINITE); if (m_writerState != NOT_OPENED) { ReleaseMutex(m_mutex); throw SWException("Failed to open a local memory stream for reading. Current state %d.", m_writerState); } m_vi = vi; m_codecstr = codecstr; m_bps = bps; m_maxBufferSize = maxBufferSize; m_writerTimeoutMS = writerTimeoutMS; m_writerState = OPENED; // Fix m_maxBufferSize if necessary. if (m_maxBufferSize < 0 || m_maxBufferSize >= m_frameArraySize) { // The maximum capacity is the array's size minus 1. // The array must never be full so that (m_firstElem == m_firstFreeElem) means an // empty list and will not be ambiguous. m_maxBufferSize = m_frameArraySize - 1; } // Create the empty semaphore. This semaphore counts the number of empty slots. When it reaches 0 // a writer who tries to write a frame blocks. m_emptySem = CreateSemaphore (NULL, m_maxBufferSize, m_frameArraySize, NULL); if (m_emptySem == NULL) { ReleaseMutex(m_mutex); throw SWException("Failed to create full semaphore."); } ReleaseMutex(m_mutex); ReleaseSemaphore(m_openingSem, 1, NULL); // Allow reader to open stream } void CLocalMemStream::writerClose() { WaitForSingleObject(m_mutex, INFINITE); if (m_writerState != OPENED) { ReleaseMutex(m_mutex); throw SWException("Failed to close a local memory stream for reading. Current state %d.", m_writerState); } m_writerState = CLOSED; // This ReleaseSemaphore on the fullSem will cause the reader to read a NULL frame once it is done reading the frames in the queue. // The reader, upon reading such a NULL frame will know that the input is finished. ReleaseSemaphore(m_fullSem, 1, NULL); ReleaseMutex(m_mutex); } bool CLocalMemStream::didReaderAndWriterClose() { return (m_readerState == CLOSED && m_writerState == CLOSED); } void CLocalMemStream::pauseAllStreams() { // This method should NOT be called twice in a row (without a resumeAllStreams() in between). // Taking m_pausingMutex will cause each call to read() and write() to block. WaitForSingleObject(m_pausingMutex, INFINITE); } void CLocalMemStream::resumeAllStreams() { // Release the read() and write() executions that were blocked. ReleaseMutex(m_pausingMutex); } #else #include "VSLocalMemStream.h" #include "SWException.h" #include <errno.h> #include <sys/sem.h> #include <time.h> #include <sys/time.h> using namespace std; using namespace VideoStream; // Static mutex. Used for pausing all local memory streams. //HANDLE CLocalMemStream::m_pausingMutex = CreateMutex (NULL, FALSE, NULL); pthread_mutex_t CLocalMemStream::m_pausingMutex = create_mutex(); pthread_mutex_t CLocalMemStream::create_mutex() { pthread_mutex_init(&CLocalMemStream::m_pausingMutex, NULL); return CLocalMemStream::m_pausingMutex; } CLocalMemStream::CLocalMemStream(string streamName) : m_streamName(streamName), m_firstElem(0), m_firstFreeElem(0), m_bps(-1), m_maxBufferSize(0), m_readerTimeoutMS(INFINITE), m_writerTimeoutMS( INFINITE), m_readerState(NOT_OPENED), m_writerState( NOT_OPENED) { // Create mutex. //m_mutex = CreateMutex (NULL, FALSE, NULL); int res = pthread_mutex_init(&m_mutex, NULL); if (res != 0) { throw SWException("Failed to create mutex."); } // There are three counting semaphores. // m_fullSem counts the number of full slots. It is initialized here. // m_emptySem counts the number of empty slots. It will be initialized when we know how many frames we support in a stream. // m_openingSem counts only until 1. It blocks the readerOpen until a writerOpen takes place. //m_fullSem = CreateSemaphore (NULL, 0, m_frameArraySize, NULL); //if (m_fullSem == NULL) if (sem_init(&m_fullSem, 0, 0) < 0) { //CloseHandle(m_mutex); pthread_mutex_destroy(&m_mutex); //m_mutex = NULL; throw SWException("Failed to create full semaphore."); } printDebug(m_fullSem, "fullSem", m_streamName, "init"); //m_openingSem = CreateSemaphore (NULL, 0, 1, NULL); //if (m_openingSem == NULL) if (sem_init(&m_openingSem, 0, 1) < 0) { pthread_mutex_destroy(&m_mutex); sem_destroy(&m_fullSem); //CloseHandle(m_mutex); //&m_mutex = NULL; //CloseHandle(m_fullSem); //m_fullSem = NULL; throw SWException("Failed to create opening semaphore."); } printDebug(m_openingSem, "openingSem", m_streamName, "init"); //It blocks the readerOpen until a writerOpen takes place. printDebug(m_openingSem, "openingSem", m_streamName, "wait start"); sem_wait(&m_openingSem); printDebug(m_openingSem, "openingSem", m_streamName, "wait end"); } CLocalMemStream::~CLocalMemStream() { //if (m_mutex) CloseHandle(m_mutex); pthread_mutex_destroy(&m_mutex); sem_destroy(&m_fullSem); sem_destroy(&m_emptySem); sem_destroy(&m_openingSem); //if (m_fullSem) CloseHandle(m_fullSem); //if (m_emptySem) CloseHandle(m_emptySem); //if (m_openingSem) CloseHandle(m_openingSem); for (int i = m_firstElem; i != m_firstFreeElem; i = (i + 1) % m_frameArraySize) { m_frameArray[i].setNull(); } } CFramePtr CLocalMemStream::read() { // The following two lines cause the read command to block when the memory streams are paused. //WaitForSingleObject(m_pausingMutex, INFINITE); //ReleaseMutex(m_pausingMutex); pthread_mutex_lock(&CLocalMemStream::m_pausingMutex); pthread_mutex_unlock(&CLocalMemStream::m_pausingMutex); struct timespec ts; if (clock_gettime(CLOCK_REALTIME, &ts) == -1) { throw SWException("Clock_gettime failed\n."); } ts.tv_sec += m_readerTimeoutMS; //DWORD rc = WaitForSingleObject(m_fullSem, m_readerTimeoutMS); int rc; printDebug(m_fullSem, "fullSem", m_streamName, "wait start"); while ((rc = sem_timedwait(&m_fullSem, &ts)) == -1 && errno == EINTR) continue; if (rc == -1) { if (errno == ETIMEDOUT) { throw SWException("Local memory stream read timed out."); } else { throw SWException( "Local memory stream read failed taking full semaphore. rc = 0x%x.", rc); } } printDebug(m_fullSem, "fullSem", m_streamName, "wait end"); pthread_mutex_lock(&m_mutex); CFramePtr framePtr = m_frameArray[m_firstElem]; if (framePtr.isNull()) { // This is supposed to happen only if the writer has closed. if (m_writerState != CLOSED) { fprintf( stderr, "Error in local memory stream. A null frame encountered unexpectedly. Try to bring elemen %d\n", m_firstElem); throw SWException( "Error in local memory stream. A null frame encountered unexpectedly."); } } else { m_frameArray[m_firstElem].setNull(); m_firstElem = (m_firstElem + 1) % m_frameArraySize; } //ReleaseMutex(m_mutex); pthread_mutex_unlock(&m_mutex); //ReleaseSemaphore(m_emptySem, 1, NULL); printDebug(m_emptySem, "emptySem", m_streamName, "post start"); sem_post(&m_emptySem); printDebug(m_emptySem, "emptySem", m_streamName, "post end"); return framePtr; } void CLocalMemStream::printDebug(sem_t& sem, string semName, string streamName, string mode) { return; int value; sem_getvalue(&sem, &value); unsigned found = streamName.find_last_of("/"); string sName = streamName.substr(found + 1); fprintf(stderr, "%s::%s::%s::%d.\n", sName.c_str(), semName.c_str(), mode.c_str(), value); } void CLocalMemStream::write(CFramePtr framePtr) { // The following two lines cause the write command to block when the memory streams are paused. pthread_mutex_lock(&m_pausingMutex); pthread_mutex_unlock(&m_pausingMutex); if (framePtr.isNull()) { throw SWException("Error. Attempting to write a null frame."); } int rc; struct timespec ts; if (clock_gettime(CLOCK_REALTIME, &ts) == -1) { throw SWException("Clock_gettime failed\n."); } ts.tv_sec += m_writerTimeoutMS; printDebug(m_emptySem, "emptySem", m_streamName, "wait start"); while ((rc = sem_timedwait(&m_emptySem, &ts)) == -1 && errno == EINTR) { // fprintf(stderr, "Waiting for sem :%d", rc); continue; } printDebug(m_emptySem, "emptySem", m_streamName, "wait end"); if (rc == -1) { if (errno == ETIMEDOUT) { // fprintf(stderr, "Time out :%d", rc); throw SWException("Local memory stream read timed out."); } else { // fprintf(stderr, "Local memory stream read failed taking full semaphore :%d", rc); throw SWException( "Local memory stream read failed taking full semaphore. rc = 0x%x.", rc); } } else { //good } pthread_mutex_lock(&m_mutex); /*DWORD rc = WaitForSingleObject(m_emptySem, m_writerTimeoutMS); switch (rc) { case WAIT_OBJECT_0: // good break; case WAIT_TIMEOUT: throw SWException("Local memory stream write timed out."); default: throw SWException("Local memory stream write failed taking empty semaphore. rc = 0x%x.", rc); } WaitForSingleObject(m_mutex, INFINITE); */ m_frameArray[m_firstFreeElem] = framePtr; // fprintf(stderr, "Set m_frameArray element : %d", m_firstFreeElem); m_firstFreeElem = (m_firstFreeElem + 1) % m_frameArraySize; pthread_mutex_unlock(&m_mutex); printDebug(m_fullSem, "fullSem", m_streamName, "post start"); sem_post(&m_fullSem); printDebug(m_fullSem, "fullSem", m_streamName, "post end"); } int CLocalMemStream::getNumberOfFrames() { pthread_mutex_lock(&m_mutex); int s = (m_firstFreeElem + m_frameArraySize - m_firstElem) % m_frameArraySize; pthread_mutex_unlock(&m_mutex); return s; } int CLocalMemStream::getMaxBufferSize() { return m_maxBufferSize; } void CLocalMemStream::readerOpen(DWORD readerTimeoutMS) { int rc = 0; struct timespec ts; if (clock_gettime(CLOCK_REALTIME, &ts) == -1) { throw SWException("Clock_gettime failed\n."); } ts.tv_sec += readerTimeoutMS; printDebug(m_openingSem, "openingSem", m_streamName, "wait start"); // If readerOpen() is called before writerOpen() we will be stuck on this semaphore. while ((rc = sem_timedwait(&m_openingSem, &ts)) == -1 && errno == EINTR) { continue; } if (rc == -1) { if (errno == ETIMEDOUT) { throw SWException("Local memory stream read timed out."); } else { throw SWException( "Local memory stream read failed taking full semaphore. rc = 0x%x.", rc); } } printDebug(m_openingSem, "openingSem", m_streamName, "wait end"); pthread_mutex_lock(&m_mutex); if (m_readerState != NOT_OPENED) { pthread_mutex_unlock(&m_mutex); throw SWException( "Failed to open a local memory stream for reading. Current state %d.", m_readerState); } m_readerTimeoutMS = readerTimeoutMS; m_readerState = OPENED; pthread_mutex_unlock(&m_mutex); } void CLocalMemStream::readerClose() { pthread_mutex_lock(&m_mutex); if (m_readerState != OPENED) { pthread_mutex_unlock(&m_mutex); throw SWException( "Failed to close a local memory stream for reading. Current state %d.", m_readerState); } m_readerState = CLOSED; pthread_mutex_unlock(&m_mutex); } void CLocalMemStream::writerOpen(const VIDEOINFO &vi, const string & codecstr, int bps, int maxBufferSize, DWORD writerTimeoutMS) { pthread_mutex_lock(&m_mutex); if (m_writerState != NOT_OPENED) { pthread_mutex_unlock(&m_mutex); throw SWException( "Failed to open a local memory stream for reading. Current state %d.", m_writerState); } m_vi = vi; m_codecstr = codecstr; m_bps = bps; m_maxBufferSize = maxBufferSize; m_writerTimeoutMS = writerTimeoutMS; m_writerState = OPENED; // Fix m_maxBufferSize if necessary. if (m_maxBufferSize < 0 || m_maxBufferSize >= m_frameArraySize) { // The maximum capacity is the array's size minus 1. // The array must never be full so that (m_firstElem == m_firstFreeElem) means an // empty list and will not be ambiguous. m_maxBufferSize = m_frameArraySize - 1; } // Create the empty semaphore. This semaphore counts the number of empty slots. When it reaches 0 // a writer who tries to write a frame blocks. if (sem_init(&m_emptySem, 0, m_maxBufferSize) < 0) { //CloseHandle(m_mutex); pthread_mutex_destroy(&m_mutex); //m_mutex = NULL; throw SWException("Failed to create full semaphore."); } printDebug(m_emptySem, "emptySem", m_streamName, "init"); pthread_mutex_unlock(&m_mutex); printDebug(m_openingSem, "openingSem", m_streamName, "post start"); sem_post(&m_openingSem); // Allow reader to open stream printDebug(m_openingSem, "openingSem", m_streamName, "wait end"); } void CLocalMemStream::writerClose() { pthread_mutex_lock(&m_mutex); if (m_writerState != OPENED) { pthread_mutex_unlock(&m_mutex); throw SWException( "Failed to close a local memory stream for reading. Current state %d.", m_writerState); } m_writerState = CLOSED; // This ReleaseSemaphore on the fullSem will cause the reader to read a NULL frame once it is done reading the frames in the queue. // The reader, upon reading such a NULL frame will know that the input is finished. //ReleaseSemaphore(m_fullSem, 1, NULL); printDebug(m_fullSem, "fullSem", m_streamName, "post start"); sem_post(&m_fullSem); printDebug(m_fullSem, "fullSem", m_streamName, "post end"); pthread_mutex_unlock(&m_mutex); } bool CLocalMemStream::didReaderAndWriterClose() { return (m_readerState == CLOSED && m_writerState == CLOSED); } void CLocalMemStream::pauseAllStreams() { // This method should NOT be called twice in a row (without a resumeAllStreams() in between). // Taking m_pausingMutex will cause each call to read() and write() to block. //WaitForSingleObject(m_pausingMutex, INFINITE); pthread_mutex_lock(&m_pausingMutex); } void CLocalMemStream::resumeAllStreams() { // Release the read() and write() executions that were blocked. //ReleaseMutex(m_pausingMutex); pthread_mutex_unlock(&m_pausingMutex); } #endif<file_sep>/SVG2Video/SVG2VideoEngineFactory.h #pragma once #include "EngineFactory.h" class SVG2VideoEngineFactory : public EngineFactory { public: Engine *createEngine(); }; <file_sep>/RenderingManager/RenderingManager/include/BaseFolder.h #pragma once //#include <windows.h> #include <string> using namespace std; extern string g_baseFolder; inline const char *getBaseFolder() { return g_baseFolder.c_str(); } <file_sep>/NewEncoder/VideoEncoder/OutputAvi.cpp #include "stdafx.h" #include "OutputAvi.h" OutputAvi::OutputAvi(const char *path, DWORD length, DWORD rate, DWORD scale, DWORD fourCC, DWORD quality, BITMAPINFOHEADER *bih) { AVISTREAMINFO streamInfo; memset(&streamInfo, 0, sizeof(AVISTREAMINFO)); streamInfo.fccType = streamtypeVIDEO; streamInfo.fccHandler = fourCC; streamInfo.dwQuality = quality; streamInfo.dwScale = scale; streamInfo.dwRate = rate; streamInfo.dwLength = length; streamInfo.dwSuggestedBufferSize = 0; streamInfo.rcFrame.left = 0; streamInfo.rcFrame.top = 0; streamInfo.rcFrame.right = bih->biWidth; streamInfo.rcFrame.bottom = abs(bih->biHeight); Init(path, &streamInfo, bih); } OutputAvi::OutputAvi(const char *path, AVISTREAMINFO *streamInfo, BITMAPINFOHEADER *bih) { Init(path, streamInfo, bih); } void OutputAvi::Init(const char *path, AVISTREAMINFO *streamInfo, BITMAPINFOHEADER *bih) { aviout = new AVIOutputFile(); aviout->initOutputStreams(); AVIStreamHeader_fixed *pOutSI = &aviout->videoOut->streamInfo; pOutSI->fccType = streamInfo->fccType; pOutSI->fccHandler = streamInfo->fccHandler; pOutSI->dwFlags = streamInfo->dwFlags; pOutSI->wPriority = streamInfo->wPriority; pOutSI->wLanguage = streamInfo->wLanguage; pOutSI->dwInitialFrames = streamInfo->dwInitialFrames; pOutSI->dwScale = streamInfo->dwScale; pOutSI->dwRate = streamInfo->dwRate; pOutSI->dwStart = streamInfo->dwStart; pOutSI->dwLength = streamInfo->dwLength; pOutSI->dwSuggestedBufferSize = streamInfo->dwSuggestedBufferSize; pOutSI->dwQuality = streamInfo->dwQuality; pOutSI->dwSampleSize = streamInfo->dwSampleSize; pOutSI->rcFrame.left = (SHORT)streamInfo->rcFrame.left; pOutSI->rcFrame.top = (SHORT)streamInfo->rcFrame.top; pOutSI->rcFrame.right = (SHORT)streamInfo->rcFrame.right; pOutSI->rcFrame.bottom = (SHORT)streamInfo->rcFrame.bottom; aviout->videoOut->setCompressed(TRUE); aviout->videoOut->allocFormat(bih->biSize); memcpy(aviout->videoOut->getFormat(), bih, bih->biSize); aviout->disable_os_caching(); aviout->init(path, streamInfo->rcFrame.right, streamInfo->rcFrame.bottom, TRUE, FALSE, FALSE, (512 * 1024), FALSE); } OutputAvi::~OutputAvi(void) { aviout->finalize(); delete aviout; } void OutputAvi::writeData(void* data, int dataSize, bool keyframe) { aviout->videoOut->write(keyframe ? AVIIF_KEYFRAME : 0, (char *)data, dataSize, 1); } <file_sep>/VideoDecoder/VideoDecoder/VideoDecoderEngineFactory.cpp #include "VideoDecoder.h" #include "VideoDecoderEngineFactory.h" Engine *VideoDecoderEngineFactory::createEngine() { return new VideoDecoder(); } void VideoDecoderEngineFactory::init() { VideoDecoder::init(m_commonStorage); // Initialize ffmpeg if needed } <file_sep>/utils/CliManager/CliManager.cpp #include <iostream> #include <sstream> #include <iomanip> #ifndef WIN32 #include <unistd.h> #endif #include "SWException.h" #include "CliManager.h" using namespace std; void CliManager::printHelp() { cout << "Commands:" << endl; map<string,CommandInfo *>::iterator iter; cout << setiosflags(ios::left); for(iter = m_commands.begin(); iter != m_commands.end(); iter++ ) { CommandInfo * cmdInf = iter->second; cout << setw(20) << cmdInf->m_command << cmdInf->m_shortHelp << endl; } cout << resetiosflags(ios::left); } bool CliManager::addCommand(string command, string shortHelp, string longHelp, CommandHandler cmdHandler, void *callbackData) { if (command == "at" || command == "?") { // These are special words which have a meaning for the cli manager. They are not valid command names cerr << "Invalid command " << command << "." << endl; return false; } CommandInfo *cmdInf = new CommandInfo(command, shortHelp, longHelp, cmdHandler, callbackData); if (cmdInf == NULL) { cerr << "Failed to add command " << command << " to CliManager." << endl; return false; } if (m_commands[command] != NULL) { cerr << "Command " << command << " cannot be added twice to CliManager." << endl; delete cmdInf; return false; } m_commands[command] = cmdInf; return true; } static const std::string trim(const std::string& pString, const std::string& pWhitespace = " \t") { const size_t beginStr = pString.find_first_not_of(pWhitespace); if (beginStr == std::string::npos) { // no content return ""; } const size_t endStr = pString.find_last_not_of(pWhitespace); const size_t range = endStr - beginStr + 1; return pString.substr(beginStr, range); } static const std::string reduce(const std::string& pString, const std::string& pFill = " ", const std::string& pWhitespace = " \t") { // trim first std::string result(trim(pString, pWhitespace)); // replace sub ranges size_t beginSpace = result.find_first_of(pWhitespace); while (beginSpace != std::string::npos) { const size_t endSpace = result.find_first_not_of(pWhitespace, beginSpace); const size_t range = endSpace - beginSpace; result.replace(beginSpace, range, pFill); const size_t newStart = beginSpace + pFill.length(); beginSpace = result.find_first_of(pWhitespace, newStart); } return result; } void CliManager::run( istream &is ) { bool exitCli = false; m_startTime = clock(); while (!exitCli) { if (m_showPrompt) cout << "> "; // Read next command line // Read char by char // We may block if file is not full string commandPlusArgs; while ( true ) { int c = is.get(); if ( is.eof() ) { // Line end is not written yet. Wait a while and retry. #ifdef WIN32 Sleep(50); #else usleep(50 * 1000); #endif // Reset stream so we can try to read again is.seekg( 0, ios_base::cur ); is.clear(); // Calculate current time clock_t now = clock(); float timeFromStartInSec = ((float)now - (float)m_startTime) / (float)CLOCKS_PER_SEC; // Check for timeout. if(m_timeOutInSeconds > 0 && timeFromStartInSec > m_timeOutInSeconds) { fprintf(m_captureFile, "at %.3f Exiting due to time out\n", timeFromStartInSec); fflush(m_captureFile); cout << "Exiting due to time out" << endl; exit(-23); } continue; } if ( is.fail() ) { throw SWException("Error reading input" ); } if ( c == '\n' ) break; else commandPlusArgs += (char)c; } // Make sure that there are no trailing or leading spaces and no multiple spacing. commandPlusArgs = trim(reduce(commandPlusArgs)); if (commandPlusArgs.empty()) { continue; } // Get the first word of commandPlusArgs. That's the command. string command = commandPlusArgs.substr(0, commandPlusArgs.find(' ')); // Hanndle some special commands if (command == "?") { printHelp(); continue; } // Calculate current time clock_t now = clock(); float timeFromStartInSec = ((float)now - (float)m_startTime) / (float)CLOCKS_PER_SEC; // A command can be given with a time, for example "at 5.43 command ...". // If the time has arrived, execute the command, otherwise sleep. // In any case, peel off the "at" and the time. if (command == "at") { istringstream iss(commandPlusArgs); string atStr, timeStr, rest; getline( iss, atStr , ' '); if (atStr != "at") { throw SWException("Internal error in CLI manager"); } getline( iss, timeStr , ' '); float commandTimeInSec = (float)atof(timeStr.c_str()); getline( iss, rest); commandPlusArgs = rest; command = commandPlusArgs.substr(0, commandPlusArgs.find(' ')); if (commandTimeInSec > timeFromStartInSec) { #ifdef WIN32 Sleep((DWORD)((commandTimeInSec - timeFromStartInSec) * 1000)); #else usleep((DWORD)((commandTimeInSec - timeFromStartInSec) * 1000000)); #endif } now = clock(); timeFromStartInSec = ((float)now - (float)m_startTime) / (float)CLOCKS_PER_SEC; } CommandInfo *cmdInf = m_commands[command]; if (cmdInf == NULL) { cout << "Unrecognized command " + command << endl; continue; } if (commandPlusArgs == (command + " ?")) { cout << command << endl; cout << cmdInf->m_longHelp << endl; continue; } // Log the command to the capture file if (m_captureFile != NULL) { fprintf(m_captureFile, "at %.3f %s\n", timeFromStartInSec, commandPlusArgs.c_str()); fflush(m_captureFile); } cmdInf->m_cmdHandler(commandPlusArgs, cmdInf->m_callbackData, exitCli); } } CliManager::~CliManager() { map<string,CommandInfo *>::iterator iter; for(iter = m_commands.begin(); iter != m_commands.end(); iter++ ) { if (iter->second != NULL) delete iter->second; } m_commands.clear(); } <file_sep>/utils/Utils/Profiler.h #pragma once #include <stdio.h> #include <iostream> #include <map> class Profiler { private: // The following are used for profiling the rendering time __int64 m_queryPerformanceFreq; FILE * m_profilingFile; double *m_profileTimesNoAlphaMS; double *m_profileTimesWithAlphaMS; double *m_profileTimesTotalMS; __int64 m_initTime; __int64 m_renderStartTime; std::string m_profilingConfigFileName; int m_rate, m_scale; __int64 startTime; __int64 stopTime; int m_startFrame; int m_endFrame; bool withAlfa; public: Profiler(std::string filename, std::string profilingConfigFileName, int rate, int scale, int endFrame); ~Profiler(void); void WriteProfileFile(int startFrame, int lastFrameWritten); bool CalcProfilingLimits( double *limits, int &nlimits ); bool GetDoubleFromMap( const std::map<std::string,double> &keyValues, const std::string &key, double &result, bool mustExist ); void startFrameRendering(int frameNum); void stopFrameRendering(int frameNum); void setStartTime(); void setTotalRenderTime(int frameNum); }; <file_sep>/LightSynthesizer/VideoObject.h #pragma once #ifndef __VIDEO_OBJECT_H__ #define __VIDEO_OBJECT_H__ #include "LightSynthesizer.h" #include "VSVideo.h" #include "Logger.h" #include <string> class VideoObject { private: std::string path; int frameRate; int width; int height; double outputFrameRate; VideoStream::CInputVideoStream *videoStream; int currentFrameNum; double zPos; bool opened; ResizeMethod resizeMethod; Logger & logger; int firstFrame; //first frame this object appeared in int lastFrame; //last frame this object appeared in // temp images for resize void *resizeim_src; void *resizeim_dest; // Current frame for this video VideoStream::CFramePtr curFramePtr; public: ~VideoObject(); VideoObject(Logger & logger); VideoObject(Logger & logger, std::string path, ResizeMethod resizeMethod, double zPos); bool Init(TiXmlElement *elem, ResizeMethod rm ); bool create(); VideoStream::CFramePtr getFramePtr(int frame, int width, int height, bool *rc); void close(); void setZPos(double z) {zPos = z;} double getZPos() const {return zPos;} void updateCurFrame( int framenum, int width, int height, bool *rc ); VideoStream::CFramePtr getCurFramePtr() { return curFramePtr;} int getCurFrameNum() { return currentFrameNum;} const char *getPath() const { return path.c_str();} int GetFirstFrame() const { return firstFrame; } //first frame this object appeared in int GetLastFrame() const { return lastFrame; } //last frame this object appeared in void AppearedInFrame( int framenum ); // Mark that object appeared in this frame // Sorting by z-position static bool compareObjsByZ( const VideoObject *o1, const VideoObject *o2 ) { return ( o1->getZPos() < o2->getZPos() ); } // Sorting by first frame static bool compareObjsByFirstFrame( const VideoObject *o1, const VideoObject *o2 ) { return ( o1->GetFirstFrame() < o2->GetFirstFrame() ); } private: VideoStream::CMutableFramePtr resizeFrame(VideoStream::CFramePtr orig, int width, int height); }; #endif<file_sep>/RenderingManager/RenderingManager/TestEngineFactory.h #pragma once //#include <windows.h> #include <string> #include "EngineFactory.h" class TestEngineFactory : public EngineFactory { public: // Return a TestEngine object Engine *createEngine(); }; <file_sep>/Fmod/CMakeLists.txt file(GLOB Fmod_SRC "*.cpp" ) link_directories(fmodapi42416linux64/api/lib/) add_executable(Fmod ${Fmod_SRC}) target_include_directories(Fmod PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/ ${CMAKE_CURRENT_SOURCE_DIR}/Effects ${CMAKE_CURRENT_SOURCE_DIR}/Events ${CMAKE_CURRENT_SOURCE_DIR}/Parser ${CMAKE_CURRENT_SOURCE_DIR}/Presets ${CMAKE_CURRENT_SOURCE_DIR}/fmodapi42416linux64/api/inc/ ${CMAKE_CURRENT_SOURCE_DIR}/../utils/Utils/) target_link_libraries(Fmod LINK_PUBLIC fmodex64 TinyXml) <file_sep>/Fmod/Interpolated.h #ifndef __INTERPOLATED_H__ #define __INTERPOLATED_H__ #pragma once #include "afx.h" class Interpolated { protected: double startTime; double endTime; public: virtual std::string getType() =0; double getStartTime() { return startTime; } void setStartTime(double st) { startTime = st; } double getEndTime() { return endTime; } void setEndTime(double et) { endTime = et; } }; #endif <file_sep>/Fmod/Effects/DspEffect.h #ifndef __DSP_EFFECT_H__ #define __DSP_EFFECT_H__ #include "Effect.h" #pragma once class DspEffect : public Effect { protected: std::vector<FMOD::DSP *> dspList; public: std::vector<FMOD::DSP *> getDspList() { return dspList; } void setDspList(std::vector<FMOD::DSP *> dl) { dspList = dl; } virtual void apply(FMOD::Channel *channel) { for (std::vector<FMOD::DSP *>::iterator it = dspList.begin() ; it != dspList.end() ; ++it) { FMOD::DSP *dsp = *it; channel->addDSP(dsp, NULL); } } virtual void apply(FMOD::ChannelGroup *channelGroup) { for (std::vector<FMOD::DSP *>::iterator it = dspList.begin() ; it != dspList.end() ; ++it) { FMOD::DSP *dsp = *it; channelGroup->addDSP(dsp, NULL); } } }; #endif <file_sep>/VideoStream/VideoStream/VSThreadOutput.h #pragma once #include "VSMedia.h" #include <queue> #ifndef WIN32 #include <pthread.h> #include <semaphore.h> #include "CEventManager.h" #endif namespace VideoStream { class CThreadOutput : public IOutputVideoStream { private: IOutputVideoStream *_pStream; frameWrittenCallback _pFrameCallback; void *_pContext; #ifdef WIN32 HANDLE _thread; HANDLE _frameReady; HANDLE _framesDone; HANDLE _queueBuffer; CRITICAL_SECTION _mutex; DWORD _threadId; bool _shouldStop; std::queue<CFramePtr> _queue; unsigned long ThreadRun(); static unsigned long __stdcall ThreadEntryProc( void* pThis ); #else pthread_t _thread; HANDLE _frameReady; HANDLE _framesDone; sem_t _queueBuffer; CRITICAL_SECTION _mutex; volatile bool _shouldStop; std::queue<CFramePtr> _queue; static void* ThreadEntryProc( void* pThis ); #endif protected: public: #ifndef WIN32 DWORD _threadId; unsigned long ThreadRun(); #endif CThreadOutput (IOutputVideoStream *pStream); virtual ~CThreadOutput(); bool Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams, int streamId); bool WriteFrame (CFramePtr framePtr); bool DeleteUpToFrame (int frameNum) {return _pStream->DeleteUpToFrame(frameNum);} void Close(); bool IsAsync() {return true;} void SetFrameWrittenCallback (frameWrittenCallback callback, void *pContext) {_pFrameCallback = callback; _pContext = pContext;} bool IsPersistent() {return _pStream->IsPersistent();} }; };<file_sep>/RenderingManager/RenderingManager/EngineFactory.cpp #include "EngineFactory.h" #include "Engine.h" #include "RenderingManager.h" using namespace std; Engine *EngineFactory::createEngine(string executionId) { // Call the inheriting class Engine * engine = createEngine(); if (engine == NULL) return NULL; // Set the engine's type and name. engine->m_engineType = m_engineType; engine->m_executionId = executionId; return engine; } <file_sep>/VideoStream/VideoStream/ShmemMedia.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSShmemMedia.h" #include "VSExceptions.h" #include "VSUtils.h" #else #include <stdio.h> #include <string.h> #include "VSShmemMedia.h" #include "VSExceptions.h" #include "VSUtils.h" #endif using namespace VideoStream; // The compression codec that is in use const DWORD compressionCodec = uncompressed; #ifndef WIN32 AVCodecID compressionCodecId = AV_CODEC_ID_NONE; #endif const bool mediaPersistent = false; static std::string GetFrameStringId (LPCTSTR location, int frameNum) { TCHAR prefix[10+1]; #ifdef WIN32 _stprintf_s (prefix, 10, _T("%d"), frameNum); return GetSyncObjName (prefix, location); #else snprintf (prefix, 10, _T("%d"), frameNum); prefix[10+1] = '\0'; return GetSyncObjName (prefix, location); #endif } static std::string GetHeaderStringId (LPCTSTR location) { return GetSyncObjName (_T("Header"), location); } CInputShmemStream::CInputShmemStream() { _pDecompressor = NULL; } CInputShmemStream::~CInputShmemStream() { if (_pDecompressor != NULL) delete _pDecompressor; } bool CInputShmemStream::Open (LPCTSTR location, MediaLocationParams & mlParams) { try { { // Block for locker: SharedData creation and read (to guarantee that we don't // read from shared data before writer finished writing). std::string dataName = GetHeaderStringId (location); CSharedDataLocker sharedDataLocker( dataName.c_str() ); CSharedData headerData ( dataName.c_str() , mediaPersistent, 0); headerData.Read (&_vi, sizeof (VIDEOINFO), false); } #ifdef WIN32 _pDecompressor = CVideoDecompressor::Create (compressionCodec, _vi); #else _pDecompressor = CVideoDecompressor::Create (compressionCodec, compressionCodecId, _vi); #endif return true; } catch (CSharedDataIOException) { return false; } } const VIDEOINFO &CInputShmemStream::GetVideoInfo() const { return _vi; } CSharedData *CInputShmemStream::GetCurrentFrameData() { try { std::string frameID = GetFrameStringId (GetLocation(), GetCurrentFrameNum()); CSharedData *frameData = new CSharedData (frameID.c_str(), mediaPersistent, 0); return frameData; } catch (CSharedDataIOException) { return NULL; } } bool CInputShmemStream::FastForward (unsigned int frames) { for (unsigned int i=0; i < frames; i++) { CSharedData *pFrameData = GetCurrentFrameData(); if (pFrameData == NULL) return false; delete pFrameData; FrameWasProcessed(); } return true; } CFramePtr CInputShmemStream::GetNextFrame() { CMutableFramePtr framePtr; CSharedData *pFrameData = GetCurrentFrameData(); if (pFrameData == NULL) return framePtr; if (_pDecompressor == NULL) { framePtr = CMutableFramePtr(_vi.width, _vi.height, _vi.bitCount); pFrameData->Read (framePtr.GetData(), framePtr.GetDataSize(), false); } else { int compressedSize; void *pCompressedData = pFrameData->Read (compressedSize, false); #ifdef WIN32 framePtr = _pDecompressor->DecompressFrame (pCompressedData); #else framePtr = _pDecompressor->DecompressFrame (pCompressedData, compressedSize); #endif free (pCompressedData); } delete pFrameData; FrameWasProcessed(); return framePtr; } void CInputShmemStream::Close() { } COutputShmemStream::COutputShmemStream() { _headerData = NULL; _pCompressor = NULL; } COutputShmemStream::~COutputShmemStream() { if (_headerData != NULL) { delete _headerData; _headerData = NULL; std::vector<CSharedData *>::iterator i; for (i = _framesData.begin(); i != _framesData.end(); i++) { CSharedData *frameData = (*i); if (frameData != NULL) { delete frameData; (*i) = NULL; } } } if (_pCompressor != NULL) delete _pCompressor; } bool COutputShmemStream::Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams) { if (_headerData != NULL) return false; #ifdef WIN32 _pCompressor = CVideoCompressor::Create (compressionCodec, vi); #else _pCompressor = CVideoCompressor::Create (compressionCodec, compressionCodecId, vi); #endif { // Block for locker: SharedData creation and write (to guarantee that reader doesn't // read from shared data before we finished writing). std::string dataName = GetHeaderStringId (location); CSharedDataLocker sharedDataLocker( dataName.c_str() ); _headerData = new CSharedData ( dataName.c_str(), mediaPersistent, sizeof (VIDEOINFO)); _headerData->Write (&vi, sizeof (VIDEOINFO), false); } return true; } bool COutputShmemStream::WriteFrame (CFramePtr framePtr) { if (_headerData == NULL) return false; std::string frameID = GetFrameStringId (GetLocation(), GetCurrentFrameNum()); CSharedData *frameData; if (_pCompressor == NULL) { frameData = new CSharedData (frameID.c_str(), mediaPersistent, framePtr.GetDataSize()); frameData->Write (framePtr.GetData(), framePtr.GetDataSize(), false); } else { LONG compressedDataSize; void *pCompressedData = _pCompressor->CompressFrame (framePtr, compressedDataSize); frameData = new CSharedData (frameID.c_str(), mediaPersistent, compressedDataSize + sizeof (int)); frameData->WriteWithSize (pCompressedData, compressedDataSize, false); } _framesData.push_back (frameData); FrameWasProcessed(); return true; } bool COutputShmemStream::DeleteUpToFrame (int frameNum) { std::vector<CSharedData *>::iterator i; for (i = _framesData.begin(); i != _framesData.end() && frameNum >= 0; i++, frameNum--) { CSharedData *frameData = (*i); if (frameData != NULL) { delete frameData; (*i) = NULL; } } return true; } void COutputShmemStream::Close() { } <file_sep>/VideoStream/VideoStream/VSSync.h #pragma once #include "VSSharedData.h" #include "VSISync.h" #include <list> #ifndef WIN32 #include <stdio.h> #include <string.h> #include "CEventManager.h" #endif namespace VideoStream { class CSyncObjects { private: HANDLE _hProduceStartEvent; HANDLE _hProduceEndEvent, _hConsumeEndEvent; HANDLE _hNewBufSpaceEvent; HANDLE _hKillEvent; static HANDLE _hGlobalKillEvent; LPTSTR _location; CSharedData _framesProduced, _nConsumers, _framesConsumed; DWORD _timeoutMS; public: CSyncObjects (LPCTSTR location ); virtual ~CSyncObjects(); static bool AlreadyCreated (LPCTSTR location); std::string GetNewFrameEventName (int consumerNum); std::string GetHandshakeMutexName(); HANDLE ProduceStartEvent(); HANDLE ProduceEndEvent(); HANDLE ConsumeEndEvent() {return _hConsumeEndEvent;} HANDLE NewBufSpaceEvent() {return _hNewBufSpaceEvent;} HANDLE KillEvent() {return _hKillEvent;}; CSharedData &FramesProduced() {return _framesProduced;} CSharedData &FramesConsumed() {return _framesConsumed;} CSharedData &NConsumers() {return _nConsumers;} static HANDLE GlobalKillEvent(); int WaitForObjects (int nObjects, HANDLE objects[], DWORD dwTimeout, bool throwIOnTimeout = true ); void WaitForObject (HANDLE object, DWORD dwTimeout ); static void KillStream (LPCTSTR location); static void KillAllStreams(); }; class CInputSync : public IInputSync { private: CSyncObjects *_pSyncObjs; int _framesConsumed, _framesWaited; HANDLE _hNewFrameEvent; DWORD _timeoutMS; CInputSync (LPCTSTR location, DWORD timeoutMS); void WaitForVideo(); int GetConsumerNumber(); void InitConsumer(); static bool IsVideoBeingProduced (LPCTSTR location); public: virtual ~CInputSync(); static CInputSync *Handshake (LPCTSTR location, bool waitForProducer, DWORD timeoutMS); // throws CSyncTimeoutException, CSyncKillException DWORD getTimeoutMS() { return _timeoutMS; } bool WaitForFrame (HANDLE readEndEventHandle); // throws CSyncTimeoutException, CSyncKillException void FrameWasRead(); void Close(); bool HasProducingEnded( DWORD timeoutMs ); void Sleep( DWORD timeoutMs ); }; class COutputSync : public IOutputSync { private: CSyncObjects *_pSyncObjs; std::list<HANDLE> _hNewFrameEvents; int _maxBufferSize, _framesProduced; DWORD _timeoutMS; public: COutputSync (LPCTSTR location, int maxBufferSize, DWORD timeoutMS); virtual ~COutputSync(); void WaitForBufferSpace(); // throws CSyncTimeoutException, CSyncKillException void FrameWasWritten(); int HowManyFramesConsumed(); void Started(); DWORD getTimeoutMS() { return _timeoutMS; } void VideoWasComplete(); void WaitForConsumerEnd(); // throws CSyncTimeoutException, CSyncKillException }; }<file_sep>/NewEncoder/VideoEncoder/CMakeLists.txt #file(GLOB VideoEncoder_SRC # "*.cpp" #) add_library(VideoEncoder avc.cpp FFMpegWriter.cpp MoovHeaderWriter.cpp PlaylistWriter.cpp stdafx.cpp VideoEncoder.cpp VideoEncoderEngineFactory.cpp) add_definitions(-D__STDC_CONSTANT_MACROS) if (UNIX) target_include_directories(VideoEncoder PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/../../VideoStream/VideoStream/include ${CMAKE_CURRENT_SOURCE_DIR}/../../RenderingManager/RenderingManager/include ${CMAKE_CURRENT_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR}/../../third_party/include/ffmpeg/include ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/Utils ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/StackWalker/include ${CMAKE_CURRENT_SOURCE_DIR}/) else() target_include_directories(VideoEncoder PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/../../VideoStream/VideoStream/include ${CMAKE_CURRENT_SOURCE_DIR}/WMFSDK9/include ${CMAKE_CURRENT_SOURCE_DIR}/../../RenderingManager/RenderingManager/include ${CMAKE_CURRENT_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR}/../../ffmpeg/include ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/Utils ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/StackWalker/include ${CMAKE_CURRENT_SOURCE_DIR}/) endif() #target_link_libraries(CliManager LINK_PUBLIC StackWalker) <file_sep>/Fmod/AudioOrigin.cpp #include "AudioOrigin.h" AudioOrigin::AudioOrigin(void) { } AudioOrigin::~AudioOrigin(void) { } <file_sep>/utils/StackWalker/include/SWException.h #pragma once #include <iostream> #include <exception> #include "StackWalker.h" using namespace std; class SWException: public exception, public StackWalker { private: string m_message; string m_stackTrace; void fillStackTrace(); public: SWException(); SWException(string & message); SWException(const char *format, ...); SWException(const SWException & e); virtual ~SWException() throw() {}; SWException & operator= (const SWException & other); const char* what() const throw() { return m_message.c_str(); } const string getStackTrace() const { return m_stackTrace; } virtual void OnOutput(LPCSTR szText, bool isStack) { StackWalker::OnOutput(szText, isStack); if (isStack) m_stackTrace = m_stackTrace.append(szText); } }; <file_sep>/Fmod/Presets/NarrationPreset.h #ifndef __NARRATION_PRESET_H__ #define __NARRATION_PRESET_H__ #include "Preset.h" #include "../Effects/NormalizationDsp.h" class NarrationPreset : public Preset { public: NarrationPreset() { Preset(); } virtual void create(FMOD::System *fmodSystem) { NormalizationDsp *norm = new NormalizationDsp(); norm->create(fmodSystem); norm->setFadeTime(100.0f); norm->setMaxAmp(10.0f); effectDspList.push_back(norm); volume=0.5f; created=true; } }; #endif <file_sep>/VideoStream/VideoStream/VSCompression.h #pragma once #ifdef WIN32 #include "VSFrame.h" #include <vfw.h> namespace VideoStream { // Compression codecs const DWORD huffyuv = mmioFOURCC ('h', 'f', 'y', 'u'); const DWORD lagarith = mmioFOURCC ('L', 'A', 'G', 'S'); const DWORD xvid = mmioFOURCC ('X', 'V', 'I', 'D'); const DWORD MSRLE = mmioFOURCC ('m', 'r', 'l', 'e'); const DWORD h264 = mmioFOURCC ('x', '2', '6', '4'); const DWORD vp62 = mmioFOURCC ('v', 'p', '6', '2'); const DWORD vp70 = mmioFOURCC ('v', 'p', '7', '0'); const DWORD uncompressed = 0; class CVideoCompressor { private: COMPVARS _compVars; BITMAPINFO _bmpinfo; HIC _hICcompressor; CVideoCompressor (DWORD compressionCodec, const VIDEOINFO &vi); public: static CVideoCompressor *Create (DWORD compressionCodec, const VIDEOINFO &vi) {return (compressionCodec == uncompressed ? NULL : new CVideoCompressor (compressionCodec, vi));} virtual ~CVideoCompressor(); void *CompressFrame (CFramePtr framePtr, LONG &compSize); BITMAPINFO *GetCompressedBitmapInfo(); }; class CVideoDecompressor { private: BITMAPINFO *_pbiInput, *_pbiOutput; HIC _hICdecompressor; const VIDEOINFO _vi; CVideoDecompressor (DWORD compressionCodec, const VIDEOINFO &vi); public: static CVideoDecompressor *Create (DWORD compressionCodec, const VIDEOINFO &vi) {return (compressionCodec == uncompressed ? NULL : new CVideoDecompressor (compressionCodec, vi));} virtual ~CVideoDecompressor(); CMutableFramePtr DecompressFrame (void *pCompressedData); }; } #else #pragma once #include "VSFrame.h" //#include <vfw.h> #include "pthread.h" #ifndef UINT64_C #define UINT64_C(c) (c ## ULL) #endif extern "C" { #include "libavformat/avformat.h" #include "libswscale/swscale.h" #include "libavutil/imgutils.h" #include "libavutil/opt.h" } namespace VideoStream { // Compression codecs //const DWORD huffyuv = mmioFOURCC('h', 'f', 'y', 'u'); const DWORD huffyuv = mmioFOURCC('H', 'F', 'Y', 'U'); const DWORD lagarith = mmioFOURCC('L', 'A', 'G', 'S'); const DWORD xvid = mmioFOURCC('X', 'V', 'I', 'D'); const DWORD MSRLE = mmioFOURCC('m', 'r', 'l', 'e'); const DWORD h264 = mmioFOURCC('x', '2', '6', '4'); const DWORD vp62 = mmioFOURCC('v', 'p', '6', '2'); const DWORD vp70 = mmioFOURCC('v', 'p', '7', '0'); const DWORD uncompressed = 0; class CVideoCompressor { private: AVCodec *m_codec; AVCodecContext* m_codecContext; AVFrame* m_frame; AVPicture m_dstPicture; AVStream* video_st; const VIDEOINFO _vi; static pthread_mutex_t m_mutex; FILE *f; CVideoCompressor(DWORD compressionCodec, AVCodecID codecId, const VIDEOINFO &vi); public: //static CVideoCompressor *Create(DWORD compressionCodec, const VIDEOINFO &vi) static CVideoCompressor *Create(DWORD compressionCodec, AVCodecID codecId, const VIDEOINFO &vi) { return (compressionCodec == uncompressed || codecId == AV_CODEC_ID_NONE ? NULL : new CVideoCompressor(compressionCodec, codecId, vi)); } virtual ~CVideoCompressor(); void *CompressFrame(CFramePtr framePtr, LONG &compSize); void init(AVFormatContext * oc, AVCodecContext* c); static pthread_mutex_t create_mutex(); static void initFFmpeg(); }; class CVideoDecompressor { private: //BITMAPINFO *_pbiInput, *_pbiOutput; HIC _hICdecompressor; const VIDEOINFO _vi; AVCodec *m_codec; AVCodecContext* m_codecContext; AVFrame* m_frame; AVStream* video_st; CVideoDecompressor(DWORD compressionCodec, AVCodecID codecId, const VIDEOINFO &vi); public: static CVideoDecompressor *Create(DWORD compressionCodec, AVCodecID codecId, const VIDEOINFO &vi) { return (/*compressionCodec == uncompressed ||*/ codecId == AV_CODEC_ID_NONE ? NULL : new CVideoDecompressor(compressionCodec, codecId, vi)); } virtual ~CVideoDecompressor(); CMutableFramePtr DecompressFrame(void *pCompressedData, int compressedSize); void open_codec_context(AVFormatContext * ic, VIDEOINFO &vi); }; } #endif<file_sep>/Fmod/Events/SetVolumeEvent.h #ifndef __SET_VOLUME_EVENT_H__ #define __SET_VOLUME_EVENT_H__ #include "SyncPointEventDescriptor.h" class SetVolumeEvent : public SyncPointEventDescriptor { private: double value; public: SetVolumeEvent(double v) { value = v; } int apply(FMOD::Channel *channel) { channel->setVolume((float)value); return 0; } double getValue() { return value; } }; #endif <file_sep>/utils/StackWalker/include/WindowDefinitions.h #ifndef WINDOWDEFINITIONS_H_ #define WINDOWDEFINITIONS_H_ #include <stdint.h> #define far #define __stdcall typedef uint64_t DWORD64, *PDWORD64; typedef uint32_t* LPWORD; typedef uint32_t DWORD; typedef char* LPSTR; typedef const char* LPCSTR; typedef char CHAR; typedef int BOOL; typedef void* HANDLE; typedef HANDLE HMODULE; typedef unsigned char BYTE; typedef char TCHAR, *PTCHAR; typedef void far *LPVOID; typedef void *PVOID; typedef DWORD far *LPDWORD; typedef uint64_t ULONGLONG; #define _tcscat_s strcat #define strcpy_s strcpy #define _snprintf_s snprintf #ifndef TRUE #define TRUE 1 #endif #ifndef FALSE #define FALSE 0 #endif #endif #define ERROR_SUCCESS O #define MAX_PATH 260 <file_sep>/NewEncoder/VideoEncoder/WAVReader.cpp #include "WAVReader.h" #include "SWException.h" #pragma warning ( disable : 4996 ) CWAVReader::CWAVReader(Logger & logger) : m_logger(logger) { m_fw = NULL; m_wav = NULL; } bool CWAVReader::Init( char *file_name ) { int i; unsigned int wstat; char obuff[80]; m_fw = NULL; m_wav = NULL; CHUNK_HDR *chk; int sflag; long int rmore; // set defaults g_wdata_in = NULL; g_num_isamp = 0; g_max_isamp = 0; // allocate wav header m_wav = new WAV_HDR; chk = new CHUNK_HDR; if(m_wav==NULL) throw SWException("cant new headers\n"); if(chk==NULL) throw SWException("cant new headers\n"); /* open wav file */ m_fw = fopen(file_name,"rb"); if(m_fw==NULL) throw SWException("cant open wav file\n"); /* read riff/wav header */ wstat = (int)fread((void *)m_wav,sizeof(WAV_HDR),(size_t)1,m_fw); if(wstat!=1) throw SWException("cant read wav\n"); // check format of header for(i=0;i<4;i++) obuff[i] = m_wav->rID[i]; obuff[4] = 0; if(strcmp(obuff,"RIFF")!=0) throw SWException("bad RIFF format\n"); for(i=0;i<4;i++) obuff[i] = m_wav->wID[i]; obuff[4] = 0; if(strcmp(obuff,"WAVE")!=0) throw SWException("bad WAVE format\n"); for(i=0;i<3;i++) obuff[i] = m_wav->fId[i]; obuff[3] = 0; if(strcmp(obuff,"fmt")!=0) throw SWException("bad fmt format\n"); if(m_wav->wFormatTag!=1) throw SWException("bad wav wFormatTag\n"); if( (m_wav->nBitsPerSample != 16) && (m_wav->nBitsPerSample != 8) ){ throw SWException("bad m_wav nBitsPerSample\n"); } // skip over any remaining portion of wav header rmore = m_wav->pcm_header_len - (sizeof(WAV_HDR) - 20); wstat = fseek(m_fw,rmore,SEEK_CUR); if(wstat!=0) throw SWException("cant seek\n"); // read chunks until a 'data' chunk is found sflag = 1; while(sflag!=0){ // check attempts if(sflag>10) throw SWException("too many chunks\n"); // read chunk header wstat = (int)fread((void *)chk,sizeof(CHUNK_HDR),(size_t)1,m_fw); if(wstat!=1) throw SWException("cant read chunk\n"); // check chunk type for(i=0;i<4;i++) obuff[i] = chk->dId[i]; obuff[4] = 0; if(strcmp(obuff,"data")==0) break; // skip over chunk sflag++; wstat = fseek(m_fw,chk->dLen,SEEK_CUR); if(wstat!=0) throw SWException("cant seek\n"); } /* find length of remaining data */ wbuff_len = chk->dLen; // find number of samples g_max_isamp = chk->dLen; g_max_isamp /= m_wav->nBitsPerSample / 8; // save demographics fs_hz = (double) (m_wav->nSamplesPerSec); bits_per_sample = m_wav->nBitsPerSample; num_ch = m_wav->nChannels; m_logger.info("Loaded WAV File: %s\n" " Sample Rate = %1.0lf (Hz)\n" " Number of Samples = %ld\n" " Bits Per Sample = %d\n" " Number of Channels = %d\n\n", file_name, fs_hz, g_max_isamp, bits_per_sample, num_ch); return true; } CWAVReader::~CWAVReader() { // be polite - clean up if(m_wav!=NULL) delete m_wav; if ( m_fw ) fclose(m_fw); } int CWAVReader::Read( unsigned char *bfr, int bfr_size, bool *endofbfr ) { int nread = (int)fread(bfr,1,bfr_size,m_fw); wbuff_len-= nread; *endofbfr = ( wbuff_len <= 0 ); return nread; } <file_sep>/utils/Utils/WindowDefinitions.h #ifndef WINDOWDEFINITIONS_H_ #define WINDOWDEFINITIONS_H_ #include <stdint.h> #include <sys/types.h> #define far #define __stdcall #ifndef INT64_C #define INT64_C(c) (c ## LL) #define UINT64_C(c) (c ## ULL) #endif typedef uint64_t DWORD64, *PDWORD64; typedef uint32_t* LPWORD; typedef uint32_t DWORD; typedef char* LPSTR; typedef const char* LPCSTR; typedef char CHAR; typedef int BOOL; typedef void* HANDLE; typedef HANDLE HMODULE; typedef char TCHAR, *PTCHAR; typedef LPCSTR LPCTSTR; typedef LPSTR LPTSTR; typedef long LONG; typedef void far *LPVOID; typedef void *PVOID; typedef DWORD far *LPDWORD; typedef uint64_t ULONGLONG; typedef unsigned short WORD; typedef unsigned char BYTE; typedef void* HANDLE; typedef HANDLE HIC; struct BITMAPINFOHEADER { DWORD biSize; LONG biWidth; LONG biHeight; WORD biPlanes; WORD biBitCount; DWORD biCompression; DWORD biSizeImage; LONG biXPelsPerMeter; LONG biYPelsPerMeter; DWORD biClrUsed; DWORD biClrImportant; }; struct RGBQUAD { BYTE rgbBlue; BYTE rgbGreen; BYTE rgbRed; BYTE rgbReserved; }; struct BITMAPINFO { BITMAPINFOHEADER bmiHeader; RGBQUAD bmiColors[1]; }; //typedef struct _FILETIME { // DWORD dwLowDateTime; // DWORD dwHighDateTime; //} FILETIME, *PFILETIME, *LPFILETIME; #define _tcscat_s strcat #define strcpy_s strcpy #define _snprintf_s snprintf #ifndef TRUE #define TRUE 1 #endif #ifndef FALSE #define FALSE 0 #endif #endif #define ERROR_SUCCESS O #define MAX_PATH 260 #define __T(x) x #define _T(x) __T(x) #define INFINITE 0xFFFFFFFF //VStream def /* constants for the biCompression field */ #define BI_RGB 0L #define BI_RLE8 1L #define BI_RLE4 2L #define BI_BITFIELDS 3L #define BI_JPEG 4L #define BI_PNG 5L #define MAKEFOURCC(ch0, ch1, ch2, ch3) \ ((DWORD)(BYTE)(ch0) | ((DWORD)(BYTE)(ch1) << 8) | \ ((DWORD)(BYTE)(ch2) << 16) | ((DWORD)(BYTE)(ch3) << 24 )) /* MMIO macros */ //#define mmioFOURCC(ch0, ch1, ch2, ch3) MAKEFOURCC(ch0, ch1, ch2, ch3) typedef ulong DWORD_PTR, *PDWORD_PTR; #define ToHex(n) ((BYTE) (((n) > 9) ? ((n) - 10 + 'A') : ((n) + '0'))) #define MAKEAVICKID(tcc, stream) \ MAKELONG((ToHex((stream) & 0x0f) << 8) | \ (ToHex(((stream) & 0xf0) >> 4)), tcc) #ifndef mmioFOURCC #define mmioFOURCC( ch0, ch1, ch2, ch3 ) \ ( (DWORD)(BYTE)(ch0) | ( (DWORD)(BYTE)(ch1) << 8 ) | \ ( (DWORD)(BYTE)(ch2) << 16 ) | ( (DWORD)(BYTE)(ch3) << 24 ) ) #endif //#ifndef aviTWOCC //#define aviTWOCC(ch0, ch1) ((WORD)(BYTE)(ch0) | ((WORD)(BYTE)(ch1) << 8)) //#endif /* Basic chunk types */ //#define cktypeDIBbits aviTWOCC('d', 'b') <file_sep>/VideoStream/VideoStream/VSExceptions.h #pragma once namespace VideoStream { class CSyncTimeoutException {}; class CSyncKillException {}; class CSharedDataIOException {}; }; <file_sep>/RenderingManager/RenderingManager/Logger.cpp #include "Logger.h" void Logger::printf_adv(bool printHeader, bool verifyNewLine, DebugLevel dbgLvl, const char *format, ...) { if (getDebugLevel() <= dbgLvl) { va_list argList; va_start(argList, format); vprintf_adv(printHeader, verifyNewLine, dbgLvl, format, argList); va_end(argList); } } void Logger::vprintf(DebugLevel dbgLvl, const char *format, va_list argList) { vprintf_adv(true, true, dbgLvl, format, argList); } void Logger::printf(DebugLevel dbgLvl, const char *format, ...) { if (getDebugLevel() <= dbgLvl) { va_list argList; va_start(argList, format); vprintf(dbgLvl, format, argList); va_end(argList); } } void Logger::trace(const char *format, ...) { if (getDebugLevel() <= DebugLevel_TRACE) { va_list argList; va_start(argList, format); vprintf(DebugLevel_TRACE, format, argList); va_end(argList); } } void Logger::info(const char *format, ...) { if (getDebugLevel() <= DebugLevel_INFO) { va_list argList; va_start(argList, format); vprintf(DebugLevel_INFO, format, argList); va_end(argList); } } void Logger::warning(const char *format, ...) { if (getDebugLevel() <= DebugLevel_WARNING) { va_list argList; va_start(argList, format); vprintf(DebugLevel_WARNING, format, argList); va_end(argList); } } void Logger::error(const char *format, ...) { if (getDebugLevel() <= DebugLevel_ERROR) { va_list argList; va_start(argList, format); vprintf(DebugLevel_ERROR, format, argList); va_end(argList); } } void Logger::v_trace(const char *format, va_list argList) { if (getDebugLevel() <= DebugLevel_TRACE) vprintf(DebugLevel_TRACE, format, argList); } void Logger::v_info(const char *format, va_list argList) { if (getDebugLevel() <= DebugLevel_INFO) vprintf(DebugLevel_INFO, format, argList); } void Logger::v_warning(const char *format, va_list argList) { if (getDebugLevel() <= DebugLevel_WARNING) vprintf(DebugLevel_WARNING, format, argList); } void Logger::v_error(const char *format, va_list argList) { if (getDebugLevel() <= DebugLevel_ERROR) vprintf(DebugLevel_ERROR, format, argList); } <file_sep>/Fmod/AudioPart.h #ifndef __AUDIO_PART_H__ #define __AUDIO_PART_H__ #pragma once #include "MixerEnums.h" #include "Effects/Effect.h" #include "Interpolated.h" class AudioPart { private: std::string id; std::string source; double start; double end; double offset; double paceFactor; AudioType type; AudioOrigin origin; AudioSpeaker speaker; AudioRole role; int channelGroup; std::vector<Effect *> effects; std::vector<Interpolated *> volume; // std::vector<Interpolated *> pan; public: AudioPart(void) { channelGroup = -1; } ~AudioPart(void) { for (std::vector<Effect *>::iterator it = effects.begin(); it != effects.end() ; ++it) { Effect *effect = *it; delete effect; } for (std::vector<Interpolated *>::iterator it = volume.begin(); it != volume.end() ; ++it) { Interpolated *vol = *it; delete vol; } } double getStart() { return start; } void setStart(double s) { start = s; } double getEnd() { return end; } void setEnd(double e) { end = e; } std::string getId() { return id; } void setId(std::string i) { id = i; } std::string getSource() { return source; } void setSource(std::string s) { source = s; } AudioType getType() { return type; } void setType(AudioType t) { type = t; } AudioOrigin getOrigin() { return origin; } void setOrigin(AudioOrigin o) { origin = o; } AudioRole getRole() { return role; } void setRole(AudioRole r) { role = r; } AudioSpeaker getSpeaker() { return speaker; } void setSpeaker(AudioSpeaker s) { speaker = s; } int getChannelGroup() { return channelGroup; } void setChannelGroup(int cg) { channelGroup = cg; }; double getOffset() { return offset; } void setOffset(double o) { offset = o; } void addEffect(Effect *effect) { effects.push_back(effect); } std::vector<Effect *> getEffects() { return effects; } void setEffects(std::vector<Effect *> e) { effects = e; } double getPaceFactor() { return paceFactor; } void setPaceFactor(double pf) { paceFactor = pf; } std::vector<Interpolated *> getVolume() { return volume; } void setVolume(std::vector<Interpolated *> v) { volume = v; } }; #endif <file_sep>/SVG2Video/SVG2VideoArgs.h #ifndef __EVEV_CmdArgs_H #define __EVEV_CmdArgs_H #include <string> #include <vector> // CommandLine parser class SVG2VideoArgs { public: SVG2VideoArgs() { m_outWidth = 100; m_outHeight = 100; m_out_rate = 1; m_out_scale = 1; m_dbg = false; m_minBitDepth = 24; m_seconds = 1; m_svgServerHost = "127.0.0.1:50051"; m_useFrameCache = false; m_timeFromStartToCache = 0; } std::string m_inFile; std::string m_outFile; int m_outWidth; int m_outHeight; int m_out_rate; int m_out_scale; double m_seconds; std::string m_profilingFileName; std::string m_profilingConfigFileName; std::string m_svgServerHost; bool m_dbg; int m_minBitDepth; bool m_useFrameCache; int m_timeFromStartToCache; }; #endif <file_sep>/Fmod/Effects/HiPassDsp.h #ifndef __HI_PASS_DSP_H__ #define __HI_PASS_DSP_H__ #include "DspEffect.h" #pragma once class HiPassDsp : public DspEffect { private: FMOD::DSP *dsp; public: void create(FMOD::System *fmodSystem) { fmodSystem->createDSPByType(FMOD_DSP_TYPE_HIGHPASS, &dsp); dspList.push_back(dsp); } void setCutoff(float factor) { dsp->setParameter(FMOD_DSP_HIGHPASS_CUTOFF, factor); } void setResonance(float factor) { dsp->setParameter(FMOD_DSP_HIGHPASS_RESONANCE, factor); } }; #endif <file_sep>/utils/CliManager/CMakeLists.txt file(GLOB CliManager_SRC "*.cpp" ) add_library(CliManager ${CliManager_SRC}) target_include_directories(CliManager PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/ ${CMAKE_CURRENT_SOURCE_DIR}/../StackWalker/include/) #target_link_libraries(CliManager LINK_PUBLIC StackWalker)<file_sep>/Fmod/Effects/Effect.h #ifndef __EFFECT_H__ #define __EFFECT_H__ #pragma once class Effect { public: virtual void apply(FMOD::Channel *channel){} virtual void apply(FMOD::ChannelGroup *channelGroup){} virtual void create(FMOD::System *fmodSystem){} }; #endif<file_sep>/VideoStream/VideoStream/AVIFileMedia.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSAVIFileMedia.h" #include "VSFrame.h" #include "VSLog.h" #include <stdio.h> #include <Share.h> //#include <MMSYSTEM.H> //#include <AVIFmt.h> using namespace VideoStream; // The compression codec that is in use const DWORD defaultCompressionCodec = lagarith; // RIFF constants const DWORD dwRiffId = mmioFOURCC ('R', 'I', 'F', 'F'); const DWORD dwRiffListId = mmioFOURCC ('L', 'I', 'S', 'T'); const DWORD dwFrameChunkId = MAKEAVICKID (cktypeDIBbits, 0); const int riffChunkOverhead = sizeof (DWORD) * 2; const int riffListOverhead = sizeof (DWORD) * 3; struct RIFFHEADER { DWORD riffId; DWORD size; DWORD type; }; /*********************************/ /* BITMAP construction functions */ /*********************************/ static void GetBitmapInfo (BITMAPINFOHEADER &bmphdr, int bitCount, int width, int height) { memset (&bmphdr, 0, sizeof (bmphdr)); bmphdr.biSize = sizeof (BITMAPINFOHEADER); bmphdr.biWidth = width; bmphdr.biHeight = height; bmphdr.biPlanes = 1; bmphdr.biBitCount = bitCount; bmphdr.biCompression = BI_RGB; bmphdr.biSizeImage = CFrame::GetFrameSize (width, height, bitCount); } /*****************/ /* CInputAVIFile */ /*****************/ static bool ReadChunkHeader (FILE *fp, DWORD &chunkId, DWORD &chunkSize, DWORD expectedChunkId) { if (fread (&chunkId, 1, sizeof (DWORD), fp) == sizeof (DWORD)) { if (expectedChunkId != NULL && chunkId != expectedChunkId) return false; return (fread (&chunkSize, 1, sizeof (DWORD), fp) == sizeof (DWORD)); } else return false; } static bool ReadChunkData (FILE *fp, size_t chunkSize, void *pChunk) { if (fread (pChunk, 1, chunkSize, fp) != chunkSize) return false; if (chunkSize%2 == 1) // align to 2-bytes fseek (fp, 1, SEEK_CUR); return true; } static bool ReadChunk (FILE *fp, void *pChunk, DWORD expectedChunkId) { DWORD chunkId, chunkSize; if (ReadChunkHeader (fp, chunkId, chunkSize, expectedChunkId)) return ReadChunkData (fp, chunkSize, pChunk); else return false; } static void *ReadChunk (FILE *fp, DWORD expectedChunkId, DWORD &chunkSize) { DWORD chunkId; if (ReadChunkHeader (fp, chunkId, chunkSize, expectedChunkId)) { void *pChunk = new BYTE[chunkSize]; if (ReadChunkData (fp, chunkSize, pChunk)) return pChunk; } return NULL; } static bool SkipChunk (FILE *fp, DWORD expectedChunkId) { DWORD chunkId, chunkSize; if (ReadChunkHeader (fp, chunkId, chunkSize, expectedChunkId)) { chunkSize = chunkSize + (chunkSize%2); // Align to 2-bytes fseek (fp, chunkSize, SEEK_CUR); return true; } else return false; } static bool ReadMainStreamHeader (FILE *fp, unsigned int &dwRate, DWORD &dwCodec) { AVIStreamHeader strhdr; if (!ReadChunk (fp, &strhdr, ckidSTREAMHEADER)) return false; dwRate = strhdr.dwRate; dwCodec = strhdr.fccHandler; return true; } static BITMAPINFO *ReadStreamFormat (FILE *fp) { DWORD chunkSize; return (BITMAPINFO *) ReadChunk (fp, ckidSTREAMFORMAT, chunkSize); } static BITMAPINFO *ReadStreamHeaders (FILE *fp, unsigned int &dwRate, unsigned int &bitCount, unsigned int &width, unsigned int &height, DWORD &dwCodec) { /* * Read the 'strl' RIFF list which contains * a. 'strh' chunk - the video stream header * b. 'strf' chunk - the video stream format */ // Jump over the 'strl' list header fseek (fp, riffListOverhead, SEEK_CUR); // Read the 'strh' chunk if (!ReadMainStreamHeader (fp, dwRate, dwCodec)) return NULL; // Read the 'strf' chunk BITMAPINFO *pbmphdr = ReadStreamFormat (fp); width = pbmphdr->bmiHeader.biWidth; height = pbmphdr->bmiHeader.biHeight; bitCount = pbmphdr->bmiHeader.biBitCount; return pbmphdr; } static BITMAPINFO *ReadAVIHeaders (FILE *fp, VIDEOINFO &vi, DWORD &dwCodec) { /* * Read the 'hdrl' RIFF list which contains * 1. 'avih' chunk - the main AVI header * 2. 'strl' list which contains * a. 'strh' chunk - the video stream header * b. 'strf' chunk - the video stream format */ // Skip over the 'hdrl' list header and the 'avih' chunk fseek (fp, riffListOverhead + riffChunkOverhead + sizeof (MainAVIHeader), SEEK_CUR); // Read the 'strl' list return ReadStreamHeaders (fp, vi.frameRate, vi.bitCount, vi.width, vi.height, dwCodec); } static BITMAPINFO *ReadRIFFHeaders (FILE *fp, VIDEOINFO &vi, DWORD &dwCodec) { // Read all relevant RIFF headers // See: http://msdn2.microsoft.com/en-us/library/ms779636(VS.85).aspx // Skip over the main 'RIFF' header fseek (fp, sizeof (RIFFHEADER), SEEK_SET); // Read the AVI headers return ReadAVIHeaders (fp, vi, dwCodec); } CInputAVIFile::CInputAVIFile() : CBaseInputVideoStream() { _fp = NULL; _pDecompressor = NULL; } bool CInputAVIFile::Open (LPCTSTR location, MediaLocationParams & mlParams) { _fp = _tfsopen (location, _T("rb"), _SH_DENYNO); if (_fp == NULL) return false; // Read the video information from the RIFF headers DWORD dwCodec; BITMAPINFO *pbiInput = ReadRIFFHeaders (_fp, _vi, dwCodec); if (pbiInput == NULL) { fclose (_fp); return false; } free (pbiInput); // Move the stream pointer to the first frame fseek (_fp, AVI_HEADERSIZE + riffListOverhead, SEEK_SET); // Initialize decompression _pDecompressor = CVideoDecompressor::Create (dwCodec, _vi); return true; } void CInputAVIFile::Close() { if (_fp != NULL) { if (_pDecompressor != NULL) delete _pDecompressor; fclose (_fp); _fp = NULL; } } CInputAVIFile::~CInputAVIFile() { Close(); } const VIDEOINFO &CInputAVIFile::GetVideoInfo() const { return _vi; } bool CInputAVIFile::FastForward (unsigned int frames) { for (unsigned int i=0; i < frames; i++) { if (SkipChunk (_fp, dwFrameChunkId) == false) return false; LogTime (_T("Skipped frame")); FrameWasProcessed(); } return true; } CFramePtr CInputAVIFile::GetNextFrame() { CMutableFramePtr framePtr; LogTime (_T("Start reading")); if (_pDecompressor == NULL) // no compression { framePtr = CMutableFramePtr(_vi.width, _vi.height, _vi.bitCount); if (ReadChunk (_fp, framePtr.GetData(), dwFrameChunkId) == false) { // end of file framePtr.setNull(); return framePtr; } } else { // Read compressed frame DWORD dataSize; void *pData = ReadChunk (_fp, dwFrameChunkId, dataSize); if (pData == NULL) return framePtr; // end of file LogTime (_T("Start decompress")); // Decompress the frame framePtr = _pDecompressor->DecompressFrame (pData); delete[] pData; } FrameWasProcessed(); return framePtr; } /******************/ /* COutputAVIFile */ /******************/ static void WriteRIFFHeader (FILE *fp, DWORD size) { RIFFHEADER riffHeader; riffHeader.riffId = dwRiffId; riffHeader.size = size; riffHeader.type = formtypeAVI; fwrite (&riffHeader, 1, sizeof (riffHeader), fp); } static void WriteChunkHeader (FILE *fp, DWORD chunkId, DWORD chunkSize) { fwrite (&chunkId, 1, sizeof (DWORD), fp); fwrite (&chunkSize, 1, sizeof (DWORD), fp); } static int WriteChunk (FILE *fp, DWORD chunkId, DWORD chunkSize, const void *pChunk) { WriteChunkHeader (fp, chunkId, chunkSize); fwrite (pChunk, 1, chunkSize, fp); if (chunkSize%2 == 1) { BYTE pad = 0; fwrite (&pad, 1, 1, fp); chunkSize++; } return (chunkSize + riffChunkOverhead); } static void WriteListHeader (FILE *fp, DWORD listSize, DWORD listId) { fwrite (&dwRiffListId, 1, sizeof (DWORD), fp); DWORD realListSize = listSize + sizeof (DWORD); // the listId is counted in the size of the list fwrite (&realListSize, 1, sizeof (DWORD), fp); fwrite (&listId, 1, sizeof (DWORD), fp); } static void WriteMainAVIHeader (FILE *fp, DWORD dwMicroSecPerFrame, int width, int height, int nFrames) { MainAVIHeader header; memset (&header, 0, sizeof (header)); header.dwMicroSecPerFrame = dwMicroSecPerFrame; header.dwFlags = 0x800 /* = AVIF_TRUSTCKTYPE*/ | AVIF_HASINDEX; header.dwStreams = 1; header.dwWidth = width; header.dwHeight = height; header.dwTotalFrames = nFrames; WriteChunk (fp, ckidAVIMAINHDR, sizeof (header), &header); } static void WriteMainStreamHeader (FILE *fp, DWORD dwRate, DWORD dwSuggestedBufferSize, int width, int height, int nFrames, DWORD dwCodec) { AVIStreamHeader strhdr; memset (&strhdr, 0, sizeof (strhdr)); strhdr.fccType = streamtypeVIDEO; strhdr.fccHandler = dwCodec; strhdr.dwScale = 1; strhdr.dwLength = nFrames; strhdr.dwQuality = -1; strhdr.dwRate = dwRate; strhdr.dwSuggestedBufferSize = dwSuggestedBufferSize; //strhdr.dwSampleSize = dwSuggestedBufferSize; SetRect(&strhdr.rcFrame, 0, 0, width, height); WriteChunk (fp, ckidSTREAMHEADER, sizeof (strhdr), &strhdr); } static void WriteStreamFormat (FILE *fp, BITMAPINFO *pbmphdr) { WriteChunk (fp, ckidSTREAMFORMAT, pbmphdr->bmiHeader.biSize, pbmphdr); } static DWORD GetFccHandler (HIC hIC) { ICINFO info; ICGetInfo (hIC, &info, sizeof (info)); return info.fccHandler; } static void WriteStreamHeaders (FILE *fp, DWORD dwRate, int nFrames, BITMAPINFO *pbmphdr) { /* * Write the 'strl' RIFF list which contains * a. 'strh' chunk - the video stream header * b. 'strf' chunk - the video stream format */ // Write the 'strl' list header int streamHeadersSize = sizeof (AVIStreamHeader) + pbmphdr->bmiHeader.biSize + 2*riffChunkOverhead; // 'strh', 'strf' chunks WriteListHeader (fp, streamHeadersSize, listtypeSTREAMHEADER); // Write the 'strh' chunk WriteMainStreamHeader (fp, dwRate, 0, pbmphdr->bmiHeader.biWidth, pbmphdr->bmiHeader.biHeight, nFrames, pbmphdr->bmiHeader.biCompression); // Write the 'strf' chunk WriteStreamFormat (fp, pbmphdr); } static void WriteAVIHeaders (FILE *fp, const VIDEOINFO &vi, int nFrames, BITMAPINFO *pCompBmpHdr) { /* * Write the 'hdrl' RIFF list which contains * 1. 'avih' chunk - the main AVI header * 2. 'strl' list which contains * a. 'strh' chunk - the video stream header * b. 'strf' chunk - the video stream format */ // Write the 'hdrl' list header int AVIHeadersSize = sizeof (MainAVIHeader) + sizeof (AVIStreamHeader) + pCompBmpHdr->bmiHeader.biSize + riffListOverhead // 'strl' list + 3*riffChunkOverhead; // 'avih', 'strh', 'strf' chunks WriteListHeader (fp, AVIHeadersSize, listtypeAVIHEADER); // Write the 'avih' chunk DWORD dwMicroSecPerFrame = 1000000 / vi.frameRate; WriteMainAVIHeader (fp, dwMicroSecPerFrame, vi.width, vi.height, nFrames); // Write the 'strl' list WriteStreamHeaders (fp, vi.frameRate, nFrames, pCompBmpHdr); } static void WriteJunkChunk (FILE *fp) { int junkPadding = AVI_HEADERSIZE - ftell (fp) - riffChunkOverhead; BYTE junk[AVI_HEADERSIZE]; memset (junk, 0, junkPadding); WriteChunk (fp, ckidAVIPADDING, junkPadding, junk); } static void WriteRIFFHeaders (FILE *fp, const VIDEOINFO &vi, BITMAPINFO *pCompBmpHdr, int movieSize = 0, int nFrames = 0) { // Write all relevant RIFF headers // See: http://msdn2.microsoft.com/en-us/library/ms779636(VS.85).aspx // Write the main 'RIFF' header int fileSize = movieSize + sizeof (DWORD) + AVI_HEADERSIZE + riffChunkOverhead + nFrames * sizeof (AVIINDEXENTRY); WriteRIFFHeader (fp, fileSize); // Write the AVI headers WriteAVIHeaders (fp, vi, nFrames, pCompBmpHdr); // Fill with junk up to 2K WriteJunkChunk (fp); // Start the main movie list WriteListHeader (fp, movieSize, listtypeAVIMOVIE); } COutputAVIFile::COutputAVIFile() : CBaseOutputVideoStream() { _fp = NULL; _pCompressor = NULL; } bool COutputAVIFile::Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams) { // maxBufferSize and timeoutMS are ignored. They are relevant only if sync object is bypassed (which is not the case here). _vi = vi; // Open file _fp = _tfsopen (location, _T("wb"), _SH_DENYWR); if (_fp == NULL) return false; // Initialize compressor DWORD useCodec = GetDefaultCodec(); // Default codec // Override codec if requested so if ( mlParams.codecstr.compare( "h264" ) == 0 ) useCodec = h264; else if ( mlParams.codecstr.compare( "vp6" ) == 0 ) useCodec = vp62; else if ( mlParams.codecstr.compare( "vp7" ) == 0 ) useCodec = vp70; _pCompressor = CVideoCompressor::Create (useCodec, vi); // Write headers BITMAPINFO *pCompBmpHdr = GetOutputBitmapInfo(); WriteRIFFHeaders (_fp, vi, pCompBmpHdr); free (pCompBmpHdr); fflush (_fp); _lastIndex = sizeof (DWORD); return true; } static void WriteAVIIndex (FILE *fp, const IndicesType &index) { WriteChunkHeader (fp, ckidAVINEWINDEX, sizeof (AVIINDEXENTRY) * (int)index.size()); AVIINDEXENTRY idxEntry; idxEntry.ckid = dwFrameChunkId; IndicesType::const_iterator i; for (i = index.begin(); i != index.end(); i++) { const FRAMEINFO &info = (*i); idxEntry.dwChunkLength = info.length; idxEntry.dwChunkOffset = info.offset; idxEntry.dwFlags = (info.isKeyFrame ? AVIIF_KEYFRAME : 0); fwrite (&idxEntry, 1, sizeof (idxEntry), fp); } } static void UpdateAVISizes (FILE *fp, const VIDEOINFO &vi, BITMAPINFO *pCompBmpHdr, int movieSize, int nFrames) { fseek (fp, 0, SEEK_SET); WriteRIFFHeaders (fp, vi, pCompBmpHdr, movieSize, nFrames); } static void CloseAVIFile (FILE *fp, const VIDEOINFO &vi, BITMAPINFO *pCompBmpHdr, const IndicesType &index, int movieSize) { WriteAVIIndex (fp, index); UpdateAVISizes (fp, vi, pCompBmpHdr, movieSize, (int)index.size()); fclose (fp); } BITMAPINFO *COutputAVIFile::GetOutputBitmapInfo() { if (_pCompressor == NULL) { BITMAPINFO *pbmpinfo = (BITMAPINFO *) malloc (sizeof (BITMAPINFOHEADER)); GetBitmapInfo (pbmpinfo->bmiHeader, _vi.bitCount, _vi.width, _vi.height); return pbmpinfo; } else return _pCompressor->GetCompressedBitmapInfo(); } void COutputAVIFile::Close() { if (_fp != NULL) { BITMAPINFO *pCompBmpHdr = GetOutputBitmapInfo(); CloseAVIFile (_fp, _vi, pCompBmpHdr, _index, _lastIndex - sizeof (DWORD)); free (pCompBmpHdr); _fp = NULL; if (_pCompressor != NULL) delete _pCompressor; } } COutputAVIFile::~COutputAVIFile() { Close(); } bool COutputAVIFile::WriteFrame (CFramePtr framePtr) { LogTime (_T("Write start")); const void *pData = NULL; DWORD dataSize = 0; BOOL keyFrame = TRUE; if (_pCompressor == NULL) { pData = framePtr.GetData(); dataSize = framePtr.GetDataSize(); } else { // Compress frame LONG compSize = framePtr.GetDataSize(); pData = _pCompressor->CompressFrame (framePtr, compSize); dataSize = compSize; LogTime (_T("Compressed")); } // Write frame int chunkSize = WriteChunk (_fp, dwFrameChunkId, dataSize, pData); fflush (_fp); // Add index for frame FRAMEINFO info; info.isKeyFrame = (keyFrame != FALSE ? true : false); info.length = dataSize; info.offset = _lastIndex; _index.push_back (info); // Update last index offset _lastIndex += chunkSize; FrameWasProcessed(); return true; } DWORD COutputAVIFile::GetDefaultCodec() { return defaultCompressionCodec; } #else #include "VSAVIFileMedia.h" #include "VSFrame.h" #include "VSLog.h" #include <stdio.h> #include <fcntl.h> #include "AVIFmt.h" #include "SWException.h" using namespace VideoStream; // The compression codec that is in use const DWORD defaultCompressionCodec = huffyuv; //lagarith; const AVCodecID defaultCompressionCodecId = AV_CODEC_ID_HUFFYUV; // RIFF constants const DWORD dwRiffId = mmioFOURCC('R', 'I', 'F', 'F'); const DWORD dwRiffListId = mmioFOURCC('L', 'I', 'S', 'T'); const DWORD dwFrameChunkId = MAKEAVICKID(cktypeDIBbits, 0); const int riffChunkOverhead = sizeof(DWORD) * 2; const int riffListOverhead = sizeof(DWORD) * 3; struct RIFFHEADER { DWORD riffId; DWORD size; DWORD type; }; /*********************************/ /* BITMAP construction functions */ /*********************************/ static void GetBitmapInfo(BITMAPINFOHEADER &bmphdr, int bitCount, int width, int height) { memset(&bmphdr, 0, sizeof(bmphdr)); bmphdr.biSize = sizeof(BITMAPINFOHEADER); bmphdr.biWidth = width; bmphdr.biHeight = height; bmphdr.biPlanes = 1; bmphdr.biBitCount = bitCount; bmphdr.biCompression = BI_RGB; bmphdr.biSizeImage = CFrame::GetFrameSize(width, height, bitCount); } /*****************/ /* CInputAVIFile */ /*****************/ static bool ReadChunkHeader(FILE *fp, DWORD &chunkId, DWORD &chunkSize, DWORD expectedChunkId) { if (fread(&chunkId, 1, sizeof(DWORD), fp) == sizeof(DWORD)) { if (expectedChunkId != NULL && chunkId != expectedChunkId) return false; return (fread(&chunkSize, 1, sizeof(DWORD), fp) == sizeof(DWORD)); } else return false; } static bool ReadChunkData(FILE *fp, size_t chunkSize, void *pChunk) { if (fread(pChunk, 1, chunkSize, fp) != chunkSize) return false; if (chunkSize % 2 == 1) // align to 2-bytes fseek(fp, 1, SEEK_CUR); return true; } static bool ReadChunk(FILE *fp, void *pChunk, DWORD expectedChunkId) { DWORD chunkId, chunkSize; if (ReadChunkHeader(fp, chunkId, chunkSize, expectedChunkId)) return ReadChunkData(fp, chunkSize, pChunk); else return false; } static void *ReadChunk(FILE *fp, DWORD expectedChunkId, DWORD &chunkSize) { DWORD chunkId; if (ReadChunkHeader(fp, chunkId, chunkSize, expectedChunkId)) { void *pChunk = new BYTE[chunkSize]; if (ReadChunkData(fp, chunkSize, pChunk)) return pChunk; } return NULL; } static bool SkipChunk(FILE *fp, DWORD expectedChunkId) { DWORD chunkId, chunkSize; if (ReadChunkHeader(fp, chunkId, chunkSize, expectedChunkId)) { chunkSize = chunkSize + (chunkSize % 2); // Align to 2-bytes fseek(fp, chunkSize, SEEK_CUR); return true; } else return false; } static bool ReadMainStreamHeader(FILE *fp, unsigned int &dwRate, DWORD &dwCodec) { AVIStreamHeader strhdr; if (!ReadChunk(fp, &strhdr, ckidSTREAMHEADER)) return false; dwRate = strhdr.dwRate; dwCodec = strhdr.fccHandler; return true; } static BITMAPINFO *ReadStreamFormat(FILE *fp) { DWORD chunkSize; return (BITMAPINFO *)ReadChunk(fp, ckidSTREAMFORMAT, chunkSize); } static BITMAPINFO *ReadStreamHeaders(FILE *fp, unsigned int &dwRate, unsigned int &bitCount, unsigned int &width, unsigned int &height, DWORD &dwCodec) { /* * Read the 'strl' RIFF list which contains * a. 'strh' chunk - the video stream header * b. 'strf' chunk - the video stream format */ // Jump over the 'strl' list header fseek(fp, riffListOverhead, SEEK_CUR); // Read the 'strh' chunk if (!ReadMainStreamHeader(fp, dwRate, dwCodec)) return NULL; // Read the 'strf' chunk BITMAPINFO *pbmphdr = ReadStreamFormat(fp); width = pbmphdr->bmiHeader.biWidth; height = pbmphdr->bmiHeader.biHeight; bitCount = pbmphdr->bmiHeader.biBitCount; return pbmphdr; } static BITMAPINFO *ReadAVIHeaders(FILE *fp, VIDEOINFO &vi, DWORD &dwCodec) { /* * Read the 'hdrl' RIFF list which contains * 1. 'avih' chunk - the main AVI header * 2. 'strl' list which contains * a. 'strh' chunk - the video stream header * b. 'strf' chunk - the video stream format */ // Skip over the 'hdrl' list header and the 'avih' chunk fseek(fp, riffListOverhead + riffChunkOverhead + sizeof(MainAVIHeader), SEEK_CUR); // Read the 'strl' list return ReadStreamHeaders(fp, vi.frameRate, vi.bitCount, vi.width, vi.height, dwCodec); } static BITMAPINFO *ReadRIFFHeaders(FILE *fp, VIDEOINFO &vi, DWORD &dwCodec) { // Read all relevant RIFF headers // See: http://msdn2.microsoft.com/en-us/library/ms779636(VS.85).aspx // Skip over the main 'RIFF' header fseek(fp, sizeof(RIFFHEADER), SEEK_SET); // Read the AVI headers return ReadAVIHeaders(fp, vi, dwCodec); } CInputAVIFile::CInputAVIFile() : CBaseInputVideoStream() { _fp = NULL; _pDecompressor = NULL; ic = NULL; } bool CInputAVIFile::Open(const char* location, MediaLocationParams & mlParams) { AVCodecID codecId = AV_CODEC_ID_HUFFYUV; DWORD dwCodec; //open input file and allcocate format context int err = avformat_open_input(&ic, location, NULL, NULL); if (err < 0) { codecId = AV_CODEC_ID_NONE; dwCodec = uncompressed; fprintf(stderr, "Could not open source file %s\n", location); } // Initialize decompression _pDecompressor = CVideoDecompressor::Create(dwCodec, codecId, _vi); if (_pDecompressor) { _pDecompressor->open_codec_context(ic, _vi); av_dump_format(ic, 0, location, 0); } else { fprintf(stderr, "_pDecompressor == NULL\n"); _fp = fopen(location, _T("rb")); //, _SH_DENYNO); if (_fp == NULL) return false; // Read the video information from the RIFF headers BITMAPINFO *pbiInput = ReadRIFFHeaders(_fp, _vi, dwCodec); if (pbiInput == NULL) { fclose(_fp); return false; } free(pbiInput); // Move the stream pointer to the first frame fseek(_fp, AVI_HEADERSIZE + riffListOverhead, SEEK_SET); } return true; } void CInputAVIFile::Close() { if (_fp != NULL) { if (_pDecompressor != NULL) delete _pDecompressor; fclose(_fp); _fp = NULL; } if (ic) { avformat_close_input(&ic); ic = NULL; } } CInputAVIFile::~CInputAVIFile() { Close(); } const VIDEOINFO &CInputAVIFile::GetVideoInfo() const { return _vi; } bool CInputAVIFile::FastForward(unsigned int frames) { if (_pDecompressor == NULL) { fprintf(stderr, "FastForward : _pDecompressor == NULL\n"); for (unsigned int i = 0; i < frames; i++) { if (SkipChunk(_fp, dwFrameChunkId) == false) return false; LogTime(_T("Skipped frame")); FrameWasProcessed(); } } else { LogTime(_T("Skipped frame")); FrameWasProcessed(); } return true; } CFramePtr CInputAVIFile::GetNextFrame() { CMutableFramePtr framePtr; LogTime(_T("Start reading")); if (_pDecompressor == NULL) // no compression { framePtr = CMutableFramePtr(_vi.width, _vi.height, _vi.bitCount); if (ReadChunk(_fp, framePtr.GetData(), dwFrameChunkId) == false) { // end of file framePtr.setNull(); return framePtr; } } else { // Read compressed frame DWORD dataSize; //void *pData = ReadChunk(_fp, dwFrameChunkId, dataSize); AVPacket pkt; av_init_packet(&pkt); pkt.data = NULL; pkt.size = 0; int ret = av_read_frame(ic, &pkt); if (ret < 0) return framePtr; // end of file LogTime(_T("Start decompress")); dataSize = pkt.size; void* pData = av_malloc(dataSize); memcpy(pData, pkt.data, dataSize); //cpy??? av_free_packet(&pkt); // Decompress the frame framePtr = _pDecompressor->DecompressFrame(pData, dataSize); delete[] pData; } FrameWasProcessed(); return framePtr; } /******************/ /* COutputAVIFile */ /******************/ static void WriteRIFFHeader(FILE *fp, DWORD size) { RIFFHEADER riffHeader; riffHeader.riffId = dwRiffId; riffHeader.size = size; riffHeader.type = formtypeAVI; fwrite(&riffHeader, 1, sizeof(riffHeader), fp); } static void WriteChunkHeader(FILE *fp, DWORD chunkId, DWORD chunkSize) { fwrite(&chunkId, 1, sizeof(DWORD), fp); fwrite(&chunkSize, 1, sizeof(DWORD), fp); } static int WriteChunk(FILE *fp, DWORD chunkId, DWORD chunkSize, const void *pChunk) { WriteChunkHeader(fp, chunkId, chunkSize); fwrite(pChunk, 1, chunkSize, fp); if (chunkSize % 2 == 1) { BYTE pad = 0; fwrite(&pad, 1, 1, fp); chunkSize++; } return (chunkSize + riffChunkOverhead); } static void WriteListHeader(FILE *fp, DWORD listSize, DWORD listId) { fwrite(&dwRiffListId, 1, sizeof(DWORD), fp); DWORD realListSize = listSize + sizeof(DWORD); // the listId is counted in the size of the list fwrite(&realListSize, 1, sizeof(DWORD), fp); fwrite(&listId, 1, sizeof(DWORD), fp); } static void WriteMainAVIHeader(FILE *fp, DWORD dwMicroSecPerFrame, int width, int height, int nFrames) { MainAVIHeader header; memset(&header, 0, sizeof(header)); header.dwMicroSecPerFrame = dwMicroSecPerFrame; header.dwFlags = 0x800 /* = AVIF_TRUSTCKTYPE*/ | AVIF_HASINDEX; header.dwStreams = 1; header.dwWidth = width; header.dwHeight = height; header.dwTotalFrames = nFrames; WriteChunk(fp, ckidAVIMAINHDR, sizeof(header), &header); } static BOOL SetRect(RECT* rect, int xLeft, int yTop, int xRight, int yBottom) { rect->left = xLeft; rect->top = yTop; rect->right = xRight; rect->bottom = yBottom; } static void WriteMainStreamHeader(FILE *fp, DWORD dwRate, DWORD dwSuggestedBufferSize, int width, int height, int nFrames, DWORD dwCodec) { AVIStreamHeader strhdr; memset(&strhdr, 0, sizeof(strhdr)); strhdr.fccType = streamtypeVIDEO; strhdr.fccHandler = dwCodec; strhdr.dwScale = 1; strhdr.dwLength = nFrames; strhdr.dwQuality = -1; strhdr.dwRate = dwRate; strhdr.dwSuggestedBufferSize = dwSuggestedBufferSize; //strhdr.dwSampleSize = dwSuggestedBufferSize; SetRect(&strhdr.rcFrame, 0, 0, width, height); WriteChunk(fp, ckidSTREAMHEADER, sizeof(strhdr), &strhdr); } static void WriteStreamFormat(FILE *fp, BITMAPINFO *pbmphdr) { WriteChunk(fp, ckidSTREAMFORMAT, pbmphdr->bmiHeader.biSize, pbmphdr); } /*static DWORD GetFccHandler (HIC hIC) { ICINFO info; ICGetInfo (hIC, &info, sizeof (info)); return info.fccHandler; }*/ static void WriteStreamHeaders(FILE *fp, DWORD dwRate, int nFrames, BITMAPINFO *pbmphdr) { /* * Write the 'strl' RIFF list which contains * a. 'strh' chunk - the video stream header * b. 'strf' chunk - the video stream format */ // Write the 'strl' list header int streamHeadersSize = sizeof(AVIStreamHeader) + pbmphdr->bmiHeader.biSize + 2 * riffChunkOverhead; // 'strh', 'strf' chunks WriteListHeader(fp, streamHeadersSize, listtypeSTREAMHEADER); // Write the 'strh' chunk WriteMainStreamHeader(fp, dwRate, 0, pbmphdr->bmiHeader.biWidth, pbmphdr->bmiHeader.biHeight, nFrames, pbmphdr->bmiHeader.biCompression); // Write the 'strf' chunk WriteStreamFormat(fp, pbmphdr); } static void WriteAVIHeaders(FILE *fp, const VIDEOINFO &vi, int nFrames, BITMAPINFO *pCompBmpHdr) { /* * Write the 'hdrl' RIFF list which contains * 1. 'avih' chunk - the main AVI header * 2. 'strl' list which contains * a. 'strh' chunk - the video stream header * b. 'strf' chunk - the video stream format */ // Write the 'hdrl' list header int AVIHeadersSize = sizeof(MainAVIHeader) + sizeof(AVIStreamHeader) + pCompBmpHdr->bmiHeader.biSize + riffListOverhead // 'strl' list + 3 * riffChunkOverhead; // 'avih', 'strh', 'strf' chunks WriteListHeader(fp, AVIHeadersSize, listtypeAVIHEADER); // Write the 'avih' chunk DWORD dwMicroSecPerFrame = 1000000 / vi.frameRate; WriteMainAVIHeader(fp, dwMicroSecPerFrame, vi.width, vi.height, nFrames); // Write the 'strl' list WriteStreamHeaders(fp, vi.frameRate, nFrames, pCompBmpHdr); } static void WriteJunkChunk(FILE *fp) { int junkPadding = AVI_HEADERSIZE - ftell(fp) - riffChunkOverhead; BYTE junk[AVI_HEADERSIZE]; memset(junk, 0, junkPadding); WriteChunk(fp, ckidAVIPADDING, junkPadding, junk); } static void WriteRIFFHeaders(FILE *fp, const VIDEOINFO &vi, BITMAPINFO *pCompBmpHdr, int movieSize = 0, int nFrames = 0) { // Write all relevant RIFF headers // See: http://msdn2.microsoft.com/en-us/library/ms779636(VS.85).aspx // Write the main 'RIFF' header int fileSize = movieSize + sizeof(DWORD) + AVI_HEADERSIZE + riffChunkOverhead + nFrames * sizeof(AVIINDEXENTRY); WriteRIFFHeader(fp, fileSize); // Write the AVI headers WriteAVIHeaders(fp, vi, nFrames, pCompBmpHdr); // Fill with junk up to 2K WriteJunkChunk(fp); // Start the main movie list WriteListHeader(fp, movieSize, listtypeAVIMOVIE); } COutputAVIFile::COutputAVIFile() : CBaseOutputVideoStream() { // _fp = NULL; oc = NULL; fmt = NULL; _pCompressor = NULL; c = NULL; _fp = NULL; } bool COutputAVIFile::Open(const char* location, const VIDEOINFO &vi, MediaLocationParams & mlParams) { // maxBufferSize and timeoutMS are ignored. They are relevant only if sync object is bypassed (which is not the case here). _vi = vi; // Initialize compressor DWORD useCodec = GetDefaultCodec(); // Default codec AVCodecID codecId = GetDefaultCodecId(); // default // Override codec if requested so if (mlParams.codecstr.compare("h264") == 0) { useCodec = h264; codecId = AV_CODEC_ID_H264; } else if (mlParams.codecstr.compare("vp6") == 0) { useCodec = vp62; codecId = AV_CODEC_ID_VP6F; } else if (mlParams.codecstr.compare("vp7") == 0) { useCodec = vp70; codecId = AV_CODEC_ID_VP9; } // avcodec_register_all(); // av_register_all(); _pCompressor = CVideoCompressor::Create(useCodec, codecId, vi); if (_pCompressor) { int ret = 0; try { fmt = av_guess_format(NULL, location, NULL); //"avi" //allocate the media output context oc = avformat_alloc_context(); if (!oc) { throw SWException("Couldn't deduce output fromant from extention\n"); } oc->oformat = fmt; fmt->video_codec = codecId; _pCompressor->init(oc, c); av_dump_format(oc, 0, location, 1); if (!(fmt->flags & AVFMT_NOFILE)) { ret = avio_open(&oc->pb, location, AVIO_FLAG_WRITE); if (ret < 0) { return false; } } else { return false; } //Write stream header if any ret = avformat_write_header(oc, NULL); if (ret < 0) { throw SWException("Error occurred when opening output file.\n"); } } catch (SWException e) { release(); throw e; } } else { // Open file _fp = fopen(location, "wb"); //, _SH_DENYWR); //_T("wb") if (_fp == NULL) return false; // Write headers BITMAPINFO *pCompBmpHdr = GetOutputBitmapInfo(); if (pCompBmpHdr) { WriteRIFFHeaders(_fp, vi, pCompBmpHdr); free(pCompBmpHdr); fflush(_fp); } } _lastIndex = sizeof(DWORD); return true; } static void WriteAVIIndex(FILE *fp, const IndicesType &index) { WriteChunkHeader(fp, ckidAVINEWINDEX, sizeof(AVIINDEXENTRY) * (int)index.size()); AVIINDEXENTRY idxEntry; idxEntry.ckid = dwFrameChunkId; IndicesType::const_iterator i; for (i = index.begin(); i != index.end(); i++) { const FRAMEINFO &info = (*i); idxEntry.dwChunkLength = info.length; idxEntry.dwChunkOffset = info.offset; idxEntry.dwFlags = (info.isKeyFrame ? AVIIF_KEYFRAME : 0); fwrite(&idxEntry, 1, sizeof(idxEntry), fp); } } static void UpdateAVISizes(FILE *fp, const VIDEOINFO &vi, BITMAPINFO *pCompBmpHdr, int movieSize, int nFrames) { fseek(fp, 0, SEEK_SET); WriteRIFFHeaders(fp, vi, pCompBmpHdr, movieSize, nFrames); } static void CloseAVIFile(FILE *fp, const VIDEOINFO &vi, BITMAPINFO *pCompBmpHdr, const IndicesType &index, int movieSize) { // if (pCompBmpHdr) { WriteAVIIndex(fp, index); UpdateAVISizes(fp, vi, pCompBmpHdr, movieSize, (int)index.size()); // } else { // //compressed AVI - gotten from ffmpeg // uint8_t endcode[] = { 0, 0, 1, 0xb7 }; // fwrite(endcode, 1, sizeof(endcode), fp); // } fclose(fp); } BITMAPINFO *COutputAVIFile::GetOutputBitmapInfo() { if (_pCompressor == NULL) { BITMAPINFO *pbmpinfo = (BITMAPINFO *)malloc(sizeof(BITMAPINFOHEADER)); GetBitmapInfo(pbmpinfo->bmiHeader, _vi.bitCount, _vi.width, _vi.height); return pbmpinfo; } else { //Linux - ffmpeg return frame already contains all information return NULL; // } } void COutputAVIFile::release() { try { if (c) { avcodec_close(c); av_free(c); c = NULL; } if (fmt) { if (!(fmt->flags & AVFMT_NOFILE)) { //close the output file if (oc && oc->pb) { avio_close(oc->pb); } } } //free stream if (oc) { avformat_free_context(oc); oc = NULL; } if (_pCompressor != NULL) delete _pCompressor; } catch (std::exception& e) { fprintf(stderr, "Error closing writer %s", e.what()); } } void COutputAVIFile::Close() { fprintf(stderr, "COutputAVIFile::Go to close.\n"); if (_fp != NULL) { BITMAPINFO *pCompBmpHdr = GetOutputBitmapInfo(); CloseAVIFile(_fp, _vi, pCompBmpHdr, _index, _lastIndex - sizeof(DWORD)); if (pCompBmpHdr) { free(pCompBmpHdr); } _fp = NULL; } if (oc) { fprintf(stderr, "av_write_trailer"); av_write_trailer(oc); } fprintf(stderr, "COutputAVIFile::Close.\n"); } COutputAVIFile::~COutputAVIFile() { fprintf(stderr, "COutputAVIFile::~COutputAVIFile\n"); release(); } bool COutputAVIFile::WriteFrame(CFramePtr framePtr) { LogTime(_T("Write start")); const void *pData = NULL; DWORD dataSize = 0; BOOL keyFrame = TRUE; if (_pCompressor == NULL) { pData = framePtr.GetData(); dataSize = framePtr.GetDataSize(); } else { // Compress frame LONG compSize = framePtr.GetDataSize(); pData = _pCompressor->CompressFrame(framePtr, compSize); dataSize = compSize; LogTime(_T("Compressed")); } if (_pCompressor == NULL) { fprintf(stderr, "_pCompressor == NULL\n"); // Write frame int chunkSize = WriteChunk(_fp, dwFrameChunkId, dataSize, pData); fflush(_fp); // Add index for frame FRAMEINFO info; info.isKeyFrame = (keyFrame != FALSE ? true : false); info.length = dataSize; info.offset = _lastIndex; _index.push_back(info); // Update last index offset _lastIndex += chunkSize; } else { AVPacket pkt; av_init_packet(&pkt); pkt.size = dataSize; pkt.data = (uint8_t*)pData; av_write_frame(oc, &pkt); av_free_packet(&pkt); // Add index for frame FRAMEINFO info; info.isKeyFrame = (keyFrame != FALSE ? true : false); info.length = dataSize; info.offset = _lastIndex; _index.push_back(info); // Update last index offset _lastIndex += dataSize; } FrameWasProcessed(); return true; } DWORD COutputAVIFile::GetDefaultCodec() { return defaultCompressionCodec; } AVCodecID COutputAVIFile::GetDefaultCodecId() { return defaultCompressionCodecId; } #endif <file_sep>/RenderingManager/RenderingManager/TestEngine.h #pragma once //#include <windows.h> #include <string> #include "Engine.h" class TestEngine : public Engine { public: void executeEngine(int argc, char** argv); }; <file_sep>/Fmod/Effects/PitchShiftDsp.h #ifndef __PITCH_SHIFT_DSP_H__ #define __PITCH_SHIFT_DSP_H__ #include "DspEffect.h" #pragma once class PitchShiftDsp : public DspEffect { protected: FMOD::DSP *dsp; public: void create(FMOD::System *fmodSystem) { fmodSystem->createDSPByType(FMOD_DSP_TYPE_PITCHSHIFT, &dsp); dspList.push_back(dsp); } void setPitch(float factor) { dsp->setParameter(FMOD_DSP_PITCHSHIFT_PITCH, factor); } void setFftSize(float factor) { dsp->setParameter(FMOD_DSP_PITCHSHIFT_FFTSIZE, factor); } void setMaxChannels(float factor) { dsp->setParameter(FMOD_DSP_PITCHSHIFT_MAXCHANNELS, factor); } void setOverlap(float factor) { dsp->setParameter(FMOD_DSP_PITCHSHIFT_OVERLAP, factor); } }; #endif <file_sep>/Fmod/afx.h /* * afx.h * * Created on: Oct 22, 2013 * Author: eranv */ #ifndef AFX_H_ #define AFX_H_ #pragma once #include "targetver.h" #include "stdio.h" #include <vector> #include <string> #include <map> #include <fmod.hpp> #include <fmod_errors.h> #include "tinyxml.h" #define MAX_LEN 256 #endif /* AFX_H_ */ <file_sep>/NewEncoder/VideoEncoder/MoovHeaderWriter.cpp #include "MoovHeaderWriter.h" #include <stdio.h> #include <stdlib.h> #include <inttypes.h> #include <string.h> #include "isom.h" #include "SWException.h" #include <fstream> #include <sstream> #include "avc.h" #include <list> #define DEBUG_HEADER #define UINT32_MAX 0xffffffff #define INT32_MAX 0x7fffffff #define MOV_TKHD_FLAG_ENABLED 0x0001 #define MOV_TKHD_FLAG_IN_MOVIE 0x0002 #define MOV_TKHD_FLAG_IN_PREVIEW 0x0004 #define MOV_TKHD_FLAG_IN_POSTER 0x0008 #define FFMAX(a, b) ((a) > (b) ? (a) : (b)) #define FFMAX3(a, b, c) FFMAX(FFMAX(a,b),c) typedef struct DynBuffer { int pos, size, allocated_size; uint8_t *buffer; int io_buffer_size; uint8_t io_buffer[1]; } DynBuffer; static void put_descr(AVIOContext *pb, int tag, unsigned int size) { int i = 3; avio_w8(pb, tag); for (; i > 0; i--) avio_w8(pb, (size >> (7 * i)) | 0x80); avio_w8(pb, size & 0x7F); } static int utf8len(const uint8_t *b) { int len = 0; int val; while (*b) { GET_UTF8(val, *b++, return -1;) len++; } return len; } static av_always_inline void ffio_wfourcc(AVIOContext *pb, const uint8_t *s) { avio_wl32(pb, MKTAG(s[0], s[1], s[2], s[3])); } //static int64_t update_size(AVIOContext *pb, int64_t pos) //{ // int64_t curpos = avio_tell(pb); // avio_seek(pb, pos, SEEK_SET); // avio_wb32(pb, curpos - pos); /* rewrite size */ // avio_seek(pb, curpos, SEEK_SET); // // return curpos - pos; //} static void write_matrix(AVIOContext *pb, int16_t a, int16_t b, int16_t c, int16_t d, int16_t tx, int16_t ty) { avio_wb32(pb, a << 16); /* 16.16 format */ avio_wb32(pb, b << 16); /* 16.16 format */ avio_wb32(pb, 0); /* u in 2.30 format */ avio_wb32(pb, c << 16); /* 16.16 format */ avio_wb32(pb, d << 16); /* 16.16 format */ avio_wb32(pb, 0); /* v in 2.30 format */ avio_wb32(pb, tx << 16); /* 16.16 format */ avio_wb32(pb, ty << 16); /* 16.16 format */ avio_wb32(pb, 1 << 30); /* w in 2.30 format */ } //static int mov_write_int8_metadata(AVFormatContext *s, AVIOContext *pb, // const char *name, const char *tag, // int len) //{ // AVDictionaryEntry *t = NULL; // uint8_t num; // int size = 24 + len; // // if (len != 1 && len != 4) // return -1; // // if (!(t = av_dict_get(s->metadata, tag, NULL, 0))) // return 0; // num = atoi(t->value); // // avio_wb32(pb, size); // ffio_wfourcc(pb, (uint8_t *)name); // avio_wb32(pb, size - 8); // ffio_wfourcc(pb, (uint8_t *)"data"); // avio_wb32(pb, 0x15); // avio_wb32(pb, 0); // if (len==4) avio_wb32(pb, num); // else avio_w8 (pb, num); // // return size; //} /* helper function to write a data tag with the specified string as data */ static int mov_write_string_data_tag(AVIOContext *pb, const char *data, int lang, int long_style) { if (long_style) { int size = 16 + strlen(data); avio_wb32(pb, size); /* size */ ffio_wfourcc(pb, (uint8_t *)"data"); avio_wb32(pb, 1); avio_wb32(pb, 0); avio_write(pb, (uint8_t *)data, strlen(data)); return size; } //else { // if (!lang) // lang = ff_mov_iso639_to_lang("und", 1); // avio_wb16(pb, strlen(data)); /* string length */ // avio_wb16(pb, lang); // avio_write(pb, data, strlen(data)); // return strlen(data) + 4; //} } static int mov_write_string_tag(AVIOContext *pb, const char *name, const char *value, int lang, int long_style) { int string_tag_size = 37; if (value && value[0]) { int64_t pos = avio_tell(pb); avio_wb32(pb, string_tag_size); /* size */ ffio_wfourcc(pb, (uint8_t *)name); mov_write_string_data_tag(pb, value, lang, long_style); //29 //size = update_size(pb, pos); } return string_tag_size; } static const uint8_t *ff_avc_find_startcode_internal(const uint8_t *p, const uint8_t *end) { const uint8_t *a = p + 4 - ((intptr_t)p & 3); for (end -= 3; p < a && p < end; p++) { if (p[0] == 0 && p[1] == 0 && p[2] == 1) return p; } for (end -= 3; p < end; p += 4) { uint32_t x = *(const uint32_t*)p; // if ((x - 0x01000100) & (~x) & 0x80008000) // little endian // if ((x - 0x00010001) & (~x) & 0x00800080) // big endian if ((x - 0x01010101) & (~x) & 0x80808080) { // generic if (p[1] == 0) { if (p[0] == 0 && p[2] == 1) return p; if (p[2] == 0 && p[3] == 1) return p + 1; } if (p[3] == 0) { if (p[2] == 0 && p[4] == 1) return p + 2; if (p[4] == 0 && p[5] == 1) return p + 3; } } } for (end += 3; p < end; p++) { if (p[0] == 0 && p[1] == 0 && p[2] == 1) return p; } return end + 3; } const uint8_t * ff_avc_find_startcode(const uint8_t *p, const uint8_t *end) { const uint8_t *out = ff_avc_find_startcode_internal(p, end); if (p<out && out<end && !out[-1]) out--; return out; } int ff_avc_parse_nal_units(AVIOContext *pb, const uint8_t *buf_in, int size) { const uint8_t *p = buf_in; const uint8_t *end = p + size; const uint8_t *nal_start, *nal_end; size = 0; nal_start = ff_avc_find_startcode(p, end); for (;;) { while (nal_start < end && !*(nal_start++)); if (nal_start == end) break; nal_end = ff_avc_find_startcode(nal_start, end); avio_wb32(pb, nal_end - nal_start); avio_write(pb, nal_start, nal_end - nal_start); size += 4 + nal_end - nal_start; nal_start = nal_end; } return size; } int ff_avc_parse_nal_units_buf(const uint8_t *buf_in, uint8_t **buf, int *size) { AVIOContext *pb; int ret = avio_open_dyn_buf(&pb); if (ret < 0) return ret; ff_avc_parse_nal_units(pb, buf_in, *size); av_freep(buf); *size = avio_close_dyn_buf(pb, buf); return 0; } uint32_t calc_sps_size(const uint8_t *data, int len) { uint8_t *buf = NULL, *end, *start; uint32_t sps_size = 0, pps_size = 0; uint8_t *sps = 0, *pps = 0; int ret = ff_avc_parse_nal_units_buf(data, &buf, &len); //if (ret < 0) // return ret; start = buf; end = buf + len; /* look for sps and pps */ while (end - buf > 4) { uint32_t size; uint8_t nal_type; size = FFMIN(AV_RB32(buf), end - buf - 4); buf += 4; nal_type = buf[0] & 0x1f; if (nal_type == 7) { /* SPS */ sps = buf; sps_size = size; } else if (nal_type == 8) { /* PPS */ pps = buf; pps_size = size; } buf += size; } return sps_size; } int ff_mov_iso639_to_lang(const char lang[4], int mp4) { int i, code = 0; ///* old way, only for QT? */ //for (i = 0; lang[0] && !mp4 && i < FF_ARRAY_ELEMS(mov_mdhd_language_map); i++) { // if (!strcmp(lang, mov_mdhd_language_map[i])) // return i; //} /* XXX:can we do that in mov too? */ if (!mp4) return -1; /* handle undefined as such */ if (lang[0] == '\0') lang = "und"; /* 5bit ascii */ for (i = 0; i < 3; i++) { uint8_t c = lang[i]; c -= 0x60; if (c > 0x1f) return -1; code <<= 5; code |= c; } return code; } //static int mov_write_string_metadata(AVFormatContext *s, AVIOContext *pb, // const char *name, const char *tag, // int long_style) //{ // int l, lang = 0, len, len2; // AVDictionaryEntry *t, *t2 = NULL; // char tag2[16]; // // if (!(t = av_dict_get(s->metadata, tag, NULL, 0))) // return 0; // // len = strlen(t->key); // _snprintf(tag2, sizeof(tag2), "%s-", tag); // while ((t2 = av_dict_get(s->metadata, tag2, t2, AV_DICT_IGNORE_SUFFIX))) { // len2 = strlen(t2->key); // if (len2 == len + 4 && !strcmp(t->value, t2->value) // && (l = ff_mov_iso639_to_lang(&t2->key[len2 - 3], 1)) >= 0) { // lang = l; // break; // } // } // return mov_write_string_tag(pb, name, t->value, lang, long_style); //} //static unsigned compute_avg_bitrate(AVStream* st, int size, int duration){ // int timescale = st->time_base.den; ///st->codec->time_base.den; // //this is original code: // //return size * 8 * track->timescale / track->track_duration; // return floor((double) size * 8 * timescale /(double)duration); //} MoovHeaderWriter::MoovHeaderWriter(Logger & logger) : m_logger(logger) { m_vduration =0; m_nvframe = 0; //m_fps = 0; m_vpacket_size = 0; m_voffsets = NULL; m_aduration =0; m_naframe =0; m_apacket_size = 0; m_aoffsets = NULL; m_dataStartPosition = 0; // m_mediaListSize = 0; m_frame_number = 0; m_data_size = 0; } MoovHeaderWriter::~MoovHeaderWriter(void) { } //int ff_write_header(AVFormatContext *s, AVDictionary **options) //{ // int ret = 0; // // if (ret = init_muxer(s, options)) // return ret; // // if (s->oformat->write_header) { // ret = s->oformat->write_header(s); // if (ret >= 0 && s->pb && s->pb->error < 0) // ret = s->pb->error; // if (ret < 0) // return ret; // if (s->flush_packets && s->pb && s->pb->error >= 0 && s->flags & AVFMT_FLAG_FLUSH_PACKETS) // avio_flush(s->pb); // } // // if ((ret = init_pts(s)) < 0) // return ret; // // if (s->avoid_negative_ts < 0) { // av_assert2(s->avoid_negative_ts == AVFMT_AVOID_NEG_TS_AUTO); // if (s->oformat->flags & (AVFMT_TS_NEGATIVE | AVFMT_NOTIMESTAMPS)) { // s->avoid_negative_ts = 0; // } else // s->avoid_negative_ts = AVFMT_AVOID_NEG_TS_MAKE_NON_NEGATIVE; // } // // return 0; //} int MoovHeaderWriter:: mov_write_ftyp_tag(AVIOContext *pb){ int64_t pos = avio_tell(pb); avio_wb32(pb, ftyp_atom_size); /* size */ //8+ bytes file type box = long unsigned offset + long ASCII text string 'ftyp' ffio_wfourcc(pb, (uint8_t *)"ftyp"); //4 bytes major brand = long ASCII text main type string ffio_wfourcc(pb, (uint8_t *)"isom"); //4 bytes major brand version = long unsigned main type revision value int minor = 0x200; avio_wb32(pb, minor); //4+ bytes compatible brands = list of long ASCII text used technology strings ffio_wfourcc(pb, (uint8_t *)"isom"); ffio_wfourcc(pb, (uint8_t *)"iso2"); ffio_wfourcc(pb, (uint8_t *)"avc1"); ffio_wfourcc(pb, (uint8_t *)"mp41"); return ftyp_atom_size; //update_size(pb, pos); } int MoovHeaderWriter :: mov_write_mdat_tag(AVIOContext *pb) { int64_t pos = avio_tell(pb); //avio_seek(pb, pos, SEEK_SET); avio_wb32(pb, 8); // placeholder for extended size field (64 bit) ffio_wfourcc(pb, (uint8_t *)"free"); avio_wb32(pb, m_data_size); /* size placeholder*/ ffio_wfourcc(pb, (uint8_t *)"mdat"); int64_t mdat_pos = avio_tell(pb); return mdat_pos - pos; } int MoovHeaderWriter::mov_write_tkhd_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration) { //size 92 /*MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; MOVTrack *track = &(mov->tracks[i]);*/ AVStream *st = s->nb_streams ? s->streams[i] : NULL; /*int64_t duration = av_rescale_rnd(track->track_duration, MOV_TIMESCALE, track->timescale, AV_ROUND_UP);*/ int version = duration < INT32_MAX ? 0 : 1; int flags = MOV_TKHD_FLAG_IN_MOVIE; flags |= MOV_TKHD_FLAG_ENABLED; int rotation = 0; int group = 0; int track_id = (i + 1); AVMediaType codec_type = st->codec->codec_type; /*if (st) { if (mov->per_stream_grouping) group = st->index; else*/ group = st->codec->codec_type; //} int width = group == 0 ? 640 : 0; int height = group == 0 ? 360 :0; /*if (track->flags & MOV_TRACK_ENABLED) flags |= MOV_TKHD_FLAG_ENABLED;*/ (version == 1) ? avio_wb32(pb, 104) : avio_wb32(pb, tkht_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"tkhd"); avio_w8(pb, version); avio_wb24(pb, flags); avio_wb32(pb, 0); /* creation time */ avio_wb32(pb, 0); /* modification time */ avio_wb32(pb, track_id/*track->track_id*/); /* track-id */ avio_wb32(pb, 0); /* reserved */ (version == 1) ? avio_wb64(pb, duration) : avio_wb32(pb, duration); /*if (!track->entry && mov->mode == MODE_ISM) (version == 1) ? avio_wb64(pb, UINT64_C(0xffffffffffffffff)) : avio_wb32(pb, 0xffffffff); else if (!track->entry) (version == 1) ? avio_wb64(pb, 0) : avio_wb32(pb, 0); else (version == 1) ? avio_wb64(pb, duration) : avio_wb32(pb, duration);*/ avio_wb32(pb, 0); /* reserved */ avio_wb32(pb, 0); /* reserved */ avio_wb16(pb, 0); /* layer */ avio_wb16(pb, group); /* alternate group) */ /* Volume, only for audio */ if (codec_type == AVMEDIA_TYPE_AUDIO) avio_wb16(pb, 0x0100); else avio_wb16(pb, 0); avio_wb16(pb, 0); /* reserved */ /* Matrix structure */ if (st && st->metadata) { AVDictionaryEntry *rot = av_dict_get(st->metadata, "rotate", NULL, 0); rotation = (rot && rot->value) ? atoi(rot->value) : 0; } if (rotation == 90) { write_matrix(pb, 0, 1, -1, 0, height/*track->enc->height*/, 0); } else if (rotation == 180) { write_matrix(pb, -1, 0, 0, -1, width, height /*track->enc->width, track->enc->height*/); } else if (rotation == 270) { write_matrix(pb, 0, -1, 1, 0, 0, width/*track->enc->width*/); } else { write_matrix(pb, 1, 0, 0, 1, 0, 0); } /* Track width and height, for visual only */ if (st && (codec_type == AVMEDIA_TYPE_VIDEO || codec_type == AVMEDIA_TYPE_SUBTITLE)) { double sample_aspect_ratio = av_q2d(st->sample_aspect_ratio); if (!sample_aspect_ratio || /*track->height*/ height!= width/*track->enc->height*/) sample_aspect_ratio = 1; avio_wb32(pb, sample_aspect_ratio * width/*track->enc->width*/ * 0x10000); avio_wb32(pb, height/*track->height*/ * 0x10000); } else { avio_wb32(pb, 0); avio_wb32(pb, 0); } return 0x5c; } // This box seems important for the psp playback ... without it the movie seems to hang int MoovHeaderWriter::mov_write_edts_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration) { /*MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; MOVTrack *track = &(mov->tracks[i]);*/ AVStream *st = s->nb_streams ? s->streams[i] : NULL; /*int64_t duration = av_rescale_rnd(track->track_duration, MOV_TIMESCALE, track->timescale, AV_ROUND_UP);*/ int version = duration < INT32_MAX ? 0 : 1; int entry_size, entry_count, size; int64_t delay, start_ct = 0; delay = 0; //st->codec->delay; /*av_rescale_rnd(start_dts + start_ct, MOV_TIMESCALE, track->timescale, AV_ROUND_DOWN);*/ version |= delay < INT32_MAX ? 0 : 1; entry_size = (version == 1) ? 20 : 12; entry_count = 1 + (delay > 0); size =st->codec->codec_type == AVMEDIA_TYPE_VIDEO ? v_edts_atom_size : a_edts_atom_size; //24 + entry_count * entry_size; /* write the atom data */ avio_wb32(pb, size); ffio_wfourcc(pb, (uint8_t *)"edts"); avio_wb32(pb, size - 8); ffio_wfourcc(pb, (uint8_t *)"elst"); avio_w8(pb, version); avio_wb24(pb, 0); /* flags */ avio_wb32(pb, entry_count); if (delay > 0) { /* add an empty edit to delay presentation */ /* In the positive delay case, the delay includes the cts * offset, and the second edit list entry below trims out * the same amount from the actual content. This makes sure * that the offsetted last sample is included in the edit * list duration as well. */ if (version == 1) { avio_wb64(pb, delay); avio_wb64(pb, -1); } else { avio_wb32(pb, delay); avio_wb32(pb, -1); } avio_wb32(pb, 0x00010000); } else { /* Avoid accidentally ending up with start_ct = -1 which has got a * special meaning. Normally start_ct should end up positive or zero * here, but use FFMIN in case dts is a a small positive integer * rounded to 0 when represented in MOV_TIMESCALE units. */ /* av_assert0(av_rescale_rnd(start_dts, MOV_TIMESCALE, track->timescale, AV_ROUND_DOWN) <= 0); start_ct = -FFMIN(start_dts, 0);*/ /* Note, this delay is calculated from the pts of the first sample, * ensuring that we don't reduce the duration for cases with * dts<0 pts=0. */ duration += delay; } /* For fragmented files, we don't know the full length yet. Setting * duration to 0 allows us to only specify the offset, including * the rest of the content (from all future fragments) without specifying * an explicit duration. */ /*if (mov->flags & FF_MOV_FLAG_FRAGMENT) duration = 0;*/ /* duration */ /*if (version == 1) { avio_wb64(pb, duration); avio_wb64(pb, start_ct); } else {*/ avio_wb32(pb, duration); avio_wb32(pb, start_ct); //} avio_wb32(pb, 0x00010000); return size; } int MoovHeaderWriter::getTrackTimescale(AVStream* st){ int track_timescale = 0; if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) { track_timescale = st->time_base.den; while(track_timescale < 10000) track_timescale *= 2; } else{ track_timescale = st->codec->sample_rate; } return track_timescale; } int MoovHeaderWriter::mov_write_mdhd_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration) { MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; MOVTrack *track = &(mov->tracks[i]); AVStream *st = s->nb_streams ? s->streams[i] : NULL; int track_timescale = getTrackTimescale(st) ; // i == 0 ? st->time_base.den : st->codec->time_base.den; //int track_timescale = st->codec->time_base.den /st->codec->time_base.num; bool isVideo = st->codec->codec_type == AVMEDIA_TYPE_VIDEO ? true: false; //4 bytes duration = long unsigned media time length (in media time units double fps = (double) m_options.framerate / (double)m_options.framerate_scale; int64_t track_duration = i == 0 ? track_timescale * m_nvframe/fps : st->codec->frame_size * m_naframe; //TODO //int64_t track_duration = i == 0 ? m_vduration: m_aduration; // 663552; // 231424 av_rescale_rnd(duration, MOV_TIMESCALE, track_timescale, AV_ROUND_UP); // timescale, AV_ROUND_UP); int version = track_duration < INT32_MAX ? 0 : 1; int track_language = 21956; avio_wb32(pb, 32); /* size */ ffio_wfourcc(pb, (uint8_t *)"mdhd"); avio_w8(pb, version); avio_wb24(pb, 0); /* flags */ avio_wb32(pb, track->time); /* creation time */ avio_wb32(pb, track->time); /* modification time */ avio_wb32(pb, track_timescale); /* time scale (sample rate for audio) */ avio_wb32(pb, track_duration); /* duration */ avio_wb16(pb, track_language); /* language */ avio_wb16(pb, 0); /* reserved (quality) */ return 32; } int MoovHeaderWriter::mov_write_hdlr_tag(AVIOContext *pb, AVFormatContext *s, int i) { AVStream *st = s->nb_streams ? s->streams[i] : NULL; const char *hdlr, *descr = NULL, *hdlr_type = NULL; int64_t pos = avio_tell(pb); hdlr = "dhlr"; hdlr_type = "url "; descr = "DataHandler"; AVMediaType codec_type = st->codec->codec_type; unsigned int codec_tag = st->codec->codec_tag; int mode = MODE_MP4; if (s->nb_streams) { hdlr = "\0\0\0\0"; if (codec_type == AVMEDIA_TYPE_VIDEO) { hdlr_type = "vide"; descr = "VideoHandler"; } else if (codec_type == AVMEDIA_TYPE_AUDIO) { hdlr_type = "soun"; descr = "SoundHandler"; } else { char tag_buf[32]; av_get_codec_tag_string(tag_buf, sizeof(tag_buf), codec_tag); } if (st) { // hdlr.name is used by some players to identify the content title // of the track. So if an alternate handler description is // specified, use it. AVDictionaryEntry *t; t = av_dict_get(st->metadata, "handler", NULL, 0); if (t && utf8len((uint8_t *)t->value)) descr = t->value; } } avio_wb32(pb, hdlr_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"hdlr"); avio_wb32(pb, 0); /* Version & flags */ avio_write(pb, (uint8_t *)hdlr, 4); /* handler */ ffio_wfourcc(pb, (uint8_t *)hdlr_type); /* handler type */ avio_wb32(pb, 0); /* reserved */ avio_wb32(pb, 0); /* reserved */ avio_wb32(pb, 0); /* reserved */ avio_write(pb, (unsigned char *)descr, strlen(descr)); /* handler description */ avio_w8(pb, 0); /* c string */ return hdlr_atom_size; //update_size(pb, pos); } int MoovHeaderWriter::mov_write_smhd_tag(AVIOContext *pb) { avio_wb32(pb, 16); /* size */ ffio_wfourcc(pb, (uint8_t *)"smhd"); avio_wb32(pb, 0); /* version & flags */ avio_wb16(pb, 0); /* reserved (balance, normally = 0) */ avio_wb16(pb, 0); /* reserved */ return 16; } int MoovHeaderWriter::mov_write_vmhd_tag(AVIOContext *pb) { avio_wb32(pb, 0x14); /* size (always 0x14) */ ffio_wfourcc(pb, (uint8_t *)"vmhd"); avio_wb32(pb, 0x01); /* version & flags */ avio_wb64(pb, 0); /* reserved (graphics mode = copy) */ return 0x14; } int MoovHeaderWriter::mov_write_dref_tag(AVIOContext *pb) { avio_wb32(pb, 28); /* size */ ffio_wfourcc(pb, (uint8_t *)"dref"); avio_wb32(pb, 0); /* version & flags */ avio_wb32(pb, 1); /* entry count */ avio_wb32(pb, 0xc); /* size */ //FIXME add the alis and rsrc atom ffio_wfourcc(pb, (uint8_t *)"url "); avio_wb32(pb, 1); /* version & flags */ return 28; } int MoovHeaderWriter::mov_write_dinf_tag(AVIOContext *pb) { int64_t pos = avio_tell(pb); avio_wb32(pb, dinf_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"dinf"); mov_write_dref_tag(pb); return dinf_atom_size; //update_size(pb, pos); } //void find_compressor(char * compressor_name, int len, MOVTrack *track) //{ // AVDictionaryEntry *encoder; // int xdcam_res = (track->enc->width == 1280 && track->enc->height == 720) // || (track->enc->width == 1440 && track->enc->height == 1080) // || (track->enc->width == 1920 && track->enc->height == 1080); // // if (track->mode == MODE_MOV && // (encoder = av_dict_get(track->st->metadata, "encoder", NULL, 0))) { // av_strlcpy(compressor_name, encoder->value, 32); // } else if (track->enc->codec_id == AV_CODEC_ID_MPEG2VIDEO && xdcam_res) { // int interlaced = track->enc->field_order > AV_FIELD_PROGRESSIVE; // AVStream *st = track->st; // int rate = av_q2d(find_fps(NULL, st)); // av_strlcatf(compressor_name, len, "XDCAM"); // if (track->enc->pix_fmt == AV_PIX_FMT_YUV422P) { // av_strlcatf(compressor_name, len, " HD422"); // } else if(track->enc->width == 1440) { // av_strlcatf(compressor_name, len, " HD"); // } else // av_strlcatf(compressor_name, len, " EX"); // // av_strlcatf(compressor_name, len, " %d%c", track->enc->height, interlaced ? 'i' : 'p'); // // av_strlcatf(compressor_name, len, "%d", rate * (interlaced + 1)); // } //} int MoovHeaderWriter::mov_write_avcc_tag(AVIOContext *pb, AVFormatContext *s, int i) { int64_t pos = avio_tell(pb); MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; MOVTrack *track = &(mov->tracks[i]); avio_wb32(pb, avcc_tag_size); ffio_wfourcc(pb, (uint8_t *)"avcC"); AVStream* st = s->streams[i]; int vos_len = st->codec->extradata_size; uint8_t *vos_data = st->codec->extradata; //uint8_t sps [] = {0x67, 0x42, 0xC0, 0x1E, 0xDA, 0x02, 0x80, 0xBF, 0xE5, 0x84, 0x00, 0x00, 0x03, 0x00, 0x04, 0x00, 0x00, 0x03, 0x01, 0xE0, 0x3C, 0x58, 0xBA, 0x80}; //uint8_t pps[] = {0x68, 0xCE, 0x3C, 0x80}; //avio_w8(pb, 1); /* version */ //avio_w8(pb, sps[1]); ///* profile */ //avio_w8(pb, sps[2]); /* profile compat */ //avio_w8(pb, sps[3]); /* level */ //avio_w8(pb, 0xff); /* 6 bits reserved (111111) + 2 bits nal size length - 1 (11) */ //avio_w8(pb, 0xe1); /* 3 bits reserved (111) + 5 bits number of sps (00001) */ //uint32_t sps_size=0x18, pps_size=0x4; //avio_wb16(pb, sps_size); //avio_write(pb, sps, sps_size); //avio_w8(pb, 1); /* number of ps */ //avio_wb16(pb, pps_size); //avio_write(pb, pps, pps_size); pos = avc::ff_isom_write_avcc(pb, vos_data, vos_len); return avcc_tag_size; //update_size(pb, pos); } int MoovHeaderWriter::mov_write_video_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration) { MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; MOVTrack *track = &(mov->tracks[i]); AVStream *st = s->nb_streams ? s->streams[i] : NULL; AVMediaType codec_type = st->codec->codec_type; int64_t pos = avio_tell(pb); char compressor_name[32] = { 0 }; int avid = 0; int width = codec_type == 0 ? 640 : 0; int height = codec_type == 0 ? 360 :0; avio_wb32(pb, video_tag_size); /* size */ avio_wl32(pb, track->tag); // store it byteswapped avio_wb32(pb, 0); /* Reserved */ avio_wb16(pb, 0); /* Reserved */ avio_wb16(pb, 1); /* Data-reference index */ avio_wb16(pb, 0); /* Codec stream version */ avio_wb16(pb, 0); /* Codec stream revision (=0) */ //if (track->mode == MODE_MOV) { // ffio_wfourcc(pb, "FFMP"); /* Vendor */ // if (track->enc->codec_id == AV_CODEC_ID_RAWVIDEO) { // avio_wb32(pb, 0); /* Temporal Quality */ // avio_wb32(pb, 0x400); /* Spatial Quality = lossless*/ // } else { // avio_wb32(pb, 0x200); /* Temporal Quality = normal */ // avio_wb32(pb, 0x200); /* Spatial Quality = normal */ // } //} else { avio_wb32(pb, 0); /* Reserved */ avio_wb32(pb, 0); /* Reserved */ avio_wb32(pb, 0); /* Reserved */ //} avio_wb16(pb, width); /* Video width */ avio_wb16(pb, height); /* Video height */ avio_wb32(pb, 0x00480000); /* Horizontal resolution 72dpi */ avio_wb32(pb, 0x00480000); /* Vertical resolution 72dpi */ avio_wb32(pb, 0); /* Data size (= 0) */ avio_wb16(pb, 1); /* Frame count (= 1) */ /* FIXME not sure, ISO 14496-1 draft where it shall be set to 0 */ //find_compressor(compressor_name, 32, track); avio_w8(pb, strlen(compressor_name)); avio_write(pb, (unsigned char *)compressor_name, 31); if (track->mode == MODE_MOV && track->enc->bits_per_coded_sample) avio_wb16(pb, track->enc->bits_per_coded_sample); else avio_wb16(pb, 0x18); /* Reserved */ avio_wb16(pb, 0xffff); /* Reserved */ /*if (track->tag == MKTAG('m','p','4','v')) mov_write_esds_tag(pb, track); else if (track->enc->codec_id == AV_CODEC_ID_H263) mov_write_d263_tag(pb); else if (track->enc->codec_id == AV_CODEC_ID_AVUI || track->enc->codec_id == AV_CODEC_ID_SVQ3) { mov_write_extradata_tag(pb, track); avio_wb32(pb, 0); } else if (track->enc->codec_id == AV_CODEC_ID_DNXHD) { mov_write_avid_tag(pb, track); avid = 1; } else if (track->enc->codec_id == AV_CODEC_ID_HEVC) mov_write_hvcc_tag(pb, track); else if (track->enc->codec_id == AV_CODEC_ID_H264 && !TAG_IS_AVCI(track->tag)) {*/ mov_write_avcc_tag(pb, s, i); /*if (track->mode == MODE_IPOD) mov_write_uuid_tag_ipod(pb);*/ //} else if (track->enc->codec_id == AV_CODEC_ID_VC1 && track->vos_len > 0) // mov_write_dvc1_tag(pb, track); //else if (track->enc->codec_id == AV_CODEC_ID_VP6F || // track->enc->codec_id == AV_CODEC_ID_VP6A) { // /* Don't write any potential extradata here - the cropping // * is signalled via the normal width/height fields. */ //} else if (track->enc->codec_id == AV_CODEC_ID_R10K) { // if (track->enc->codec_tag == MKTAG('R','1','0','k')) // mov_write_dpxe_tag(pb, track); //} else if (track->vos_len > 0) // mov_write_glbl_tag(pb, track); /*if (track->enc->codec_id != AV_CODEC_ID_H264 && track->enc->codec_id != AV_CODEC_ID_MPEG4 && track->enc->codec_id != AV_CODEC_ID_DNXHD) if (track->enc->field_order != AV_FIELD_UNKNOWN) mov_write_fiel_tag(pb, track);*/ /*if (mov->flags & FF_MOV_FLAG_WRITE_GAMA) { if (track->mode == MODE_MOV) mov_write_gama_tag(pb, track, mov->gamma); else av_log(mov->fc, AV_LOG_WARNING, "Not writing 'gama' atom. Format is not MOV.\n"); } if (mov->flags & FF_MOV_FLAG_WRITE_COLR) { if (track->mode == MODE_MOV || track->mode == MODE_MP4) mov_write_colr_tag(pb, track); else av_log(mov->fc, AV_LOG_WARNING, "Not writing 'colr' atom. Format is not MOV or MP4.\n"); }*/ /*if (track->enc->sample_aspect_ratio.den && track->enc->sample_aspect_ratio.num && track->enc->sample_aspect_ratio.den != track->enc->sample_aspect_ratio.num) { mov_write_pasp_tag(pb, track); }*/ /* extra padding for avid stsd */ /* https://developer.apple.com/library/mac/documentation/QuickTime/QTFF/QTFFChap2/qtff2.html#//apple_ref/doc/uid/TP40000939-CH204-61112 */ if (avid) avio_wb32(pb, 0); return video_tag_size; //update_size(pb, pos); } int MoovHeaderWriter::mov_write_esds_tag(AVIOContext *pb, AVFormatContext *s, int i) // Basic { MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; MOVTrack *track = &(mov->tracks[i]); AVStream *st = s->nb_streams ? s->streams[i] : NULL; AVCodecID codec_id = st->codec->codec_id; int sample_rate = st->codec->sample_rate; int vos_len = st->codec->extradata_size; uint8_t *vos_data = st->codec->extradata; int64_t pos = avio_tell(pb); int decoder_specific_info_len = vos_len ? 5 + vos_len : 0; unsigned avg_bitrate; int esds_tag_size = (codec_id == AV_CODEC_ID_AAC ? 54 : 47); avio_wb32(pb, esds_tag_size); // size ffio_wfourcc(pb, (uint8_t *)"esds"); avio_wb32(pb, 0); // Version // ES descriptor put_descr(pb, 0x03, 3 + 5+13 + decoder_specific_info_len + 5+1); avio_wb16(pb, track->track_id); avio_w8(pb, 0x00); // flags (= no flags) // DecoderConfig descriptor put_descr(pb, 0x04, 13 + decoder_specific_info_len); // Object type indication if ((codec_id == AV_CODEC_ID_MP2 || codec_id == AV_CODEC_ID_MP3) && sample_rate > 24000) avio_w8(pb, 0x6B); // 11172-3 else { avio_w8(pb, 0x66); /*avio_w8(pb, 0x67); avio_w8(pb, 0x68);*/ //avio_w8(pb, ff_codec_get_tag(ff_mp4_obj_type, codec_id)); //avio_w8(pb, tags); } if (track->enc == NULL) m_logger.info("track->enc == null!!!"); //EAEA - I commeted out the track->enc lines since this causes NPE!!! // the following fields is made of 6 bits to identify the streamtype (4 for video, 5 for audio) // plus 1 bit to indicate upstream and 1 bit set to 1 (reserved) //if (track->enc->codec_id == AV_CODEC_ID_DVD_SUBTITLE) // avio_w8(pb, (0x38 << 2) | 1); // flags (= NeroSubpicStream) //else if (track->enc->codec_type == AVMEDIA_TYPE_AUDIO) avio_w8(pb, 0x15); // flags (= Audiostream) //else // avio_w8(pb, 0x11); // flags (= Visualstream) avio_wb24(pb, st->codec->rc_buffer_size >> 3); // Buffersize DB int track_timescale = getTrackTimescale(st) ;//st->codec->time_base.den; int64_t track_duration = st->codec->frame_size * m_naframe; // 663552; // 231424 av_rescale_rnd(duration, MOV_TIMESCALE, track_timescale, AV_ROUND_UP); // timescale, AV_ROUND_UP); int size = achunkList.size() * m_apacket_size; avg_bitrate = st->codec->bit_rate; //compute_avg_bitrate(st, size, m_aduration); // maxbitrate (FIXME should be max rate in any 1 sec window) avio_wb32(pb, FFMAX3(st->codec->bit_rate, st->codec->rc_max_rate, avg_bitrate)); avio_wb32(pb, avg_bitrate); if (vos_len) { // DecoderSpecific info descriptor put_descr(pb, 0x05, vos_len); avio_write(pb, vos_data, vos_len); } // SL descriptor put_descr(pb, 0x06, 1); avio_w8(pb, 0x02); return esds_tag_size; //update_size(pb, pos); } int MoovHeaderWriter::mov_write_audio_tag(AVIOContext *pb, AVFormatContext *s, int i) { MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; MOVTrack *track = &(mov->tracks[i]); AVStream *st = s->nb_streams ? s->streams[i] : NULL; AVCodecID codec_id = st->codec->codec_id; int64_t pos = avio_tell(pb); int version = 0; uint32_t tag = tag = MKTAG('m','p','4','a'); int sample_rate = st->codec->sample_rate; long asample_size = m_apacket_size; int audio_tag_size= (codec_id == AV_CODEC_ID_AAC ? 90 :80); avio_wb32(pb, audio_tag_size); /* size */ avio_wl32(pb, tag); // store it byteswapped avio_wb32(pb, 0); /* Reserved */ avio_wb16(pb, 0); /* Reserved */ avio_wb16(pb, 1); /* Data-reference index, XXX == 1 */ /* SoundDescription */ avio_wb16(pb, version); /* Version */ avio_wb16(pb, 0); /* Revision level */ avio_wb32(pb, 0); /* Reserved */ avio_wb16(pb, 2); avio_wb16(pb, 16); avio_wb16(pb, 0); avio_wb16(pb, 0); /* packet size (= 0) */ avio_wb16(pb, sample_rate ); avio_wb16(pb, 0); /* Reserved */ mov_write_esds_tag(pb, s, i); return audio_tag_size; //update_size(pb, pos); } int MoovHeaderWriter::mov_write_stsd_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration) { AVStream *st = s->nb_streams ? s->streams[i] : NULL; AVMediaType codec_type = st->codec->codec_type; int64_t pos = avio_tell(pb); int stsd_atom_size = codec_type==AVMEDIA_TYPE_VIDEO ? v_stsd_atom_size : a_stsd_atom_size; avio_wb32(pb, stsd_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"stsd"); avio_wb32(pb, 0); /* version & flags */ avio_wb32(pb, 1); /* entry count */ if (codec_type == AVMEDIA_TYPE_VIDEO) mov_write_video_tag(pb, s, i, duration); else if (codec_type == AVMEDIA_TYPE_AUDIO) mov_write_audio_tag(pb, s, i); else{ throw SWException ("Invalid codec type passed"); } return stsd_atom_size; //update_size(pb, pos); } /* Time to sample atom */ int MoovHeaderWriter::mov_write_stts_tag(AVIOContext *pb, AVFormatContext *s, int j, int64_t duration) { MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; MOVTrack *track = &(mov->tracks[j]); AVStream *st = s->nb_streams ? s->streams[j] : NULL; MOVStts *stts_entries; uint32_t entries = -1; int i; int track_entry = 0; AVMediaType codec_type = st->codec->codec_type; uint32_t stts_atom_size = codec_type == AVMEDIA_TYPE_VIDEO ? v_stts_atom_size : a_stts_atom_size; if (codec_type == AVMEDIA_TYPE_AUDIO && !track->audio_vbr) { stts_entries = (MOVStts *)av_malloc(sizeof(*stts_entries)); /* one entry */ if (!stts_entries) return AVERROR(ENOMEM); stts_entries[0].count = m_naframe; //track->sample_count; stts_entries[0].duration = st->codec->frame_size; entries = 1; } else { track_entry = 2; entries = 0; stts_entries = track_entry ? (MOVStts *)av_malloc_array(track_entry, sizeof(*stts_entries)) : /* worst case */ NULL; if (track_entry && !stts_entries) return AVERROR(ENOMEM); //we don't have key frames double fps = (double)m_options.framerate/(double)m_options.framerate_scale; stts_entries[entries].duration = st->time_base.den/fps; stts_entries[entries].count = m_nvframe; entries++; /* last one */ stts_entries[entries].count = 1; //track->sample_count; stts_entries[entries].duration = 0; entries++; /* last one */ } //atom_size = 16 + (entries * 8); avio_wb32(pb, stts_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"stts"); avio_wb32(pb, 0); /* version & flags */ avio_wb32(pb, entries); /* entry count */ for (i = 0; i < entries; i++) { avio_wb32(pb, stts_entries[i].count); avio_wb32(pb, stts_entries[i].duration); } av_free(stts_entries); return stts_atom_size; } /* Sync sample atom */ int MoovHeaderWriter::mov_write_stss_tag(AVIOContext *pb) { //MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; //MOVTrack *track = &(mov->tracks[i]); //AVStream *st = s->nb_streams ? s->streams[i] : NULL; int64_t curpos, entryPos; int i, index = 0; int64_t pos = avio_tell(pb); avio_wb32(pb, v_stss_atom_size); // size ffio_wfourcc(pb, (uint8_t *)"stss" ); // always MOV_SYNC_SAMPLE avio_wb32(pb, 0); // version & flags entryPos = avio_tell(pb); //4 bytes number of key frames = long unsigned total avio_wb32(pb, 1); // entry count- this is key_int number //4+ bytes key/intra frame location = long unsigned framing time // - key/intra frame location according to sample/framing time for (i = 0; i < 1/*track->entry*/; i++) { avio_wb32(pb, i + 1); index++; } //curpos = avio_tell(pb); //avio_seek(pb, entryPos, SEEK_SET); ////8+ bytes sample/framing to chunk/block box //avio_wb32(pb, index); // rewrite size //avio_seek(pb, curpos, SEEK_SET); return v_stss_atom_size; // update_size(pb, pos); } /* Sample to chunk atom */ int MoovHeaderWriter::mov_write_stsc_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/ int j){ AVStream *st = s->nb_streams ? s->streams[j] : NULL; int index = 0, oldval = -1, i; int64_t entryPos, curpos; int stsc_atom_size = st->codec->codec_type == AVMEDIA_TYPE_VIDEO? v_stsc_atom_size : a_stsc_atom_size; int64_t pos = avio_tell(pb); avio_wb32(pb, stsc_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"stsc"); avio_wb32(pb, 0); // version & flags entryPos = avio_tell(pb); long entryCount =st->codec->codec_type == AVMEDIA_TYPE_VIDEO ? ventryList.size() : aentryList.size(); avio_wb32(pb, entryCount); // entry count if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO){ list<MOVIentry >::iterator p = ventryList.begin(); while(p != ventryList.end()){ avio_wb32(pb, p->chunkNum); // first chunk = TODO 7 avio_wb32(pb, p->samples_in_chunk); // samples per chunk /TODO 39 avio_wb32(pb, 0x1); // sample description index index++; p++; } } else{ list<MOVIentry >::iterator p = aentryList.begin(); while(p != aentryList.end()){ avio_wb32(pb, p->chunkNum); // first chunk = TODO 7 avio_wb32(pb, p->samples_in_chunk); // samples per chunk /TODO 39 avio_wb32(pb, 0x1); // sample description index index++; p++; } } //curpos = avio_tell(pb); //avio_seek(pb, entryPos, SEEK_SET); //avio_wb32(pb, index); // rewrite size //avio_seek(pb, curpos, SEEK_SET); return stsc_atom_size; //update_size(pb, pos); } /* Sample size atom */ int MoovHeaderWriter::mov_write_stsz_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration) { int equalChunks = 1; int entries = 0, tst = -1, oldtst = -1; AVStream *st = s->nb_streams ? s->streams[i] : NULL; int stsz_atom_size = st->codec->codec_type == AVMEDIA_TYPE_VIDEO ? v_stsz_atom_size : a_stsz_atom_size; int64_t pos = avio_tell(pb); avio_wb32(pb, stsz_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"stsz"); avio_wb32(pb, 0); /* version & flags */ /* Sample size A 32-bit integer specifying the sample size. If all the samples are the same size, this field contains that size value. If this field is set to 0, then the samples have different sizes, and those sizes are stored in the sample size table. */ if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO){ unsigned int sample_size = m_apacket_size; avio_wb32(pb, sample_size); // sample size int sample_count = m_naframe; avio_wb32(pb, sample_count); // sample count } else{ avio_wb32(pb, 0); // sample size /* Number of entries A 32-bit integer containing the count of entries in the sample size table. */ int sample_count = m_nvframe + 1; avio_wb32(pb, sample_count); // sample count //first entry avio_wb32(pb, m_vpacket_size + nal_units_size); //2d entry for (int j = 0; j < m_nvframe; j++) { avio_wb32(pb, m_vpacket_size); } } return stsz_atom_size; //update_size(pb, pos); } /* Chunk offset atom */ int MoovHeaderWriter::mov_write_stco_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/ int i, int64_t duration) { //, int* mediaList) AVStream *st = s->nb_streams ? s->streams[i] : NULL; AVMediaType mediaType = st->codec->codec_type; int stco_atom_size = mediaType == AVMEDIA_TYPE_VIDEO ? v_stco_atom_size: a_stco_atom_size; int64_t pos = avio_tell(pb); avio_wb32(pb, stco_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"stco"); avio_wb32(pb, 0); /* version & flags */ unsigned int chunkCount = mediaType == AVMEDIA_TYPE_VIDEO? vchunkList.size() : achunkList.size(); avio_wb32(pb, chunkCount); /* entry count */ if(mediaType == AVMEDIA_TYPE_VIDEO){ list<MOVIentry >::iterator p = vchunkList.begin(); while(p != vchunkList.end()){ avio_wb32(pb, m_dataStartPosition + p->pos); p++; } } else{ list<MOVIentry>::iterator p = achunkList.begin(); while(p != achunkList.end()){ avio_wb32(pb, m_dataStartPosition + p->pos); p++; } } return stco_atom_size; //update_size(pb, pos); } int MoovHeaderWriter::mov_write_stbl_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/ int i, int64_t duration) { //, int* mediaList) AVStream *st = s->nb_streams ? s->streams[i] : NULL; int64_t pos = avio_tell(pb); int ret; AVMediaType codec_type = st->codec->codec_type; int stbl_atom_size = codec_type == AVMEDIA_TYPE_VIDEO? v_stbl_atom_size :a_stbl_atom_size; avio_wb32(pb, stbl_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"stbl"); mov_write_stsd_tag(pb, s, i, duration); mov_write_stts_tag(pb, s, i, duration); if (codec_type == AVMEDIA_TYPE_VIDEO) mov_write_stss_tag(pb); mov_write_stsc_tag(pb, s, /*mov, track,*/ i); mov_write_stsz_tag(pb, s, i, duration); mov_write_stco_tag(pb, s, /*mov, track,*/ i, duration) ; return stbl_atom_size; //update_size(pb, pos); } int MoovHeaderWriter::mov_write_minf_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/ int i, int64_t duration){ //, int* mediaList) int64_t pos = avio_tell(pb); int ret; AVStream *st = s->nb_streams ? s->streams[i] : NULL; AVMediaType codec_type = st->codec->codec_type; int minf_atom_size = codec_type == AVMEDIA_TYPE_VIDEO ? v_minf_atom_size : a_minf_atom_size; avio_wb32(pb, minf_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"minf"); if (codec_type == AVMEDIA_TYPE_VIDEO) mov_write_vmhd_tag(pb); else if (codec_type == AVMEDIA_TYPE_AUDIO) mov_write_smhd_tag(pb); mov_write_dinf_tag(pb); if ((ret = mov_write_stbl_tag(pb, s, /*mov, track,*/ i, duration)) < 0) return ret; return minf_atom_size; //update_size(pb, pos); } int MoovHeaderWriter::mov_write_mdia_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/ int i, int64_t duration) { //, int* mediaList) int64_t pos = avio_tell(pb); int ret; int mdia_atom_size = s->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO?v_mdia_atom_size:a_mdia_atom_size; avio_wb32(pb, mdia_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"mdia"); mov_write_mdhd_tag(pb, s, i, duration); mov_write_hdlr_tag(pb, s, i); if ((ret = mov_write_minf_tag(pb, s,/* mov, track,*/ i, duration)) < 0) return ret; return v_mdia_atom_size; //update_size(pb, pos); } //int MoovHeaderWriter::mov_write_track_metadata(AVIOContext *pb, AVStream *st, const char *tag, const char *str) //{ // int64_t pos = avio_tell(pb); // AVDictionaryEntry *t = av_dict_get(st->metadata, str, NULL, 0); // if (!t || !utf8len((uint8_t *)t->value)) // return 0; // // int track_metadata_atom_size = 90; // avio_wb32(pb, track_metadata_atom_size); /* size */ // ffio_wfourcc(pb, (uint8_t *)tag); /* type */ // avio_write(pb, (unsigned char *)t->value, strlen(t->value)); /* UTF8 string value */ // return track_metadata_atom_size; //update_size(pb, pos); //} int MoovHeaderWriter::moov_avio_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer) { DynBuffer *d; int size; static const char padbuf[FF_INPUT_BUFFER_PADDING_SIZE] = {0}; int padding = 0; if (!s) { *pbuffer = NULL; return 0; } /* don't attempt to pad fixed-size packet buffers */ if (!s->max_packet_size) { avio_write(s, (const unsigned char *)padbuf, sizeof(padbuf)); padding = FF_INPUT_BUFFER_PADDING_SIZE; } avio_flush(s); d = (DynBuffer *)s->opaque; *pbuffer = d->buffer; size = d->size; av_free(d); av_free(s); return size - padding; } //int MoovHeaderWriter::mov_write_track_udta_tag(AVIOContext *pb, AVStream *st) //{ // AVIOContext *pb_buf; // int ret, size; // uint8_t *buf; // // /* if (!st || mov->fc->flags & AVFMT_FLAG_BITEXACT) // return 0;*/ // // ret = avio_open_dyn_buf(&pb_buf); // if (ret < 0) // return ret; // // //if (mov->mode & MODE_MP4) // mov_write_track_metadata(pb_buf, st, "name", "title"); // // // if ((size = moov_avio_close_dyn_buf(pb_buf, &buf)) > 0) { // //size =90; // avio_wb32(pb, size + 8); // ffio_wfourcc(pb, (uint8_t *)"udta"); // avio_write(pb, buf, size); // } // av_free(buf); // // return (size +8); //} int MoovHeaderWriter::mov_write_trak_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/ int i) { //int* mediaList) //MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; //MOVTrack *track = &(mov->tracks[i]); AVStream *st = s->nb_streams ? s->streams[i] : NULL; int64_t duration = st->codec->codec_type == AVMEDIA_TYPE_VIDEO ? m_vduration : m_aduration; int64_t pos = avio_tell(pb); int entry = 0; long chunkCount= 0; int entry_backup = entry; //track->entry; int chunk_backup = 0; /*track->chunkCount;*/ int ret; /* If we want to have an empty moov, but some samples already have been * buffered (delay_moov), pretend that no samples have been written yet. */ if (s->flags & FF_MOV_FLAG_EMPTY_MOOV) chunkCount = entry = 0; //8+ bytes track (element) box = long unsigned offset + long ASCII text string 'trak' int trak_size = st->codec->codec_type == AVMEDIA_TYPE_VIDEO ? v_trak_size : a_trak_size; avio_wb32(pb, trak_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"trak"); mov_write_tkhd_tag(pb, s, i, duration); mov_write_edts_tag(pb, s, i, duration); // PSP Movies and several other cases require edts box if ((ret = mov_write_mdia_tag(pb, s, /*mov, track,*/ i, duration)) < 0) return ret; //mov_write_track_udta_tag(pb, st); return trak_size; //update_size(pb, pos); } int MoovHeaderWriter:: mov_write_mvhd_tag(AVIOContext *pb, AVFormatContext *s, int64_t duration) { //MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; int max_track_id = 2, i; int64_t max_track_len = duration; //duration int version; /*for (i = 0; i < s->nb_streams; i++) { if (mov->tracks[i].entry > 0 && mov->tracks[i].timescale) { int64_t max_track_len_temp = av_rescale_rnd(mov->tracks[i].track_duration, MOV_TIMESCALE, mov->tracks[i].timescale, AV_ROUND_UP); if (max_track_len < max_track_len_temp) max_track_len = max_track_len_temp; if (max_track_id < mov->tracks[i].track_id) max_track_id = mov->tracks[i].track_id; } }*/ /* If using delay_moov, make sure the output is the same as if no * samples had been written yet. */ /* if (mov->flags & FF_MOV_FLAG_EMPTY_MOOV) { max_track_len = 0; max_track_id = 1; }*/ version = max_track_len < UINT32_MAX ? 0 : 1; avio_wb32(pb, version == 1 ? 120 : mvhd_atom_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"mvhd"); avio_w8(pb, version); avio_wb24(pb, 0); /* flags */ // if (version == 1) { //avio_wb64(pb, mov->time); // avio_wb64(pb, mov->time); // } else { // avio_wb32(pb, mov->time); /* creation time */ // avio_wb32(pb, mov->time); /* modification time */ // } avio_wb32(pb, 0); /* creation time */ avio_wb32(pb, 0); /* modification time */ avio_wb32(pb, MOV_TIMESCALE); (version == 1) ? avio_wb64(pb, max_track_len) : avio_wb32(pb, max_track_len); /* duration of longest track */ avio_wb32(pb, 0x00010000); /* reserved (preferred rate) 1.0 = normal */ avio_wb16(pb, 0x0100); /* reserved (preferred volume) 1.0 = normal */ avio_wb16(pb, 0); /* reserved */ avio_wb32(pb, 0); /* reserved */ avio_wb32(pb, 0); /* reserved */ /* Matrix structure */ write_matrix(pb, 1, 0, 0, 1, 0, 0); avio_wb32(pb, 0); /* reserved (preview time) */ avio_wb32(pb, 0); /* reserved (preview duration) */ avio_wb32(pb, 0); /* reserved (poster time) */ avio_wb32(pb, 0); /* reserved (selection time) */ avio_wb32(pb, 0); /* reserved (selection duration) */ avio_wb32(pb, 0); /* reserved (current time) */ avio_wb32(pb, max_track_id + 1); /* Next track id */ return 0x6c; } int MoovHeaderWriter::mov_write_itunes_hdlr_tag(AVIOContext *pb, AVFormatContext *s) { avio_wb32(pb, 33); /* size */ ffio_wfourcc(pb, (uint8_t *)"hdlr"); avio_wb32(pb, 0); avio_wb32(pb, 0); ffio_wfourcc(pb, (uint8_t *)"mdir"); ffio_wfourcc(pb, (uint8_t *)"appl"); avio_wb32(pb, 0); avio_wb32(pb, 0); avio_w8(pb, 0); return 33; } /* iTunes meta data list */ int MoovHeaderWriter::mov_write_ilst_tag(AVIOContext *pb, AVFormatContext *s) { MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; int64_t pos = avio_tell(pb); int ilst_tag_size = 45; avio_wb32(pb, ilst_tag_size); /* size */ ffio_wfourcc(pb, (uint8_t *)"ilst"); mov_write_string_tag(pb, "\251too", LIBAVFORMAT_IDENT, 0, 1); //mov_write_tmpo_tag(pb, s); return ilst_tag_size; // update_size(pb, pos); } /* iTunes meta data tag */ int MoovHeaderWriter::mov_write_meta_tag(AVIOContext *pb, AVFormatContext *s) { int size = 90; int64_t pos = avio_tell(pb); avio_wb32(pb, size); /* size */ ffio_wfourcc(pb, (uint8_t *)"meta"); avio_wb32(pb, 0); mov_write_itunes_hdlr_tag(pb, s); mov_write_ilst_tag(pb, s); //size = update_size(pb, pos); return size; } int MoovHeaderWriter::mov_write_udta_tag(AVIOContext *pb, AVFormatContext *s) { AVIOContext *pb_buf; int ret, size; uint8_t *buf; ret = avio_open_dyn_buf(&pb_buf); if (ret < 0) return ret; /* iTunes meta data */ mov_write_meta_tag(pb_buf,s); //} /* if (s->nb_chapters && !(mov->flags & FF_MOV_FLAG_DISABLE_CHPL)) mov_write_chpl_tag(pb_buf, s);*/ if ((size = avio_close_dyn_buf(pb_buf, &buf)) > 0) { avio_wb32(pb, size + 8); ffio_wfourcc(pb, (uint8_t *)"udta"); avio_write(pb, buf, size); } av_free(buf); return 0; } int MoovHeaderWriter::mov_write_moov_tag(AVIOContext *pb, AVFormatContext *s) { //int i; int64_t pos = avio_tell(pb); //8+ bytes movie (presentation) box = long unsigned offset + long ASCII text string 'moov' avio_wb32(pb, moov_atom_size); /* size placeholder*/ ffio_wfourcc(pb, (uint8_t *)"moov"); mov_write_mvhd_tag(pb, s, m_vduration); m_logger.info("mov_write_moov_tag. pos = %d \n", pos); for (int i = 0; i < s->nb_streams; i++) { int ret = mov_write_trak_tag(pb, s, /*mov, &(mov->tracks[i]),*/ i); if (ret < 0) return ret; } mov_write_udta_tag(pb, s); return 0; //update_size(pb, pos); } int MoovHeaderWriter::calculateDataStartPosition(uint8_t *vos_data, int vos_len){ int dataStartPosition = 0; ftyp_atom_size = 32; //calculate moov atom size mvhd_atom_size = 108; tkht_atom_size = 92; //trak v int v_stream_count = 1; int v_edls_atom_size = (8 /*edls */ + 8 + v_stream_count * 12); v_edts_atom_size = 8 + v_edls_atom_size; //36 v_stts_atom_size = 8 + 4 + 4 + 2 * 8; // 32 v_stss_atom_size = 20; //will check --- int v_entries = ventryList.size(); //2; v_stsc_atom_size = 8 + 4 + 4 + v_entries * 12; //40 int keyframe = 1; v_stsz_atom_size = 8 + 4 + 4 + 4 + keyframe * 4 + m_nvframe * 4; //1832 dinf_atom_size = 36; //TODO long vchunkCount = vchunkList.size(); // - 1; //(m_naframe == 0) ? ceil((double)m_data_size / (double) (1024 * 1024)) : m_nvframe; v_stco_atom_size = 8 + 4 + 4 + vchunkCount * 4; //(16 + 4*m_nventries) + (16 + 4*m_naentries); //avcc_tag_size = vos_len + 11; avcc_tag_size = calc_sps_size(vos_data, vos_len) + 23; video_tag_size = avcc_tag_size + 86; v_stsd_atom_size = video_tag_size + 16; //if (m_options.stretch_time != 1){ // avcc_tag_size = 46; // video_tag_size = 132; // v_stsd_atom_size = 148; //} //else{ // avcc_tag_size = 47; // video_tag_size = 133; // v_stsd_atom_size = 149; //} v_stbl_atom_size = 8 + v_stsd_atom_size /*stsd*/ + v_stts_atom_size + v_stss_atom_size + v_stsc_atom_size + v_stsz_atom_size + v_stco_atom_size; //3905 v_minf_atom_size = 8 + 20 /*vmhd*/ + dinf_atom_size /*dinf*/+ v_stbl_atom_size; hdlr_atom_size = 45; v_mdia_atom_size = 8 + 32 /*mvhd*/+ hdlr_atom_size /*hdr*/+ v_minf_atom_size; /*5044*/ v_trak_size = 8 + tkht_atom_size + v_edts_atom_size + v_mdia_atom_size; //4190 a_trak_size = 0; if (m_naframe > 0) { //trak a AVCodecID aCodecId = m_options.acodec.compare("aac") == 0 ? AV_CODEC_ID_AAC : AV_CODEC_ID_MP3; int a_stream_count = 1; int a_edls_atom_size = (8 /*edls */ + + 8 + a_stream_count * 12); a_edts_atom_size = 8 + a_edls_atom_size; //36 int a_entries = aentryList.size(); //1 a_stts_atom_size = 8 + 4 + 4 + 1 * 8; // 24 a_stsc_atom_size = 8 + 4 + 4 + a_entries * 12; //28 a_stsz_atom_size = 8 + 4 + 4 + 1 * 4; //20 - we have just 1 entry with the same sample size for audio int achunkCount = achunkList.size();// - 1; a_stco_atom_size = 8 + 4 + 4 + achunkCount * 4; //(16 + 4*m_nventries) + (16 + 4*m_naentries); a_stsd_atom_size = aCodecId == AV_CODEC_ID_AAC ? 106 : 96; a_stbl_atom_size = 8 + a_stsd_atom_size /*stsb*/ + a_stts_atom_size + a_stsc_atom_size + a_stsz_atom_size + a_stco_atom_size; a_minf_atom_size = 8 + 16 /*smhd*/ + dinf_atom_size /*dinf*/+ a_stbl_atom_size; a_mdia_atom_size = 8 + 32 /*mvhd*/+ hdlr_atom_size /*hdr*/+ a_minf_atom_size; a_trak_size = 8 + tkht_atom_size + a_edts_atom_size + a_mdia_atom_size; } //--track size //------------ int udta_atom_size = 98; moov_atom_size = 8 + mvhd_atom_size + v_trak_size + a_trak_size + udta_atom_size; ; int free_atom_size = 8; dataStartPosition = ftyp_atom_size + moov_atom_size + free_atom_size + 8 /*mdat atom name*/; return dataStartPosition; } int MoovHeaderWriter::fillDebugMediaList(std::list<int>& mediaList){ std::string line; std::ifstream infile("C:/media/test_padding/stream_ids.txt"); while (std::getline(infile, line)) { std::istringstream iss(line); int id; if (!(iss >> id)) { break; } // error mediaList.push_back(id); } return 0; } int MoovHeaderWriter::fillMediaList(int nSamples, CVideoWriterOptions &options, AVFormatContext *s, std::list<int>& mediaList){ int totalAudio =0; bool vhas_more_data = true; bool ahas_more_data = (options.abitrate > 0); int atotal_bytes_read = 0; int vtotal_bytes_read = 0; int samplesRead = 0; double aencoded_secs = 0; double vencoded_secs = 0; bool is_key = false; double asampleRate = 0.; double vsampleRate = 0.; int audioSampleCount = 0; int videoSampleCount = 0; //int aframe_size = 0; //double adelay = 0; for (int i= 0; i< s->nb_streams; i++){ AVStream *st = s->nb_streams ? s->streams[i] : NULL; AVMediaType codec_type = st->codec->codec_type; if ( codec_type == AVMEDIA_TYPE_VIDEO){ //vsampleRate = (double)1 / (double) m_fps; vsampleRate = (double)options.framerate_scale/ (double) options.framerate; //(1/30 10000/305401 } else{ //aframe_size = st->codec->frame_size; asampleRate = (double)(st->codec->frame_size) / (double) st->codec->sample_rate; //1152/44100 //adelay = (double)(st->codec->delay)/(double)st->codec->sample_rate; } } /* * Encode Audio and video */ std::list<int> apacketList; double stretchFactor = options.stretch_time == 0 ? 1 : options.stretch_time; m_logger.info("Fill media list...\n"); while ( (vhas_more_data || ahas_more_data ) ) //&& samplesRead < nSamples { /* * Encode some audio */ while ( ahas_more_data || !apacketList.empty()) { atotal_bytes_read++; //aencoded_secs = ((double)atotal_bytes_read) * (options.stretch_time == 0 ? 1 : options.stretch_time)/asampleRate; //options.achannels/writer->GetBytesPerSample()/options.asample_rate; //aencoded_secs = ((double)atotal_bytes_read) * asampleRate;// - adelay; aencoded_secs = (double)atotal_bytes_read * stretchFactor * asampleRate; m_logger.trace("samplesRead %d, (a) aencoded_secs=%f, vencoded_secs=%f\n", samplesRead, aencoded_secs, vencoded_secs ); if ( options.vduration_ms > 0 && aencoded_secs*1000 >= options.vduration_ms ) { // End of audio ahas_more_data = false; } if (atotal_bytes_read > m_naframe){ //end of audio ahas_more_data = false; } if (ahas_more_data) { apacketList.push_back((int)AVMEDIA_TYPE_AUDIO); // push (a) } //write media list if (samplesRead > 0 && (vhas_more_data && !apacketList.empty())){ //video alread started int sampleType = apacketList.front(); mediaList.push_back(sampleType); m_logger.trace("samplesRead %d, (a) \n", samplesRead); //delete packet from list apacketList.pop_front(); samplesRead++; audioSampleCount++; } if ( aencoded_secs-AUDIO_ENCODE_AHEAD_SECS > vencoded_secs && vhas_more_data ) { // audio passed video -> encode some video now // (Encode audio slightly before video - to avoid audio problems). break; } } // has audio? /* * Encode some video */ while ( vhas_more_data ) { vtotal_bytes_read++; vencoded_secs = ((double)vtotal_bytes_read)*vsampleRate; m_logger.trace("samplesRead %d (v) aencoded_secs=%f, vencoded_secs=%f\n", samplesRead, aencoded_secs, vencoded_secs ); if ( options.vduration_ms > 0 && vencoded_secs*1000 >= options.vduration_ms ) { // End of video vhas_more_data = false; } if (vtotal_bytes_read > m_nvframe){ //end of audio vhas_more_data = false; } mediaList.push_back((int)AVMEDIA_TYPE_VIDEO); m_logger.trace("samplesRead %d, (v) \n", samplesRead); samplesRead++; videoSampleCount++; if ( vencoded_secs > aencoded_secs-AUDIO_ENCODE_AHEAD_SECS && (ahas_more_data || !apacketList.empty()) ) { // video passed audio -> encode some audio now // (Encode audio slightly before video - to avoid audio problems). break; } } // vhas_more_data }// while ( vhas_more_data || ahas_more_data ) if (!apacketList.empty()){ while(apacketList.size() > 0){ //video alread started int sampleType = apacketList.front(); mediaList.push_back(sampleType); m_logger.trace("samplesRead %d, (a) \n", samplesRead); //delete packet from list apacketList.pop_front(); samplesRead++; audioSampleCount++; } } m_logger.trace("Counts = %d, %d \n", audioSampleCount, videoSampleCount); return 0; } void MoovHeaderWriter::build_chunks(MOVTrack *track, long samples_in_chunk, int offset ) { //build clusters int dataStartPosition = m_dataStartPosition; int oldval = -1; int samplesToWrite =samples_in_chunk; int chunkNum =1; track->cluster = (MOVIentry *)av_malloc(track->entry * sizeof(MOVIentry)); int chunkCount = track->chunkCount; for (int j = 0; j < track->entry; j++) { do { if (track->sample_count - samplesToWrite < 0){ samples_in_chunk = samples_in_chunk - (samplesToWrite - track->sample_count); } track->cluster[j].samples_in_chunk = samples_in_chunk; track->cluster[j].chunkNum =0; if (oldval != track->cluster[j].samples_in_chunk) { track->cluster[j].chunkNum = chunkNum; track->cluster[j].pos = dataStartPosition; track->cluster[j].size = samples_in_chunk * offset; track->cluster[j].entries = samples_in_chunk; } samplesToWrite += samples_in_chunk; chunkNum++; dataStartPosition += track->cluster[j].size; oldval= track->cluster[j].samples_in_chunk; chunkCount--; if (track->cluster[j].chunkNum != 0) break; } while (chunkCount > 0); } } int MoovHeaderWriter::buildSampleToChunkEntry(std::list<MOVIentry>& chunkList, std::list<MOVIentry>& entryList){ list<MOVIentry>::iterator it = chunkList.begin(); int oldval = -1; int count = 0; while(it != chunkList.end()){ int sample_in_chunk = it->samples_in_chunk; if (oldval != sample_in_chunk){ MOVIentry* entry = (MOVIentry *)av_malloc(sizeof(MOVIentry)); //one entry entry->samples_in_chunk=sample_in_chunk; entry->chunkNum = it->chunkNum; entryList.push_back(*entry); oldval = sample_in_chunk; } it++; count++; } return 0; } int MoovHeaderWriter::buildEntries(){ buildSampleToChunkEntry(vchunkList, ventryList); buildSampleToChunkEntry(achunkList, aentryList); return 0; } int MoovHeaderWriter::createNewEntry(int dataStartPosition, int packetSize, int chunkNum, std::list<MOVIentry> &list){ MOVIentry* entry = (MOVIentry *)av_malloc(sizeof(MOVIentry)); //one entry entry->samples_in_chunk=1; entry->pos = dataStartPosition; entry->size = packetSize; entry->chunkNum = chunkNum; list.push_back(*entry); return 0; } int MoovHeaderWriter::updateChunkEntry(int dataStartPosition, int packetSize, std::list<MOVIentry> &list) { //take chunk from list MOVIentry entry = list.back(); //remove list.pop_back(); entry.samples_in_chunk ++; entry.size += packetSize; //return to list list.push_back(entry); return 0; } int MoovHeaderWriter::buildChunkOffsets(std::list<int>& mediaList) { //(int *mediaList, int mediaListSize){ int dataStartPosition = 0; int vchunkNumber =1; int achunkNumber =1; int oldval = -1; list<int>::iterator it = mediaList.begin(); while (it != mediaList.end()) { int packet = *it; //mediaList.front(); //[i]; int offset = (packet == (int)AVMEDIA_TYPE_VIDEO)? m_vpacket_size : m_apacket_size; if (oldval != packet){ if (packet == AVMEDIA_TYPE_VIDEO){ createNewEntry(dataStartPosition, offset, vchunkNumber, vchunkList); dataStartPosition += (vchunkNumber == 1) ? offset + nal_units_size : offset; vchunkNumber++; } else{ createNewEntry(dataStartPosition, offset, achunkNumber, achunkList); dataStartPosition += offset; achunkNumber++; } oldval = packet; } else{ //update existing entry if (packet == AVMEDIA_TYPE_VIDEO) { updateChunkEntry(dataStartPosition,offset, vchunkList); } else { updateChunkEntry(dataStartPosition,offset, achunkList); } dataStartPosition += offset; } it++; } return 0; } int MoovHeaderWriter::write_header(AVFormatContext *s, AVIOContext* ioContext, CVideoWriterOptions &options, std::list<int>& mediaList){ m_logger.info("Start write move header. pos = 0 \n"); int ret = 0; m_options = options; //m_fps = options.fps; nal_units_size = 1; //int videoDur = options.vduration_ms; m_frame_number = options.frame_number; //videoDur * m_fps / 1000; //453; int track_timescale = 0; //m_mediaListSize = mediaListSize; MOVMuxContext *mov = (MOVMuxContext *)s->priv_data; int vos_len = 0; uint8_t *vos_data; for (int i= 0; i< s->nb_streams; i++){ AVStream *st = s->nb_streams ? s->streams[i] : NULL; AVMediaType codec_type = st->codec->codec_type; if ( codec_type == AVMEDIA_TYPE_VIDEO){ vos_len = st->codec->extradata_size; vos_data = st->codec->extradata; m_nvframe = m_frame_number - 1; //double vduration = (double)(1000 /*ms*/ * m_nvframe) / (double)m_fps; m_vduration = options.vtrack_duration; //ceil(vduration); m_vpacket_size = options.vpacket_size; //m_nventries = m_nvframe; } else{ m_naframe = options.aPacketsNumber; //double aduration = (double) (1000 /*ms*/ * m_naframe * (st->codec->frame_size)) / (double)st->codec->sample_rate; m_aduration = options.atrack_duration; ////ceil(aduration); //15047; m_apacket_size = options.apacket_size; //m_naentries = (m_naframe > m_nvframe) ? m_nvframe -1 : m_naframe; } } //for mdat atom - mdat atom size m_data_size = 8 + m_naframe * m_apacket_size + m_nvframe * m_vpacket_size + (1 * m_vpacket_size + 1); // ??? what is 1 ??? //fill order in which packets will be written ret = fillMediaList(m_frame_number, options, s, mediaList); //ret = fillDebugMediaList(mediaList); //build chunk offset ret = buildChunkOffsets(mediaList); //, mediaListSize); //build entries for sample-to-chunk table ret = buildEntries(); //calculate data position m_dataStartPosition = calculateDataStartPosition(vos_data, vos_len); long offset = ftyp_atom_size; //ftyp /*ftyp*/ + 8 /*free*/; //AVIOContext *pb = s->pb; //set to the end of ftyp atom //avio_seek(ioContext/*s->pb*/, offset, 0); int pos = mov_write_ftyp_tag(ioContext); m_logger.info("mov_write_ftyp_tag written. \n"); //write moov atom pos = mov_write_moov_tag(ioContext/*s->pb*/, s/*, mov*/); m_logger.info("mov_write_moov_tag written. \n"); pos = mov_write_mdat_tag(ioContext/*s->pb*/); m_logger.info("Mdat atom written. Pos = %d", pos); avio_flush(ioContext/*s->pb*/); return 0; } int MoovHeaderWriter::debug_header(AVIOContext* pb){ //this is just for debugging int offset = 32 /*offset*/ + 8 /*pos*/; FILE *headerFile = fopen("C:/media/test_padding/moov_header.mp4", "rb"); if (!headerFile) { perror("Failed to open: "); return 1; } fseek(headerFile, 0, SEEK_END); long moov_atom_size = ftell(headerFile) -offset; fseek(headerFile, offset, SEEK_SET); unsigned char *moov_atom = (unsigned char *)malloc(moov_atom_size); if (fread(moov_atom, moov_atom_size, 1, headerFile) != 1) { perror("Read failed: "); return 1; } fclose(headerFile); avio_write(pb, (uint8_t *)moov_atom, moov_atom_size); } <file_sep>/Fmod/Effects/CompressionDsp.h #ifndef __COMPRESSION_DSP_H__ #define __COMPRESSION_DSP_H__ #include "DspEffect.h" #pragma once class CompressionDsp : public DspEffect { private: FMOD::DSP *dsp; public: void create(FMOD::System *fmodSystem) { fmodSystem->createDSPByType(FMOD_DSP_TYPE_COMPRESSOR, &dsp); dspList.push_back(dsp); } void setThreshhold(float factor) { dsp->setParameter(FMOD_DSP_COMPRESSOR_THRESHOLD, factor); } void setAttack(float factor) { dsp->setParameter(FMOD_DSP_COMPRESSOR_ATTACK, factor); } void setRelease(float factor) { dsp->setParameter(FMOD_DSP_COMPRESSOR_RELEASE, factor); } void setGainMakeUp(float factor) { dsp->setParameter(FMOD_DSP_COMPRESSOR_GAINMAKEUP, factor); } }; #endif <file_sep>/NewEncoder/VideoEncoder/VideoEncoder.h #pragma once //#include <windows.h> #include <string> #include <vector> #include "Engine.h" #include "VideoWriter.h" #include "ProgressTracker.h" #include "VSVideo.h" using namespace std; class VideoEncoderInputParameters; class VideoEncoder : public Engine { public: void executeEngine(int argc, char **argv); static void init(map<string, void *> *commonStorage); // Initialize libs (ffmpeg) private: ProgressTracker *progressTracker; bool ReadNumbersList( std::string s, std::vector<int> *nums ); bool parse_args(int argc, char* argv[], VideoEncoderInputParameters & params); CVideoWriter *CreateWriter(VideoEncoderInputParameters & params, ProgressTracker *progressTracker); void ms2indices( const std::vector<int> &ms, std::vector<int> &inds, int framerate ); void cleanup(VideoStream::CInputVideoStream *inputStream, VideoStream::CInputAudioStream *inputAStream, CAudioFrame& afr, CVideoWriter *writer); }; <file_sep>/VideoStream/VideoStream/AVIStdFileMedia.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSAVIStdFileMedia.h" #include "VSFrame.h" #else #include "VSAVIStdFileMedia.h" #include "VSFrame.h" #endif using namespace VideoStream; const DWORD huffyuv = 1970890344; const DWORD divx = 1684633208; const DWORD uncompressed = 541215044; const DWORD MSRLE = 1701605997; const DWORD lagarith = 1936154988; const DWORD fccHandler = divx; /********************/ /* CInputAVIStdFile */ /********************/ //CInputAVIStdFile::CInputAVIStdFile (LPCTSTR location) //{ // AVIFileInit(); // _frameNum = 0; // // // Open AVI file // HRESULT hr; // do { // hr = AVIFileOpen (&_pfile, location, OF_READ | OF_SHARE_DENY_NONE, NULL); // } while (hr != S_OK); // // // Get compressed stream // hr = AVIFileGetStream (_pfile, &_pavi, streamtypeVIDEO, 0); // //} // //CInputAVIStdFile::~CInputAVIStdFile() //{ // HRESULT hr = AVIStreamClose (_pavi); // hr = AVIFileClose (_pfile); // AVIFileExit(); //} // //const VIDEOINFO &CInputAVIStdFile::GetVideoInfo() const //{ // static VIDEOINFO vi; // return vi; //} // //CFramePtr CInputAVIStdFile::GetNextFrame() //{ // CFrame *pFrame = new CFrame(); // pFrame->Allocate (640,400,640); // HRESULT hr = AVIStreamRead (_pavi, _frameNum++, 1, pFrame->GetData(), // pFrame->GetDataSize(), NULL, NULL); // return pFrame; //} // /*********************/ /* COutputAVIStdFile */ /*********************/ /*static PAVISTREAM OpenAVIFile (LPCTSTR location, const VIDEOINFO &vi, bool create) { PAVIFILE pfile; PAVISTREAM pavi; PAVISTREAM pavi_comp; int frameRate = vi.frameRate; int width = vi.width; int height = vi.height; int frameSize = CFrame::GetFrameSize (vi.width, vi.height, vi.bitCount); // Open AVI file HRESULT hr = AVIFileOpen (&pfile, location, OF_WRITE | (create ? OF_CREATE : 0) | OF_SHARE_DENY_NONE, NULL); // Create stream AVISTREAMINFO strhdr; memset(&strhdr, 0, sizeof(strhdr)); strhdr.fccType = streamtypeVIDEO;// stream type strhdr.fccHandler = 0; strhdr.dwScale = 1; strhdr.dwLength = 0; strhdr.dwQuality = -1; strhdr.dwRate = frameRate; strhdr.dwSuggestedBufferSize = frameSize; SetRect(&strhdr.rcFrame, 0, 0, width, height); hr = AVIFileCreateStream (pfile, &pavi, &strhdr); hr = AVIFileRelease(pfile); // Create compressed stream AVICOMPRESSOPTIONS opts; memset(&opts, 0, sizeof(opts)); opts.fccHandler = fccHandler; opts.dwFlags = 8; hr = AVIMakeCompressedStream (&pavi_comp, pavi, &opts, NULL); hr = AVIStreamRelease (pavi); // Set stream format BITMAPINFO bmpInfo; BITMAPINFOHEADER *pbmpHeader = &bmpInfo.bmiHeader; pbmpHeader->biSize = sizeof (BITMAPINFOHEADER); pbmpHeader->biWidth = width; pbmpHeader->biHeight = height; pbmpHeader->biPlanes = 1; pbmpHeader->biBitCount = vi.bitCount; pbmpHeader->biCompression = BI_RGB; pbmpHeader->biSizeImage = 0; pbmpHeader->biXPelsPerMeter = 0; pbmpHeader->biYPelsPerMeter = 0; pbmpHeader->biClrUsed = 0; pbmpHeader->biClrImportant = 0; hr = AVIStreamSetFormat (pavi_comp, 0, &bmpInfo, sizeof (BITMAPINFO)); return pavi_comp; } COutputAVIStdFile::COutputAVIStdFile() : CBaseOutputVideoStream() { AVIFileInit(); _frameNum = 0; } bool COutputAVIStdFile::Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams) { _pavi = OpenAVIFile (location, vi, true); return (_pavi == NULL ? false : true); } void COutputAVIStdFile::Close() { if (_pavi != NULL) { HRESULT hr = AVIStreamClose (_pavi); _pavi = NULL; } } COutputAVIStdFile::~COutputAVIStdFile() { Close(); AVIFileExit(); } bool COutputAVIStdFile::WriteFrame (CFramePtr framePtr) { // The frame's data is cast to an (non-const) unsigned char * because that's what the function expects. // In fact it does not alter the data. HRESULT hr = AVIStreamWrite (_pavi, _frameNum++, 1, (unsigned char *)framePtr.GetData(), framePtr.GetDataSize(), AVIIF_KEYFRAME, NULL, NULL); return (hr == S_OK ? true : false); } ////////////////////// // CAVIStdFileMedia // ////////////////////// IInputVideoStream *CAVIStdFileMedia::CreateInputVideoStream() { return NULL; } IOutputVideoStream *CAVIStdFileMedia::CreateOutputVideoStream() { return new COutputAVIStdFile(); }*/ <file_sep>/VideoStream/VideoStream/CMakeLists.txt file(GLOB VideoStream_SRC "*.cpp" ) add_library(VideoStream ${VideoStream_SRC}) if(UNIX) target_include_directories(VideoStream PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/ ${CMAKE_CURRENT_SOURCE_DIR}/ ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/Utils/ ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/StackWalker/include/ ${CMAKE_CURRENT_SOURCE_DIR}/../../third_party/include/ffmpeg/include) else() target_include_directories(VideoStream PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/ ${CMAKE_CURRENT_SOURCE_DIR}/ ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/Utils/ ${CMAKE_CURRENT_SOURCE_DIR}/../../utils/StackWalker/include/ ${CMAKE_CURRENT_SOURCE_DIR}/../../ffmpeg/include) endif() #target_link_libraries(CliManager LINK_PUBLIC StackWalker) <file_sep>/VideoStream/VideoStream/ThreadInput.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSThreadInput.h" using namespace VideoStream; unsigned long __stdcall CThreadInput::ThreadEntryProc(void *pThis) { return ((CThreadInput *)pThis)->ThreadRun(); } CThreadInput::CThreadInput (IInputVideoStream *pStream) : _pStream (pStream) { const int maxQueueSize = 30; // limit the queue size to prevent big memory consumption InitializeCriticalSection (&_mutex); _framesDone = CreateEvent (0, FALSE, FALSE, 0); _frameReady = CreateEvent (0, TRUE, FALSE, 0); _readEnd = CreateEvent (0, FALSE, FALSE, 0); _queueBuffer = CreateSemaphore (0, maxQueueSize, maxQueueSize, 0); _shouldStop = false; _eov = false; } CThreadInput::~CThreadInput() { Close(); DeleteCriticalSection(&_mutex); CloseHandle(_framesDone); CloseHandle(_frameReady); CloseHandle(_queueBuffer); delete _pStream; } void CThreadInput::RunThread() { _thread = CreateThread( NULL, // LPSECURITY_ATTRIBUTES lpThreadAttributes, 0, // SIZE_T dwStackSize, ThreadEntryProc, // LPTHREAD_START_ROUTINE lpStartAddress, this, // LPVOID lpParameter, 0, // dwCreationFlags &_threadId); } bool CThreadInput::Open (LPCTSTR location, MediaLocationParams & mlParams, int streamId) { return _pStream->Open (location, mlParams, streamId); } bool CThreadInput::FastForward (unsigned int frames) { for (unsigned int i=0; i < frames; i++) { CFramePtr framePtr = GetNextFrame(); if (framePtr.isNull()) return false; } return true; } CFramePtr CThreadInput::GetNextFrame() { EnterCriticalSection(&_mutex); if (_queue.empty()) { if (_eov == true) { LeaveCriticalSection(&_mutex); return CFramePtr(); } ResetEvent(_frameReady); LeaveCriticalSection(&_mutex); WaitForSingleObject(_frameReady, INFINITE); EnterCriticalSection(&_mutex); if (_queue.empty() && _eov == true) { LeaveCriticalSection(&_mutex); return CFramePtr(); } } CFramePtr framePtr = _queue.front(); _queue.pop(); LeaveCriticalSection(&_mutex); ReleaseSemaphore (_queueBuffer, 1, NULL); return framePtr; } void CThreadInput::Close() { if (_shouldStop == false) { _shouldStop = true; SetEvent (_readEnd); ReleaseSemaphore (_queueBuffer, 1, NULL); WaitForSingleObject(_framesDone, INFINITE); _pStream->Close(); } } unsigned long CThreadInput::ThreadRun() { while (!_shouldStop) { // Wait until the queue is not full WaitForSingleObject(_queueBuffer, INFINITE); if (_shouldStop) break; // Wait for frame to be ready if (_pFrameCallback != NULL) { if (!(*_pFrameCallback)(_pContext, _readEnd)) { _eov = true; SetEvent(_frameReady); break; } } // Read frame CFramePtr framePtr = _pStream->GetNextFrame(); if (framePtr.isNull()) { _eov = true; SetEvent(_frameReady); break; } // Add frame to queue EnterCriticalSection(&_mutex); _queue.push(framePtr); LeaveCriticalSection(&_mutex); // Notify main thread of frame SetEvent(_frameReady); } SetEvent(_framesDone); return 0; } #else #include "VSThreadInput.h" #include <stdio.h> #include <errno.h> using namespace VideoStream; void* CThreadInput::ThreadEntryProc(void *pThis) { CThreadInput* input = (CThreadInput*)pThis; // input->_threadId = pthread_self(); //*g_threadId = pthread_self(); //return ((CThreadInput *)pThis)->ThreadRun(); input->ThreadRun(); } CThreadInput::CThreadInput(IInputVideoStream *pStream) : _pStream(pStream) { const int maxQueueSize = 30; // limit the queue size to prevent big memory consumption InitializeCriticalSection(&_mutex); _framesDone = CreateEvent(0, FALSE, FALSE, 0); _frameReady = CreateEvent(0, TRUE, FALSE, 0); _readEnd = CreateEvent(0, FALSE, FALSE, 0); //_queueBuffer = CreateSemaphore (0, maxQueueSize, maxQueueSize, 0); sem_init(&_queueBuffer, 0, maxQueueSize); _shouldStop = false; _eov = false; } CThreadInput::~CThreadInput() { Close(); DeleteCriticalSection(&_mutex); CloseHandle(_framesDone); CloseHandle(_frameReady); //CloseHandle(_queueBuffer); sem_destroy(&_queueBuffer); delete _pStream; } void CThreadInput::RunThread() { pthread_create(&_thread, NULL, CThreadInput::ThreadEntryProc, this); // _thread = CreateThread( // NULL, // LPSECURITY_ATTRIBUTES lpThreadAttributes, // 0, // SIZE_T dwStackSize, // ThreadEntryProc, // LPTHREAD_START_ROUTINE lpStartAddress, // this, // LPVOID lpParameter, // 0, // dwCreationFlags // &_threadId); } bool CThreadInput::Open(LPCTSTR location, MediaLocationParams & mlParams, int streamId) { return _pStream->Open(location, mlParams, streamId); } bool CThreadInput::FastForward(unsigned int frames) { for (unsigned int i = 0; i < frames; i++) { CFramePtr framePtr = GetNextFrame(); if (framePtr.isNull()) return false; } return true; } CFramePtr CThreadInput::GetNextFrame() { EnterCriticalSection(&_mutex); if (_queue.empty()) { if (_eov == true) { LeaveCriticalSection(&_mutex); return CFramePtr(); } ResetEvent(_frameReady); LeaveCriticalSection(&_mutex); WaitForSingleObject(_frameReady, INFINITE); EnterCriticalSection(&_mutex); if (_queue.empty() && _eov == true) { LeaveCriticalSection(&_mutex); return CFramePtr(); } } CFramePtr framePtr = _queue.front(); _queue.pop(); LeaveCriticalSection(&_mutex); //ReleaseSemaphore (_queueBuffer, 1, NULL); sem_post(&_queueBuffer); return framePtr; } void CThreadInput::Close() { if (_shouldStop == false) { _shouldStop = true; SetEvent(_readEnd); //ReleaseSemaphore (_queueBuffer, 1, NULL); fprintf(stderr, "CThreadInput::Close.\n"); sem_post(&_queueBuffer); WaitForSingleObject(_framesDone, INFINITE); _pStream->Close(); } } unsigned long CThreadInput::ThreadRun() { while (!_shouldStop) { // Wait until the queue is not full int res = 0; fprintf(stderr, "Waiting Wait until the queue is not full.\n"); while (res = sem_wait(&_queueBuffer) == -1 && errno == EINTR) { fprintf(stderr, "Waiting Wait until the queue is not full.\n"); continue; } fprintf(stderr, "Waiting Wait until the queue is not full END.\n"); //WaitForSingleObject(_queueBuffer, INFINITE); if (_shouldStop) break; // Wait for frame to be ready if (_pFrameCallback != NULL) { if (!(*_pFrameCallback)(_pContext, _readEnd)) { _eov = true; SetEvent(_frameReady); break; } } // Read frame CFramePtr framePtr = _pStream->GetNextFrame(); if (framePtr.isNull()) { _eov = true; SetEvent(_frameReady); break; } // Add frame to queue EnterCriticalSection(&_mutex); _queue.push(framePtr); LeaveCriticalSection(&_mutex); // Notify main thread of frame SetEvent(_frameReady); } SetEvent(_framesDone); return 0; } #endif <file_sep>/RenderingManager/RenderingManager/DebugLevel.cpp #include "DebugLevel.h" bool debugLevelToString(DebugLevel dbgLvl, string & o_str) { switch(dbgLvl) { case DebugLevel_TRACE: o_str = "TRACE"; return true; case DebugLevel_INFO: o_str = "INFO"; return true; case DebugLevel_WARNING: o_str = "WARNING"; return true; case DebugLevel_ERROR: o_str = "ERROR"; return true; default: o_str = ""; return false; } } bool debugLevelFromString(const string & str, DebugLevel & o_dbgLvl) { #ifdef WIN32 if (!_stricmp(str.c_str(), "TRACE")) { o_dbgLvl = DebugLevel_TRACE; return true; } else if (!_stricmp(str.c_str(), "INFO")) { o_dbgLvl = DebugLevel_INFO; return true; } else if (!_stricmp(str.c_str(), "WARNING")) { o_dbgLvl = DebugLevel_WARNING; return true; } else if (!_stricmp(str.c_str(), "ERROR")) { o_dbgLvl = DebugLevel_ERROR; return true; } else { return false; } #else if (!strcasecmp(str.c_str(), "TRACE")) { o_dbgLvl = DebugLevel_TRACE; return true; } else if (!strcasecmp(str.c_str(), "INFO")) { o_dbgLvl = DebugLevel_INFO; return true; } else if (!strcasecmp(str.c_str(), "WARNING")) { o_dbgLvl = DebugLevel_WARNING; return true; } else if (!strcasecmp(str.c_str(), "ERROR")) { o_dbgLvl = DebugLevel_ERROR; return true; } else { return false; } #endif } <file_sep>/Fmod/Fmod.cpp // Fmod.cpp : Defines the entry point for the console application. // #include "AudioMixer.h" std::vector<string> getArgs(char* argv[], int argc) { std::vector<string> args; for (int i = 1; i < argc; i++) { args.push_back(argv[i]); } return args; } bool parseArgs(vector<string> args, map<string, string> &retMap) { bool foundInput = false; bool foundOutput = false; bool foundLogFile = false; for (unsigned int i = 0; i < args.size(); i++) { if (args[i].substr(0, 1) != "-") { return false; } string var = args[i].substr(1); if (var == "input") { i++; if (i >= args.size()) { return false; } foundInput = true; retMap.insert(pair<string, string> (var, args[i])); } else if (var == "output") { i++; if (i >= args.size()) { return false; } foundOutput = true; retMap.insert(pair<string, string> (var, args[i])); } else if (var == "log") { i++; if (i >= args.size()) { return false; } foundLogFile = true; retMap.insert(pair<string, string> (var, args[i])); } } if (!foundOutput || !foundInput || !foundLogFile) { return false; } return true; } int main(int argc, char** argv) { std::vector<string> args = getArgs(argv, argc); std::map<string, string> argMap; bool rc = parseArgs(args, argMap); if (!rc) { printf( "Usage: Fmod -input DVG_32344Mixer.xml -output DVG_32345.wav -log logFilename.log\n"); return -1; } AudioMixer *mixer = new AudioMixer(); std::string input = argMap["input"]; std::string output = argMap["output"]; std::string log = argMap["log"]; mixer->config(input, output, log); mixer->runService(); delete mixer; } <file_sep>/VideoStream/VideoStream/VSMedia.h #pragma once #include "VSFrame.h" #ifdef WIN32 #include <tchar.h> #include <windows.h> #else #include "WindowDefinitions.h" #endif #include <string> using namespace std; namespace VideoStream { class MediaLocationParams { public: MediaLocationParams() : timeoutMS(600000 /* 10 minutes */), bps(-1), bufSize(-1) {} public: DWORD timeoutMS; string codecstr; int bps; int bufSize; }; class IInputVideoStream { public: virtual bool Open (LPCTSTR location, MediaLocationParams & mlParams, int streamId) =0; virtual const VIDEOINFO &GetVideoInfo() const =0; virtual bool FastForward (unsigned int frames) =0; virtual CFramePtr GetNextFrame() =0; virtual void Close() =0; // By default the CInputVideoStreamImpl class handles a stream and a sync object. // In some cases, we want the stream (this class) to handle synchronizing issues and we want to bypass the sync object. virtual bool BypassSyncObject() {return false;} // Asynchronous input virtual bool IsAsync() =0; // if true then there is no need to wait for frame before calling GetNextFrame/FastForward typedef bool (*waitForFrameCallback)(void *pContext, HANDLE readEndEventHandle); virtual void SetWaitForFrameCallback (waitForFrameCallback callback, void *pContext) =0; // only relevant when stream is async virtual void RunThread() =0; virtual ~IInputVideoStream() {}; }; class IOutputVideoStream { public: virtual bool Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams, int streamId) =0; virtual bool WriteFrame (CFramePtr framePtr) =0; virtual bool DeleteUpToFrame (int frameNum) =0; virtual void Close() =0; // By default the COutputVideoStreamImpl class handles a stream and a sync object. // In some cases, we want the stream (this class) to handle synchronizing issues and we want to bypass the sync object. virtual bool BypassSyncObject() {return false;} // Asynchronous output virtual bool IsAsync() =0; // if true then WriteFrame returns immediately and frame is written in a separate thread typedef void (*frameWrittenCallback)(void *pContext); virtual void SetFrameWrittenCallback (frameWrittenCallback callback, void *pContext) =0; // only relevant when stream is async // Persistency virtual bool IsPersistent() =0; virtual ~IOutputVideoStream() {}; }; class IInputAudioStream { public: virtual bool Open (LPCTSTR location, int streamId) =0; virtual void Close() =0; virtual ~IInputAudioStream() {}; }; class IOutputAudioStream { public: virtual bool Open (LPCTSTR location, int streamId) =0; virtual void Close() =0; virtual ~IOutputAudioStream() {}; }; class IMedia { public: virtual IInputVideoStream *CreateInputVideoStream() =0; virtual IOutputVideoStream *CreateOutputVideoStream() =0; virtual ~IMedia() {}; }; class CMediaFactory { public: static IMedia *GetMedia (LPCTSTR location); static void ParseMediaLocation (LPCTSTR location, std::string *bareLocation, MediaLocationParams *mparms);// evev - move to a different class? private: static LPCTSTR RemoveMediaPrefix (LPCTSTR location); static void ParseMediaLocationParams( MediaLocationParams *mparms, std::string &locationParams ); // evev - move to a different class? }; }<file_sep>/VideoStream/VideoStream/VSWAVFileMedia.h #pragma once #include "VSBaseMedia.h" #include "VSCompression.h" #include "VSISync.h" #include <list> #ifdef WIN32 #include <windows.h> #include <vfw.h> #endif namespace VideoStream { // CInputWAVFile class class CInputWAVFile { private: FILE *_fp; IInputSync *_pSync; MediaLocationParams *_locationParams; std::string _location; typedef struct { DWORD rID; // 'RIFF' int rLen; DWORD wID; // 'WAVE' DWORD fId; // 'fmt ' int pcm_header_len; // varies... short int wFormatTag; short int nChannels; // 1,2 for stereo data is (l,r) pairs int nSamplesPerSec; int nAvgBytesPerSec; short int nBlockAlign; short int nBitsPerSample; } WAV_HDR; // header of chunk typedef struct { DWORD dId; // 'data' or 'fact' int dLen; } CHUNK_HDR; AUDIOINFO _audioInfo; WAV_HDR _wavHdr; bool ReadWavHeader( bool throwOnErr ); bool ReadUpToData( bool throwOnErr ); public: CInputWAVFile(MediaLocationParams *locationParams ); void SetSync(IInputSync *pSync )// WAV reader needs sync for events (end/kill) { _pSync = pSync; } ~CInputWAVFile(); bool Open (LPCTSTR location, DWORD timeoutMS, bool throwOnErr=true); const AUDIOINFO &GetAudioInfo() const { return _audioInfo; } // Read as many bytes as available (wait for at least one byte) int Read( unsigned char *bfr, int bfr_size, bool *eof ); // Read bytes to fill buffer. int ReadFullBfr( unsigned char *bfr, int bfr_size, bool *eof); void Close(); }; } <file_sep>/NewEncoder/VideoEncoder/VideoEncoder.cpp // VideoEncoder.cpp : Defines the entry point for the console application. // // FFmpegTest.cpp : Defines the entry point for the console application. // #include <stdio.h> #include <stdlib.h> #include <memory.h> #include <string> #include <vector> #include <algorithm> #include <sys/timeb.h> #ifndef WIN32 #include <ctime> #include <unistd.h> #include "WindowDefinitions.h" #include <sys/time.h> #define stricmp strcasecmp #endif using std::string; #include <math.h> #include "SWException.h" #pragma warning (disable : 4996 ) // Encode audio slightly before video (to avoid audio problems). #ifndef AUDIO_ENCODE_AHEAD_SECS #define AUDIO_ENCODE_AHEAD_SECS 0.5 #endif /* * Copyright (c) 2003 <NAME> * Copyright (c) 2007 <NAME> * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include <stdlib.h> #include <stdio.h> #ifdef WIN32 #include <tchar.h> #endif extern "C" { #define __STDC_CONSTANT_MACROS #include "libavformat/avformat.h" } #include "FFMpegWriter.h" //#include "WMVWriter.h" //#include "VFWWriter.h" //#include "WAVReader.h" //#include "VSVideo.h" #include "VideoEncoder.h" class VideoEncoderInputParameters { // Everything is public public: char *invideo_url; char *inaudio_file; int vbitrate, abitrate; char *vcodec; char *progressFileName; char *jobId; double startDelta; char *acodec; char *outname; char *profile; bool vno_rc; int vqf; int vcompress_factor; int vqual2perf_tradeoff; // 100 - highest quality, 0 - Best performance. int vkeyframe_interval; // # of frames between keyframes. int vrcbufms; std::string vconfload; // File to write video codec configuration from std::string vconfstore; // File to write video codec configuration to float vconstantRateFactor; // Rate factor: the smaller it is, the higher the quality. // -1=none (use bitrate), otherwise, bitrate is ignored. std::string vprofile; // baseline/main/high (default=baseline) int vlevel; // level (2 digits without period). default=30 int passnum; std::string passlogfile; // Requested key frames std::vector<int> vkeyfrms; // total bitrate to be written in header (to overcome a situation where // resulting bitrate is lower than requested) int br_inhdr; int vlookahead_frames; // Number of frames the encoder will look ahead before encoding (higher value -> higher latency) float vpsy_rd; // Strength of psy. Rate-Distortion for x264 (0 - fastest, lowest quality, >0 - increase) int max_durdiff; // Maximum difference between audio duration and video duration (ms) int max_dur; // Maximum duration (ms) - everything else will be clipped int playlist_segment_dur_ms; float playlist_first_segment_dur_ms; int video_dur; bool fragmented; double stretch_time; int startNumber; int renderedChunk; double notificationInterval; int vpacket_size; int apacket_size; int aPacketsNumber; bool padded; std::string h264Impl; public: VideoEncoderInputParameters() { // Set defaults invideo_url = NULL; inaudio_file = NULL; vbitrate = 256000; abitrate = 256000; vcodec = "";//"flv";//"xvid";//"flv1"; progressFileName = NULL; jobId = NULL; startDelta = 0.0; acodec = "mp3"; outname = NULL; profile = ""; vno_rc = false; // operate ratecontrol. vqf = -1; // no const quantization vcompress_factor //= 50; // Normal = 100; // Highest compression vqual2perf_tradeoff = -1; // 100 - highest quality, 0 - Best performance. vkeyframe_interval = -1; // # of frames between keyframes. vrcbufms = -1; // vconfload; // constructor already gives it a default value // vconfstore; // constructor already gives it a default value vconstantRateFactor = -1; // Rate factor: the smaller it is, the higher the quality. // -1=none (use bitrate), otherwise, bitrate is ignored. std::string vprofile; // baseline/main/high (default=baseline) vlevel = -1; // level (2 digits without period). default=30 passnum = -1; // passlogfile; // constructor already gives it a default value // Requested key frames // vkeyfrms; // constructor already gives it a default value // total bitrate to be written in header (to overcome a situation where // resulting bitrate is lower than requested) br_inhdr = -1; max_durdiff = -1; // Maximum difference between audio duration and video duration (ms) max_dur = -1; // Maximum duration (ms) - everything else will be clipped playlist_segment_dur_ms = 5000; // playlist_first_segment_dur_ms = -1; video_dur = -1; fragmented = false; stretch_time = 1.0; startNumber = 1; vlookahead_frames = -1; // Take ffmpeg default = 40 vpsy_rd = 0; // Strength of psy. Rate-Distortion for x264 (0 - fastest, lowest quality, >0 - increase) renderedChunk = -1; notificationInterval = 10.0; // 10 sec vpacket_size = 15000; apacket_size = 418; padded = false; } private: /* * Functions for printing comma separated lists (for vkeyfr) */ void printNumbersList(Logger & logger, DebugLevel dbgLvl, const std::vector<int> &nums ) { std::vector<int>::const_iterator iter; for ( iter = nums.begin(); iter != nums.end(); iter++ ) logger.printf_adv(false, false, dbgLvl, "%d,", *iter); }; public: // Function for printing parameters void print(Logger & logger, DebugLevel dbgLvl) { logger.printf_adv(true, false, dbgLvl, "\n outname =%s\n invideo_url=%s\n inaudio_file=%s\n vbitrate=%d\n " "abitrate=%d\n vcodec=%s\n acodec=%s\n profile=%s\n vqf=%d\n vno_rc=%d\n vcompress_factor=%d\n " "vqual2perf_tradeoff=%d\n vkeyframe_interval=%d\n vrcbuf=%d\n " "vconstantRateFactor=%f\n vprofile=%s\n vlevel=%d\n vlookaheadh_frames=%d\n vpsy_rd=%f\n" "vconfload=%s\n vconfstore=%s\n passlog=%s\n passnum=%d\n br_inhdr=%d\n max_durdiff=%d max_dur=%d playlist_segment_dur=%d playlist_first_segment_dur=%f\n video_dur=%d fragmented=%d\n stretch_time=%f\n vkeyfr=", outname, invideo_url, inaudio_file, vbitrate, abitrate, vcodec, acodec, profile, vqf, vno_rc, vcompress_factor, vqual2perf_tradeoff, vkeyframe_interval, vrcbufms, vconstantRateFactor, vprofile.c_str(), vlevel, vlookahead_frames, vpsy_rd, vconfload.c_str(), vconfstore.c_str(), passlogfile.c_str(), passnum, br_inhdr, max_durdiff, max_dur, playlist_segment_dur_ms,playlist_first_segment_dur_ms, video_dur, fragmented, stretch_time); printNumbersList(logger, dbgLvl, vkeyfrms); logger.printf_adv(false, true, dbgLvl, "\n"); } }; class CProgress { private: #ifdef WIN32 struct _timeb _startTime; struct _timeb _startNotificationTime; #else struct timeval _startTime; #endif FILE * _outputFile; ProgressTracker *_progressTracker; double _startDelta; VideoEncoderInputParameters & _params; int _renderedFrames; public: CProgress(FILE *outputFile, ProgressTracker *progressTracker, double startDelta, VideoEncoderInputParameters & params); void ReportFrame(int iFrame); void NotifyFrame(int iFrame, bool isLast); virtual ~CProgress(); }; // Transfers ownership of progress file CProgress::CProgress(FILE *outputFile, ProgressTracker *progressTracker, double startDelta, VideoEncoderInputParameters & params) : _outputFile(outputFile), _startDelta(startDelta), _params(params), _progressTracker(progressTracker) { #ifdef WIN32 _ftime_s (&_startTime); _ftime_s (&_startNotificationTime); _renderedFrames = 0; /info if (_outputFile != NULL) { char timestr[26]; errno_t err = ctime_s( timestr, 26, & ( _startTime.time ) ); if (err) { fprintf(_outputFile,"Encoding progress for JobID %s, start time <unknown>, which is %.3f seconds after absolute start time\n", _params.jobId, startDelta); } else { fprintf(_outputFile,"Encoding progress for JobID %s, start time %.19s.%hu %.4s, which is %.3f seconds after absolute start time\n", _params.jobId, timestr,_startTime.millitm, &timestr[20],startDelta); } fprintf(_outputFile,"%s,%s,%s,%s\n","JobID","FrameNo","RelativeTimeEncoded","AbsoluteTimeEncoded"); } if (_progressTracker != NULL){ _progressTracker->setEncoderProgress(0, 0, 0); } #else gettimeofday(&_startTime, NULL); if (_outputFile != NULL) { char timestr[26]; timestr[0] = '\0'; time_t now; tm* localtm = gmtime(&now); if (localtm == NULL) { fprintf( _outputFile, "Encoding progress for JobID %s, start time <unknown>, which is %.3f seconds after absolute start time\n", _params.jobId, startDelta); } else { //TODO - what is format strftime(timestr, sizeof(timestr), "%Y-%m-%d %H:%M:%S", localtm); fprintf( _outputFile, "Encoding progress for JobID %s, start time %.19s.%hu %.4s, which is %.3f seconds after absolute start time\n", _params.jobId, timestr, localtm->tm_sec, &timestr[20], startDelta); } fprintf(_outputFile, "%s,%s,%s,%s\n", "JobID", "FrameNo", "RelativeTimeEncoded", "AbsoluteTimeEncoded"); } #endif } void CProgress::ReportFrame(int iFrame) { if (_outputFile == NULL) { return; } #ifdef WIN32 _timeb curTime; _ftime_s (&curTime); double elapsedTime = difftime(curTime.time, _startTime.time); elapsedTime += (curTime.millitm - _startTime.millitm) * 0.001; #else struct timeval curTime; gettimeofday(&curTime, NULL); float elapsedTime = curTime.tv_sec - _startTime.tv_sec; elapsedTime += (curTime.tv_usec - _startTime.tv_usec) / 1000000.0; #endif fprintf(_outputFile,"%s,%d,%.3f,%.3f\n", _params.jobId == NULL ? "" : _params.jobId, iFrame, elapsedTime, elapsedTime + _startDelta); } void CProgress::NotifyFrame(int iFrame, bool isLast){ if (_progressTracker == NULL){ return; } #ifdef WIN32 _timeb curTime; _ftime_s (&curTime); double elapsedTime = difftime(curTime.time, _startNotificationTime.time); elapsedTime += (curTime.millitm - _startNotificationTime.millitm) * 0.001; #else struct timeval curTime; gettimeofday(&curTime, NULL); float elapsedTime = curTime.tv_sec - _startTime.tv_sec; elapsedTime += (curTime.tv_usec - _startTime.tv_usec) / 1000000.0; #endif if (elapsedTime < _params.notificationInterval && !isLast){ return; } double averageFramesPerSec = (double)((iFrame - _renderedFrames)/elapsedTime); _progressTracker->setEncoderProgress((iFrame - _renderedFrames), elapsedTime, averageFramesPerSec); _renderedFrames = iFrame; #ifdef WIN32 _startNotificationTime = curTime; #else _startTime = curTime; #endif } CProgress::~CProgress() { if (_outputFile != NULL) { fclose(_outputFile); } if(_progressTracker != NULL){ _progressTracker->flush(); } } // Initialize ffmpeg //static void VideoEncoder::init(map<string, void *> *commonStorage) { CFFMpegWriter::init(commonStorage); } bool VideoEncoder::ReadNumbersList( std::string s, std::vector<int> *nums ) { nums->clear(); int start = 0; int comma = -1; do { #define LIST_SEPARATOR ',' comma = s.find( LIST_SEPARATOR, start ); int num = atoi ( s.c_str() + start ); nums->push_back( num ); start = comma+1; } while ( comma != std::string::npos ); return true; } bool VideoEncoder::parse_args (int argc, char* argv[], VideoEncoderInputParameters & params) { int iarg; if ( argc < 1 ) { throw SWException("Too few arguments\n"); } params.outname = argv[argc-1]; for ( iarg = 0; iarg < argc-1; iarg++ ) { if ( strcmp(argv[iarg], "-iv") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.invideo_url = argv[iarg]; } else { m_logger.error( "Expecting input video url\n"); return false; } } else if (strcmp(argv[iarg], "-progress") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.progressFileName = argv[iarg]; } else { m_logger.error( "Expecting progress filename\n"); return false; } } else if (strcmp(argv[iarg], "-id") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.jobId = argv[iarg]; } else { m_logger.error( "Expecting job identifier\n"); return false; } } else if (strcmp(argv[iarg], "-startDelta") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.startDelta = atof(argv[iarg]); } else { m_logger.error( "Expecting start time delta\n"); return false; } } else if (strcmp(argv[iarg], "-vcodec") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vcodec = argv[iarg]; } else { m_logger.error( "Expecting video codec\n"); return false; } } else if (strcmp(argv[iarg], "-vb") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vbitrate = atoi(argv[iarg]); } else { m_logger.error( "Expecting video bitrate\n"); return false; } } else if (strcmp(argv[iarg], "-vqf") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vqf = atoi(argv[iarg]); } else { m_logger.error( "Expecting video quantization factor\n"); return false; } } else if (strcmp(argv[iarg], "-vcf") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vcompress_factor = atoi(argv[iarg]); } else { m_logger.error( "Expecting video compression factor (0-100)\n"); return false; } } else if (strcmp(argv[iarg], "-vq2perf") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vqual2perf_tradeoff = atoi(argv[iarg]); } else { m_logger.error( "Expecting video quality to performance tradeoff (0-100)\n"); return false; } } else if (strcmp(argv[iarg], "-vkeyfr_int") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vkeyframe_interval = atoi(argv[iarg]); } else { m_logger.error( "Expecting video keyframe interval (in frames)\n"); return false; } } else if (strcmp(argv[iarg], "-vrcbuf") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vrcbufms = atoi(argv[iarg]); } else { m_logger.error( "Expecting video ratecontrol buffer size\n"); return false; } } else if (strcmp(argv[iarg], "-vkeyfr") == 0 ) { iarg++; if ( iarg < argc-1 ) { // Comma separated list ReadNumbersList( argv[iarg], &params.vkeyfrms ); } else { m_logger.error( "Expecting video keyframe in ms\n"); return false; } } else if (strcmp(argv[iarg], "-vno_rc") == 0 ) { params.vno_rc = true; } else if (strcmp(argv[iarg], "-ia") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.inaudio_file = argv[iarg]; } else { m_logger.error( "Expecting input audio file\n"); return false; } } else if (strcmp(argv[iarg], "-vconfload") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vconfload = argv[iarg]; } else { m_logger.error( "Expecting vconfload file name\n"); return false; } } else if (strcmp(argv[iarg], "-vconfstore") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vconfstore = argv[iarg]; } else { m_logger.error( "Expecting vconf store file name\n"); return false; } } else if (strcmp(argv[iarg], "-acodec") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.acodec = argv[iarg]; } else { m_logger.error( "Expecting audio codec\n"); return false; } } else if (strcmp(argv[iarg], "-ab") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.abitrate = atoi(argv[iarg]); } else { m_logger.error( "Expecting audio bitrate\n"); return false; } } else if (strcmp(argv[iarg], "-profile") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.profile = argv[iarg]; } else { m_logger.error( "Expecting profile\n"); return false; } } else if (strcmp(argv[iarg], "-pass") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.passnum = atoi(argv[iarg]); } else { m_logger.error( "Expecting pass number\n"); return false; } } else if (strcmp(argv[iarg], "-passlog") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.passlogfile = argv[iarg]; } else { m_logger.error( "Expecting pass log file name\n"); return false; } } else if (strcmp(argv[iarg], "-br_inhdr") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.br_inhdr = atoi(argv[iarg]); } else { m_logger.error( "Expecting bitrate in header\n"); return false; } } else if (strcmp(argv[iarg], "-max_durdiff") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.max_durdiff = atoi(argv[iarg]); } else { m_logger.error( "Expecting maximum difference between audio duration and video duration (ms)\n"); return false; } } else if (strcmp(argv[iarg], "-max_dur") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.max_dur = atoi(argv[iarg]); } else { m_logger.error( "Expecting maximum duration (ms)\n"); return false; } } else if (strcmp(argv[iarg], "-playlist_segment_dur") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.playlist_segment_dur_ms = atoi(argv[iarg]); } else { m_logger.error( "Expecting playlist segment duration (ms)\n"); return false; } } else if (strcmp(argv[iarg], "-playlist_first_segment_dur") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.playlist_first_segment_dur_ms = atoi(argv[iarg]); } else { m_logger.error( "Expecting playlist first segment duration (ms)\n"); return false; } } else if (strcmp(argv[iarg], "-video_dur") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.video_dur = atoi(argv[iarg]); } else { m_logger.error( "Expecting playlist segment duration (ms)\n"); return false; } } else if (strcmp(argv[iarg], "-vprofile") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vprofile = argv[iarg]; } else { m_logger.error( "Expecting video profile\n"); return false; } } else if (strcmp(argv[iarg], "-vlevel") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vlevel = atoi(argv[iarg]); } else { m_logger.error( "Expecting video level\n"); return false; } } else if (strcmp(argv[iarg], "-vlookahead_frames") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vlookahead_frames = atoi(argv[iarg]); } else { m_logger.error( "Expecting video lookahead frames\n"); return false; } } else if (strcmp(argv[iarg], "-vpsy_rd") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vpsy_rd = (float)atof(argv[iarg]); } else { m_logger.error( "Expecting video psy-rd strength\n"); return false; } } else if (strcmp(argv[iarg], "-vconstant_rate") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.vconstantRateFactor = (float)atof(argv[iarg]); } else { m_logger.error( "Expecting video constant rate factor\n"); return false; } } else if (strcmp(argv[iarg], "-fragmented") == 0 ) { params.fragmented = true; } else if (strcmp(argv[iarg], "-stretch_time") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.stretch_time = atof(argv[iarg]); } else { m_logger.error( "Expecting stretch_time\n"); return false; } } else if (strcmp(argv[iarg], "-vframe") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.startNumber = atoi(argv[iarg]); } else { m_logger.error( "Expecting video codec\n"); return false; } } else if (strcmp(argv[iarg], "-rendered_chunk") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.renderedChunk = atoi(argv[iarg]); } else { m_logger.error( "Expecting rendered_chunk identifier\n"); return false; } } else if (strcmp(argv[iarg], "-notification_interval") == 0 ) { iarg++; if ( iarg < argc-1 ) { params.notificationInterval = atof(argv[iarg]); //sec } else { m_logger.error( "Expecting rendered_chunk identifier\n"); return false; } } else if (strcmp(argv[iarg], "-padded") == 0 ) { params.padded = true; //-padded" 15000:418 iarg++; if ( iarg < argc-1 ) { std::string currange = argv[iarg]; int start = atoi( currange.c_str() ); size_t delim = currange.find( ':' ); if ( delim == std::string::npos ) // No delimiter params.vpacket_size = start; else { int end = delim + 1; std::string str = currange.substr(0, delim); params.vpacket_size = atoi(str.c_str()); str = currange.substr(delim + 1); params.apacket_size = atoi(str.c_str()); } } else { m_logger.error( "Expecting padded chunk identifier\n"); return false; } } else if (strcmp(argv[iarg], "-h264_impl") == 0) { iarg++; if (iarg < argc - 1) { params.h264Impl = argv[iarg]; } else { m_logger.error("Expecting h264 implementation\n"); return false; } } else { m_logger.error( "Unknown option: %s\n", argv[iarg]); return false; } } if ( !params.invideo_url ) { m_logger.error( "Input video url must be specified\n"); return false; } params.print(m_logger, DebugLevel_INFO); return true; } CVideoWriter *VideoEncoder::CreateWriter(VideoEncoderInputParameters & params, ProgressTracker *progressTracker) { // Find file name externsion char *pdot = strrchr( params.outname, '.' ); if ( !pdot ) { m_logger.error( "No filename extension...\n"); return NULL; } if ( strcmp( pdot+1, "flv" ) == 0 ) { if (params.vcodec == NULL || params.vcodec[0] == '\0') params.vcodec = VE_CODEC_VP6; // that's the default for flv... //vcodec = VE_CODEC_FLV; params.fragmented = false; // decline fragmented output return new CFFMpegWriter(m_logger, progressTracker); } else if ( stricmp( pdot+1, "mp4" ) == 0 || stricmp( pdot+1, "3gp" ) == 0 ) { if (params.vcodec == NULL || params.vcodec[0] == '\0') params.vcodec = VE_CODEC_H264; // that's the default //vcodec = VE_CODEC_FLV; return new CFFMpegWriter(m_logger, progressTracker); } else if ( stricmp( pdot+1, "ts" ) == 0 || stricmp( pdot+1, "m3u8" ) == 0 ) { if (params.vcodec == NULL || params.vcodec[0] == '\0') params.vcodec = VE_CODEC_H264; // that's the default for ts... return new CFFMpegWriter(m_logger, progressTracker); } //else if ( stricmp( pdot+1, "avi" ) == 0 ) //{ // if (params.vcodec == NULL || params.vcodec[0] == '\0') // params.vcodec = VE_CODEC_VP6; // that's the default for avi... // return new CVFWWriter(m_logger); // vcodec = VE_CODEC_XVID; // that's the default for avi... // return new CFFMpegWriter; //} else if ( stricmp( pdot+1, "mov" ) == 0 ) { if (params.vcodec == NULL || params.vcodec[0] == '\0') params.vcodec = VE_CODEC_QTRLE; // that's the default for mov... params.acodec = VE_CODEC_PCM_S16LE; // that's the default for mov... return new CFFMpegWriter(m_logger, progressTracker); } else if ( stricmp( pdot+1, "webm" ) == 0 ) { if (params.vcodec == NULL || params.vcodec[0] == '\0') params.vcodec = VE_CODEC_VP8; params.acodec = VE_CODEC_VORBIS; return new CFFMpegWriter(m_logger, progressTracker); } /*else if ( stricmp( pdot+1, "wmv" ) == 0 ) { return new CWMVWriter(m_logger); }*/ else if ( stricmp( pdot+1, "jpeg" ) == 0 || stricmp( pdot+1, "jpg" ) == 0 || stricmp( pdot+1, "png" ) == 0 ) { if (params.vcodec == NULL || params.vcodec[0] == '\0'){ params.vcodec = IE_CODEC_IMAGE2; // that's the default params.startNumber = 1; } return new CFFMpegWriter(m_logger, progressTracker); } throw SWException( "Unknown filename extension: %s\n", pdot+1); return NULL; } void VideoEncoder::executeEngine(int argc, char **argv) { int ifr; VideoEncoderInputParameters params; if ( ! parse_args( argc, argv, params) ) { throw SWException("Usage: -args -iv <in_video_url>" "[-vcodec <video_codec>] " "[-vb <video_bitrate>] " "[-vqf <video_quantization_factor>] " "[-vcf vcompress_factor] " "[-vno_rc] " "[-vrcbuf <rc-buffersize-ms>] " "[-vkeyfr <keyframe-times-ms>]" "[-vconfstore <video-codec-conf-store-file> " "[-vconfload <video-codec-conf-load-file>] " "[-vprofile profile] " "[-vlevel level] " "[-vconstant_rate rate] " "[-ia <in_audio_file>] " "[-acodec <audio_codec>] " "[-ab <audio_bitrate>]" "[-passlog <pass-log-file>] " "[-pass <pass-number>] " "[-br_inhdr <bitrate-in-header>] " "[-max_durdiff <maximum-a/v-duration-diff] " "[-max_dur <maximum-output-duration] " "[-playlist_segment_dur <duration-of-playlist-segment>] " "[-profile <profile-path>] " "[-dbg] " "[-progress <progress-file-name>] " "[-fragmented] " "[-stretch_time <stretch>] " "[-rendered_chunk <chunk-size>] " "[-padded <video_framesize_size:audio_frame_size>] " "[-h264Impl <h264 video codec implementation>] " "<outfile>\n"); } // Get progressTracker from common storage map<string, void *>::const_iterator iter = m_commonStorage->find( "progressTracker" ); if ( iter == m_commonStorage->end() ) throw SWException("Missing progressTracker in common storage.\n"); else progressTracker = (ProgressTracker *)iter->second; progressTracker->flush(); FILE * progressFile = NULL; if (params.progressFileName != NULL) { m_logger.error("Opening progress file %s\n", params.progressFileName); progressFile = fopen(params.progressFileName,"w"); if (progressFile == NULL) { throw SWException( "Failed to open progress file for writing: %s\n", params.progressFileName ); } } CProgress progress(progressFile, progressTracker, params.startDelta, params); #ifdef WIN32 DWORD starttime = GetTickCount(); #else struct timeval starttimetv; gettimeofday(&starttimetv, 0); DWORD starttime = starttimetv.tv_sec * 1000 + starttimetv.tv_usec / 1000; #endif m_logger.info("Loading input url\n"); int isleep = 0; VideoStream::EnableLog(m_logger.getDebugLevel() <= DebugLevel_TRACE); VideoStream::CInputVideoStream *inputStream = VideoStream::CInputVideoStream::Create(); if (inputStream->Open (params.invideo_url) == false) { throw SWException( "Failed to open url: %s\n", params.invideo_url); } m_logger.info("Loaded input url\n"); CVideoWriter *writer = CreateWriter(params, progressTracker); if ( !writer ) { throw SWException("CreateWriter failed (possibly due to invalid output file parameters)\n"); } CVideoWriterOptions options; options.name = params.outname; options.profile = params.profile; options.width = inputStream->GetWidth(); options.height = inputStream->GetHeight(); options.vbitrate = params.vbitrate; options.framerate = inputStream->GetFrameRate(); // numenator options.fps = options.framerate; //(int)ceil((double)options.framerate/(double)options.framerate_scale); options.framerate_scale = 1; // denumenator options.vcodec = params.vcodec; options.vbitsperpel = inputStream->GetBitCount(); options.vno_rc = params.vno_rc; // operate ratecontrol? options.vqf = params.vqf; // const quantization options.vcompress_factor = params.vcompress_factor; options.vqual2perf_tradeoff = params.vqual2perf_tradeoff; options.vkeyframe_interval = params.vkeyframe_interval; options.vrcbuf_ms = params.vrcbufms; options.vconfload = params.vconfload; options.vconfstore = params.vconfstore; options.vconstantRateFactor = params.vconstantRateFactor; options.vprofile = params.vprofile; options.vlevel = params.vlevel; //options.m_bAudio = ( inaudio_file != NULL ); options.abitrate = ( params.inaudio_file != NULL ) ? params.abitrate : 0; options.asample_rate = 44100; options.achannels = 2; options.acodec = params.acodec; options.passlogname = params.passlogfile; options.passnum = params.passnum; // single pass options.br_inhdr = params.br_inhdr; options.playlist_segment_dur_ms = params.playlist_segment_dur_ms; options.playlist_first_segment_dur_ms = params.playlist_first_segment_dur_ms; options.vduration_ms = params.video_dur; options.fragmented = params.fragmented; options.stretch_time = params.stretch_time; options.atempo = (options.stretch_time == 0 ? 1 : 1/options.stretch_time); options.startNumber = params.startNumber; options.vlookahead_frames = params.vlookahead_frames; options.vpsy_rd = params.vpsy_rd; options.renderedChunk = params.renderedChunk; options.padded = params.padded; options.vpacket_size = params.vpacket_size; options.apacket_size = params.apacket_size; char executionId[100]; sprintf(executionId, "%s", getExecutionId().c_str()); options.jobId = (params.jobId == NULL ? executionId : params.jobId); // Translate keyframes from ms to frame index std::vector<int> vkeyfrinds; ms2indices( params.vkeyfrms, vkeyfrinds, options.framerate ); //speedup support if (options.stretch_time != 1){ options.framerate_scale = 10000; // denumenator options.framerate = (int) ((options.framerate / options.stretch_time) * options.framerate_scale + 0.5); // numenator } bool use_padding = false; #ifdef DEBUG_PADDING use_padding=true; #endif if (options.padded || use_padding){ options.vkeyframe_interval = 1000000; // patch to delete key frames m_logger.info("PADDING set vkeyframe_interval to %d\n", options.vkeyframe_interval); } options.h264Impl = params.h264Impl; #define USE_AUDIO_STREAM #ifdef USE_AUDIO_STREAM VideoStream::CInputAudioStream *inputAStream = NULL; #else CWAVReader wavr; #endif if ( options.abitrate > 0 ) { m_logger.info("Loading input audio file: %s\n", params.inaudio_file); int filled_size = 0; #ifdef USE_AUDIO_STREAM inputAStream = VideoStream::CInputAudioStream::Create(); if (inputAStream->Open (params.inaudio_file) == false) { throw SWException( "Failed to open audio file: %s\n", params.inaudio_file); } m_logger.info("Loaded input audio file\n"); VideoStream::AUDIOINFO audioInfo = inputAStream->GetAudioInfo(); options.asample_rate = audioInfo.sampleRate; options.achannels = audioInfo.numChannels; //evev - test format!! #else if ( !wavr.Init( inaudio_file ) ) exit(2); options.asample_rate = wavr.SampleRate(); options.achannels = wavr.Channels(); #endif } if ( !writer->Init(options) ) { throw SWException("Init writer failed\n"); } writer->BeginWrite(); // // Init video // m_logger.info("Preparing video\n"); CVideoFrame fr; fr.width = options.width; fr.height = options.height; fr.bytesperpel = inputStream->GetBitCount()/8; m_logger.info("Input parms: %dx%d %d bytes per pixel.\n", fr.width, fr.height, fr.bytesperpel ); double vencoded_secs = 0; ifr = 0; VideoStream::CFramePtr framePtr = inputStream->GetNextFrame(); bool vhas_more_data = ( !framePtr.isNull() ); progress.ReportFrame(ifr); // // Init audio // CAudioFrame afr; afr.bfr = NULL; int atotal_bytes_read = 0; bool ahas_more_data = false; double aencoded_secs = 0; if ( options.abitrate > 0 ) { m_logger.info("Preparing audio\n"); afr.len = writer->GetSamplesPerFrame() * options.achannels * writer->GetBytesPerSample(); afr.bfr = new unsigned char[afr.len]; ahas_more_data = true; } int rendered = 0; bool readyToPlayWritten = false; int totalAudio =0; bool firstTimeAudioRead = true; double stretchFactor = options.stretch_time == 0 ? 1 : (double)options.stretch_time /(double)options.achannels/writer->GetBytesPerSample()/options.asample_rate; /* * Encode Audio and video */ m_logger.info("Encoding...\n"); while ( vhas_more_data || ahas_more_data ) { /* * Encode some audio */ if ( options.abitrate > 0 ) // has audio? { int filled_size = 0; while ( ahas_more_data ) { bool endofdata; #ifdef USE_AUDIO_STREAM int nread = inputAStream->Read( afr.bfr + filled_size, afr.len - filled_size, &endofdata ); if ( nread < 0 ) { throw SWException("Failed to read audio data\n"); } #else int nread = wavr.Read( afr.bfr + filled_size, afr.len - filled_size, &endofdata ); #endif totalAudio +=nread; m_logger.trace("nread=%d, totalAudio=%d endofdata=%s\n", nread, totalAudio, endofdata?"true" : "false"); filled_size += nread; atotal_bytes_read += nread; if ( endofdata && !firstTimeAudioRead ) { memset( afr.bfr + filled_size, 0, afr.len - filled_size ); filled_size = afr.len; ahas_more_data = false; m_logger.trace("endofdata=true - ahas_more_data = false 1"); } if ( filled_size == afr.len ) { if ( !writer->EncodeAFrame( afr ) ) { cleanup(inputStream, inputAStream, afr, writer); throw SWException("Failed to write audio frame\n"); } filled_size = 0; // start re-filling again aencoded_secs = ((double)atotal_bytes_read) * stretchFactor; //(options.stretch_time == 0 ? 1 : options.stretch_time)/options.achannels/writer->GetBytesPerSample()/options.asample_rate; m_logger.trace("(a) aencoded_secs=%f, vencoded_secs=%f\n", aencoded_secs, vencoded_secs ); if ( params.max_dur > 0 && aencoded_secs*1000 >= params.max_dur ) { // End of audio m_logger.info("end of audio"); ahas_more_data = false; } if ( aencoded_secs-AUDIO_ENCODE_AHEAD_SECS > vencoded_secs && vhas_more_data ) { // audio passed video -> encode some video now // (Encode audio slightly before video - to avoid audio problems). m_logger.info("encode some video now"); firstTimeAudioRead = false; break; } } } } // has audio? bool produceImage = (options.vcodec == IE_CODEC_IMAGE2); /* * Encode some video */ while ( vhas_more_data ) { m_logger.info("Writing vframe %d\n", ifr); //memset(fr.bfr, (ifr*2)%255, fr.width * fr.height*fr.bytesperpel); fr.bfr = framePtr.GetData(); VideoStream::CFramePtr nextFramePtr = inputStream->GetNextFrame(); fr.is_last = (nextFramePtr.isNull()); // Check if frame is in keyframe list if (options.padded) { fr.is_key = false; } else{ if ( std::find( vkeyfrinds.begin(), vkeyfrinds.end(), ifr ) != vkeyfrinds.end() ) fr.is_key = true; else fr.is_key = false; } if (!fr.is_last) { progress.ReportFrame(ifr+1); } if (!produceImage){ progress.NotifyFrame(ifr +1, fr.is_last); } bool encodeFrame = true; if (produceImage){ if (ifr != options.startNumber){ encodeFrame = false; } } int out_size = 0; if (encodeFrame) { if ( !writer->EncodeVFrame( fr, &out_size ) ) { cleanup(inputStream, inputAStream, afr, writer); throw SWException("Failed to write frame\n"); } rendered = rendered + out_size; //m_logger.trace("options.renderedChunk=%u, out_size=%u, rendered=%u, readyToPlayWritten=%d\n", options.renderedChunk, out_size, rendered, readyToPlayWritten ); if (!produceImage && options.renderedChunk > 0 && rendered >= options.renderedChunk && !readyToPlayWritten){ readyToPlayWritten = true; progressTracker->readyToPlay(rendered); rendered = 0; } } framePtr = nextFramePtr; vhas_more_data = ( !framePtr.isNull() ); //m_logger.trace( "VFrame %d %s\n", ifr, fr.is_key ? "(forced keyframe)" : "" ); if (ifr == 54){ m_logger.trace( "Stop fro debugger"); } ifr++; if (isleep > 0) { #ifdef WIN32 Sleep(isleep); #else usleep(isleep * 1000); #endif } vencoded_secs = ((double)ifr)*options.framerate_scale/options.framerate; m_logger.trace("(v) aencoded_secs=%f, vencoded_secs=%f\n", aencoded_secs, vencoded_secs ); if (produceImage && encodeFrame){ //vhas_more_data = false; } if ( params.max_dur > 0 && vencoded_secs*1000 >= params.max_dur ) { // End of video vhas_more_data = false; } if ( vencoded_secs > aencoded_secs-AUDIO_ENCODE_AHEAD_SECS && ahas_more_data ) { // video passed audio -> encode some audio now // (Encode audio slightly before video - to avoid audio problems). break; } } // vhas_more_data }// while ( vhas_more_data || ahas_more_data ) cleanup(inputStream, inputAStream, afr, writer); #ifdef WIN32 DWORD endtime = GetTickCount(); #else struct timeval endtimetv; gettimeofday(&endtimetv, 0); DWORD endtime = endtimetv.tv_sec * 1000 + endtimetv.tv_usec / 1000; #endif m_logger.info("File %s written. total time: %d ms\n", params.outname, endtime-starttime ); if ( options.abitrate > 0 && params.max_durdiff >= 0 ) { // Check if audio and video duraitons are close enough. double durdiffSecs = fabs((vencoded_secs - aencoded_secs)); if ( durdiffSecs*1000 > params.max_durdiff ) { throw SWException( "Difference between audio duration (%gs) and video duration (%gs) is too big (maximum allowed=%dms)\n", aencoded_secs, vencoded_secs, params.max_durdiff); } } return; } void VideoEncoder::cleanup(VideoStream::CInputVideoStream *inputStream, VideoStream::CInputAudioStream *inputAStream, CAudioFrame& afr, CVideoWriter *writer){ progressTracker->flush(); /* * Destory readers */ if (inputStream) { inputStream->Close(); VideoStream::CInputVideoStream::Destroy (inputStream); inputStream = NULL; } #ifdef USE_AUDIO_STREAM if ( inputAStream ) { inputAStream->Close(); VideoStream::CInputAudioStream::Destroy (inputAStream); inputAStream = NULL; } #endif if ( afr.bfr ){ delete [] afr.bfr; afr.bfr = NULL; } // Destroy writer writer->Close(); try { delete writer; writer = NULL; } catch (...) { perror("Delete:\n"); } } // Translate ms to frame index void VideoEncoder::ms2indices( const std::vector<int> &ms, std::vector<int> &inds, int framerate ) { inds.clear(); std::vector<int>::const_iterator iter; for ( iter = ms.begin(); iter != ms.end(); iter++ ) { int ind = ( (*iter)*framerate + 500 ) / 1000; inds.push_back( ind ); } } <file_sep>/LightSynthesizer/LightSynthesizerEngineFactory.cpp //#include "stdafx.h" #include "LightSynthesizerEngineFactory.h" #include "LightSynthesizer.h" Engine *LightSynthesizerEngineFactory::createEngine() { return new LightSynthesizer(); } <file_sep>/RenderingManager/RenderingManager/EngineManager.h #pragma once //#include <windows.h> #include <string> #include <map> #include <iostream> #include <fstream> #include <queue> #include "Engine.h" #include "EngineFactory.h" using namespace std; // Forward declaration class RenderingManager; #define UNLIMITED_EXECUTIONS (0xFFFF) // A big enough number class EngineManager { private: class EngineTypeInfo { public: string m_type; EngineFactory * m_factory; unsigned int m_nRunning; unsigned int m_maxRunning; queue<Engine*> m_pending; EngineTypeInfo(string type, EngineFactory * factory) : m_type(type), m_factory(factory), m_nRunning(0), m_maxRunning(UNLIMITED_EXECUTIONS) {} ~EngineTypeInfo() {}; // TODO: delete the factory?? }; map<string,Engine *> m_engines; // The map's key is the engine's execution ID (not type) so the IDs are expected to be unique accross all types. map<string,EngineTypeInfo *> m_engineTypes; // The map's key is the engine type int m_nEngines; // Counts engines (both running and pending) RenderingManager * m_rndMng; public: EngineManager(); ~EngineManager(); // Should be called right after the creation of EngineManager bool init(RenderingManager * rndMng); RenderingManager * getRenderingManager(){return m_rndMng;}; void addEngine(Engine * engine); void removeEngine(Engine * engine); Engine * getEngineByExecutionId(string executionId); int getNumberOfEngines() {return m_nEngines;}; bool hasExecutionsOfType(string engineType); bool isEngineTypeSupported(string engineType) {return (m_engineTypes[engineType] != NULL);} EngineFactory * getEngineFactory(string engineType) {return isEngineTypeSupported(engineType) ? m_engineTypes[engineType]->m_factory : NULL;} void printReport(); void printEngineTypeReport(); void stopAllEngines(Engine *triggeringEngine); void initEngineType(string engineType, EngineFactory * engineFactory); bool hasExecutions(string engineType); void setEngineTypeLimit(string engineType, unsigned int limit); }; <file_sep>/SVG2Video/SVG2VideoEngineFactory.cpp #include "stdafx.h" #include "SVG2VideoEngineFactory.h" #include "SVG2VideoEngine.h" Engine *SVG2VideoEngineFactory::createEngine() { return new SVG2VideoEngine(m_commonStorage); } <file_sep>/Fmod/Effects/LoPassDsp.h #ifndef __LO_PASS_DSP_H__ #define __LO_PASS_DSP_H__ #include "DspEffect.h" #pragma once class LoPassDsp : public DspEffect { private: FMOD::DSP *dsp; public: void create(FMOD::System *fmodSystem) { fmodSystem->createDSPByType(FMOD_DSP_TYPE_LOWPASS, &dsp); dspList.push_back(dsp); } void setCutoff(float factor) { dsp->setParameter(FMOD_DSP_LOWPASS_CUTOFF, factor); } void setResonance(float factor) { dsp->setParameter(FMOD_DSP_LOWPASS_RESONANCE, factor); } }; #endif <file_sep>/VideoStream/VideoStream/VSAVIFileMedia.h #pragma once #include "VSBaseMedia.h" #include "VSCompression.h" #include <list> #ifdef WIN32 #include <windows.h> #include <vfw.h> #else #include <pthread.h> #ifndef UINT64_C #define UINT64_C(c) (c ## ULL) #endif extern "C" { #include "libavformat/avformat.h" #include "libavutil/imgutils.h" #include "libavutil/opt.h" } #endif namespace VideoStream { // CInputAVIFile class class CInputAVIFile : public CBaseInputVideoStream { private: FILE *_fp; VIDEOINFO _vi; CVideoDecompressor *_pDecompressor; #ifndef WIN32 AVFormatContext * ic; #endif public: CInputAVIFile(); ~CInputAVIFile(); #ifdef WIN32 bool Open (LPCTSTR location, MediaLocationParams & mlParams); #else bool Open(const char* location, MediaLocationParams & mlParams); #endif const VIDEOINFO &GetVideoInfo() const; bool FastForward (unsigned int frames); CFramePtr GetNextFrame(); void Close(); }; // Frame index information struct FRAMEINFO { int offset; int length; bool isKeyFrame; }; typedef std::list<FRAMEINFO> IndicesType; // COutputAVIFile class class COutputAVIFile : public CBaseOutputVideoStream<true> { private: IndicesType _index; int _lastIndex; FILE *_fp; VIDEOINFO _vi; CVideoCompressor *_pCompressor; #ifndef WIN32 AVFormatContext * oc; AVOutputFormat *fmt; // AVStream* video_st; AVCodec *codec; AVCodecContext *c; AVCodecID GetDefaultCodecId(); void release(); #endif BITMAPINFO *GetOutputBitmapInfo(); DWORD GetDefaultCodec(); public: COutputAVIFile(); ~COutputAVIFile(); #ifdef WIN32 bool Open (LPCTSTR location, const VIDEOINFO &vi, MediaLocationParams & mlParams); #else bool Open(const char* location, const VIDEOINFO &vi, MediaLocationParams & mlParams); #endif bool WriteFrame (CFramePtr framePtr); void Close(); }; // CAVIFileMedia class class CAVIFileMedia : public CBaseMedia<CInputAVIFile,COutputAVIFile,/*IsInputSync = */true,/*IsOutputSync = */false> { }; }<file_sep>/Fmod/Effects/AudioStretchDsp.h #ifndef __AUDIO_STRETCH_DSP_H__ #define __AUDIO_STRETCH_DSP_H__ #include "PitchShiftDsp.h" #pragma once class AudioStretchDsp : public PitchShiftDsp { private: float factor; public: AudioStretchDsp(){} float getFactor() { return factor; } void setFactor(float f) { factor = f; } void dividePitch(float factor) { float pitch; dsp->getParameter(FMOD_DSP_PITCHSHIFT_PITCH, &pitch, 0, 0); dsp->setParameter(FMOD_DSP_PITCHSHIFT_PITCH, pitch/factor); } void stretch(FMOD::Channel *channel) { float freq; dividePitch(factor); DspEffect::apply(channel); channel->getFrequency(&freq); channel->setFrequency(freq * factor); } }; #endif <file_sep>/VideoDecoder/VideoDecoder/FFMpegReader.cpp #include <stdlib.h> #include <stdio.h> #ifndef WIN32 #include <pthread.h> #endif extern "C" { #define __STDC_CONSTANT_MACROS #include "libavformat/avformat.h" #include "libavcodec/avcodec.h" #include "libavdevice/avdevice.h" #include "libswscale/swscale.h" #include "libavutil/opt.h" #include "libavfilter/avfilter.h" #include "libavfilter/avfiltergraph.h" #include "libavcodec/avcodec.h" } #include "FFMpegReader.h" #include "MutexAutoLock.h" #include "SWException.h" #define VE_CODEC_FLV "flv" #define VE_CODEC_XVID "xvid" #define VE_CODEC_MP3 "mp3" #define VE_CODEC_PCM_S16LE "pcm16le" #define MAX(_a,_b) ( (_a)>(_b) ? (_a) : (_b) ) // A global handle to synchronize calls to ffmpeg funcs // This handle is also used by the encoder // it is shared through the commonStorage mechanism #ifdef WIN32 static HANDLE avcodec_lock_mutex = NULL; #else static pthread_mutex_t avcodec_lock_mutex; #endif #define AVCODEC_LOCK_MUTEX_ID "avcodec_lock_mutex" static int lockmgr(void **mtx, enum AVLockOp op) { switch (op) { case AV_LOCK_CREATE: #ifdef WIN32 avcodec_lock_mutex = CreateMutex (NULL, FALSE, NULL); #else pthread_mutex_init(&avcodec_lock_mutex, NULL); #endif *mtx = &avcodec_lock_mutex; if (!*mtx) return 1; return 0; case AV_LOCK_OBTAIN: #ifdef WIN32 return WaitForSingleObject(*mtx, INFINITE); #else return pthread_mutex_lock((pthread_mutex_t *)*mtx); #endif case AV_LOCK_RELEASE: #ifdef WIN32 return ReleaseMutex(*mtx); #else return pthread_mutex_unlock((pthread_mutex_t *)*mtx); #endif case AV_LOCK_DESTROY: #ifdef WIN32 CloseHandle(*mtx); #else pthread_mutex_destroy((pthread_mutex_t *)*mtx); #endif return 0; } return 1; } /* initialize libavcodec, and register all codecs and formats */ //static void CFFMpegReader::init(map<string, void *> *commonStorage) { // Take handle to avcodec mutex // Its existance also indicates that ffmpeg was initialized by another engine (encoder) map<string, void *>::const_iterator iter = commonStorage->find( AVCODEC_LOCK_MUTEX_ID ); if ( iter == commonStorage->end() ) { /*if (av_lockmgr_register(lockmgr)) { av_log(NULL, AV_LOG_FATAL, "Could not initialize lock manager!\n"); throw SWException("Could not initialize lock manager!\n"); }*/ #ifdef WIN32 avcodec_lock_mutex = CreateMutex( NULL, FALSE, NULL ); #else pthread_mutex_init(&avcodec_lock_mutex, NULL); #endif // First time - intialize ffmpeg /* register all the codecs */ avcodec_register_all(); av_register_all(); avfilter_register_all(); // Store for other engines #ifdef WIN32 (*commonStorage)[AVCODEC_LOCK_MUTEX_ID] = avcodec_lock_mutex; #else (*commonStorage)[AVCODEC_LOCK_MUTEX_ID] = &avcodec_lock_mutex; #endif } else { #ifdef WIN32 avcodec_lock_mutex = (HANDLE)iter->second; #else avcodec_lock_mutex = *((pthread_mutex_t*)iter->second); #endif } } CFFMpegReader::CFFMpegReader( Logger &logger ) : m_logger(logger) { m_pAVFormatContext = NULL; m_pVStream = NULL; m_iVStreamIdx = -1; m_pVDecodedFrame = NULL; m_pVOutFrame = NULL; m_pSWSContext = NULL; } CFFMpegReader::~CFFMpegReader() { if (m_pVOutFrame) { //don't free data - allocated outside! av_free(m_pVOutFrame->data[0]); av_free(m_pVOutFrame); } if (m_pVDecodedFrame) av_free(m_pVDecodedFrame); Close(); } // Open a file bool CFFMpegReader::Open(const char *name, CVideoProperties &properties ) { MutexAutoLock lock(avcodec_lock_mutex); // Lock all function scope to protect ffmpeg vars int err, ret; AVInputFormat *pAVInputFormat = NULL; // get default parameters from command line m_pAVFormatContext = avformat_alloc_context(); if (!m_pAVFormatContext) { throw SWException("Failed allocate AVFormatContext.\n"); } /* open the input file with generic avformat function */ err = avformat_open_input(&m_pAVFormatContext, name, pAVInputFormat, NULL); //&o->g->format_opts if (err < 0) { throw SWException("Could not open %s\n", name ); } ret = avformat_find_stream_info(m_pAVFormatContext, NULL); if (ret < 0) { av_log(NULL, AV_LOG_FATAL, "%s: could not find codec parameters\n", name); avformat_close_input(&m_pAVFormatContext); throw SWException("%s: could not find codec parameters'\n", name); } if ( !InitVideoStream() ) return false; m_nWidth = m_pVStream->codec->width; m_nHeight = m_pVStream->codec->height; properties.width = m_nWidth; properties.height = m_nHeight; if ( (m_pVStream->r_frame_rate.num > 0 && m_pVStream->r_frame_rate.den > 0) && m_pVStream->r_frame_rate.num % m_pVStream->r_frame_rate.den == 0 ) { properties.framerate = m_pVStream->r_frame_rate.num / m_pVStream->r_frame_rate.den; } else { throw SWException( " Invalid frame rate: %d/%d\n", m_pVStream->r_frame_rate.num, m_pVStream->r_frame_rate.den ); } return true; } bool CFFMpegReader::FindVideoStream() { for (int i = 0; i < (int)m_pAVFormatContext->nb_streams; i++) { AVCodecContext *c = m_pAVFormatContext->streams[i]->codec; if (c->codec_type == AVMEDIA_TYPE_VIDEO) { m_iVStreamIdx = i; m_pVStream = m_pAVFormatContext->streams[i]; m_pVCodecContext = c; return true; } } throw SWException("No video stream found\n"); } bool CFFMpegReader::InitVideoStream() { if ( !FindVideoStream() ) return false; AVCodec *codec = avcodec_find_decoder(m_pVCodecContext->codec_id); if (!codec) { throw SWException("No video decoder found\n"); } int rc = avcodec_open2(m_pVCodecContext, codec, NULL); //TODO check opts if (rc < 0) { fprintf(stderr, "FFMPEG reader could not open codec\n"); throw SWException("FFMPEG reader could not open codec\n"); } if (m_pVCodecContext->width <= 0 || m_pVCodecContext->width <= 0) { throw SWException("Invalid dims - can't decode this\n"); } m_pVDecodedFrame = av_frame_alloc(); m_eOutPixFmt = AV_PIX_FMT_BGRA; //PIX_FMT_BGRA; m_pVOutFrame = alloc_picture(m_eOutPixFmt, m_pVCodecContext->width, m_pVCodecContext->height); return true; } // Decode a video frame // framePtr is initialized with the proper width, height etc... but without the data. bool CFFMpegReader::DecodeVFrame(VideoStream::CMutableFramePtr framePtr, bool &a_bEOF) { a_bEOF = false; if (framePtr.GetWidth() != m_nWidth || framePtr.GetHeight() != m_nHeight) { throw SWException("Frame dims (%dx%d) don't match video's (%dx%d)\n", framePtr.GetWidth(), framePtr.GetHeight(), m_nWidth, m_nHeight ); } int pix_fmt; switch (framePtr.GetBitCount()) { case 24: pix_fmt = AV_PIX_FMT_BGR24; break; case 32: pix_fmt = AV_PIX_FMT_BGRA; break; default: throw SWException("Only RGB and RGBA are supported\n"); } av_frame_unref(m_pVDecodedFrame); // Put our buffer in ffmpeg's data struct. (just copy pointers, not data!) //avpicture_fill((AVPicture*)m_pVOutFrame, video_frame.bfr, pix_fmt, video_frame.width, video_frame.height); int got_picture = 0; while ( !got_picture ) { // Read sample AVPacket packet; av_init_packet(&packet); if (!ReadVSample(packet)) { a_bEOF = true; return false; } m_logger.trace("Got video packet of size %d\n", packet.size ); // Decode sample int len = avcodec_decode_video2(m_pVCodecContext, m_pVDecodedFrame, &got_picture, &packet); if (len < 0) { av_free_packet(&packet); throw SWException("FFMpeg Decoder: Error decoding video frame - len=%d\n", len); } if ( got_picture ) { // Create a Context for scaling and conversion // If context already exists and appropriate (e.g. pix_fmt didn't change) - just use it m_pSWSContext = sws_getCachedContext(m_pSWSContext , m_pVCodecContext->width, m_pVCodecContext->height, m_pVCodecContext->pix_fmt, m_pVCodecContext->width, m_pVCodecContext->height, m_eOutPixFmt, SWS_BILINEAR, 0, 0, 0); // convert color evev - can we avoid this? sws_scale(m_pSWSContext, m_pVDecodedFrame->data, m_pVDecodedFrame->linesize, 0, m_pVCodecContext->height, m_pVOutFrame->data, m_pVOutFrame->linesize); unsigned int lin; // flip vertically unsigned char *src = m_pVOutFrame->data[0]; unsigned char *dst = framePtr.GetData() + framePtr.GetStride() * (framePtr.GetHeight()-1); for ( lin = 0; lin < framePtr.GetHeight(); lin++ ) { memcpy( dst, src, framePtr.GetStride() ); src += m_pVOutFrame->linesize[0]; dst -= framePtr.GetStride(); } //evev - what about cts? } // got_picture = true; av_free_packet(&packet); } return true; } // Read a video sample bool CFFMpegReader::ReadVSample( AVPacket &packet ) { int rc; while ( true ) { // Read sample rc = av_read_frame(m_pAVFormatContext, &packet); if (rc < 0) { // m_logger.error("av_read_frame failed: rc=%d\n", rc ); return false; } if (packet.stream_index == m_iVStreamIdx) // Is this a frame from our stream? { if (packet.dts != AV_NOPTS_VALUE) { AVRational timebase = { 1, AV_TIME_BASE }; packet.dts = av_rescale_q( packet.dts, m_pAVFormatContext->streams[packet.stream_index]->time_base, timebase); } return true; } av_free_packet(&packet); } throw SWException("No video samples found\n"); } // Read a video sample bool CFFMpegReader::Close( ) { if(m_pVCodecContext && m_pVCodecContext->codec) { AvcodecCloseThreadSafe(m_pVCodecContext); // Close codec m_pVCodecContext = NULL; } if (m_pAVFormatContext) { avformat_close_input(&m_pAVFormatContext); m_pAVFormatContext = NULL; } //av_lockmgr_register(NULL); return true; } // Allocate picture and its buffer AVFrame* CFFMpegReader::alloc_picture(AVPixelFormat pix_fmt, int width, int height) { AVFrame *picture; uint8_t *picture_buf; int size; picture = av_frame_alloc(); if (!picture) return NULL; size = avpicture_get_size(pix_fmt, width, height); picture_buf = (uint8_t *)av_malloc(size); if (!picture_buf) { av_free(picture); return NULL; } avpicture_fill((AVPicture *)picture, picture_buf, pix_fmt, width, height); return picture; } void CFFMpegReader::AvcodecCloseThreadSafe(AVCodecContext *avctx) { MutexAutoLock lock(avcodec_lock_mutex); // Lock all function scope to protect ffmpeg vars avcodec_close(avctx); } <file_sep>/RenderingManager/RenderingManager/include/EngineFactory.h #pragma once #include <string> #include <map> #include "Engine.h" #include "RenderingManagerLogger.h" class EngineFactory { private: string m_engineType; friend class EngineManager; protected: RenderingManagerLogger m_logger; map<string, void *> * m_commonStorage; public: Engine *createEngine(string executionId); string getType() {return m_engineType;} // The init function is called at the beginning (even if this engine is never executed) virtual void init() {} protected: virtual Engine *createEngine() = 0; }; <file_sep>/utils/Utils/CMutex.cpp /* * CMutex.cpp * * Created on: Oct 24, 2013 * Author: eranv */ #ifndef WIN32 #include "CMutex.h" #include <sys/sem.h> #include <sys/types.h> #include <sys/stat.h> #include <stdlib.h> #include <unistd.h> #include <sys/time.h> #include <iostream> int CreateNamedMutex(semun& arg, int bInitilaizedOwner, const char* mutexName) { cout << "Mutext name " << mutexName << endl; int mutex = 0; int flag = IPC_CREAT; key_t semKey = 1234567890;//(key_t) atoi(mutexName); cout << "semkey " << semKey << endl; if (semKey == 0) { return FALSE; } cout << "Mutext name1 " << mutexName << endl; flag |= S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP; mutex = (int) semget(semKey, 1, flag); if (mutex < 0) { return FALSE; } cout << "Mutext name2 " << mutexName << endl; arg.val = 1; if (semctl(mutex, 0, SETVAL, arg) == -1) { return FALSE; } cout << "Mutext OK " << mutex << endl; return mutex; } int MutexLock(int& namedMutex, DWORD timeout) { struct sembuf semBuf; semBuf.sem_num = 0; semBuf.sem_op = -1; semBuf.sem_flg = SEM_UNDO; int result; if (timeout == INFINITE) { result = semop(namedMutex, &semBuf, 1); } else { struct timeval now; struct timespec ts; gettimeofday(&now, NULL); ts.tv_sec = now.tv_sec + timeout; ts.tv_nsec = now.tv_usec * 1000; result = semtimedop(namedMutex, &semBuf, 1, &ts); } if (result != 0) { return FALSE; } return TRUE; } int ReleaseMutex(int& namedMutex) { struct sembuf semBuf; semBuf.sem_num = 0; semBuf.sem_op = 1; semBuf.sem_flg = SEM_UNDO; if (semop(namedMutex, &semBuf, 1) != 0) { return FALSE; } return TRUE; } int DeleteMutex(int& namedMutex) { if (semctl(namedMutex, 0, IPC_RMID) == -1) { return FALSE; } return TRUE; } #endif <file_sep>/SVG2Video/FrameWriter.h #ifndef __CAVI_FILE_H #define __CAVI_FILE_H #include "VSVideo.h" #include "VSFrame.h" #define TEXT_HEIGHT 20 #define AVIIF_KEYFRAME 0x00000010L // this frame is a key frame. #define BUFSIZE 260 class Logger; class FrameWriter {//: public CObject { public: FrameWriter(const char* lpszFileName, int out_rate, int xdim, int ydim, bool dbg, int minBitDepth, Logger &logger); virtual ~FrameWriter(); // virtual void AddFrame(CBitmap *wbmp, CBitmap *bbmp); virtual void AddFrame(VideoStream::CMutableFramePtr wFramePtr); private: int xDim; int yDim; int m_minBitDepth; VideoStream::COutputVideoStream *_outputStream; //unsigned char table[256*2*256]; //int m_out_rate, m_out_scale; //fix alpha stats int total_handled, total_changed, total_contradicts; Logger &m_logger; }; #endif <file_sep>/Fmod/AudioMixer.h #ifndef __AUDIO_MIXER_H__ #define __AUDIO_MIXER_H__ #include "afx.h" #include "AudioPart.h" #include "Events/SyncPointEventDescriptor.h" #include "Events/SequenceEvent.h" #include "Events/DurationEvent.h" #include "Events/SetVolumeEvent.h" #include "LinearInterpolated.h" #define STEP_INTERVAL 100 class EffectGroup; using namespace FMOD; using namespace std; class AudioMixer { private: std::string inputFilename; std::string outputFilename; std::string log; void loadPresetsToChannelGroups(ChannelGroup *channelGroup, EffectGroup *effectGroup); void setSequenceEvent(FMOD::Sound *clipTimeline, AudioPart *previousAudioPart, std::vector<SyncPointEventDescriptor *> eventList); void unpauseChannels(); void ERRCHECK(FMOD_RESULT result); bool createAndInitFmodSystem(System **fmodSystem); bool associateSimplePartsWithChannelGroups(); bool initializeParentChannelGroup(); bool createEffectGroups(); double calculateClipEndTime(); bool setClipTimeline(Sound **clipTimeline); bool startTimeline(Sound *clipTimeline, Channel **timelineChannel); bool playAudioParts(); bool shutdownFmod(); // used to sort the audio part vector according to start time struct StartTimeAscendingDateSort { bool operator()(AudioPart* first, AudioPart* second) { return first->getStart() < second->getStart(); } } ; public: AudioMixer(); ~AudioMixer(); void config(std::string in, std::string out, std::string log); void runService(); }; #endif <file_sep>/LightSynthesizer/VideoObject.cpp //#include "stdafx.h" #include "VideoObject.h" #include "cv.h" #include "SWException.h" VideoObject::VideoObject(Logger & lgr) : currentFrameNum(-1), opened(false), logger(lgr), resizeim_src(NULL), resizeim_dest(NULL), firstFrame(-1), lastFrame(-1) { videoStream = VideoStream::CInputVideoStream::Create(); } VideoObject::VideoObject(Logger & lgr, std::string pth, ResizeMethod resizeMtd, double z) : path(pth), currentFrameNum(-1), zPos(z), opened(false), resizeMethod(resizeMtd), logger(lgr), resizeim_src(NULL), resizeim_dest(NULL), firstFrame(-1), lastFrame(-1) { videoStream = VideoStream::CInputVideoStream::Create(); } VideoObject::~VideoObject() { VideoStream::CInputVideoStream::Destroy(videoStream); if ( resizeim_src ) cvReleaseImage( (IplImage**)&resizeim_src ); if ( resizeim_dest ) cvReleaseImage( (IplImage**)&resizeim_dest ); } bool VideoObject::Init(TiXmlElement *elem, ResizeMethod rm ) { TiXmlElement *pathXml = elem->FirstChildElement("Path"); if (pathXml != NULL) { path = pathXml->GetText(); } resizeMethod = rm; return true; } bool VideoObject::create() { if (!videoStream->Open(path.c_str())) { return false; } frameRate = videoStream->GetFrameRate(); opened = true; return true; } VideoStream::CFramePtr VideoObject::getFramePtr(int frameNumber, int req_width, int req_height, bool *rc) { *rc = true; if (!opened) create(); if (currentFrameNum == frameNumber) { return VideoStream::CFramePtr(); } else { // fast forward the video stream to the requested frame *rc = videoStream->FastForward(frameNumber - currentFrameNum - 1); if (*rc == false) { // print something smart and return something to inform the calling method logger.warning("Fast-Forward to frame #%d failed for %s\n", frameNumber, getPath() ); // Don't fail to allow minor exceeding from video length (probably due to rounding inconsistencies) //return NULL; *rc = true; } } currentFrameNum=frameNumber; // get the frame from the videoStream VideoStream::CFramePtr framePtr = videoStream->GetNextFrame(); if ( ! framePtr.isNull() ) { // Need to resize? if ( framePtr.GetWidth() != req_width || framePtr.GetHeight() != req_height ) { if ( resizeMethod == RESIZE_METHOD_NONE ) { throw SWException("Resize is not allowed for %s expecting(%d X %d) received (%d X %d)\n", getPath(), req_width, req_height, framePtr.GetWidth(), framePtr.GetHeight()); } VideoStream::CFramePtr resized_frame = resizeFrame( framePtr, req_width, req_height ); return resized_frame; } } return framePtr; } void VideoObject::close() { curFramePtr.setNull(); videoStream->Close(); } // Go to specified frame and read it. void VideoObject::updateCurFrame( int framenum, int width, int height, bool *rc ) { VideoStream::CFramePtr framePtr = getFramePtr(framenum, width, height, rc); if (! framePtr.isNull() ) { curFramePtr = framePtr; } } void VideoObject::AppearedInFrame( int framenum ) { if ( firstFrame == -1 || firstFrame > framenum ) firstFrame = framenum; // Remember first frame this object appeared in if ( lastFrame == -1 || lastFrame < framenum ) lastFrame = framenum; // Remember last frame this object appeared in } VideoStream::CMutableFramePtr VideoObject::resizeFrame(VideoStream::CFramePtr orig, int width, int height ) { unsigned int r; // perform resize with ipl images. // re-use images if already allocated. IplImage* srcim = NULL; IplImage* dstim = NULL; if ( resizeim_src ) srcim = (IplImage*)resizeim_src; else { srcim = cvCreateImage( cvSize(orig.GetWidth(), orig.GetHeight()), IPL_DEPTH_8U, orig.GetBitCount()/8 ); resizeim_src = srcim; } if ( resizeim_dest ) dstim = (IplImage*)resizeim_dest; else { dstim = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, orig.GetBitCount()/8 ); resizeim_dest = dstim; } VideoStream::CMutableFramePtr resframePtr(width, height, orig.GetBitCount() ); // Copy src to ipl-image for ( r = 0; r < orig.GetHeight(); r ++ ) { const unsigned char *srcbuf = orig.GetData() + r*orig.GetStride(); unsigned char *dstbuf = &CV_IMAGE_ELEM( srcim, unsigned char, r, 0 ); memcpy( dstbuf, srcbuf, orig.GetWidth()*orig.GetBitCount()/8 ); } // resize int interpolation = CV_INTER_LINEAR; switch ( resizeMethod ) { case RESIZE_METHOD_NN: interpolation = CV_INTER_NN; break; case RESIZE_METHOD_LINEAR: interpolation = CV_INTER_LINEAR; break; case RESIZE_METHOD_CUBIC: interpolation = CV_INTER_CUBIC; break; case RESIZE_METHOD_AREA: interpolation = CV_INTER_AREA; break; } cvResize( srcim, dstim, interpolation ); // Copy result from ipl-image for ( r = 0; r < resframePtr.GetHeight(); r ++ ) { unsigned char *srcbuf = &CV_IMAGE_ELEM( dstim, unsigned char, r, 0 ); unsigned char *dstbuf = resframePtr.GetData() + r*resframePtr.GetStride(); memcpy( dstbuf, srcbuf, resframePtr.GetWidth()*resframePtr.GetBitCount()/8 ); } // Don't release ipl images - for reuse return resframePtr; } <file_sep>/NewEncoder/VideoEncoder/PlaylistWriter.cpp #include <string.h> #include "PlaylistWriter.h" #include <sys/stat.h> #include "SWException.h" #ifndef WIN32 #define stricmp strcasecmp #endif #pragma warning (disable : 4996 ) #define WRITE_ALL_SEGMENTS_IN_ADVANCE CPlaylistWriter::CPlaylistWriter( const char *name, int segment_duration_ms, float first_segment_duration_ms, CVideoWriterOptions &options ) { m_options = &options; m_playlist_name = name; m_segment_duration_ms = segment_duration_ms ; m_first_segment_duration_ms = first_segment_duration_ms ; m_prev_segment_time_ms = 0; m_nsegments_ready = 0; m_nTotalSegments = 0; fpPlaylist = NULL; fpSegmentsList = NULL; fpSegmentsList2 = NULL; } CPlaylistWriter::~CPlaylistWriter() { if ( fpPlaylist ) fclose( fpPlaylist ); if ( fpSegmentsList ) fclose( fpSegmentsList ); if ( fpSegmentsList2 ) fclose( fpSegmentsList2 ); } /* * Start playlist creation * Create and fill playlist file * Create an empty segments file */ bool CPlaylistWriter::Start() { if ( m_options->vduration_ms <= 0 ) { throw SWException("Video duration not specified - can't create playlist\n"); } // Fill playlist. // Number of segments is determined according to video duration if (m_first_segment_duration_ms > 0) m_nTotalSegments = ( m_options->vduration_ms - m_first_segment_duration_ms + m_segment_duration_ms - 100 ) / m_segment_duration_ms + 1; else m_nTotalSegments = ( m_options->vduration_ms + m_segment_duration_ms - 100 ) / m_segment_duration_ms; #ifdef LONG_FIRST_SEGMENTS m_nTotalSegments -= 3;// 3 long first segments #endif /* * Prepare the files. * The encoder first creates the segments file and then the playlist file. * The servlet first makes sure that the playlist file exists (which means that the segments file also exists), and * then reads segments and/or playlist (non-blocking) */ if ( !CreateSegmentsFile() ) return false; if ( !CreatePlaylistFile() ) return false; return true; } bool CPlaylistWriter::CreatePlaylistFile() { fpPlaylist = fopen( m_playlist_name, "w" ); if ( !fpPlaylist ) { throw SWException("Failed to open playlist file: %s\n", m_playlist_name ); } fprintf( fpPlaylist, "#EXTM3U\n#EXT-X-TARGETDURATION:%d\n", (m_segment_duration_ms+999)/1000); // Round up (EVEV - ???) int segment_dur_sec = 0; if (m_first_segment_duration_ms > 0) segment_dur_sec = ( m_first_segment_duration_ms + 999 ) / 1000; // Round up else segment_dur_sec = ( m_segment_duration_ms + 999 ) / 1000; // Round up for ( int i = 0; i < m_nTotalSegments; i++ ) { if ( i == m_nTotalSegments-1 ) { // Residual if (m_first_segment_duration_ms > 0) segment_dur_sec = ( m_options->vduration_ms - m_segment_duration_ms*(m_nTotalSegments-2) - m_first_segment_duration_ms + 999 ) / 1000; // Round up else segment_dur_sec = ( m_options->vduration_ms - m_segment_duration_ms*(m_nTotalSegments-1) + 999 ) / 1000; // Round up } else if (i != 0) { segment_dur_sec = ( m_segment_duration_ms + 999 ) / 1000; // Round up } #ifdef LONG_FIRST_SEGMENTS if ( i < 3 ) // 3 long first segments WriteSegment2Playlist( 2*segment_dur_sec, GetSegmentName( i, true ).c_str()); else #endif WriteSegment2Playlist( segment_dur_sec, GetSegmentName( i, true ).c_str()); } WriteEnd2Playlist(); if ( fpPlaylist ) fflush( fpPlaylist ); // Allow reader to read data return true; } bool CPlaylistWriter::CreateSegmentsFile() { const char *segmentsFileName = GetSegmentsFileName(); fpSegmentsList = fopen( segmentsFileName, "w" ); if ( !fpSegmentsList ) { throw SWException("Failed to open segments file: %s\n", segmentsFileName ); } // Write number of segments to segments file fprintf( fpSegmentsList, "nsegments=%d\n", m_nTotalSegments); const char *segmentsFileName2 = GetSegmentsFileName2(); fpSegmentsList2 = fopen(segmentsFileName2, "w"); if (!fpSegmentsList2) { throw SWException("Failed to open new segments file: %s\n", segmentsFileName2); } // Write number of segments to segments file fprintf(fpSegmentsList2, "nsegments=%d;target_duration=%d;vduration_ms=%d;\n", m_nTotalSegments, (m_segment_duration_ms + 999) / 1000, m_options->vduration_ms); return true; } std::string &CPlaylistWriter::GetNextSegmentFullPath() { m_next_segment_name = GetSegmentName(m_nsegments_ready, false); return m_next_segment_name; } // Get segment name based on its 0-based index. std::string CPlaylistWriter::GetSegmentName( int index, bool relativePath ) { int prefix_len = 0; const char *filename = NULL; if ( relativePath ) { char name[MAX_PATH]; sprintf(name, "%d.ts", index + 1); return name; } else { filename = m_playlist_name; const char *path_end = NULL; const char *last_slash = strrchr(m_playlist_name, '/'); const char *last_backslash = strrchr(m_playlist_name, '\\'); if (!last_slash) path_end = last_backslash; else if (!last_backslash) path_end = last_slash; else path_end = MAX(last_slash, last_backslash); prefix_len = path_end + 1 - filename; char name[MAX_PATH]; sprintf(name, "%.*s%d.ts", prefix_len, filename, index + 1); return name; } } void CPlaylistWriter::WriteEnd2Playlist( ) { if ( fpPlaylist ) { fprintf( fpPlaylist, "#EXT-X-ENDLIST\n"); } } void CPlaylistWriter::WriteSegment2Playlist( int time_sec, const char *name ) { if ( fpPlaylist ) { fprintf( fpPlaylist, "#EXTINF:%d,\n%s\n", time_sec, name ); } } void CPlaylistWriter::SegmentCreated() { std::string segmentName = GetSegmentName(m_nsegments_ready, true); fprintf( fpSegmentsList, "%s\n", segmentName.c_str()); fflush( fpSegmentsList ); fprintf(fpSegmentsList2, "%s\n", segmentName.c_str()); fflush(fpSegmentsList2); } void CPlaylistWriter::SegmentReady( int end_time_ms, bool last_segment /*= false*/ ) { struct stat filestatus; stat( GetSegmentName(m_nsegments_ready, false).c_str(), &filestatus ); fprintf( fpSegmentsList, "%d\n", filestatus.st_size ); fflush( fpSegmentsList ); //The duration is calculated in the same way that it is calculated in for the playlist int duration = (m_segment_duration_ms + 999) / 1000; if (m_nsegments_ready == 0 && m_first_segment_duration_ms > 0) duration = (m_first_segment_duration_ms + 999) / 1000; if (last_segment) { duration = (m_options->vduration_ms - m_segment_duration_ms*(m_nTotalSegments - 1) + 999) / 1000; } fprintf(fpSegmentsList2, "size=%d;duration=%d;duration_ms=%d;\n", filestatus.st_size, duration, end_time_ms - m_prev_segment_time_ms); fflush(fpSegmentsList2); int time_sec = ( end_time_ms - m_prev_segment_time_ms + 999 ) / 1000; // Round up (EVEV - ???) m_prev_segment_time_ms = end_time_ms; m_nsegments_ready++; } //static bool CPlaylistWriter::IsPlaylist( const char *name ) { #define PLAYLIST_EXT ".m3u8" int len = strlen(name); if ( len >= sizeof(PLAYLIST_EXT) && stricmp(name+len-sizeof(PLAYLIST_EXT)+1, PLAYLIST_EXT) == 0 ) return true; return false; } bool CPlaylistWriter::ShouldStartNewSegment( int time_ms ) { if (m_first_segment_duration_ms > 0 && m_nsegments_ready == 0 && time_ms >= m_first_segment_duration_ms) return true; else if (m_first_segment_duration_ms > 0 && m_nsegments_ready == 0) return false; int segdur = m_segment_duration_ms; #ifdef LONG_FIRST_SEGMENTS ( m_nsegments_ready < 3 ? m_segment_duration_ms*2 : m_segment_duration_ms );// 3 long first segments #endif if ( m_nsegments_ready+1 < m_nTotalSegments && time_ms - m_prev_segment_time_ms >= segdur ) return true; return false; } const char *CPlaylistWriter::GetSegmentsFileName() { int prefix_len = GetPlaylistNamePrefixLen(); static char fileName[MAX_PATH]; sprintf(fileName, "%.*s_segments.txt", prefix_len, m_playlist_name); return fileName; } const char *CPlaylistWriter::GetSegmentsFileName2() { int prefix_len = GetPlaylistNamePrefixLen(); static char fileName[MAX_PATH]; sprintf(fileName, "%.*s_segments2.txt", prefix_len, m_playlist_name); return fileName; } int CPlaylistWriter::GetPlaylistNamePrefixLen() { const char *dot = strrchr(m_playlist_name, '.'); if (dot == NULL) return strlen(m_playlist_name); return dot - m_playlist_name; } <file_sep>/LightSynthesizer/LightSynthesizerMain.cpp #include "stdafx.h" #include "windows.h" #include "LightSynthesizer.h" #include "VSVideo.h" #include <stdio.h> #include <string> #include <vector> using namespace std; LightSynthesizerExitCode lightSynthesizer_exitCode = EXITCODE_GENERAL_ERROR; vector<string> getArgs(char *argv[], int argc) { vector<string> args; for (int i = 1; i < argc; i++) { args.push_back(argv[i]); } return args; } bool parseArgs(vector<string> args, map<string, string> &retMap) { bool foundInput = false; for (uint i = 0; i < args.size(); i++) { if (args[i].substr(0, 1) != "-") { return false; } string var = args[i].substr(1); if (var == "input") { i++; if (i >= args.size()) { return false; } foundInput = true; retMap.insert(pair<string, string>(var, args[i])); } else if (var == "bufsize") { i++; if (i >= args.size()) { return false; } retMap.insert(pair<string, string>(var, args[i])); } else if (var == "hide") { retMap.insert(pair<string, string>(var, "true")); } else if (var == "dbg") { retMap.insert(pair<string, string>(var, "true")); } } if (!foundInput) { return false; } return true; } int _tmain(int argc, char* argv[]) { printf("here\n"); vector<string> args = getArgs(argv, argc); map<string, string> argMap; bool rc = parseArgs(args, argMap); if (!rc) { return -1; } if (argMap["dbg"] == "true") { VideoStream::EnableLog(true); } else { VideoStream::EnableLog(false); } int streamBufferSize = -1; if (argMap["bufsize"] != "") { streamBufferSize = atoi(argMap["bufsize"].c_str()); } LightSynthesizer synth; if (!synth.configure(streamBufferSize)) { return -1; } TiXmlDocument *doc = new TiXmlDocument(argMap["input"].c_str()); if (!doc->LoadFile()) { return EXITCODE_PARSING_FAILED; } if (!synth.init(doc)) { return EXITCODE_INIT_FAILED; } if (!synth.doFlow()) { return lightSynthesizer_exitCode; } return EXITCODE_OK; } <file_sep>/utils/Utils/CEventManager.cpp /* * CEventManager.cpp * * Created on: Aug 20, 2013 * Author: eranv */ #ifndef WIN32 #include "CEventManager.h" #include <stdlib.h> #include <sys/time.h> //using namespace linux_syn_like_win; void InitializeCriticalSection(LPCRITICAL_SECTION lpcs) { pthread_mutexattr_t attr; pthread_mutexattr_init(&attr); pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); pthread_mutex_init(&lpcs->mutex, &attr); pthread_mutexattr_destroy(&attr); } void EnterCriticalSection(LPCRITICAL_SECTION lpcs) { pthread_mutex_lock(&lpcs->mutex); } void LeaveCriticalSection(LPCRITICAL_SECTION lpcs) { pthread_mutex_unlock(&lpcs->mutex); } void DeleteCriticalSection(LPCRITICAL_SECTION lpcs) { pthread_mutex_destroy(&lpcs->mutex); } BOOL CloseHandle(HANDLE hObject) { LPOBJECT_HEADER lpHeader = (LPOBJECT_HEADER) hObject; if (EVENT_MAGIC == lpHeader->magic) { if (0 == --lpHeader->count) delete (LPEVENT_OBJECT) hObject; return TRUE; } return FALSE; } // // Event // HANDLE CreateEvent(void * ignore, // ignored BOOL bManualReset, BOOL bInitialSet, const char * // ignored ) { return (HANDLE) new EVENT_OBJECT(bInitialSet, bManualReset); } HANDLE OpenEvent(DWORD dwAccess, BOOL bInheritHandle, const char *) { // nope return NULL; } BOOL SetEvent(HANDLE hObject) { LPOBJECT_HEADER lpHeader = (LPOBJECT_HEADER) hObject; if (EVENT_MAGIC != lpHeader->magic) return FALSE; LPEVENT_OBJECT lpObject = (LPEVENT_OBJECT) hObject; pthread_mutex_lock(&lpObject->lock); lpObject->is_set = true; lpObject->is_pulse = false; pthread_cond_broadcast(&lpObject->cond); pthread_mutex_unlock(&lpObject->lock); return TRUE; } BOOL PulseEvent(HANDLE hObject) { LPOBJECT_HEADER lpHeader = (LPOBJECT_HEADER) hObject; if (EVENT_MAGIC != lpHeader->magic) return FALSE; LPEVENT_OBJECT lpObject = (LPEVENT_OBJECT) hObject; pthread_mutex_lock(&lpObject->lock); lpObject->is_set = true; lpObject->is_pulse = true; pthread_cond_signal(&lpObject->cond); pthread_mutex_unlock(&lpObject->lock); return TRUE; } BOOL ResetEvent(HANDLE hObject) { LPOBJECT_HEADER lpHeader = (LPOBJECT_HEADER) hObject; if (EVENT_MAGIC != lpHeader->magic) return FALSE; LPEVENT_OBJECT lpObject = (LPEVENT_OBJECT) hObject; pthread_mutex_lock(&lpObject->lock); lpObject->is_set = false; pthread_mutex_unlock(&lpObject->lock); return TRUE; } DWORD WaitForSingleObject(HANDLE hObject, DWORD dwTime) { LPOBJECT_HEADER lpHeader = (LPOBJECT_HEADER) hObject; if (EVENT_MAGIC != lpHeader->magic) return 0; LPEVENT_OBJECT lpObject = (LPEVENT_OBJECT) hObject; pthread_mutex_lock(&lpObject->lock); if (true != lpObject->is_set) { if (dwTime == INFINITE) { pthread_cond_wait(&lpObject->cond, &lpObject->lock); } else { struct timeval now; struct timespec timeout; gettimeofday(&now, NULL); timeout.tv_sec = now.tv_sec + dwTime; timeout.tv_nsec = now.tv_usec * 1000; pthread_cond_timedwait(&lpObject->cond, &lpObject->lock, &timeout); } } if (false == lpObject->is_manual_reset) lpObject->is_set = false; pthread_mutex_unlock(&lpObject->lock); return 1; } void * threadfunc(void* param) { ThreadInfo* info = (ThreadInfo*) param; HANDLE hObject = &info->lpObject; WaitForSingleObject(hObject, info->dwTime); } DWORD WaitForMultipleObjects(DWORD ncount, HANDLE* hObjects, BOOL bWaitAll, DWORD dwTime) { pthread_mutex_t mutex; pthread_t* threadid; pthread_mutex_init(&mutex, NULL); pthread_mutex_lock(&mutex); threadid = (pthread_t*) malloc(ncount * sizeof(pthread_t)); unsigned int i = 0; int rc = 0; for (i = 0; i < ncount; i++) { LPEVENT_OBJECT lpObject = (LPEVENT_OBJECT) hObjects[i]; ThreadInfo info; info.lpObject = lpObject; info.dwTime = dwTime; rc = pthread_create(&threadid[i], NULL, threadfunc, &info); if (rc != 0) { return WAIT_TIMEOUT; } } if (!bWaitAll) { for (i = 0; i < ncount; i++) { //wakeup all threads LPEVENT_OBJECT lpObject = (LPEVENT_OBJECT) hObjects[i]; pthread_cond_broadcast(&lpObject->cond); } } for (i = 0; i < ncount; i++) { //wait for threads and clean up pthread_join(threadid[i], NULL); } pthread_mutex_unlock(&mutex); pthread_mutex_destroy(&mutex); } DWORD GetCurrentThreadId() { return (DWORD) pthread_self(); } #endif<file_sep>/Fmod/LinearInterpolated.h #ifndef __LINEAR_INTERPOLATED_H__ #define __LINEAR_INTERPOLATED_H__ #pragma once #include "Interpolated.h" class LinearInterpolated : public Interpolated { private: double startValue; double endValue; double step; public: LinearInterpolated() { } LinearInterpolated(double sv, double ev, double st, double et) { startValue = sv; endValue = ev; startTime = st; endTime = et; } ~LinearInterpolated(void) {} double getStartValue() { return startValue; } void setStartValue(double sv) { startValue = sv; } double getEndValue() { return endValue; } void setEndValue(double ev) { endValue = ev; } double getStep() { return step; } void setStep(double s) { step=s; } std::string getType() { return "LinearInterpolated"; } }; #endif <file_sep>/Fmod/Effects/EqualizerDsp.h #ifndef __EQUALIZER_DSP_H__ #define __EQUALIZER_DSP_H__ #include "DspEffect.h" #pragma once class EqualizerDsp : public DspEffect { public: void create(FMOD::System *fmodSystem) { } void addEqBand(FMOD::System *fmodSystem, float center, float bandwidth, float gain) { FMOD::DSP *dsp; fmodSystem->createDSPByType(FMOD_DSP_TYPE_NORMALIZE, &dsp); dsp->setParameter(FMOD_DSP_PARAMEQ_CENTER, center); dsp->setParameter(FMOD_DSP_PARAMEQ_BANDWIDTH, bandwidth); dsp->setParameter(FMOD_DSP_PARAMEQ_GAIN, gain); dspList.push_back(dsp); } }; #endif <file_sep>/utils/Utils/ColorSpace.h #pragma once class ColorSpace { public: static void rgb24_to_yuv420p(unsigned char *lum, unsigned char *cb, unsigned char *cr, const unsigned char *src, int width, int height, int src_stride); static void rgb32_to_yuv420p(unsigned char *lum, unsigned char *cb, unsigned char *cr, const unsigned char *src, int width, int height, int src_stride); static void yuv420p_to_rgb32(const unsigned char *lum, const unsigned char *cb, const unsigned char *cr, int lum_stride, int cb_stride, int cr_stride, unsigned char *dst, int width, int height, int dst_stride ); }; <file_sep>/utils/Utils/CEventManager.h #ifndef __LINUX_SYN_LIKE_WIN__ #define __LINUX_SYN_LIKE_WIN__ #include <pthread.h> #include "WindowDefinitions.h" #include <string> using namespace std; //#include "objbase.h" //namespace linux_syn_like_win //{ #define TRUE 1 #define FALSE 0 #define STATUS_WAIT_0 ((DWORD )0x00000000L) #define STATUS_ABANDONED_WAIT_0 ((DWORD )0x00000080L) #define WAIT_OBJECT_0 ((STATUS_WAIT_0 ) + 0 ) #define WAIT_ABANDONED ((STATUS_ABANDONED_WAIT_0 ) + 0 ) #define WAIT_ABANDONED_0 ((STATUS_ABANDONED_WAIT_0 ) + 0 ) #define WAIT_TIMEOUT 258L #define SYNCHRONIZE (0x00100000L) // // Critical section // typedef struct _CRITICAL_SECTION { pthread_mutex_t mutex; } CRITICAL_SECTION, *LPCRITICAL_SECTION; enum OBJECT_MAGIC { EVENT_MAGIC, NULL_MAGIC }; typedef struct _OBJECT_HEADER { BYTE magic; BYTE count; _OBJECT_HEADER(BYTE _magic) : magic(_magic), count(1) { } } OBJECT_HEADER, *LPOBJECT_HEADER; typedef struct _EVENT_OBJECT { OBJECT_HEADER header; pthread_mutex_t lock; pthread_cond_t cond; bool is_set; bool is_pulse; bool is_manual_reset; _EVENT_OBJECT(bool _is_set, bool _is_manual_reset) : header(EVENT_MAGIC), is_set(_is_set), is_pulse(false), is_manual_reset(_is_manual_reset) { pthread_cond_init(&cond, NULL); pthread_mutex_init(&lock, NULL); } ~_EVENT_OBJECT() { pthread_cond_destroy(&cond); pthread_mutex_destroy(&lock); } } EVENT_OBJECT, *LPEVENT_OBJECT; typedef struct ThreadInfo{ LPEVENT_OBJECT lpObject; int *count; DWORD dwTime; }; void InitializeCriticalSection(LPCRITICAL_SECTION); void EnterCriticalSection(LPCRITICAL_SECTION); void LeaveCriticalSection(LPCRITICAL_SECTION); void DeleteCriticalSection(LPCRITICAL_SECTION); typedef void * HANDLE; typedef void VOID; BOOL CloseHandle(HANDLE hObject); DWORD GetCurrentThreadId(); // // Event // HANDLE CreateEvent(void * skip, BOOL bManualReset, BOOL bInitialSet, const char *); HANDLE OpenEvent(DWORD dwAccess, BOOL bInheritHandle, const char *); BOOL SetEvent(HANDLE hObject); BOOL PulseEvent(HANDLE hObject); BOOL ResetEvent(HANDLE hObject); /*enum WAIT_FOR_CONSTS { INFINITE };*/ DWORD WaitForSingleObject(HANDLE hObject, DWORD dwTime); DWORD WaitForMultipleObjects(DWORD ncount, HANDLE* hObjects, BOOL bWaitAll, DWORD dwTime); //} #endif // __LINUX_SYN_LIKE_WIN__ <file_sep>/utils/StackWalker/SWException.cpp #include "SWException.h" #ifndef WIN32 #include <stdio.h> #include <stdarg.h> #include <stdlib.h> #endif using namespace std; // default constructor creates an empty SWException (without even a stack trace) SWException::SWException() { } SWException::SWException(string & message) : m_message(message) { fillStackTrace(); } SWException::SWException(const char *format, ...) { if (format == NULL) return; va_list argList; va_start(argList, format); // Find requested buffer size int bufferSize = vsnprintf(NULL, 0, format, argList )+1; va_end(argList); va_start(argList, format); char *buffer = (char *)malloc(bufferSize); // Add one for terminating null vsnprintf(buffer, bufferSize, format, argList); m_message = string(buffer); free(buffer); va_end(argList); fillStackTrace(); } // Copy constructor - copy only message and stacktrace (don't copy StackWalker!) SWException::SWException(const SWException & e) : exception(), StackWalker() { m_message = e.m_message; m_stackTrace = e.m_stackTrace; } // Assignment operator SWException & SWException::operator= (const SWException & other) { if (this != &other) // protect against invalid self-assignment { m_message = other.m_message; m_stackTrace = other.m_stackTrace; } // by convention, always return *this return *this; } void SWException::fillStackTrace() { // todo ShowCallstack(); } <file_sep>/utils/Utils/FileUtils.cpp /* * FileUtils.cpp * * Created on: Aug 27, 2013 * Author: eranv */ #ifndef WIN32 #include <stdio.h> #include <stdlib.h> #include <string.h> #include <sys/stat.h> #include "FileUtils.h" bool FileUtils::folderExists(const char* folderName) { struct stat st; if (stat(folderName, &st) == 0) { if (st.st_mode & S_IFDIR != 0) { return true; } else { return false; } } else { return false; } } bool FileUtils::fileExists(const char* filename){ FILE* file; if (file = fopen(filename, "r")){ fclose(file); return 1; } return 0; } #endif <file_sep>/Fmod/AudioOrigin.h #pragma once class AudioOrigin { public: AudioOrigin(void); ~AudioOrigin(void); }; <file_sep>/NewEncoder/VideoEncoder/WAVReader.h #if !defined(___WAVREADER_H) #define ___WAVREADER_H #include <string> #include "Logger.h" typedef struct{ char rID[4]; // 'RIFF' long int rLen; char wID[4]; // 'WAVE' char fId[4]; // 'fmt ' long int pcm_header_len; // varies... short int wFormatTag; short int nChannels; // 1,2 for stereo data is (l,r) pairs long int nSamplesPerSec; long int nAvgBytesPerSec; short int nBlockAlign; short int nBitsPerSample; } WAV_HDR; // header of wav file typedef struct{ char dId[4]; // 'data' or 'fact' long int dLen; // unsigned char *data; } CHUNK_HDR; class CWAVReader { private: Logger & m_logger; public: CWAVReader(Logger & logger); bool Init( char *file_name ); virtual ~CWAVReader(); int Read( unsigned char *bfr, int bfr_size, bool *endofbfr ); int SampleRate() { return (int)fs_hz; } int Channels() { return num_ch; } private: double fs_hz; int bits_per_sample; int num_ch; double *g_wdata_in; int g_num_isamp; long int g_max_isamp; FILE *m_fw; int wbuff_len; WAV_HDR *m_wav; }; #endif // !defined <file_sep>/VideoStream/VideoStream/Compression.cpp #ifdef WIN32 #include "StdAfx.h" #include "VSCompression.h" using namespace VideoStream; CVideoCompressor::CVideoCompressor(DWORD compressionCodec, const VIDEOINFO &vi) { BITMAPINFOHEADER &bmphdr = _bmpinfo.bmiHeader; memset (&bmphdr, 0, sizeof (bmphdr)); bmphdr.biSize = sizeof (BITMAPINFOHEADER); bmphdr.biWidth = vi.width; bmphdr.biHeight = vi.height; bmphdr.biPlanes = 1; bmphdr.biBitCount = vi.bitCount; bmphdr.biCompression = BI_RGB; bmphdr.biSizeImage = VideoStream::CFrame::GetFrameSize (vi.width, vi.height, vi.bitCount); #if 0 // Open dialog to choose codec memset(&_compVars,0, sizeof(COMPVARS)); _compVars.cbSize = sizeof(COMPVARS); if(ICCompressorChoose(NULL, ICMF_CHOOSE_DATARATE | ICMF_CHOOSE_KEYFRAME, &bmphdr, NULL, &_compVars, NULL) == FALSE) { //printf("Compressor selection aborted.\n"); } //printf("Codec FCC = 0x%x\n", compvars.fccHandler); _hICcompressor = _compVars.hic; #else _hICcompressor = ICOpen (ICTYPE_VIDEO, compressionCodec, ICMODE_COMPRESS); memset(&_compVars, 0, sizeof(COMPVARS)); _compVars.cbSize = sizeof (COMPVARS); _compVars.dwFlags = ICMF_COMPVARS_VALID; _compVars.hic = _hICcompressor; _compVars.lpbiOut = NULL;//&bmpInfo; _compVars.lKey = 0; _compVars.lQ = ICQUALITY_DEFAULT; #endif //_compVars.lDataRate = 40000; evev-doesn't seem to work for most codecs... _compVars.dwFlags = ICMF_COMPVARS_VALID; ICSeqCompressFrameStart (&_compVars, &_bmpinfo); } CVideoCompressor::~CVideoCompressor() { ICSeqCompressFrameEnd (&_compVars); ICCompressorFree (&_compVars); } BITMAPINFO *CVideoCompressor::GetCompressedBitmapInfo() { int compInfoSize = ICCompressGetFormatSize (_hICcompressor, &_bmpinfo); BITMAPINFO *pbmpCompInfo = (BITMAPINFO *) malloc (compInfoSize); ICCompressGetFormat (_hICcompressor, &_bmpinfo, pbmpCompInfo); return pbmpCompInfo; } void *CVideoCompressor::CompressFrame (CFramePtr framePtr, LONG &compSize) { compSize = framePtr.GetDataSize(); BOOL keyFrame = TRUE; // Cast the (const unsigned char *) into (unsigned char *) because ICSeqCompressFrame, unfortunately, does not provide the const qualifier. // However, it does not alter the data. unsigned char *data = (unsigned char *)framePtr.GetData(); void *pCompressedData = ICSeqCompressFrame (&_compVars, 0, data, &keyFrame, &compSize); return pCompressedData; } CVideoDecompressor::CVideoDecompressor(DWORD compressionCodec, const VIDEOINFO &vi) : _vi (vi) { _hICdecompressor = ICOpen (ICTYPE_VIDEO, compressionCodec, ICMODE_DECOMPRESS); _pbiOutput = (BITMAPINFO *) malloc (sizeof (BITMAPINFOHEADER)); BITMAPINFOHEADER &bmphdr2 = _pbiOutput->bmiHeader; memset (&bmphdr2, 0, sizeof (bmphdr2)); bmphdr2.biSize = sizeof (BITMAPINFOHEADER); bmphdr2.biWidth = vi.width; bmphdr2.biHeight = vi.height; bmphdr2.biPlanes = 1; bmphdr2.biBitCount = vi.bitCount; bmphdr2.biCompression = BI_RGB; bmphdr2.biSizeImage = VideoStream::CFrame::GetFrameSize (vi.width, vi.height, vi.bitCount); HIC hICcompressor = ICOpen (ICTYPE_VIDEO, compressionCodec, ICMODE_COMPRESS); int compInfoSize = ICCompressGetFormatSize (hICcompressor, _pbiOutput); _pbiInput = (BITMAPINFO *) malloc (compInfoSize); ICCompressGetFormat (hICcompressor, _pbiOutput, _pbiInput); ICClose (hICcompressor); ICDecompressBegin (_hICdecompressor, _pbiInput, _pbiOutput); } CVideoDecompressor::~CVideoDecompressor() { ICDecompressEnd (_hICdecompressor); ICClose (_hICdecompressor); free (_pbiInput); free (_pbiOutput); } CMutableFramePtr CVideoDecompressor::DecompressFrame (void *pCompressedData) { CMutableFramePtr decompressedFramePtr(_vi.width, _vi.height, _vi.bitCount); ICDecompress (_hICdecompressor, 0, &_pbiInput->bmiHeader, pCompressedData, &_pbiOutput->bmiHeader, decompressedFramePtr.GetData()); return decompressedFramePtr; } #else #include "VSCompression.h" #include <stdlib.h> #include <cstring> #include "SWException.h" #include "ColorSpace.h" using namespace VideoStream; static int g_frame_count = 0; bool g_debugComp = false; pthread_mutex_t CVideoCompressor::m_mutex = create_mutex(); pthread_mutex_t CVideoCompressor::create_mutex() { pthread_mutex_init(&CVideoCompressor::m_mutex, NULL); return CVideoCompressor::m_mutex; } void CVideoCompressor::initFFmpeg() { // First time - intialize ffmpeg avcodec_register_all(); // avdevice_register_all(); // avfilter_register_all(); av_register_all(); } //CVideoCompressor::CVideoCompressor(DWORD compressionCodec, const VIDEOINFO &vi) { CVideoCompressor::CVideoCompressor(DWORD compressionCodec, AVCodecID codecId, const VIDEOINFO &vi) : _vi(vi) { g_frame_count = 0; //init mutex // pthread_mutex_init(&m_mutex, NULL); if (codecId == AV_CODEC_ID_NONE) { return; } } void CVideoCompressor::init(AVFormatContext* oc, AVCodecContext* c) { pthread_mutex_lock(&m_mutex); m_codecContext = c; AVCodecID codecId = oc->oformat->video_codec; m_codec = avcodec_find_encoder(codecId); if (!m_codec) { throw SWException("Codec not found\n"); } video_st = avformat_new_stream(oc, m_codec); if (!video_st) { throw SWException("Failed allocate stream.\n"); } video_st->id = oc->nb_streams - 1; m_codecContext = video_st->codec; // put sample parameters m_codecContext->codec_id = codecId; m_codecContext->codec_type = AVMEDIA_TYPE_VIDEO; m_codecContext->bit_rate = 400000; /* resolution must be a multiple of two */ m_codecContext->gop_size = 10; /* emit one intra frame every ten frames */ m_codecContext->width = _vi.width; m_codecContext->height = _vi.height; // fprintf(stderr, "CVideoCompressor: width %d; height %d\n", m_codecContext->width, m_codecContext->height); /* frames per second */ m_codecContext->time_base = (AVRational) { 1, 25 }; m_codecContext->max_b_frames = 1; if (m_codecContext->codec_id == AV_CODEC_ID_MPEG2VIDEO) { m_codecContext->max_b_frames = 2; } if (m_codecContext->codec_id == AV_CODEC_ID_MPEG1VIDEO) { m_codecContext->mb_decision = 2; } if (codecId == AV_CODEC_ID_HUFFYUV) { m_codecContext->pix_fmt = AV_PIX_FMT_BGRA; //PIX_FMT_YUV422P; } else { m_codecContext->pix_fmt = AV_PIX_FMT_YUV420P; } if (oc->oformat->flags & AVFMT_GLOBALHEADER) { m_codecContext->flags != CODEC_FLAG_GLOBAL_HEADER; } if (video_st) { //open video if (avcodec_open2(m_codecContext, m_codec, NULL) < 0) { // fprintf(stderr, "could not open codec\n"); perror("Open \n"); throw SWException("Compressor could not open codec\n"); } //allocate and init te-usable frame m_frame = av_frame_alloc(); if (!m_frame) { throw SWException("could not allocate video frame\n"); } //allocate the encoded raw picture int ret = avpicture_alloc(&m_dstPicture, m_codecContext->pix_fmt, m_codecContext->width, m_codecContext->height); if (ret < 0) { throw SWException("could not allocate picture frame\n"); } *((AVPicture*)m_frame) = m_dstPicture; } pthread_mutex_unlock(&m_mutex); } CVideoCompressor::~CVideoCompressor() { if (m_frame) { // av_freep(&m_frame->data[0]); av_frame_free(&m_frame); m_frame = NULL; } pthread_mutex_destroy(&m_mutex); printf("\n"); } void *CVideoCompressor::CompressFrame(CFramePtr framePtr, LONG &compSize) { int i, ret, got_output; i = g_frame_count++; AVPacket pkt; void * pCompressedData = NULL; av_init_packet(&pkt); pkt.data = NULL; // packet data will be allocated by the encoder pkt.size = 0; fflush(stdout); // fprintf(stderr, "start encoding frame\n"); int width = framePtr.GetWidth(); int height = framePtr.GetHeight(); // Convert color if (m_codecContext->pix_fmt == AV_PIX_FMT_YUV420P || m_codecContext->pix_fmt == AV_PIX_FMT_YUV422P) { int widthbytes = (framePtr.GetBitCount() / 8) * width; // input is flipped vertically unsigned char *src = (unsigned char *)framePtr.GetData() + widthbytes * (height - 1); int stride = -widthbytes; ColorSpace::rgb32_to_yuv420p(m_frame->data[0], m_frame->data[1], m_frame->data[2], src, width, height, stride); } else { avpicture_fill((AVPicture *)m_frame, framePtr.GetData(), m_codecContext->pix_fmt, framePtr.GetWidth(), framePtr.GetHeight()); } m_frame->pict_type = AV_PICTURE_TYPE_I; // : AV_PICTURE_TYPE_NONE; //FF_I_TYPE : 0; // Encode current frame. ret = avcodec_encode_video2(m_codecContext, &pkt, m_frame, &got_output); if (ret < 0) { // fprintf(stderr, "Error encoding frame\n"); throw SWException("Cannot encode frame\n"); } if (ret < 0) { throw SWException("Cannot encode frame.\n"); } if (got_output) { // fprintf(stderr, "Got picture\n"); //copy compressed data to output compSize = pkt.size; // printf("Write frame %3d (size=%5u)\n", i, compSize); pCompressedData = av_malloc(compSize); memcpy(pCompressedData, pkt.data, compSize); //cpy??? av_free_packet(&pkt); } /* get the delayed frames */ for (got_output = 1; got_output; i++) { fflush(stdout); ret = avcodec_encode_video2(m_codecContext, &pkt, NULL, &got_output); if (ret < 0) { // fprintf(stderr, "Error encoding frame\n"); throw SWException("Error encoding frame\n"); } if (got_output) { compSize = pkt.size; // printf("Write (delay) frame %3d (size=%5u)\n", i, compSize); pCompressedData = av_malloc(compSize); memcpy(pCompressedData, pkt.data, compSize); av_free_packet(&pkt); } } // printf("Encode video file end s\n"); return pCompressedData; } CVideoDecompressor::CVideoDecompressor(DWORD compressionCodec, AVCodecID codecId, const VIDEOINFO &vi) : _vi(vi) { // avcodec_register_all(); // av_register_all(); // AVCodecID codecId = AV_CODEC_ID_NONE; // if (compressionCodec == h264) { // codecId = AV_CODEC_ID_H264; // } else if (compressionCodec == huffyuv) { // codecId = AV_CODEC_ID_HUFFYUV; // } else if (compressionCodec == lagarith) { // codecId = AV_CODEC_ID_HUFFYUV; // AV_CODEC_ID_LAGARITH; // } else if (compressionCodec == xvid) { // codecId = AV_CODEC_ID_MPEG4; // } else if (compressionCodec == MSRLE) { // codecId = AV_CODEC_ID_MSRLE; // } else if (compressionCodec == vp62) { // codecId = AV_CODEC_ID_VP6; // } else if (compressionCodec == vp70) { // codecId = AV_CODEC_ID_VP9; //???? // } else { // //uncompressed video // return; // } if (codecId == AV_CODEC_ID_NONE) { //uncompressed video return; } // //find video decoder // m_codec = avcodec_find_decoder(codecId); // if (!m_codec) { // throw SWException("Codec not found\n"); // } // // m_codecContext = avcodec_alloc_context3(m_codec); // if (!m_codecContext) { // throw SWException("Could not allocate video codec context\n"); // } // // // if (m_codec->capabilities & CODEC_CAP_TRUNCATED) // m_codecContext->flags |= CODEC_FLAG_TRUNCATED; /* we do not send complete frames */ // // /* For some codecs, such as msmpeg4 and mpeg4, width and height // MUST be initialized there because this information is not // available in the bitstream. */ // // /* open it */ // if (avcodec_open2(m_codecContext, m_codec, NULL) < 0) { // perror("AVCODEC_OPEN\n"); // throw SWException("Could not open codec\n"); // exit(1); // } } void CVideoDecompressor::open_codec_context(AVFormatContext *ic, VIDEOINFO &vi) { int ret = 0; AVMediaType type = AVMEDIA_TYPE_VIDEO; //retrieve stream information ret = avformat_find_stream_info(ic, NULL); if (ret < 0) { throw SWException("Could not find stream information\n"); } ret = av_find_best_stream(ic, type, -1, -1, NULL, 0); if (ret < 0) { throw SWException("Could not find %s stream in input file '%s'\n", av_get_media_type_string(type), ic->filename); } int stream_idx = ret; video_st = ic->streams[stream_idx]; m_codecContext = video_st->codec; m_codec = avcodec_find_decoder(m_codecContext->codec_id); if (!m_codec) { throw SWException("Failed to find %s codec\n", av_get_media_type_string(type)); } //fill video input structure vi.frameRate = m_codecContext->time_base.den; vi.bitCount = 32; //// m_codecContext->bit_rate; vi.width = m_codecContext->width; vi.height = m_codecContext->height; // fprintf(stderr, "CVideoDecompressor: width %d; height %d\n", m_codecContext->width, m_codecContext->height); /* open it */ if (avcodec_open2(m_codecContext, m_codec, NULL) < 0) { perror("Open:\n"); throw SWException("Failed to open %s codec\n", av_get_media_type_string(type)); } //allocate and init te-usable frame m_frame = av_frame_alloc(); if (!m_frame) { throw SWException("could not allocate video frame\n"); } } CVideoDecompressor::~CVideoDecompressor() { if (m_codecContext) { avcodec_close(m_codecContext); av_free(m_codecContext); } if (m_frame) { av_frame_free(&m_frame); } } CMutableFramePtr CVideoDecompressor::DecompressFrame(void *pCompressedData, int compressedSize) { CMutableFramePtr decompressedFramePtr(m_codecContext->width, m_codecContext->height, 32); // CMutableFramePtr decompressedFramePtr(_vi.width, _vi.height, _vi.bitCount); AVPacket avpkt; av_frame_unref(m_frame); av_init_packet(&avpkt); // Read sample avpkt.data = (uint8_t *)pCompressedData; avpkt.size = compressedSize; printf("Read frame %3d (size=%5u)\n", g_frame_count, compressedSize); // Decode sample int got_picture = 0; int len = avcodec_decode_video2(m_codecContext, m_frame, &got_picture, &avpkt); if (len < 0) { av_free_packet(&avpkt); // fprintf(stderr,"FFMpeg Decoder: Error decoding video frame - len=%d\n", len); throw SWException("FFMpeg Decoder: Error decoding video frame - len=%d\n", len); } if (got_picture) { // fprintf(stderr, "video_frame n:%d coded_n:%d \n", g_frame_count, m_frame->coded_picture_number); //, av_ts2timestr(m_frame->pts, m_codecContext->time_base)); unsigned char *src = m_frame->data[0]; unsigned int lin; unsigned char *dst = decompressedFramePtr.GetData(); //unsigned char *dst = decompressedFramePtr.GetData() + decompressedFramePtr.GetStride() * (decompressedFramePtr.GetHeight() - 1); - flip vertically for (lin = 0; lin < decompressedFramePtr.GetHeight(); lin++) { memcpy(dst, src, decompressedFramePtr.GetStride()); src += m_frame->linesize[0]; //dst -= decompressedFramePtr.GetStride(); dst += decompressedFramePtr.GetStride(); } } av_free_packet(&avpkt); g_frame_count++; return decompressedFramePtr; } #endif<file_sep>/utils/CliManager/include/CliManager.h #pragma once #include <map> #include <string> #include <time.h> using namespace std; typedef void (*CommandHandler)(string fullCommand, void *callbackData, bool &exitCli); class CliManager { private: class CommandInfo { public: CommandInfo(string command, string shortHelp, string longHelp, CommandHandler cmdHandler, void *callbackData) : m_command(command), m_shortHelp(shortHelp), m_longHelp(longHelp), m_cmdHandler(cmdHandler), m_callbackData(callbackData) {}; string m_command; string m_shortHelp; string m_longHelp; CommandHandler m_cmdHandler; void *m_callbackData; }; bool m_showPrompt; map<string,CommandInfo *> m_commands; clock_t m_startTime; // Time (in clock ticks) from the start of the cli manager. FILE * m_captureFile; float m_timeOutInSeconds; public: CliManager(bool showPrompt) : m_showPrompt(showPrompt), m_captureFile(NULL), m_timeOutInSeconds(0) {}; ~CliManager(); bool addCommand(string command, string shortHelp, string longHelp, CommandHandler cmdHandler, void *callbackData); void setCaptureFile(FILE * captureFile) {m_captureFile = captureFile;} void setTimeOut(float timeOutInSeconds) {m_timeOutInSeconds = timeOutInSeconds;} // Read commands from cin and execute them void run( istream &is ); private: void printHelp(); }; <file_sep>/RenderingManager/RenderingManager/ProgressTracker.cpp #include "ProgressTracker.h" #include "RenderingManager.h" #include "SWException.h" ProgressTracker::ProgressTracker(RenderingManager * rndMng, double notifyPeriod) : m_rndMng(rndMng), m_progress(0), m_outOf(-1), m_notifyPeriod(notifyPeriod), m_lastNotify(-1), m_nextNotify(notifyPeriod) { m_startRenditionTime = 0; if (m_nextNotify > 1) m_nextNotify = 1; if (m_notifyPeriod <= 0 || m_notifyPeriod > 1) { throw SWException("Invalid notify period %lf.", m_notifyPeriod); } } void ProgressTracker::setStartRenditionTime(float startRenditionTime){ m_startRenditionTime = startRenditionTime; } void ProgressTracker::setOutOf(int outOf) { if (m_outOf == outOf) return; if (m_outOf != -1) { throw SWException("Cannot set the progress outOf to %d since it was previously set to %d.", outOf, m_outOf); } m_outOf = outOf; } void ProgressTracker::setProgress(int progress) { if (m_outOf == -1) { throw SWException("Cannot set progress before setting outOf."); } if (progress < m_progress) { throw SWException("Cannot set the progress to %d since it was previously set to %d.", progress, m_progress); } if (progress > m_outOf) { throw SWException("Cannot set the progress to %d since outOf %d.", progress, m_outOf); } m_progress = progress; // Just to make sure... if (m_outOf == 0) return; double relativeProgress = (double)m_progress / (double)m_outOf; if (relativeProgress >= m_nextNotify) { m_rndMng->writeProgress(m_progress, m_outOf); m_lastNotify = relativeProgress; m_nextNotify += m_notifyPeriod; if (m_nextNotify > 1) m_nextNotify = 1; } } void ProgressTracker::incProgress() { setProgress(m_progress + 1); } void ProgressTracker::readyToPlay(int renderedChunk){ m_rndMng->writeReadyToPlay(renderedChunk); } void ProgressTracker::totalSleepTime(int totalSleepTime) { m_rndMng->writeTotalSleepTime(totalSleepTime); } void ProgressTracker::sceneRendered(const char* sceneName, int frames, float videoRenditionTime){ clock_t now = clock(); float renditionTime = ((float)now - (float)getStartRenditionTime()) / ((float)CLOCKS_PER_SEC); setStartRenditionTime( (float)now); m_rndMng->sceneRendered(sceneName, frames, renditionTime, videoRenditionTime); } void ProgressTracker::setEncoderProgress(int frames, double timePassed, double averageFramesPerSec){ m_rndMng->setEncoderProgress(frames, timePassed, averageFramesPerSec); } void ProgressTracker::exceedPaddedSize(int codecType, int frameNumber, int frameSize, int maxFrameSize){ m_rndMng->exceedPaddedSize(codecType, frameNumber, frameSize, maxFrameSize); } void ProgressTracker::writeEvent(const char* name, const char* value){ m_rndMng->writeEvent(name, value); } void ProgressTracker::flush() { double relativeProgress = (double)m_progress / (double)m_outOf; if (m_lastNotify == relativeProgress) return; if (m_outOf == -1) return; m_rndMng->writeProgress(m_progress, m_outOf); m_lastNotify = relativeProgress; } <file_sep>/NewEncoder/VideoEncoder/MoovHeaderWriter.h #pragma once extern "C" { #ifndef __STDC_CONSTANT_MACROS # define __STDC_CONSTANT_MACROS #endif #include "libavformat/avformat.h" } #include "Logger.h" #include "VideoWriter.h" #include <list> //#include "avc.h" #define MOV_FRAG_INFO_ALLOC_INCREMENT 64 #define MOV_INDEX_CLUSTER_SIZE 1024 #define MOV_TIMESCALE 1000 #define RTP_MAX_PACKET_SIZE 1450 #define MODE_MP4 0x01 #define MODE_MOV 0x02 #define MODE_3GP 0x04 #define MODE_PSP 0x08 // example working PSP command line: // ffmpeg -i testinput.avi -f psp -r 14.985 -s 320x240 -b 768 -ar 24000 -ab 32 M4V00001.MP4 #define MODE_3G2 0x10 #define MODE_IPOD 0x20 #define MODE_ISM 0x40 #define MODE_F4V 0x80 typedef struct MOVIentry { uint64_t pos; int64_t dts; unsigned int size; unsigned int samples_in_chunk; unsigned int chunkNum; ///< Chunk number if the current entry is a chunk start otherwise 0 unsigned int entries; int cts; #define MOV_SYNC_SAMPLE 0x0001 #define MOV_PARTIAL_SYNC_SAMPLE 0x0002 uint32_t flags; } MOVIentry; typedef struct HintSample { uint8_t *data; int size; int sample_number; int offset; int own_data; } HintSample; typedef struct HintSampleQueue { int size; int len; HintSample *samples; } HintSampleQueue; typedef struct MOVFragmentInfo { int64_t offset; int64_t time; int64_t duration; int64_t tfrf_offset; int size; } MOVFragmentInfo; typedef struct MOVTrack { int mode; int entry; unsigned timescale; uint64_t time; int64_t track_duration; int last_sample_is_subtitle_end; long sample_count; long sample_size; long chunkCount; int has_keyframes; #define MOV_TRACK_CTTS 0x0001 #define MOV_TRACK_STPS 0x0002 #define MOV_TRACK_ENABLED 0x0004 uint32_t flags; #define MOV_TIMECODE_FLAG_DROPFRAME 0x0001 #define MOV_TIMECODE_FLAG_24HOURSMAX 0x0002 #define MOV_TIMECODE_FLAG_ALLOWNEGATIVE 0x0004 uint32_t timecode_flags; int language; int track_id; int tag; ///< stsd fourcc AVStream *st; AVCodecContext *enc; int multichannel_as_mono; int vos_len; uint8_t *vos_data; MOVIentry *cluster; unsigned cluster_capacity; int audio_vbr; int height; ///< active picture (w/o VBI) height for D-10/IMX uint32_t tref_tag; int tref_id; ///< trackID of the referenced track int64_t start_dts; int64_t start_cts; int hint_track; ///< the track that hints this track, -1 if no hint track is set int src_track; ///< the track that this hint (or tmcd) track describes AVFormatContext *rtp_ctx; ///< the format context for the hinting rtp muxer uint32_t prev_rtp_ts; int64_t cur_rtp_ts_unwrapped; uint32_t max_packet_size; int64_t default_duration; uint32_t default_sample_flags; uint32_t default_size; HintSampleQueue sample_queue; AVIOContext *mdat_buf; int64_t data_offset; int64_t frag_start; int frag_discont; int nb_frag_info; MOVFragmentInfo *frag_info; unsigned frag_info_capacity; struct { int first_packet_seq; int first_packet_entry; int packet_seq; int packet_entry; int slices; } vc1_info; void *eac3_priv; } MOVTrack; typedef struct MOVMuxContext { const AVClass *av_class; int mode; int64_t time; int nb_streams; int nb_meta_tmcd; ///< number of new created tmcd track based on metadata (aka not data copy) int chapter_track; ///< qt chapter track number int64_t mdat_pos; uint64_t mdat_size; MOVTrack *tracks; int flags; int rtp_flags; int exact; int iods_skip; int iods_video_profile; int iods_audio_profile; int fragments; int max_fragment_duration; int min_fragment_duration; int max_fragment_size; int ism_lookahead; AVIOContext *mdat_buf; int first_trun; int video_track_timescale; int reserved_moov_size; ///< 0 for disabled, -1 for automatic, size otherwise int64_t reserved_moov_pos; char *major_brand; int per_stream_grouping; AVFormatContext *fc; int use_editlist; float gamma; } MOVMuxContext; #define FF_MOV_FLAG_RTP_HINT (1 << 0) #define FF_MOV_FLAG_FRAGMENT (1 << 1) #define FF_MOV_FLAG_EMPTY_MOOV (1 << 2) #define FF_MOV_FLAG_FRAG_KEYFRAME (1 << 3) #define FF_MOV_FLAG_SEPARATE_MOOF (1 << 4) #define FF_MOV_FLAG_FRAG_CUSTOM (1 << 5) #define FF_MOV_FLAG_ISML (1 << 6) #define FF_MOV_FLAG_FASTSTART (1 << 7) #define FF_MOV_FLAG_OMIT_TFHD_OFFSET (1 << 8) #define FF_MOV_FLAG_DISABLE_CHPL (1 << 9) #define FF_MOV_FLAG_DEFAULT_BASE_MOOF (1 << 10) #define FF_MOV_FLAG_DASH (1 << 11) #define FF_MOV_FLAG_FRAG_DISCONT (1 << 12) #define FF_MOV_FLAG_DELAY_MOOV (1 << 13) #define FF_MOV_FLAG_WRITE_COLR (1 << 14) #define FF_MOV_FLAG_WRITE_GAMA (1 << 15) #define AUDIO_ENCODE_AHEAD_SECS 0.5 class MoovHeaderWriter { private : Logger & m_logger; CVideoWriterOptions m_options; int64_t m_vduration; int m_nvframe; //int m_fps; unsigned int m_vpacket_size; int64_t* m_voffsets; int64_t m_aduration; int m_naframe; unsigned int m_apacket_size; int64_t* m_aoffsets; int m_dataStartPosition; int m_frame_number; int64_t m_data_size; std::list<MOVIentry> aentryList; std::list<MOVIentry> ventryList; std::list<MOVIentry> achunkList; std::list<MOVIentry> vchunkList; int getTrackTimescale(AVStream* st); int updateChunkEntry(int dataStartPosition, int packetSize, std::list<MOVIentry> &list); int createNewEntry(int dataStartPosition, int packetSize, int chunkNum, std::list<MOVIentry> &list); int fillDebugMediaList(std::list<int>& mediaList); void build_chunks(MOVTrack *trk, long samples_in_chunk, int offset ); int moov_avio_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer); int calculateDataStartPosition(uint8_t *vos_data, int vos_len); int fillMediaList(int nSamples, CVideoWriterOptions &options, AVFormatContext *s, std::list<int>& mediaList); int buildSampleToChunkEntry(std::list<MOVIentry>& chunkList, std::list<MOVIentry>& entryList); int buildEntries(); int buildChunkOffsets(std::list<int>& mediaList); int mov_write_ilst_tag(AVIOContext *pb, AVFormatContext *s); int mov_write_itunes_hdlr_tag(AVIOContext *pb, AVFormatContext *s); int mov_write_meta_tag(AVIOContext *pb, AVFormatContext *s); int mov_write_udta_tag(AVIOContext *pb, AVFormatContext *s); //a int mov_write_esds_tag(AVIOContext *pb, AVFormatContext *s, int i); int mov_write_audio_tag(AVIOContext *pb, AVFormatContext *s, int i); //v int mov_write_stco_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/ int i, int64_t duration); int mov_write_stsz_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration); int mov_write_stsc_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/int i); int mov_write_stss_tag(AVIOContext *pb); int mov_write_stts_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration); int mov_write_avcc_tag(AVIOContext *pb, AVFormatContext *s, int i); int mov_write_video_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration); int mov_write_stsd_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration); int mov_write_stbl_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/int i, int64_t duration); int mov_write_dref_tag(AVIOContext *pb); int mov_write_dinf_tag(AVIOContext *pb); int mov_write_smhd_tag(AVIOContext *pb); int mov_write_vmhd_tag(AVIOContext *pb); int mov_write_minf_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/int i, int64_t duration); int mov_write_hdlr_tag(AVIOContext *pb, AVFormatContext *s, int i); int mov_write_mdhd_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration); int mov_write_mdia_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/ int i, int64_t duration); int mov_write_edts_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration); int mov_write_tkhd_tag(AVIOContext *pb, AVFormatContext *s, int i, int64_t duration); int mov_write_trak_tag(AVIOContext *pb, AVFormatContext *s, /*MOVMuxContext *mov, MOVTrack *track,*/ int i); int mov_write_mdat_tag(AVIOContext *pb); int mov_write_moov_tag(AVIOContext *pb, AVFormatContext *s); int mov_write_mvhd_tag(AVIOContext *pb, AVFormatContext *s, int64_t duration); int mov_write_ftyp_tag(AVIOContext *pb); int debug_header(AVIOContext* pb); int ftyp_atom_size, moov_atom_size; int mvhd_atom_size; int v_trak_size, a_trak_size; int tkht_atom_size; int v_mdia_atom_size, a_mdia_atom_size; int hdlr_atom_size; int v_minf_atom_size, a_minf_atom_size; int dinf_atom_size; int v_stbl_atom_size, a_stbl_atom_size; int video_tag_size, avcc_tag_size, v_stsd_atom_size, a_stsd_atom_size; int v_stsc_atom_size, a_stsc_atom_size; int v_stsz_atom_size, a_stsz_atom_size; int v_stco_atom_size, a_stco_atom_size; int v_stss_atom_size; int v_stts_atom_size, a_stts_atom_size; int v_edts_atom_size, a_edts_atom_size; int nal_units_size; public: MoovHeaderWriter(Logger & logger); ~MoovHeaderWriter(void); /** * Allocate the stream private data and write the stream header to * an output media file. * * @param s Media file handle, must be allocated with avformat_alloc_context(). * Its oformat field must be set to the desired output format; * Its pb field must be set to an already opened AVIOContext. * @param options An AVDictionary filled with AVFormatContext and muxer-private options. * On return this parameter will be destroyed and replaced with a dict containing * options that were not found. May be NULL. * * @return 0 on success, negative AVERROR on failure. * * @see av_opt_find, av_dict_set, avio_open, av_oformat_next. */ int write_header(AVFormatContext *s, AVIOContext* ioContext, CVideoWriterOptions &options, std::list<int>& mediaList); }; <file_sep>/LightSynthesizer/LightSynthesizer.h #ifndef __SYNTHESIZER_H__ #define __SYNTHESIZER_H__ #define TIXML_USE_STL enum ResizeMethod { RESIZE_METHOD_NONE, // Resize is not allowed (error returned) RESIZE_METHOD_NN, RESIZE_METHOD_LINEAR, RESIZE_METHOD_CUBIC, RESIZE_METHOD_AREA, }; //#include <windows.h> #include <map> #include <string> #include "tinyxml.h" #include "VideoObject.h" #include "TargetFrames.h" #include "VSVideo.h" #include "Engine.h" #include "ProgressTracker.h" class AviWriter; class VideoObject; typedef std::map<std::string, VideoObject*> VideoObjectMap; typedef unsigned int uint; using namespace std; class LightSynthesizer : public Engine { private: TiXmlDocument *document; AviWriter *aviWriter; VideoObjectMap objects; ProgressTracker *progressTracker; std::string outputFileName; int framerate; int numFrames; int currentFrameNum=0; // number of frames written int resolutionWidth; int resolutionHeight; ResizeMethod resizeMethod; // One of: none, nn, linear, cubic, area std::string profilingFile; int rpcKeepAhead = 0; int rpcGrace=100; //The number of frames on which we will not sleep, beggining from the start of the video int rpcInterval=0; //The interval in frames on which we will perform the check if we can sleep. The default will be set to the framerate long startTime; long totalSleepTime=0; VideoStream::CFramePtr previousFramePtr; // Previous frame contents (empty in the beginning) bool parseArgs(int argc, char** argv, map<string, string> &retMap); bool readParamsFromXml(const char * documentName); bool readObjects(); bool closeVideoObjects(); bool targetAndSequenceSynched(const TargetFrame *targetFrame, int targetSequence); TiXmlElement * getNextVideoFrameObject(TiXmlElement *targetFrame); VideoObject * getNextVideoObject(TiXmlElement *targetFrame); VideoObject *getVideoObject(const FrameObject *frameObject); void updateCurFrame(VideoObject *videoObject, const FrameObject *frameObject, bool *rc); #ifdef WIN32 int FileTime2Ms(const FILETIME& ft); #endif bool parseResizeMethod(const char *resizeMethodStr); bool configure(int bufsize); VideoStream::CFramePtr render(std::vector<VideoObject *> renderObjs); bool doFlow(); bool handleXml(const char * documentName); bool handleCommands(const char * commandFileName, const char * captureFileName); bool WriteProfilingFile( std::vector<DWORD> frameTimesCumMS ); void AddSceneTicks( std::vector<const VideoObject *> &sortedObjects, const std::vector<DWORD> &frameTimesCumMS, FILE *fp ); bool GetVideoObjectDisplayDetails( const VideoObject &videoObject, int serialNum, char &shortName, std::string &displayName); void GetObjectsByStartTime( std::vector<const VideoObject *> &sortedObjects); void MakeBlackFrame(unsigned char *buf, int nbytes); // CLI command callbacks static void createOutputStream(string fullCommand, void *callbackData, bool & exitCli); static void createInputStream(string fullCommand, void *callbackData, bool & exitCli); static void outputFrame(string fullCommand, void *callbackData, bool & exitCli); static void removeInputStream(string fullCommand, void *callbackData, bool & exitCli); static void endOutputFrames(string fullCommand, void *callbackData, bool & exitCli); // Timing functions / members DWORD startTimeMS; std::vector<DWORD> frameTimesCumMS; void timingStart(); void timingFrame(); void timingEnd(); public: LightSynthesizer(); virtual ~LightSynthesizer(); // The entry point from the Rendeing Manager. void executeEngine(int argc, char** argv); // Called just before engine is killed void stopEngine(bool isTriggeringEngine); }; #endif <file_sep>/utils/Utils/CMakeLists.txt #file(GLOB utils_SRC # "*.cpp" #) add_library(Utils ColorSpace.cpp CEventManager.cpp CMutex.cpp MutexAutioLock.cpp FileUtils.cpp) target_include_directories(Utils PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/ ${CMAKE_CURRENT_SOURCE_DIR}/../StackWalker/include/) #target_link_libraries(Utils LINK_PUBLIC StackWalker)
d3803c6cf54039ad1effc9d8e855323e40646104
[ "C", "CMake", "C++" ]
184
C++
eyaladi/renderer-engine
1e623d0e0deafa70612a1a687167bdc0dcc6bb03
60688c24402a3e30960ec40c433737b8bfa5edaa
refs/heads/master
<repo_name>timlinux/django-offline-messages<file_sep>/offline_messages/storage.py # -*- coding: utf-8; mode: python; -*- from django.contrib.messages.storage.session import SessionStorage from offline_messages.models import OfflineMessage class OfflineStorageEngine(SessionStorage): """ Stores messages in the database (offline_messages.OfflineMessage). """ def _get(self, *args, **kwargs): """ Get offline and online messages. """ messages = [] if hasattr(self.request, "user") and self.request.user.is_authenticated(): offline_messages = OfflineMessage.objects.filter(user=self.request.user) if offline_messages: messages.extend(offline_messages) offline_messages.delete() online_messages, all_retrieved = super(OfflineStorageEngine, self)._get(*args, **kwargs) if online_messages: messages.extend(online_messages) return messages, True def _store(self, messages, *args, **kwargs): """ Store messages, but not offline. """ messages = [msg for msg in messages if not isinstance(msg, OfflineMessage)] return super(OfflineStorageEngine, self)._store(messages, *args, **kwargs) <file_sep>/offline_messages/models.py # -*- coding: utf-8; mode: python; -*- from django.db import models from django.contrib.auth.models import User from django.utils.encoding import force_unicode from django.contrib.messages import constants from django.contrib.messages.utils import get_level_tags class OfflineMessage(models.Model): user = models.ForeignKey(User) level = models.IntegerField(default=constants.INFO) message = models.CharField(max_length=200) created = models.DateTimeField(auto_now_add=True) def __unicode__(self): return force_unicode(self.message) @property def tags(self): level_tags = get_level_tags() return force_unicode(level_tags.get(self.level, ''), strings_only=True) <file_sep>/README.rst ========================= Installation Instructions ========================= Make changes to your settings: * Add 'offline_messages' to INSTALLED_APPS * Set MESSAGE_STORAGE to 'offline_messages.storage.OfflineStorageEngine' ========================= Example Usage ========================= You can continue to use the standard Django message system as desired. Messages created like: from django.contrib import messages messages.add_message(request, messages.INFO, 'Hello world.') Will work just fine. However, if you'd like to create an offline message, do something like this: from offline_messages.utils import create_offline_message, constants create_offline_message(User.objects.get(id=1), "Hello there!", level=constants.WARNING) Or like this: from offline_messages.models import OfflineMessage OfflineMessage.objects.create(user=User.objects.get(id=1), level=20, message='Hello world.') Usage example from the real life:: # Iterate through users for user in User.objects.all(): already_notified = OfflineMessage.objects.filter(user=user, message=message).exists() if not already_notified: create_offline_message(user, message, level=constants.WARNING) <file_sep>/offline_messages/utils.py # -*- coding: utf-8; mode: python; -*- from django.contrib.auth.models import User from django.contrib.messages import constants from django.utils.encoding import force_unicode from django.contrib.messages.utils import get_level_tags from offline_messages.models import OfflineMessage def create_offline_message(user, message, level=constants.INFO): if not isinstance(user, User): user = User.objects.get(username=user) level_tags = get_level_tags() label_tag = force_unicode(level_tags.get(level, ''), strings_only=True) OfflineMessage.objects.create(user=user, level=level, tags=label_tag, message=message)
90feb7c33a26fb75ceb1107eb10e3b7ba5669aea
[ "Python", "reStructuredText" ]
4
Python
timlinux/django-offline-messages
6634841a08f380216e0dd46d5c261d584e5f102c
c666b652f236d60c95077bc0d0b0cff63b16c9c8
refs/heads/master
<repo_name>nabievnurlan7/LightProfiler<file_sep>/settings.gradle include ':LightProfilerLibrary' include ':app' rootProject.name = "LightProfilerExample"<file_sep>/app/src/main/java/com/nurlandroid/lightprofilerexample/MainActivity.kt package com.nurlandroid.lightprofilerexample import android.os.Bundle import androidx.appcompat.app.AppCompatActivity import com.nurlandroid.lightprofilerlibrary.AppMetricUsageManager class MainActivity : AppCompatActivity() { private lateinit var appMetricUsageManager: AppMetricUsageManager override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(R.layout.activity_main) appMetricUsageManager = AppMetricUsageManager(this) appMetricUsageManager.startCollect() } override fun onPause() { super.onPause() appMetricUsageManager.stopCollect() } }<file_sep>/LightProfilerLibrary/src/main/java/com/nurlandroid/lightprofilerlibrary/CpuUsageExporter.kt package com.nurlandroid.lightprofilerlibrary import android.content.Context import java.io.File import java.io.FileOutputStream import java.io.PrintWriter class CpuUsageExporter(context: Context) : AppMetricExporter { private companion object { // Create Singleton class that encapsulates settings & preferences const val CPU_USAGE_FILENAME = "cpu_usage.txt" const val PACKAGE_NAME = "com.nurlandroid.lightprofilerexample" } private val cpuPrintWriter = PrintWriter(FileOutputStream(File(context.externalCacheDir, CPU_USAGE_FILENAME), true), true) override fun export() { try { recordCpu() } catch (th: Throwable) { } } override fun close() { cpuPrintWriter.close() } private fun recordCpu() { val processLine = readSystemFile("top", "-n", "1").filter { it.contains(PACKAGE_NAME) } .flatMap { it.split(" ") } .map(String::trim) .filter(String::isNotEmpty) if (processLine.isNotEmpty()) { val index = processLine.indexOfFirst { it == "S" || it == "R" || it == "D" } check(index > -1) { "Not found process state of $PACKAGE_NAME" } cpuPrintWriter.println(processLine[index + 1].toFloat().toInt().toString()) } } @Throws(java.lang.Exception::class) private fun readSystemFile(vararg pSystemFile: String): List<String> { return Runtime.getRuntime() .exec(pSystemFile).inputStream.bufferedReader() .useLines { it.toList() } } }<file_sep>/LightProfilerLibrary/src/main/java/com/nurlandroid/lightprofilerlibrary/AppMetricExporter.kt package com.nurlandroid.lightprofilerlibrary interface AppMetricExporter { fun export() fun close() }<file_sep>/LightProfilerLibrary/src/main/java/com/nurlandroid/lightprofilerlibrary/AppMetricUsageManager.kt package com.nurlandroid.lightprofilerlibrary import android.content.Context import android.os.Handler class AppMetricUsageManager(context: Context) { private companion object { const val INTERVAL_TIME_IN_SEC = 1L const val INITIAL_DELAY = 0L } private val handler: Handler = Handler() private val runnable: Runnable = object : Runnable { override fun run() { exporters.forEach { it.export() } handler.postDelayed(this, INTERVAL_TIME_IN_SEC) } } private val exporters = listOf( CpuUsageExporter(context), // MemoryUsageExporter(context), BatteryUsageExporter(context), // NetworkUsageExporter(context) ) fun startCollect() { handler.postDelayed(runnable, INTERVAL_TIME_IN_SEC) } fun stopCollect() { exporters.forEach { it.close() } handler.removeCallbacks(runnable) } }
4969e748be252e3f4985037bbabe0b941bd4973a
[ "Kotlin", "Gradle" ]
5
Gradle
nabievnurlan7/LightProfiler
c0b47877bf29025c1a30338e3a47034c2fded13e
89ce69f39c05ebe1c09becca3822d84a13ee150b
refs/heads/master
<repo_name>j91106meagher/kube-test<file_sep>/README.md # kube-test repo of various kubernetes tests <file_sep>/oasis-delete.sh #!/bin/sh kubectl delete deployments oasis-api-server kubectl delete svc oasis-api-server kubectl delete svc rabbitmq-service kubectl delete deployments oasis-rabbit kubectl delete deployments oasis-api-runner kubectl delete deployments oasis-mysql kubectl delete deployments oasis-worker
56625ace8d53bd3e7414349d57f6430d344bb5a9
[ "Markdown", "Shell" ]
2
Markdown
j91106meagher/kube-test
e0e4b0276087f9e02fea502e203ec7f87a43941d
0e86fcbe1381c981b1f3743ce441b35b17ed8506
refs/heads/master
<file_sep># select_router If several routers in a network offer DHCP service, select one <file_sep>#!/usr/bin/env python """ Sends DHCP request and lets user decide which response to use to confugure IP. Assumptions: Network interface is eth0 """ from scapy.all import * import curses from os import system def get_option(screen, options_text, option_list): """ Shows options from `option_list` and lets the user pick one. Args: screen (_curses.curses window): Window object that represents the screen. options_text (str): Options caption. option_list (list<str>): List of options to be presented to the user. Returns: int: Index of the selected option. Default is 0. On error -1. """ if len(option_list)<1: return -1 active_element=0 while True: screen.clear() screen.border(0) screen.addstr(2, 2, options_text, curses.A_BOLD) for i, option in enumerate(option_list): attribute=curses.A_REVERSE if i==active_element else curses.A_NORMAL screen.addstr(4+i, 4, option, attribute) screen.refresh() pressed_key=screen.getch() if pressed_key==ord(' ') or pressed_key==curses.KEY_ENTER or pressed_key==ord('\n'): return active_element elif pressed_key==curses.KEY_DOWN: active_element=(active_element+1)%len(option_list) elif pressed_key==curses.KEY_UP: active_element=(active_element+len(option_list)-1)%len(option_list) elif pressed_key==27: return 0 def get_dhcp_responses(attempts, timeout): """ Send `attempts` number of dhcp requests and returns response data. Args: attempts (int): Number of times the dhcp packet should be sent. timeout (int): Timeout for response. Returns: str: Source MAC address from response. str: Source IP address from response. str: Destination IP address from response. str: Yiaddr in BOOTP header. str: Router IP option in DHCP package. str: Name server IP option in DHCP package. str: Broadcast address option in DHCP package. str: Subnet mask option in DHCP package. str: Domain option in DHCP package. """ def get_field(options, field): filtered=filter(lambda x: x[0]==field, options) return filtered[0][1] if len(filtered)>0 else "" responses=list() for _ in xrange(attempts): try: conf.checkIPaddr = False fam,hw = get_if_raw_hwaddr(conf.iface) dhcp_discover = Ether(dst="ff:ff:ff:ff:ff:ff")/IP(src="0.0.0.0",dst="255.255.255.255")/UDP(sport=68,dport=67)/BOOTP(chaddr=hw)/DHCP(options=[("message-type","discover"),"end"]) ans, unans = srp(dhcp_discover, multi=True, timeout=timeout) except: pass for p in ans: response=( p[1][Ether].src, p[1][IP].src, p[1][IP].dst, ans[0][1][BOOTP].yiaddr, get_field(p[1][DHCP].options, "router"), get_field(p[1][DHCP].options, "name_server"), get_field(p[1][DHCP].options, "broadcast_address"), get_field(p[1][DHCP].options, "subnet_mask"), get_field(p[1][DHCP].options, "domain"), ) if not response in responses: responses.append(response) return responses def display_dhcp_response_and_commands(screen, dhcp_response, commands): """ Displays dhcp response data and commands to be executed. Gives the user a [cancel|accept] option. Args: screen (_curses.curses window): Window object that represents the screen. dhcp_response (str,str,str,str,str,str,str,str,str): DHCP response data. commands: (list<str>): List of commands to be executed. Returns: bool: True if user picked `accept`, False if user picked `cancel`. Default is False (if escape is pressed). """ ret=False while True: screen.clear() screen.border(0) screen.addstr(2, 2, "DHCP Response: ", curses.A_BOLD) screen.addstr(4+0, 4, "MAC Source Address:\t\t"+dhcp_response[0]) screen.addstr(4+1, 4, "Source IP Address:\t\t"+dhcp_response[1]) screen.addstr(4+2, 4, "Destination IP Address:\t"+dhcp_response[2]) screen.addstr(4+3, 4, "BOOTP Your Ip:\t\t"+dhcp_response[3]) screen.addstr(4+4, 4, "Router IP Address:\t\t"+dhcp_response[4]) screen.addstr(4+5, 4, "Name Server Address:\t"+dhcp_response[5]) screen.addstr(4+6, 4, "Broadcast Address:\t\t"+dhcp_response[6]) screen.addstr(4+7, 4, "Subnet Mask:\t\t"+dhcp_response[7]) screen.addstr(4+8, 4, "Domain:\t\t\t"+dhcp_response[8]) screen.addstr(14, 2, "Commands: ", curses.A_BOLD) for i, command in enumerate(commands): screen.addstr(16+i, 4, command) screen.addstr(18+len(commands), 6, "CANCEL", curses.A_REVERSE if ret==False else curses.A_NORMAL) screen.addstr(18+len(commands), 15, "ACCEPT", curses.A_REVERSE if ret==True else curses.A_NORMAL) screen.refresh() pressed_key=screen.getch() if pressed_key==ord(' ') or pressed_key==curses.KEY_ENTER or pressed_key==ord('\n'): return ret elif pressed_key==curses.KEY_LEFT or pressed_key==curses.KEY_RIGHT: ret=not ret elif pressed_key==27: return False def display_messages(screen, messages): """ Displays messages on screen until a key is pressed. Args: screen (_curses.curses window): Window object that represents the screen. messages: (list<str>): List of messages to be displayed. Returns: None """ screen.clear() screen.border(0) for i,message in enumerate(messages): screen.addstr(2+i, 2, message, curses.A_NORMAL) screen.getch() def configure_ip_according_to_dhcp_response(screen, dhcp_response): """ Sets IP address, subnet mask, broadcast address, default gateway, and name server according to `dhcp_response`. Only if uses agrees. Args: screen (_curses.curses window): Window object that represents the screen. dhcp_response (str,str,str,str,str,str,str,str,str): DHCP response data. Returns: bool: True if ip gets configured. False otherwise (user stops it or there is an error). """ def get_mask_bits(mask): return sum(map(lambda x: bin(int(x)).count("1"), mask.split("."))) commands=[ "ip r flush all", "ip a flush dev eth0", "ip n flush all", "ip n flush nud all", "ip n replace %s lladdr %s dev eth0"%(dhcp_response[1], dhcp_response[0]), "ip a add %s/%i brd %s dev eth0"%(dhcp_response[2], get_mask_bits(dhcp_response[7]), dhcp_response[6]), "ip r add default via %s"%dhcp_response[4], "echo \"nameserver %s\" > /etc/resolv.conf"%dhcp_response[5], "iptables -F", "iptables -t nat -F", "iptables -P OUTPUT ACCEPT", "iptables -P INPUT DROP", "iptables -A INPUT --in-interface lo -j ACCEPT", "iptables -A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT", ] ok=display_dhcp_response_and_commands(screen, dhcp_response, commands) if not ok: # display_messages(screen, ["No changes were made.", "Press any key to continue."]) return False for command in commands: ret=system(command) if ret!=0: display_messages(screen, ["Error when executing [%s]."%command, "Press any key to continue."]) return False display_messages(screen, ["Commands executed !!!", "Press any key to exit."]) return True def main(screen): curses.curs_set(0) dhcp_responses=get_dhcp_responses(2, 2) while True: option=get_option(screen, "Select router: ", ["None"]+list(map(lambda x: "%s %s" % x[:2], dhcp_responses))) if option<=0: display_messages(screen, ["Press any key to exit."]) break if configure_ip_according_to_dhcp_response(screen, dhcp_responses[option-1]): break curses.wrapper(main)
78cbd069593a23298400b5f98f5c801dd830dd95
[ "Markdown", "Python" ]
2
Markdown
tonimctoni/select_router
038a13151efdc4f3d37a86855e150afe9a1d4577
b936478b9f42b43f49e279630bc7ae6f7d2d6ab1
refs/heads/master
<repo_name>sanchezmaxar/DiagnosticadorMedico<file_sep>/Experimento1/webDownloader.py import nltk import sys from urllib.request import urlopen from bs4 import BeautifulSoup import os def gethref(texto): buf="" cond=False salida=[] for lineas in texto: print(lineas.get("href")) return salida dirBase="./paginasBase/" l=ord("P") while l<=ord("Z"): url = dirBase+str(chr(l))+".html" html_doc = open(url,"r").read() soup = BeautifulSoup(html_doc, 'html.parser') link = soup.find_all('a') # print(link) print (gethref(link)) input("borrar") os.system("clear") l+=1 print("------------------------------------------------------Aqui-----------------") # print(soup.get_text())<file_sep>/Experimento4/readme.md # Experimento 4 y 5 En este experimento se uso tf-idf para sacar los vectores que describiran los documentos. ## Experimento 4 En este experimento se obtienen los vectores de los documentos, cada vector tiene n número de dimensiones, las cuales represantan la importancia de la palabra, cuyo valor viene dado por el conteo de la palabra evaluador por una función sigmoidal, multiplicado por su valor de tf-idf de la palabra máximo en todas las palabras, esto garantiza que el valor estará entre 0 y 1 un que dependerá de la importancia de la palabra con respecto al documento donde es más importante esta palabra, esto se dará uniformemente, por lo que no estará sesgado hacia ningún resultado. ## Experimento 5 Este experimento se hizo vecotorizando de forma tradicional los documentos, asignando directamente el valor tf-idf de la palabra, en la dimensión que lo representa.<file_sep>/Experimento2/getInfoFromPages.py import nltk import sys from urllib.request import urlopen from bs4 import BeautifulSoup import os from utilidades import * import csv import time csvfile=open("datos.csv","w",newline='') dataWriter=csv.writer(csvfile, delimiter=' ',quotechar='|', quoting=csv.QUOTE_MINIMAL) listaDeSecciones=["Síntomas"] #la informacion que deseamos sacar dirBase="../" # dirBase="" for nombre in open(dirBase+"htmls.txt","r").read().splitlines(): auxrow=[nombre] fhtml=open(dirBase+nombre).read() soup = BeautifulSoup(fhtml,'html.parser') secciones = soup.find_all('p',attrs={'data-adhere':'true'}) acum="" for s in secciones: acum+=s.get_text() auxrow.append(noCaracteresEspeciales(acum)) links = soup.find_all('a',attrs={'class':'subnav__list__item__link'}) acum="" for l in links: if l.text.strip() in listaDeSecciones: aux=(l.get("href")) os.system("wget "+aux) time.sleep(10) secciones = soup.find_all('p',attrs={'data-adhere':'true'}) for s in secciones: acum+=s.get_text() auxrow.append(noCaracteresEspeciales(acum)) dataWriter.writerow(auxrow) <file_sep>/Experimento1/utilidades.py from unicodedata import normalize import re def noCaracteresEspeciales(texto): # -> NFD y eliminar diacríticos texto = re.sub( r"([^n\u0300-\u036f]|n(?!\u0303(?![\u0300-\u036f])))[\u0300-\u036f]+", r"\1", normalize( "NFD", texto), 0, re.I ) # -> NFC texto=re.sub('\W',' ',texto) texto = normalize( 'NFC', texto) # print(normalize('NFKC',texto)) return texto.lower() def textoALista(texto): listaaux=texto.split() listaaux.sort() lista=[] for i in listaaux: if i not in lista: lista.append(i) return lista<file_sep>/Experimento2/paginasEspecificas/preeclampsia.html <!doctype html> <!--[if lt IE 7]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 lt-ie9 lt-ie8 lt-ie7"><![endif]--> <!--[if IE 7]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 lt-ie9 lt-ie8 ie7"><![endif]--> <!--[if IE 8]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 lt-ie9 ie8"><![endif]--> <!--[if IE 9]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 ie9"><![endif]--> <!--[if gt IE 9]><!--><html lang="es" dir="ltr" id="sprout" class="no-js"><!--<![endif]--> <head> <meta charset="utf-8"/> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/> <meta name="HandheldFriendly" content="True"/> <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no"/> <meta http-equiv="cleartype" content="on"/> <title>Preeclampsia - Onmeda.es</title> <!-- fugly script to sort things for windows-phone-8: see here: http://mattstow.com/responsive-design-in-ie10-on-windows-phone-8.html --> <script type="text/javascript"> (function() { if ("-ms-user-select" in document.documentElement.style && navigator.userAgent.match(/IEMobile\/10\.0/)) { var msViewportStyle = document.createElement("style"); msViewportStyle.appendChild(document.createTextNode("@-ms-viewport{width:auto!important}")); document.getElementsByTagName("head")[0].appendChild(msViewportStyle); } })(); </script> <!--[if lte IE 9]><link rel="stylesheet" href="https://i.onmeda.de/core/assets/css/app-ie.css"/><![endif]--> <!--[if gt IE 9]><!--><link rel="stylesheet" href="https://i.onmeda.de/core/assets/css/app.css"/><!--<![endif]--> <link rel="stylesheet" href="https://i.onmeda.de/shariff/complete.css"/> <script src="https://i.onmeda.de/core/assets/scripts/libs/modernizr/modernizr.js"></script> <meta name="author" content="<EMAIL> GmbH, 51149 Koeln, Germany"/> <meta name="language" content="es"/> <meta name="robots" content="index, follow, noodp"/> <meta name="description" content="La preeclampsia es una alteraci&oacute;n que se presenta a partir de la semana 20 de embarazo. Cursa con hipertensi&oacute;n arterial (valores superiores a 140/90"/> <meta name="keywords" content="preeclampsia, hipertensión en embarazo, tensión alta en embarazo, eclampsia"/> <link rel="icon" href="https://i.onmeda.de/nav/favicon_neu.ico" type="image/ico"/> <meta property="og:site_name" content="onmeda.es"/> <meta property="og:type" content="article"/> <meta property="og:title" content="Preeclampsia - Onmeda.es"/> <meta property="og:description" content="La preeclampsia es una alteraci&oacute;n que se presenta a partir de la semana 20 de embarazo. Cursa con hipertensi&oacute;n arterial (valores superiores a 140/90"/> <meta property="og:image" content="https://i.onmeda.de/es/geburt_einleiten-870x435.jpg"/> <meta property="og:url" content="enfermedades/preeclampsia"/> <meta property="fb:pages" content="342661132432481"/> <meta property="article:published_time" content="2016-10-21T00:00:00+02:00"/> <meta property="article:author" content="<NAME>"/> <meta property="url:home" content="https://www.onmeda.es/enfermedades/preeclampsia.html"/> <script type="text/javascript" src="https://i.onmeda.de/onmeda_ads_2018.js"></script> <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); ga('create','UA-115586-14','auto'); ga('require','displayfeatures'); ga('send','pageview'); ga('set','anonymizeIp',true); </script> <script type="text/javascript" src="https://script.ioam.de/iam.js"></script> <!-- Google Tag Manager --> <script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src='https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);})(window,document,'script','dataLayer','GTM-KH5LF8B'); </script> <!-- End Google Tag Manager --> <link rel="next" href="https://www.onmeda.es/enfermedades/preeclampsia-mas-informacion-18128-2.html"/><link rel="canonical" href="https://www.onmeda.es/enfermedades/preeclampsia.html"/><link rel="amphtml" href="https://www.onmeda.es/amp/enfermedades/preeclampsia.html"/> <script> var _prum = [['id', '56727a59abe53d726f7ece44'], ['mark', 'firstbyte', (new Date()).getTime()]]; (function() { var s = document.getElementsByTagName('script')[0] , p = document.createElement('script'); p.async = 'async'; p.src = '//rum-static.pingdom.net/prum.min.js'; s.parentNode.insertBefore(p, s); })(); </script> <script src="https://i.onmeda.de/nav/visitor_api_v1.6.js"></script> <script src="https://i.onmeda.de/nav/dil_v6.4.js"></script> <script> var af_consentInternetTargeting = true; if (typeof(window.__cmp) !== 'undefined') { var customCookieData = JSON.parse(window.__cmp.getCustomConsentBeforeLoad()); if (customCookieData !== null && typeof(customCookieData.internet_targeting) !== 'undefined') { af_consentInternetTargeting = customCookieData.internet_targeting; } } var omf_datalayer = [{ 'siteInfo': { 'sid': 60391, 'platform': 'onmeda.es', 'uaid': 'UA-115586-14', 'lang': 'es', 'geo': 'es' }, 'pageInfo': { 'contentType': 'Hard-Inhalt', 'ops': 'none', 'section': 'enfermedades', 'additional': 'Enfermedades', 'thematic': 'preeclampsia.html' }, 'userInfo': { 'du': 'es', 'logged': 0, 'device': 'desktop', 'hashedUserId': '', 'referer': '0', 'consentInternetTargeting': (af_consentInternetTargeting ? 1 : 0) }, 'campaignInfo': { 'utmMedium': '', 'utmSource': '', 'utmCampaign': '' } }]; var partnerDIL = DIL.create({ partner : 'aufeminin', uuidCookie : { name : 'aam_uuid', days : 30 }, visitorService: { namespace: '44326DF2572396FB7F000101@AdobeOrg' } }); if (partnerDIL) { partnerDIL.api.signals({ c_sid:omf_datalayer[0].siteInfo.sid, c_platform:omf_datalayer[0].siteInfo.platform, c_uaid:omf_datalayer[0].siteInfo.uaid, c_lang:omf_datalayer[0].siteInfo.lang, c_geo:omf_datalayer[0].siteInfo.geo, c_contentType:omf_datalayer[0].pageInfo.contentType, c_ops:omf_datalayer[0].pageInfo.ops, c_section:omf_datalayer[0].pageInfo.section, c_additional:omf_datalayer[0].pageInfo.additional, c_thematic:omf_datalayer[0].pageInfo.thematic, c_userInfo:omf_datalayer[0].userInfo.du, c_device:omf_datalayer[0].userInfo.device, c_logged:omf_datalayer[0].userInfo.logged, c_hashedUserId:omf_datalayer[0].userInfo.hashedUserId, c_referer:omf_datalayer[0].userInfo.referer, c_utmMedium:omf_datalayer[0].campaignInfo.utmMedium, c_utmSource:omf_datalayer[0].campaignInfo.utmSource, c_utmCampaign:omf_datalayer[0].campaignInfo.utmCampaign, c_share:omf_datalayer[0].userInfo.consentInternetTargeting }); } var af_dataLayer = omf_datalayer; </script> </head> <script type="text/javascript"> (function(window, document) { if (!window.__cmp ) { window.__cmp = (function() { var listen = window.attachEvent || window.addEventListener; listen('message', function(event) { window.__cmp.receiveMessage(event); }, false); function addLocatorFrame() { if (!window.frames['__cmpLocator']) { if (document.body) { var frame = document.createElement('iframe'); frame.style.display = 'none'; frame.name = '__cmpLocator'; document.body.appendChild(frame); } else { setTimeout(addLocatorFrame, 5); } } } addLocatorFrame(); var commandQueue = []; var cmp = function(command, parameter, callback) { if (command === 'ping') { if (callback) { callback({ gdprAppliesGlobally: !!(window.__cmp && window.__cmp.config && window.__cmp.config.storeConsentGlobally), cmpLoaded: false }); } } else { commandQueue.push({ command: command, parameter: parameter, callback: callback }); } } cmp.commandQueue = commandQueue; cmp.receiveMessage = function(event) { var data = event && event.data && event.data.__cmpCall; if (data) { commandQueue.push({ callId: data.callId, command: data.command, parameter: data.parameter, event: event }); } }; cmp.getConsentBeforeLoad = function() { var nameEQ = 'af_eu_consent' + "="; var ca = document.cookie.split(';'); console.log(ca); for(var i=0;i < ca.length;i++) { var c = ca[i]; while (c.charAt(0) === ' ') { c = c.substring(1,c.length); } if (c.indexOf(nameEQ) === 0) { return c.substring(nameEQ.length,c.length); } } return null; } cmp.config = { forceLocale: 'es', acceptOnScroll: false, logoUrl: 'https://i.onmeda.de/nav/logo-es.png', cguLink: 'https://www.onmeda.es', minimumVendorList : [], minimumVendorListTime : 259200000, minimumVendorListCheck : false, siteId : 37 } return cmp; }()); } })(window, document); </script> <body> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-KH5LF8B"height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- <script type="text/javascript" src="https://static.afcdn.com/world/aufeminin.js?v=201711151857"></script> --> <script type="text/javascript"> var sas_networkId = 1; var sas_domain = 'https://ww1.smartadserver.com'; var sas_siteId = 60391; var sas_pageId = 476834; var sas_formatIds = '12,117,3243,4040,5,4856'; var sas_target = (window.sas_target ? sas_target : '') + 'top=enfermedades_t;preeclampsia;CID_0018128'; var sas_timestamp = 1540435145; </script> <script type="text/javascript" src="https://static.afcdn.com/reles/concat.js?PCM-lZPBTsMwDIafhmOrdiAEBw4gthNcmNi1SlI3zZY6UZygwtPTdGIbaGXpxaqd_6t-W_YGsDbuarFioYmxpiG2fPxk1oOLeTU8Pu7TfEsPm2kmNE-qfjFS_lHawClERZnf5sWhkJFCqcEb_KXWRnbMK5Ft45_JiZNa3ik8Y0Jqw5muhOk6g5U39pzR5oyzn6c1ECmDF_o7zmRwr1UH653Cr7TZHFnDfDuXoY457zy3OtBcllmL0CdyVO-iRQvYZy3PtPIwMfOJ5iI516HCGvplL1qGEubCLnAlDM7Y0GfgQaYI373SlCJcMeGN-0yRvjJk8h-fQwT3cXp5-zwJGNfkIjEskVRI--rKiEBp0jdoHFCbJo6nMe157Otwj_miKO_KYnFT3hfX3w"></script> <script type="text/javascript"> window.afSession = window.afSession || new AufSession(); </script> <script type="text/javascript"> window.afLogger = window.afLogger || new Aflog.Logger(); af_addEvent(window, 'beforeunload', function() { afLogger.bqBulkPost(); }, false); </script> <script type="text/javascript"> window.aufAdtechParams = window.aufAdtechParams || {}; aufAdtechParams.domUser = ''; </script> <script type="text/javascript"> aufAdtechParams['SmartAdServer'] = aufAdtechParams['SmartAdServer'] || {}; aufAdtechParams['SmartAdServer'].target = window.sas_target || ''; if (!(afSession.isPaid().toString() == '*' || '*' == '*')) { aufAdtechParams['SmartAdServer'].disabled = true; } </script> <script type="text/javascript"> if (af_getCookie('organicAddServer_v0_1') === 'dfp') { if ('SmartAdServer' == 'DFP') { aufAdtechParams['SmartAdServer'].disabled = false; window.sas_target += ';ismigration=1'; aufAdtechParams['SmartAdServer'].target += ';ismigration=1'; } else { aufAdtechParams['SmartAdServer'].disabled = true; } } else if (af_getCookie('organicAddServer_v0_1') === 'smart') { if ('SmartAdServer' == 'SmartAdServer') { aufAdtechParams['SmartAdServer'].disabled = false; } else { aufAdtechParams['SmartAdServer'].disabled = true; } } </script> <script type="text/javascript"> if (!aufAdtechParams['SmartAdServer'].disabled) { if (!aufAdtechParams['SmartAdServer'].formatIds) { aufAdtechParams['SmartAdServer'].formatIds = window.sas_formatIds || ''; } else if (!window.sas_formatIds) { sas_formatIds = aufAdtechParams['SmartAdServer'].formatIds; } if (!window.sas_siteId) { sas_siteId = aufAdtechParams['SmartAdServer'].siteId = 0; } adTechSiteId = sas_siteId; } </script> <script type="text/javascript"> sas_domain = aufAdtechParams['SmartAdServer'].domain = 'https://ww1.smartadserver.com'; sas_networkId = aufAdtechParams['SmartAdServer'].networkId = 1; if (!aufAdtechParams['SmartAdServer'].disabled) { var aufSmartAdParams = {}; if (aufAdtechParams['SmartAdServer'].placeholders) { aufSmartAdParams.placeholders = aufAdtechParams['SmartAdServer'].placeholders; } window.afAd = new AufAds.SmartAdserver(aufSmartAdParams); } </script> <script type="text/javascript"> if (!window.aufHBDisabled) { if (window.aufbidjs) { afLogger.logError('HB already initialized', 'HB'); } else if (!window.AufBid) { afLogger.logError('AufBid library not defined', 'HB'); } else { window.aufbidjs = new AufBid.Manager({ data: {"formats":{"117":{"name":"topleaderboard","asyncOnly":false,"position":"ATF"},"12":{"name":"toprectangle","asyncOnly":false,"position":"ATF"},"3243":{"name":"bottomleaderboard","asyncOnly":false,"position":"BTF"},"9888":{"name":"bottomrectangle","asyncOnly":false,"position":"BTF"}},"accounts":[{"name":"auf_rubicon","bidder":"rubicon","accountId":"12982","siteId":"60396","currency":"USD","discount":10,"adjustment":-2,"grossRevenue":1,"placements":{"117":[{"tag":"288072-2","size":"728x90"},{"tag":"288072-58","size":"1000x90"},{"tag":"288072-57","size":"970x250"}],"12":[{"tag":"288072-8","size":"120x600"},{"tag":"288072-9","size":"160x600"},{"tag":"288072-10","size":"300x600"},{"tag":"288072-15","size":"300x250"}],"3243":[{"tag":"503912-2","size":"728x90"},{"tag":"503912-58","size":"1000x90"}],"9888":[{"tag":"503912-15","size":"300x250"}]}},{"name":"auf_indexExchange","bidder":"indexExchange","accountId":"184403","siteId":"241111","currency":"EUR","discount":0,"adjustment":-5,"grossRevenue":0,"placements":{"117":[{"tag":"241111","size":"728x90,970x250"}],"12":[{"tag":"241428","size":"120x600,160x600,300x250,300x600"}],"3243":[{"tag":"243141","size":"728x90"}],"9888":[{"tag":"241430","size":"300x250"}]}},{"name":"auf_openx","bidder":"openx","accountId":null,"siteId":"aufeminin-e","currency":"EUR","discount":0,"adjustment":0,"grossRevenue":0,"placements":{"117":[{"tag":"539645072","size":"970x250,728x90,1000x90"}],"12":[{"tag":"539645073","size":"300x250,300x600,120x600,160x600"}],"3243":[{"tag":"539710459","size":"728x90,1000x90"}],"9888":[{"tag":"539645074","size":"300x250"}]}},{"name":"auf_appnexus_old","bidder":"appnexus","accountId":"7695","siteId":null,"currency":"USD","discount":0,"adjustment":0,"grossRevenue":0,"placements":{"117":[{"tag":"12561949","size":"728x90,970x250,1000x90"}],"12":[{"tag":"12580494","size":"120x600,160x600,300x600,300x250"}],"3243":[{"tag":"12686969","size":"728x90,1000x90"}],"9888":[{"tag":"12580482","size":"300x250"}]}},{"name":"auf_smartrtbplus","bidder":"smartrtbplus","accountId":null,"siteId":"60391","currency":"EUR","discount":9.5,"adjustment":null,"grossRevenue":1,"placements":{"12":[{"tag":"12","size":"300x250,300x600,160x600,120x600"}],"3243":[{"tag":"3243","size":"728x90,1000x90"}],"9888":[{"tag":"9888","size":"300x250"}]}},{"name":"auf_oath","bidder":"oath","accountId":"4887.1","siteId":null,"currency":"USD","discount":0,"adjustment":-20,"grossRevenue":0,"placements":{"117":[{"tag":"6550788","size":"1000x90"},{"tag":"6546175","size":"728x90"},{"tag":"6546173","size":"970x250"}],"12":[{"tag":"6546187","size":"120x600"},{"tag":"6546182","size":"160x600"},{"tag":"6546180","size":"300x250"},{"tag":"6546188","size":"300x600"}],"3243":[{"tag":"6550786","size":"1000x90"},{"tag":"6550787","size":"728x90"}]}},{"name":"sublimeskinz","bidder":"sublimeSkinz","accountId":"24549","siteId":null,"currency":"EUR","discount":0,"adjustment":null,"grossRevenue":0,"placements":{"117":[{"tag":"24549","size":"1800x1000"}]}}]}, isMobile: false, currenciesRates: {"GBP":1.13185,"USD":0.87163} }); } } </script> <link href="https://sac.ayads.co" rel="preconnect"> <link href="https://sac.ayads.co" rel="dns-prefetch"> <link href="https://adserver-eu.adtech.advertising.com" rel="preconnect"> <link href="https://adserver-eu.adtech.advertising.com" rel="dns-prefetch"> <link href="https://ib.adnxs.com" rel="preconnect"> <link href="https://ib.adnxs.com" rel="dns-prefetch"> <link href="https://aufeminin-g-d.openx.net" rel="preconnect"> <link href="https://aufeminin-g-d.openx.net" rel="dns-prefetch"> <link href="https://aufeminin-g-e.openx.net" rel="preconnect"> <link href="https://aufeminin-g-e.openx.net" rel="dns-prefetch"> <link href="https://as-sec.casalemedia.com" rel="preconnect"> <link href="https://as-sec.casalemedia.com" rel="dns-prefetch"> <link href="https://fastlane.rubiconproject.com" rel="preconnect"> <link href="https://fastlane.rubiconproject.com" rel="dns-prefetch"> <link href="https://ac-ns.sascdn.com" rel="preconnect"> <link href="https://ac-ns.sascdn.com" rel="dns-prefetch"> <link href="https://ec-ns.sascdn.com" rel="preconnect"> <link href="https://ec-ns.sascdn.com" rel="dns-prefetch"> <link href="https://ced.sascdn.com" rel="preconnect"> <link href="https://ced.sascdn.com" rel="dns-prefetch"> <link href="https://ww1.smartadserver.com" rel="preconnect"> <link href="https://ww1.smartadserver.com" rel="dns-prefetch"> <link href="https://www.onmeda.es" rel="preconnect"> <link href="https://www.onmeda.es" rel="dns-prefetch"> <link href="https://image.afcdn.com" rel="preconnect"> <link href="https://image.afcdn.com" rel="dns-prefetch"> <link href="https://static.afcdn.com/reles/concat.css?PCM-C0vNS8kv0k8sTdNPTCnWL8gpTc_MKwbxg7Mz8_SKk4uL9YwMDC0MDYxMDC0NjAE" media="all" rel="stylesheet" type="text/css"> <div id="sas_FormatID_117" class="sas_FormatID_117"><p class="ad__label">Publicidad</p><div id="sas_117"><script type="text/javascript">if (window.afAd) {afAd.render('117');}</script></div></div> <div id="sas_FormatID_5" class="sas_FormatID_5"><div id="sas_5"><script type="text/javascript">if (window.afAd) {afAd.render('5');}</script></div></div> <div class="doc"> <header class="header-global"> <!--<div class="toolbar__header"><img src="https://i.onmeda.de/es/enfeminio-network-logo.png" alt=""/></div>--> <div class="function-bar js-function-bar"> <div class="function-bar__inner"> <div class="function-bar__wrap"> <div class="function-bar__primary"> <div class="function-bar__menu-button js-function-bar-menu-toggle"> <div class="function-bar__menu-button-wrap"> <span class="function-bar__menu-button-icon icon icon--list"></span> <span class="function-bar__menu-button-label">Menú</span> </div> </div> <a href="https://www.onmeda.es/sintomas/comprobador_de_sintomas.html" class="function-bar-cta function-bar-cta--hide-sticky function-bar-cta--hide-small"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--heart2"></span> </span> <span class="function-bar-cta__label">Comprobador de síntomas</span> </a> </div> <div class="function-bar__brand"> <a href="/" class="function-bar__brand__logo"> <img src="//i.onmeda.de/es/logo_es258x80.png" alt="" class="function-bar__brand__logo-1"> <img src="//i.onmeda.de/es/logo_es258x80.png" alt="" class="function-bar__brand__logo-2"> </a> </div> <div class="function-bar__secondary"> <div class="function-bar__share js-function-bar-share"> <a href="#" class="function-bar-cta function-bar-cta--hide-small js-function-bar-share-toggle"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--follow"></span> </span> <span class="function-bar-cta__label">Síguenos</span> </a> <div class="function-bar__share__menu js-function-bar-share-menu"> <div class="function-bar__share__menu-wrap"> <div class="function-bar__share__menu-item"> <a href="https://www.facebook.com/Onmeda.es" target="_blank" class="function-bar-cta function-bar-cta--facebook"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--facebook"></span> </span> <span class="function-bar-cta__label">Facebook</span> </a> </div> <div class="function-bar__share__menu-item"> <a href="https://twitter.com/Onmeda_es" target="_blank" class="function-bar-cta function-bar-cta--twitter"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--twitter"></span> </span> <span class="function-bar-cta__label">Twitter</span> </a> </div> <div class="function-bar__share__menu-item"> <a href="https://es.pinterest.com/onmeda/" target="_blank" class="function-bar-cta function-bar-cta--googleplus"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--pinterest"></span> </span> <span class="function-bar-cta__label">Pinterest</span> </a> </div> </div> </div> </div> <a href="https://www.onmeda.es/juegos.html#utm_source=contenttracker&utm_medium=Header&utm_term=inteligencia&utm_campaign=contenttracking" class="function-bar-cta"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--brainteaser"></span> </span> <span class="function-bar-cta__label">Juegos</span> </a> <a href="#" onclick="(function(window,document){window.scrollTo(0,0);})(window,document);" class="function-bar-cta display-toggler" data-target="#login-inline"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--lock"></span> </span> <span class="function-bar-cta__label">Login</span> </a> </div> </div> </div> <div class="function-bar__menu js-function-bar-menu"> <div class="function-bar__menu__search"> <div class="function-bar__menu__search-wrap"> <form method="GET" action="https://www.onmeda.es/busqueda/"> <input type="search" name="q" placeholder="b&uacute;squeda" class="function-bar__menu__search-input js-function-bar-search" data-autosuggest="{method: 'GET',url:'/busqueda/search_service.php?action=suggest&resultType=json&q=[AUTOSUGGEST]&portal=ONMEDA_ES', styleConfig: {ulClassName: 'ac_list'}, onsubmit: window.onmedaAutosuggestSearchCallback}"> <button type="submit" class="function-bar__menu__search-button"> <span class="icon icon--search"></span> </button> </form> </div> </div> <div class="function-bar__menu__links"> <ul class="function-bar__menu__links-list"> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/enfermedades_sintomas.html">Enfermedades y síntomas<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/informacion_medicamentos.html">Medicamentos<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/embarazo_familia.html">Embarazo y familia<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/vida_saludable.html">Vida saludable<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/especiales_destacados.html">Especiales<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/actualidad_test.html">Actualidad y tests<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/foros/">Foros<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> </ul> </div> <ul class="function-bar__menu__ctas"> <li class="function-bar__menu__cta"> <a href="https://www.onmeda.es/sintomas/comprobador_de_sintomas.html" class="function-bar-cta function-bar-cta--menu-cta"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--heart2"></span> </span> <span class="function-bar-cta__label">Comprobador de síntomas</span> </a> </li> </ul> <div class="function-bar__menu__social"> <div class="function-bar__menu__social-label"> Síguenos en </div> <div class="function-bar__menu__social-links"> <a href="https://www.facebook.com/Onmeda.es" target="_blank" class="function-bar-cta function-bar-cta--menu-cta function-bar-cta--menu-cta-social"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--facebook"></span> </span> <span class="function-bar-cta__label">Facebook</span> </a> <a href="https://twitter.com/Onmeda_es" target="_blank" class="function-bar-cta function-bar-cta--menu-cta function-bar-cta--menu-cta-social"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--twitter"></span> </span> <span class="function-bar-cta__label">Twitter</span> </a> <a href="https://es.pinterest.com/onmeda/" target="_blank" class="function-bar-cta function-bar-cta--menu-cta function-bar-cta--menu-cta-social"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--pinterest"></span> </span> <span class="function-bar-cta__label">Pinterest</span> </a> </div> </div> </div> </div> <div id="login-inline" class="login-inline is--hidden group"> <div class="login-inline__panels group"> <form id="existuserlogin" action="#" class="login-inline__panel login-inline__existing" data-toggler-label="Login"> <h3 class="login-inline__title">Login y registro</h3> <fieldset class="login-inline__fields"> <div class="frow login-inline__username"> <div class="frow__label is--hidden"><label for="username">Nombre de usuario</label></div> <div class="frow__capture" data-label="Nombre de usuario"></div> </div> <div class="frow login-inline__password"> <div class="frow__label is--hidden"><label for="password">Contraseña</label></div> <div class="frow__capture" data-label="Contraseña"></div> </div> </fieldset> <p class="login-inline__forgot"><a href="//www.onmeda.es/foros/lostpw">¿Has olvidado tu contraseña?</a></p> <p><input type="submit" class="btn btn--generic login-inline__action" value="Login" /></p> </form> <div class="login-inline__panel login-inline__new" data-toggler-label="New Users"> <h3 class="login-inline__title">Regístrate gratis</h3> <div class="login-inline__intro"> <p>¿Todavía no estás registrado? Pincha en el siguiente enlace y regístrate.</p> </div> <p><a href="https://www.onmeda.es/foros/register" class="btn btn--generic login-inline__action">Registro</a></p> </div> </div> </div> <div class="masthead"> <div class="masthead__logo"> <a href="/"> <img src="//i.onmeda.de/nav/logo-es.png" alt="Onmeda.de Logo" /> </a> </div> <!-- /.masthead__logo --> <script>window.onmedaAutosuggestSearchCallback=function(r){window.location.href=r.url;};</script> <form method="GET" action="/busqueda/" class="masthead__search"> <input type="search" placeholder="b&uacute;squeda" name="q" class="masthead__search__input" data-autosuggest="{method: 'GET',url:'/busqueda/search_service.php?action=suggest&resultType=json&q=[AUTOSUGGEST]&portal=ONMEDA_ES', styleConfig: {ulClassName: 'ac_list'}, onsubmit: onmedaAutosuggestSearchCallback}" /> <button class="masthead__search_button"><span class="masthead__search_button__label">búsqueda</span><span data-icon="s" aria-hidden="true"></span></button> </form> <!-- /.masthead__search --> </div> <nav class="nav-global group" role="navigation" data-dropdowns-loaded="false" data-dropdowns-url="/-main_navigation/main_navigation/show/sub" data-custom-mobile-menu-icon=">" data-custom-mobile-menu-icon-site="true" data-line-handling="true"> <ul class="nav-global__list nav-global__list--links"> <li class="nav-global__item nav-global__item--home"> <a href="/"> <span data-site-icon="H" aria-hidden="true"></span> <span class="is--visuallyhidden">Home</span> </a> </li> <li class="nav-global__item nav-global__item--menu" data-dropdown="handheld"> <a href="#"> <span class="nav-global__item__text"> <span class="line1"><span class="icon" data-icon="m" aria-hidden="true"></span>Menú</span> </span> </a> </li> <li class="nav-global__item nav-global__item--link is--current" data-dropdown="menuitem1" data-showtablet="true" data-follow-link="true"> <a href="https://www.onmeda.es/enfermedades_sintomas.html"> <span class="nav-global__item__text"> <span class="line1">Enfermedades y</span><span class="line2"> síntomas<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem2" data-showtablet="true" data-follow-link="true"> <a href="https://www.onmeda.es/informacion_medicamentos.html"> <span class="nav-global__item__text"> <span class="line1">Medicamentos<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem3" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/embarazo_familia.html"> <span class="nav-global__item__text"> <span class="line1">Embarazo y</span><span class="line2"> familia<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem4" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/vida_saludable.html"> <span class="nav-global__item__text"> <span class="line1">Vida saludable<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem5" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/especiales_destacados.html"> <span class="nav-global__item__text"> <span class="line1">Especiales<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem6" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/actualidad_test.html"> <span class="nav-global__item__text"> <span class="line1">Actualidad y</span><span class="line2"> tests<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem7" data-showtablet="true" data-follow-link="true"> <a href="https://www.onmeda.es/foros/"> <span class="nav-global__item__text"> <span class="line1">Foros<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> </ul> <ul class="nav-global__list nav-global__list--functions"> <li class="nav-global__item nav-global__item--menu-secondary is--visuallyhidden" id="sprout_secondary__toggler"> <a href="#sprout_secondary"> <span data-icon="m" class="icon" aria-hidden="true"></span> <span class="is--visuallyhidden">Menu</span> </a> </li> </ul> </nav> <div id="sas_FormatID_4856" class="sas_FormatID_4856"><p class="ad__label">Publicidad</p><div id="sas_4856"><script type="text/javascript">if (window.afAd) {afAd.render('4856');}</script></div></div> </header> <section class="global-dropdowns"> <article id="menuitem1" class="global-dropdown"> <div class="global-dropdown__grid"> <div class="global-dropdown__column global-dropdown__column--primary"> <h3><a href="https://www.onmeda.es/enfermedades/">Enfermedades</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/enfermedades__az.html">Enfermedades de la A a la Z<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/enfermedades_especialidad_medica.html">Especialidades médicas<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/enfermedades_frecuentes.html">Enfermedades frecuentes<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/que_enfermedad_tengo/">¿Qué enfermedad tengo?<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul> <h3><a href="https://www.onmeda.es/sintomas/">Síntomas</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sintomas/sintomas_az.html">Síntomas de la A a la Z<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sintomas/comprobador_de_sintomas.html">Comprobador de síntomas<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sintomas/sintomas_frecuentes.html">Síntomas más frecuentes<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul></div><div class="global-dropdown__column global-dropdown__column--secondary"> <h3><a href="https://www.onmeda.es/clinica/consulta_medica.html">Consulta médica</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/exploracion_tratamiento/index_az.html">Exploración clínica y tratamientos<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/prevencion/">Prevención de enfermedades<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/valores_analitica/">Valores de una analítica<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul> <h3><a href="https://www.onmeda.es/enciclopedia_de_salud.html">Enciclopedia de salud</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/anatomia/">Enciclopedia de anatomía y fisiología<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/nutrientes/">Enciclopedia de nutrición<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sexualidad/">Enciclopedia de sexualidad<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul></div> <div class="global-dropdown__column global-dropdown__column--tertiary"> </div> </div> </article> </section> <section class="l-content group" id="sprout_content"> <div class="l-col l-col--primary" id="sprout_primary"> <div class="group"> <div class="content-function-bar group"> <nav class="breadcrumb float--left"> <ul class="breadcrumb__tree"> <li><a href="https://www.onmeda.es/"><span></span></a></li> <li><span></span><a href="https://www.onmeda.es/">Home</a></li><li><span></span><a href="https://www.onmeda.es/enfermedades/">Enfermedades</a></li><li><span></span>Preeclampsia</li> </ul> <p class="breadcrumb__back"><span></span><a href="https://www.onmeda.es/enfermedades/">Página anterior</a></p> </nav> <div class="social float--right"> <a class="btn--print" href="https://www.onmeda.es/enfermedades/18128-print.html" target="_blank">Drucken</a> <a href="#social" class="social-toggler">Social Links</a> </div> </div> <div class="social-widgets"> <style> .social-toggler { width: 115px; background: transparent url(https://i.onmeda.de/nav/social_sprite.png) no-repeat 0 0; } .btn--print { background: transparent url(https://i.onmeda.de/nav/social_sprite.png) no-repeat -151px 0; } .btn--print:hover { background-position: -151px -34px; } ul.orientation-horizontal li:first-child { margin-left: 0; } </style> <div class="shariff" data-lang="es" data-backend-url="https://www.onmeda.es/shariff/" data-services="[&quot;twitter&quot;,&quot;facebook&quot;,&quot;pinterest&quot;]"></div> </div> </div> <span data-is-scrollable-to-next-page></span><div class="content-header"> <h1 class="content-header__title"> Preeclampsia </h1> <p class="content-header__meta"> <span class="content-header__meta__item span content-header__meta__item--author"><strong>Autor: <a href="/equipo.html" target="_new"><NAME></a></strong></span> <span class="content-header__meta__item span content-header__meta__item--author"><strong>Revisión médica: Dr. <NAME></strong> <span class="content-header__meta__item span content-header__meta__item--datestamp">(21 de Octubre de 2016)</span> </p> </div> <div class="content-image content-image--full"> <div class="content-image__image"> <img src="https://i.onmeda.de/nwl/blindfisch.gif" data-lazyload="https://i.onmeda.de/es/geburt_einleiten-870x435.jpg" width="870" height="435"/><span class="image-copyright">© Jupiterimages/Wavebreak Media</span> </div> </div> <nav class="subnav float--left js-fix-it is--active" data-toggles="true" data-openicon="+" data-closedicon="-" data-avoid=".footer-global__legal" data-fixed-title="Indice"> <div class="subnav__title"> <h2 class="subnav__title__text"><span class="subnav__title__label">Indice</span></h2> </div> <ul class="subnav__list"> <li class="subnav__list__item is--active"><a href="https://www.onmeda.es/enfermedades/preeclampsia.html" class="subnav__list__item__link">Preeclampsia</a></li><li class="subnav__list__item"><a href="https://www.onmeda.es/enfermedades/preeclampsia-mas-informacion-18128-2.html" class="subnav__list__item__link">Más información</a></li> </ul> </nav> <p data-adhere="true" class="lead">La preeclampsia es una alteración que se presenta a partir de la <a href="https://www.onmeda.es/embarazo/semana_embarazo_20.html">semana 20 de embarazo</a>. Cursa con <a href="https://www.onmeda.es/enfermedades/hipertension_arterial.html">hipertensión arterial</a> (valores superiores a 140/90 mmHg) y proteínas en la orina. También puede aparecer <a href="https://www.onmeda.es/sintomas/edema.html">edema</a>, aunque no es necesario para hacer el diagnóstico.</p> <p data-adhere="true">Hasta el momento, se desconoce cuál es la causa que da origen a esta enfermedad, que puede llegar a ser mortal en sus formas más graves: <strong>eclampsia y síndrome de Hellp.</strong> Sin embargo, se cree que factores hereditarios o algunas <a href="https://www.onmeda.es/enfermedades/enfermedades_autoinmunes.html">enfermedades autoinmunes</a> están detrás de la preeclampsia.</p> <p data-adhere="true">Algunos de los <strong>factores de riesgo de la preeclampsia</strong> son: haberla padecido en embarazos anteriores, primer embarazo, gestación múltiple, sobrepeso, tener una enfermedad crónica y repetir embarazo con un intervalo menor de dos años.</p> <p data-adhere="true">La preeclampsia puede cursar sin síntomas en su manifestación más leve, aunque en la forma más severa de la enfermedad pueden observarse <a href="https://www.onmeda.es/9711.html">cefaleas</a>, disminución de la frecuencia urinaria, <a href="https://www.onmeda.es/embarazo/nauseas_embarazo.html">náuseas</a> y <a href="https://www.onmeda.es/sintomas/vomitos.html">vómitos</a>, alteraciones visuales y auditivas, y <a href="https://www.onmeda.es/sintomas/dolor_abdominal.html">dolor abdominal</a>, entre otros. En su presentación más severa provoca también convulsiones y coma.</p> <p data-adhere="true">Para hacer el <strong>diagnóstico de la preeclampsia,</strong> el médico valorará los datos de presión arterial de la madre y las <a href="https://www.onmeda.es/valores_analitica/">analíticas de sangre y orina</a>, además de la exploración física, ya que un aumento exagerado de peso (más de 1 kg en una semana) es señal de alarma.</p> <p data-adhere="true">La preeclampsia no se cura hasta que se produce el <a href="https://www.onmeda.es/parto/">parto</a>, por eso, cuando la situación es grave, hay que adelantar el nacimiento del bebé para que tanto él como su madre no corran más riesgos. Antes del parto, la embarazada puede tratarse con distintos <strong>medicamentos hipertensivos,</strong> así como con otras medidas, entre las que puede estar el reposo. En los casos más graves se requiere hospitalización.</p> <p data-adhere="true">La preeclampsia puede provocar complicaciones muy graves, tanto en la madre como en su hijo. Así, puede haber crecimiento intrauterino retardado y prematuridad. La preeclampsia también aumenta el riego de <strong>desprendimiento prematuro de placenta,</strong> <a href="https://www.onmeda.es/sistema_cardiovascular/">accidentes cerebrovasculares</a> en la madre, hemorragias, <a href="https://www.onmeda.es/enfermedades/enfermedades_rinones.html">problemas renales</a>, e incluso la muerte.</p> <p data-adhere="true">Las <strong>medidas preventivas ante la preeclampsia</strong> pasan por llevar un estricto control médico del embarazo y por monitorizar adecuadamente a las mujeres con riesgo aumentado de padecer la enfermedad.</p> <p data-adhere="true">Se estima que entre el 5 y el 12% de las mujeres tendrá preeclampsia durante el embarazo. La incidencia en España es más baja, ya que está en torno al 2%.</p> <h2 data-adhere="true">Definición</h2> <p data-adhere="true">La preeclampsia es una <strong>patología exclusiva del embarazo.</strong> Se presenta a partir de la semana 20 y hasta el día 30 posparto, y se caracteriza por la aparición de <a href="https://www.onmeda.es/enfermedades/hipertension_arterial.html">hipertensión arterial</a> y proteinuria (proteínas en la orina). La preeclampsia también puede ir acompañada de <a href="https://www.onmeda.es/sintomas/edema.html">edema</a>, aunque no es una condición necesaria para diagnosticar la enfermedad.</p> <ul> <li><strong>Hipertensión arterial.</strong> Se considera que hay hipertensión cuando hay una elevación sostenida de la presión arterial. Así, los valores de presión sistólica estarían por encima de 140 mmHg, mientras que los de la presión diastólica se situarían superando los 90 mmHg.</li> <li><strong>Proteinuria.</strong> Es la presencia de proteínas en la orina, detectables mediante analítica. Para hablar de proteinuria debe haber una excreción urinaria de proteínas mayor de 30 mg/dl en tiras reactivas o bien de 300 mg/dl si se hace un análisis de orina de 24 horas.</li> <li><strong>Edema.</strong> Se trata de la hinchazón de los tejidos blandos provocada por la acumulación del líquido intersticial (el líquido que rodea a las células).</li> </ul> <p data-adhere="true">La preeclampsia (conocida también como <strong>toxemia o gestosis</strong>) es una de las complicaciones más graves del <a href="https://www.onmeda.es/embarazo/">embarazo</a>, pues el estado de salud de madre e hijo se puede ver gravemente comprometido si no se pone el tratamiento adecuado. Así, supone un riesgo de daño cerebral o neurológico, de <a href="https://www.onmeda.es/enfermedades/enfermedades_rinones.html">alteraciones renales</a> y <a href="https://www.onmeda.es/enfermedades_vias_respiratorias/enfermedades_vias_respiratorias_a_z.html">alteraciones pulmonares</a> y de trastornos en la coagulación sanguínea, entre otros, que pueden ser fatales.</p> <p data-adhere="true">De hecho, la preeclampsia es una las cuatro causas que conlleva mortalidad materna, tanto en los países desarrollados como en los que están en vías de desarrollo. No obstante, la mayoría de estas muertes se podría evitar con adecuadas medidas de prevención y seguimiento del embarazo.</p> <p data-adhere="true">Tres de cada cuatro trastornos hipertensivos originados en el embarazo están provocados por la preeclampsia y la eclampsia (una forma mucho más grave de preeclampsia), mientras que el restante 30% deriva de pacientes con problemas de hipertensión previa a estos nueve meses.</p> <p data-adhere="true">Estos son los diferentes <strong>niveles de gravedad de la preeclampsia:</strong></p> <ul> <li><strong>Preeclampsia leve.</strong> La presión arterial se sitúa en valores a partir de 140/90 mmHg, o se ha constatado una elevación de 30 mmHG en la presión sistólica y de 15 mmHG en la presión diastólica (conociendo previamente los valores basales). La proteinuria es de más de 300 mg en 24 horas, y no hay vasoespasmo (contracción de los vasos sanguíneos).</li> <li><strong>Preeclampsia severa.</strong> La presión arterial es de 160/110 mmHg o superior, la proteinuria es mayor de 5 gramos en 24 horas y hay otros síntomas como edema, <a href="https://www.onmeda.es/9711.html">cefalea</a>, <a href="https://www.onmeda.es/enfermedades/acufenos.html">acúfenos</a> y fosfenos (sensación de ver manchas luminosas).</li> <li><strong>Preeclampsia sobreañadida.</strong> En el caso de que la paciente tuviera problemas de <a href="https://www.onmeda.es/anatomia/anatomia_rinon.html">riñón </a>o de hipertensión anteriores al embarazo, se habla de preeclampsia sobreañadida.</li> <li><strong>Preeclampsia recurrente.</strong> Se denomina así a la preeclampsia que aparece en la misma embarazada en dos gestaciones distintas.</li> </ul> <p data-adhere="true"></p> <p data-adhere="true">Se estima que entre el 5 y el 12% de las gestantes desarrollará preeclampsia. Las cifras de incidencia en España son bajas, pues solo un 1-2% de las mujeres tiene preeclampsia en los meses de gestación. La tasa española es considerablemente inferior a la de otros países del ámbito anglosajón y a la de otros muchos en vías de desarrollo. Así, por ejemplo, según datos de la Organización Mundial de la Salud (OMS), en África y en Asia casi una décima parte de las defunciones maternas están provocadas por la preeclampsia.</p> <h2 data-adhere="true">Causas</h2> <p data-adhere="true">Hasta el momento no se ha descubierto la causa exacta que da origen a la preeclampsia. Se han estudiado sus factores de riesgo y se sabe cómo se desarrolla, pero no se ha llegado al fondo de la enfermedad para identificar las claves que la precipitan. Durante el <a href="https://www.onmeda.es/embarazo/">embarazo</a>, el organismo materno sufre una <strong>vasodilatación</strong>, pues las necesidades sanguíneas se multiplican debido a las demandas del feto. En una gestación sin complicaciones, las <strong>arterias espirales uterinas</strong> se irían sustituyendo por <strong>células trofoblásticas</strong> (las que dan lugar a la placenta). Este mecanismo asegura la vasodilatación que permite aumentar considerablemente el caudal sanguíneo que llega al feto y a la placenta. Pero con la preeclampsia, este proceso no se desarrolla con normalidad, dando lugar a una <strong>isquemia placentaria</strong> (deficiente aporte sanguíneo a la placenta). Por ello, en una gestante que sufra preeclampsia se observa <strong>vasoconstricción generalizada</strong> en lugar de la esperable vasodilatación.</p> <p data-adhere="true">Se cree que, tras la preeclampsia, pueden estar distintos trastornos:</p> <ul> <li>Alteraciones autoinmunes (<a href="https://www.onmeda.es/enfermedades/lupus.html">lupus</a>, <a href="https://www.onmeda.es/enfermedades/miastenia_gravis.html">miastenia</a> o <a href="https://www.onmeda.es/enfermedades/esclerosis_multiple.html">esclerosis</a>)</li> <li>Problemas vasculares.</li> <li>Factores hereditarios (tanto por vía materna como por vía paterna).</li> <li>Dieta desequilibrada con escasez de <a href="https://www.onmeda.es/nutrientes/calcio.html">calcio</a>.</li> </ul> <p data-adhere="true"><section class="carousel carousel--standard carousel--sidebar carousel--external" data-width="580" data-height="435" data-external="/-galleries/gallery/get_json/34" data-page="de" data-clickable="true" data-callback="onmeda.callback.carousel({ga: 'enfermedades/preeclampsia-gally-00034.html'})"> <div class="carousel__slides"></div> </section> <div class="anchor"><a id="factores_de_riesgo"></a></div></p> <h3>Factores de riesgo</h3> <p data-adhere="true">Como el origen de la preeclampsia no está claro, no se puede hacer una prevención total de la enfermedad, por lo que los esfuerzos médicos se centran en evitar los factores de riesgo que se han relacionado con esta patología.</p> <p data-adhere="true"><script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 4040, ["12","300"], 'Publicidad');</script></p> <p data-adhere="true">Entre <strong>los factores de riesgo de la preeclampsia</strong> están:</p> <ul> <li><strong>Primer embarazo.</strong> La mujer está más predispuesta a sufrir preeclampsia en su primer embarazo, lo que se ha relacionado con los factores inmunológicos que parecen estar detrás de esta alteración.</li> <li><strong>Embarazos múltiples.</strong></li> <li><strong>Preeclampsia o eclampsia en anteriores embarazos.</strong></li> <li><strong>Tener dos embarazos en menos de dos años,</strong> o bien que entre las dos gestaciones haya un intervalo muy largo.</li> <li><strong><a href="https://www.onmeda.es/11288.html">Infección recurrente de las vías urinarias</a> o <a href="https://www.onmeda.es/enfermedades/enfermedades_rinones.html">alteraciones renales</a>.</strong></li> <li><strong>Antecedentes de enfermedades crónicas</strong> como <a href="https://www.onmeda.es/enfermedades/diabetes.html">diabetes</a> o <a href="https://www.onmeda.es/enfermedades/hipertension_arterial.html">hipertensión arterial</a>.</li> <li><strong><a href="https://www.onmeda.es/enfermedades/obesidad.html">Obesidad</a></strong> (con un <a href="https://www.onmeda.es/test/imc_calculadora.html">Índice de Masa Corporal</a> mayor a 30) o, por el contrario, <strong>desnutrición.</strong></li> <li><strong>Edad de la madre. </strong>Quedarse embarazada con más de 35 años eleva también el riesgo de preeclampsia, al igual que hacerlo con menos de 18 años.</li> <li><strong>Donación de ovocitos.</strong> Al ser un proceso que afecta a la placentación, cuando en reproducción asistida se recurre a la donación de ovocitos hay más posibilidades de que la madre sufra preeclampsia.</li> <li><strong>Síndrome de ovario poliquístico.</strong> Según algunos estudios, tener el <a href="https://www.onmeda.es/enfermedades/sop_sindrome_del_ovario_poliquistico.html">síndrome de ovario poliquístico</a> aumenta en un 45% las probabilidades de que la embarazada padezca preeclampsia.</li> <li><strong>Factores emocionales.</strong> Se cree que la <a href="https://www.onmeda.es/enfermedades/depresion.html">depresión</a> en el embarazo, un índice elevado de estrés u otros acontecimientos que alteren el estado anímico de la madre pueden aumentar el riesgo de preeclampsia.</li> </ul> <h2 data-adhere="true">Síntomas</h2> <p data-adhere="true">La preeclampsia es una enfermedad que, en sus fases iniciales, puede cursar sin síntomas alarmantes. Muchas embarazadas que tienen preeclampsia no son conscientes de ella, ya que no se encuentran mal.</p> <p data-adhere="true">Por este motivo, en todas las consultas obstétricas de seguimiento del <a href="https://www.onmeda.es/embarazo/">embarazo</a>, el médico comprueba los <a href="https://www.onmeda.es/exploracion_tratamiento/medicion_tension_arterial.html">valores de tensión arterial</a> de la madre para cerciorarse de que están dentro de los límites normales. En caso contrario, si hubiera <a href="https://www.onmeda.es/enfermedades/hipertension_arterial.html">hipertensión</a>, tendría que descartarse la presencia de preeclampsia con chequeos adicionales.</p> <p data-adhere="true">Hay que tener en cuenta que la preeclampsia y sus síntomas aparecen <strong>a partir de la semana 20 de embarazo</strong> (aunque es más frecuente que lo haga al final del mismo) y que la enfermedad se puede desarrollar hasta 30 días después del <a href="https://www.onmeda.es/parto/">parto</a>. Ello debe ser tenido en cuenta para mantener las revisiones de la gestante, tomando las cifras de tensión hasta pasada la época de riesgo.</p> <p data-adhere="true">Hay síntomas de la preeclampsia que pasan inadvertidos, a no ser que se realicen chequeos, pero hay otros que pueden observarse en el aspecto físico.</p> <p data-adhere="true">Los <strong>síntomas no evidentes de la preeclampsia</strong> son los siguientes:</p> <ul> <li><strong>Tensión arterial elevada. </strong>La presión arterial sistólica estaría entre 140 mmHg si se trata de una preeclampsia leve, y superaría el valor de 160 mmHg en el caso de una preeclampsia grave.</li> <li><strong>Proteinuria (presencia de proteínas en la orina).</strong> La cantidad de proteínas en la orina sería de 300 mg o más en 24 horas (en el caso de preeclampsia leve) y 5 g o más en orina de 24 horas (si se trata de preeclampsia grave).</li> <li><strong>Trombocitopenia (disminución de plaquetas en sangre).</strong> Se presenta en ocasiones, tanto para la preeclampsia leve como para la grave.</li> <li><strong>Hemólisis intravascular</strong> (destrucción de los <a href="https://www.onmeda.es/valores_analitica/componentes_de_la_sangre.html">glóbulos rojos</a> que ocurre dentro de los <a href="https://www.onmeda.es/anatomia/anatomia_vasos_sanguineos.html">vasos sanguíneos</a>). No se produce si la preeclampsia es leve, pero sí puede darse si la preeclampsia es grave.</li> </ul> <p data-adhere="true">Los <strong>síntomas más evidentes de la preeclampsia</strong> son:</p> <ul> <li><strong><a href="https://www.onmeda.es/sintomas/edema.html">Edema</a> o hinchazón.</strong> Aunque en el embarazo es normal que ciertas partes del cuerpo de hinchen (como los pies), el edema que alerta de una posible preeclampsia afecta a las manos y a la cara, fundamentalmente. Hay que fijarse, sobre todo, en la zona ocular y bajo los ojos, pues si muestra hinchazón repentina, hay que descartar que anuncie una preeclampsia.</li> <li><strong>Mucho aumento de peso en poco tiempo.</strong> Cuando la embarazada gana más de 1 kg a la semana hay que investigar si hay preeclampsia.</li> <li><strong><a href="https://www.onmeda.es/sintomas/dolor_abdominal.html">Dolor abdominal</a>,</strong> sobre todo localizado en el hemiabdomen derecho, bajo el arco costal inferior. No aparece si la preeclampsia es leve, pero sí cuando es aguda, en relación con el posible aumento hepático. El dolor se puede confundir con <a href="https://www.onmeda.es/enfermedades/hiperacidez.html">acidez gástrica</a> o molestias procedentes de un trastorno biliar, entre otros.</li> <li><strong>Dolor en el hombro derecho,</strong> que puede ser reflejo del dolor abdominal. Es uno de los síntomas más típicos y curiosos de la preeclampsia. Toda embarazada que note dolor en su hombro derecho debe consultarlo con el médico para descartar la presencia de esta enfermedad.</li> <li><strong>Baja producción de <a href="https://www.onmeda.es/exploracion_tratamiento/valores_orina.html">orina</a> (oliguria).</strong> Cuando hay preeclampsia, la embarazada baja la producción de orina, por lo que las micciones son menos frecuentes. Este síntoma se presenta de forma muy leve si la enfermedad no reviste demasiada gravedad, pero si es una preeclampsia grave, será más evidente. En parte es debido a los trastornos vasculares y el líquido intersticial que se acumula en forma de edema.</li> <li><strong><a href="https://www.onmeda.es/9711.html">Dolores de cabeza</a> (cefaleas).</strong> Son dolores que no desaparecen ni siquiera tomando medicación. Se presentan tanto en la preeclampsia leve como en la más grave.</li> <li><strong>Sensaciones auditivas y visuales.</strong> Oír ruidos (<a href="https://www.onmeda.es/enfermedades/acufenos.html">acúfenos</a>), ver destellos, tener doble visión o dejar de ver con nitidez son algunos de los síntomas de la preeclampsia. Se hacen presentes y recurrentes en el caso de la preeclampsia grave, pues en la leve es muy raro que aparezcan.</li> <li><strong><a href="https://www.onmeda.es/embarazo/nauseas_embarazo.html">Náuseas</a> y <a href="https://www.onmeda.es/sintomas/vomitos.html">vómitos</a> </strong>al final del embarazo. Las náuseas y los vómitos que no revisten gravedad suelen ceder en el <a href="https://www.onmeda.es/embarazo/semana_embarazo_1.html">primer trimestre</a> del embarazo. Por eso, la aparición de náuseas y vómitos al final del embarazo se considera una señal de alarma de una posible preeclampsia grave, ya que, en este caso, estamos ante vómitos de origen central por afectación del sistema nervioso.</li> <li>Puede cursar con <strong>otros síntomas de afectación neurológica</strong> como <a href="https://www.onmeda.es/enfermedades/vertigos.html">síndrome vertiginoso</a>, somnoliencia, <a href="https://www.onmeda.es/sintomas/nerviosismo.html">irritabilidad</a>, excitabilidad y desorientación, entre otros.</li> </ul> <p data-adhere="true">Ante cualquiera de estos síntomas hay que consultar cuanto antes al médico o acudir a un Servicio de Urgencias.</p> <h2 data-adhere="true">Diagnóstico</h2> <p data-adhere="true">Para establecer el <strong>diagnóstico de preeclampsia</strong> hay que tener en cuenta que se trata de una alteración que no solo afecta a la placenta sino a muchos otros órganos de la madre, como el <a href="https://www.onmeda.es/anatomia/anatomia_rinon.html">riñón</a>, el hígado y el <a href="https://www.onmeda.es/anatomia/anatomia_corazon.html">corazón</a>, al estar plenamente implicado el sistema cardiovascular.</p> <p data-adhere="true">Una de las claves para diagnosticar la preeclampsia son los <strong><a href="https://www.onmeda.es/exploracion_tratamiento/medicion_tension_arterial.html">valores de la tensión arterial</a>.</strong> Cuando una embarazada que previamente no tenía problemas de <a href="https://www.onmeda.es/enfermedades/hipertension_arterial.html">hipertensión</a> muestra unos valores de presión arterial superiores a 140/90 mmHg se habla de preeclampsia.</p> <p data-adhere="true">Para confirmar el diagnóstico de la hipertensión arterial se precisa hallar en dos ocasiones separadas al menos 6 horas una de otra, las cifras de 140/90 mm Hg, o un aumento de TAS (Tensión arterial sistólica) de al menos 30 mm Hg, o un aumento de TAD (Tensión Arterial diastólica) de al menos 15 mm Hg.</p> <p data-adhere="true">Se define una preeclampsia grave si se superan en dos ocasiones, separadas al menos de 6 horas una de otra, cifras de TA de 160/110 mm Hg, o tensión arterial diastólica mayor o igual a 120 mmHg aunque sea en una ocasión o incremento de la tensión arterial sistólica de 60 mmHg o de la tensión arterial diastólica en 30 mmHg sobre la tensión basal.</p> <p data-adhere="true">El obstetra hace un examen físico en cada una de las citas para observar si hay o no <a href="https://www.onmeda.es/sintomas/edema.html">edema</a>, además de vigilar la <strong>ganancia de peso de la embarazada</strong> (que no ha de ser superior a 1 kg en una semana, ni muy brusca en 2 o 3 días).</p> <p data-adhere="true"></p> <p data-adhere="true">Además, el médico controlará, mediante un <a href="https://www.onmeda.es/exploracion_tratamiento/valores_orina.html">análisis de orina</a>, si hay o no <strong>proteinuria</strong> (presencia de proteínas en la orina), teniendo en cuenta que los valores patológicos están por encima de 300 mg/24 horas.</p> <p data-adhere="true">Mediante las <strong>analíticas de sangre rutinarias del <a href="https://www.onmeda.es/embarazo/">embarazo</a></strong>, el médico puede valorar también si las enzimas del hígado registran valores alterados y si hay trombocitopenia (en este caso, el número de <a href="https://www.onmeda.es/valores_analitica/componentes_de_la_sangre.html">plaquetas</a> sería inferior a 100.000). También es importante analizar si los <strong>factores de coagulación</strong> presentan algún problema.</p> <p data-adhere="true">La <strong>evolución del feto</strong> también puede ayudar a diagnosticar una preeclampsia. Así, las ecografías periódicas permiten al especialista <strong>examinar el estado de la placenta.</strong> Pero, además, la preeclampsia se puede manifestar con <strong>crecimiento intrauterino retardado (CIR)</strong> y con pérdida de bienestar fetal (también conocida como sufrimiento fetal). El CIR no sucede en todos los casos de preeclampsia leve, pero sí con mucha frecuencia cuando la patología es grave. Con respecto a la <strong>pérdida de bienestar fetal,</strong> se puede observar en algunos casos, independientemente de la gravedad de la preeclampsia.</p> <p data-adhere="true"><script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 4040, ["12","300"], 'Publicidad');</script></p> <p data-adhere="true">Para hacer un <strong>diagnóstico precoz de la preeclampsia</strong> en aquellas mujeres con más riesgo se puede optar por realizar una <a href="https://www.onmeda.es/exploracion_tratamiento/ecografia.html">exploración Doppler</a> de las arterias maternas, haciendo especial hincapié en la uterina.</p> <p data-adhere="true">Es muy importante tener en cuenta los <strong>criterios de gravedad de una preeclampsia:</strong></p> <ul> <li>Tensión arterial sistólica mayor o igual a 160 mmHg o tensión arterial diastólica mayor o igual a 110 mmHg registrados en dos ocasiones con intervalos no menor de 6 horas y en estado de reposo o tensión arterial diastólica mayor o igual a 120 mmHg aunque sea en una ocasión o incremento de la tensión arterial sistólica de 60 mmHg o de la tensión arterial diastólica en 30 mmHg sobre la tensión basal.</li> <li>Proteinuria mayor o igual a 500 mg en 24 horas.</li> <li>Disfunción hepática o <a href="https://www.onmeda.es/enfermedades/enfermedades_rinones.html">disfunción renal severa</a>.</li> <li>Trastornos cerebrales o visuales.</li> <li>Dolor epigástrico.</li> <li><a href="https://www.onmeda.es/enfermedades/edema_pulmonar.html">Edema pulmonar</a> o cianosis.</li> <li>Trombocitopenia de 100.000 plaquetas o menos.</li> <li>Ácido úrico mayor o igual a 7 mg/dl.</li> <li>Aparición de crecimiento intrauterino retardado y/o oligoamnios.</li> </ul> <h2 data-adhere="true">Tratamiento</h2> <p data-adhere="true">La preeclampsia solo cede cuando la madre da a luz al bebé. Hasta ese momento puede ser tratada farmacológicamente para que no derive en complicaciones más peligrosas, pero no remitirá definitivamente hasta que nazca el niño y concluya el embarazo.</p> <p data-adhere="true">Habitualmente, si se pasa de la <a href="https://www.onmeda.es/embarazo/semana_embarazo_37.html">semana 37 de embarazo</a> y se detecta la preeclampsia, los médicos deciden concluir la gestación para evitar riesgos mayores. A estas alturas, el feto está, por lo general, perfectamente formado. En el caso de que los <a href="https://www.onmeda.es/anatomia/anatomia_pulmon.html">pulmones</a> no se hayan desarrollado del todo (es el órgano que más tarda en hacerlo), se administran corticoides a la madre para acelerar la <strong>maduración pulmonar del bebé.</strong></p> <p data-adhere="true">Después, cuando se comprueba que los pulmones del feto están en perfecto estado, se induce el <a href="https://www.onmeda.es/parto/">parto</a> o se hace una <a href="https://www.onmeda.es/parto/cesarea.html">cesárea</a>, dependiendo del criterio obstétrico.</p> <p data-adhere="true">Si la preeclampsia es leve y se detecta antes del octavo mes, el obstetra puede recomendar varias medidas:</p> <ul> <li><strong>Reposo en cama.</strong> No está probado que la preeclampsia mejore con esta indicación, pero muchos médicos la aconsejan. La embarazada deberá permanecer en cama durante la mayor parte del día, preferiblemente echada sobre el lado izquierdo. La razón de que deba colocarse en esta postura es que así no presiona la vena cava inferior, que es la que retorna la sangre desde la zona inferior del cuerpo hacia el <a href="https://www.onmeda.es/anatomia/anatomia_corazon.html">corazón</a>.</li> <li><strong>Medicamentos antihipertensivos.</strong> No se utilizan siempre, sino en aquellas situaciones en que los <a href="https://www.onmeda.es/exploracion_tratamiento/medicion_tension_arterial.html">valores de la tensión arterial</a> resulten peligrosos para madre e hijo y sea necesario bajarlos.</li> <li><strong>Dieta baja en sal.</strong> La sal eleva la presión arterial, por lo que la <a href="https://www.onmeda.es/embarazo/alimentacion_embarazo.html">dieta de la embarazada</a> con preeclampsia leve debe ser hiposódica. Sin embargo, no se debe suprimir la sal del todo, ya que la preeclampsia conlleva hemoconcentración (concentración de la sangre), que se agudiza por la pérdida de agua y de sal.</li> </ul> <div class="anchor"><a id="fin_del_parto"></a></div> <h3>Fin del parto</h3> <p data-adhere="true">En todo caso, y si la <strong>preeclampsia leve</strong> se va controlando bien, la gestación debe concluir al llegar a la <a href="https://www.onmeda.es/embarazo/semana_embarazo_40.html">semana 40</a>, sin sobrepasar ese umbral.</p> <p data-adhere="true">Cuando estamos ante una <strong>preeclampsia grave,</strong> el parto suele provocarse a partir de la <a href="https://www.onmeda.es/embarazo/semana_embarazo_32.html">semana 32 de embarazo</a>, o incluso antes si fuera necesario. El problema mayor de la prematuridad del bebé es la inmadurez pulmonar, que se resuelve mediante la administración de corticoides.</p> <p data-adhere="true">Ante una preeclampsia grave, la madre siempre deberá tomar <strong>tratamiento antihipertensivo</strong> para estabilizar su tensión arterial. Estos fármacos suelen administrarse por vía intravenosa. Ahora bien, no conviene hacer bajar los valores de presión arterial más de 140/90 mmHg , pues de ese modo la <strong>perfusión placentaria</strong> (el flujo sanguíneo que llega a la placenta) disminuiría de forma nociva para el feto.</p> <p data-adhere="true">A la embarazada se le puede prescribir también <strong>sulfato de magnesio</strong> con objeto de prevenir el riesgo de eclampsia (una compliación de la preeclampsia que cursa con convulsiones y puede ser fatal) y de hiperreflexia (respuesta exagerada del organismo ante estímulos).</p> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 4040, ["12","300"], 'Publicidad');</script> <p data-adhere="true">El bienestar fetal ha de ser vigilado mediante <a href="https://www.onmeda.es/exploracion_tratamiento/ecografia.html">ecografía</a> y otras técnicas de diagnóstico por imagen, como el Doppler, que permite visualizar los vasos sanguíneos del bebé y su posible afectación.</p> <p data-adhere="true">Como la madre debe ser estrechamente vigilada por el médico, en muchos casos se requiere hospitalización. El parto se programará dependiendo del grado de afectación de madre e hijo.</p> <ul> <li><strong>Si el embarazo está a término,</strong> hay que estabilizar médicamente a la madre primero y después provocar el parto.</li> <li><strong>Si es una gestación de más de 32 semanas,</strong> será la madurez pulmonar la que indique el momento de finalizar el embarazo, aunque si la salud de madre e hijo lo requiere, el parto se puede indicar en cualquier momento.</li> <li><strong>Si la mujer cuenta con menos de 32 semanas de embarazo,</strong> habrá que valorar cuidadosamente el estado del feto, teniendo en cuenta su peso y el desarrollo pulmonar que haya alcanzado. Así, el embarazo puede interrumpirse en cuanto se estime que es más peligroso para madre e hijo continuar con la gestación que asumir los riesgos derivados de la prematuridad del niño.</li> </ul> <h2 data-adhere="true">Evolución</h2> <p data-adhere="true">La preeclampsia se resuelve cuando la mujer da a luz, sin embargo, algunos de sus síntomas pueden prolongarse durante varias semanas en el posparto; en especial, la hipertensión arterial, que se puede descompensar aún más en los días justamente posteriores al nacimiento del bebé.</p> <p data-adhere="true">Durante el curso de la enfermedad, la preeclampsia puede evolucionar a otras manifestaciones aún más graves: la eclampsia y el síndrome de Hellp.</p> <ul> <li><strong>Eclampsia.</strong> Sucede cuando la preeclampsia deriva en convulsiones y/o estado de coma de la embarazada. Es una situación muy grave que puede ser fatal tanto para la madre como para el niño, y que se presenta a partir de la <a href="https://www.onmeda.es/embarazo/semana_embarazo_20.html">semana 20 de embarazo</a>, durante el parto o tras dar a luz. Hay inminencia de eclampsia cuando en la gestante se registran unos <a href="https://www.onmeda.es/exploracion_tratamiento/medicion_tension_arterial.html">valores de presión arterial</a> superiores a 185/115 mmHg, proteinuria mayor a 10 gramos, hiperreflexia generalizada, <a href="https://www.onmeda.es/enfermedades_oculares/">pérdida parcial o total de la visión</a> y dolor en el epigastrio que irradia hacia atrás por el abdomen en forma de cinturón (o barra).</li> <li><strong>Síndrome de Hellp.</strong> El síndrome de Hellp se caracteriza por hemólisis (destrucción de los <a href="https://www.onmeda.es/valores_analitica/componentes_de_la_sangre.html">glóbulos rojos</a>), elevación de las <a href="https://www.onmeda.es/exploracion_tratamiento/enzimas.html">enzimas</a> hepáticas y bajo recuento de <a href="https://www.onmeda.es/valores_analitica/componentes_de_la_sangre.html">plaquetas</a>. Su nombre es la abreviatura en inglés de hemólisis (HE); elevación de las enzimas (EL) y bajo recuento de plaquetas (LP). Es un episodio muy grave, que puede manifestarse incluso cuando no se había diagnosticado la preeclampsia, y que, si no se trata, puede ocasionar la muerte. Sus síntomas son parecidos a los de una virasis, una <a href="https://www.onmeda.es/enfermedades/hepatitis.html">hepatitis</a> o una <a href="https://www.onmeda.es/enfermedades/gastroenteritis.html">gastroenteritis</a>, por lo que puede pasar inadvertida en un principio, ya que cursa con malestar general y <a href="https://www.onmeda.es/sintomas/dolor_abdominal.html">dolor abdominal</a> (en el epigastrio y en el hipocondrio derecho). Un 15% de las mujeres con preeclampsia pasarán a desarrollar esta forma severa de la enfermedad mediante el síndrome de Hellp.</li> </ul> <p data-adhere="true">Además, es posible que surjan otras <strong>complicaciones de la preeclampsia:</strong></p> <ul> <li><strong>Desprendimiento prematuro de placenta.</strong> El desprendimiento de placenta es una situación muy grave que obliga a concluir inmediatamente el embarazo mediante una <a href="https://www.onmeda.es/parto/cesarea.html">cesárea</a>. Supone la separación de la placenta de la zona en la que se inserta, y es más frecuente cuando hay síndrome de Hellp.</li> <li><strong>Problemas renales.</strong> La preeclampsia puede conllevar una <a href="https://www.onmeda.es/enfermedades/insuficiencia_renal_aguda.html">insuficiencia renal aguda</a>. Así, la alteración renal más característica dependiente de la preeclampsia es la glomérulo endoteliosis, por la que se eliminan proteínas plasmáticas, dando lugar a la proteinuria.</li> <li><strong>Edema pulmonar.</strong> El pulmón también se puede ver afectado por la preeclampsia en forma de <a href="https://www.onmeda.es/enfermedades/edema_pulmonar.html">edema pulmonar agudo</a>. Es una complicación más propia de la preeclampsia grave y de la eclampsia, y puede ocasionar la muerte en el periodo posparto.</li> <li><strong>Hemorragia cerebral</strong> u otros <a href="https://www.onmeda.es/sistema_cardiovascular/">accidentes cerebrovasculares</a>. La <a href="https://www.onmeda.es/enfermedades/lesiones_en_la_cabeza.html">hemorragia cerebral</a> es la principal causa de la muerte en pacientes con preeclampsia o con eclampsia (un 60% de las defunciones por este motivo se producen por una hemorragia cerebral). La hemorragia cerebral producida por preeclampsia suele presentarse con convulsiones y coma.</li> <li><strong>Edema cerebral.</strong> La hipertensión arterial propia de la preeclampsia eleva la presión intracraneal, lo que produce extravasación de líquidos, generándose el edema cerebral.</li> <li><strong>Ruptura hepática.</strong> Afortunadamente es una complicación rara de la preeclampsia y de la eclampsia, dado su elevado índice de muerte. Se manifiesta con dolor epigástrico o en el cuadrante superior derecho que se irradia al hombro. Se trata de un dolor muy intenso que no cesa y que aumenta con la <a href="https://www.onmeda.es/anatomia/anatomia_pulmon.html">respiración</a> y con todas las situaciones que incrementan la presión intraabdominal. Varios días antes ofrece síntomas como <a href="https://www.onmeda.es/embarazo/nauseas_embarazo.html">náuseas</a> y <a href="https://www.onmeda.es/sintomas/vomitos.html">vómitos</a>, <a href="https://www.onmeda.es/sintomas/palpitaciones.html">taquicardias</a>, <a href="https://www.onmeda.es/sintomas/ictericia.html">ictericia </a>y palidez.</li> <li><strong>Alteraciones en la coagulación.</strong> El metabolismo de los factores de coagulación de la sangre puede verse alterado con la preeclampsia, formándose <a href="https://www.onmeda.es/enfermedades/trombosis.html">trombos</a> y dando lugar a hemorragias.</li> </ul> <div class="anchor"><a id="efectos_sobre_el_feto"></a></div> <h3>Efectos sobre el feto</h3> <p data-adhere="true">La preeclampsia también afecta al estado del feto, ya que el flujo sanguíneo, que es el que provee al bebé de nutrientes y oxígeno, se ve alterado. Los <strong>efectos adversos de la preeclampsia sobre el niño</strong> serán más graves cuanto más tiempo se prolongue el cuadro de preeclampsia y cuanto más severo sea.</p> <p data-adhere="true">El feto puede sufrir crecimiento intrauterino retardado (CIR), también conocido como <strong>retraso en el crecimiento uterino.</strong> Esta alteración conlleva que el feto no se desarrolle según los parámetros saludables que le corresponden de acuerdo con su edad gestacional y, en los casos más graves, puede ocasionarle la muerte. El CIR puede observarse hasta en un 31% de las gestaciones que cursan con preeclampsia.</p> <p data-adhere="true">También se produce <strong>oligoamnios</strong> (disminución de la cantidad de líquido amniótico), que implica, igualmente, la posibilidad de alteraciones en el crecimiento fetal. Además, la presencia de oligoamnios dificulta el <a href="https://www.onmeda.es/parto/">parto</a> y eleva la posibilidad de tener complicaciones con el cordón umbilical.</p> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 4040, ["12","300"], 'Publicidad');</script> <p data-adhere="true">Además, actualmente, la preeclampsia es una de los factores que más influye en la <strong>prematuridad,</strong> tanto espontánea como inducida, con los problemas que esto conlleva para el niño de bajo peso al nacer y alteraciones de todo tipo: respiratorias, metabólicas, cardiacas, inmunitarias, digestivas, oftalmológicas, auditivas... De hecho, el parto de un embarazo con preeclampsia debería suceder en una maternidad que cuente con Servicio de Neonatología por los posibles cuidados específicos que precisará el niño nada más nacer.</p> <p data-adhere="true">La preeclampsia también produce un <strong>aumento de la mortalidad perinatal.</strong> Ocurre entre un 3,5 y un 35 %. Esto guarda relación con las cifras de tensión arterial de la madre, la gravedad de la proteinuria y los valores de ácido úrico. Las causas más frecuentes son la placenta previa, asfixia fetal y otras complicaciones derivadas de la inmadurez y la prematuridad.</p> <h2 data-adhere="true">Prevención</h2> <p data-adhere="true">Las mujeres que han sufrido preeclampsia en un <a href="https://www.onmeda.es/embarazo/">embarazo</a> tienen más riesgo de que la enfermedad se vuelva a presentar en sucesivas gestaciones. Por ello, la prevención es clave.</p> <p data-adhere="true">Entre las <strong>medidas para prevenir la preeclampsia</strong> están:</p> <ul> <li><strong>No volverse a quedar embarazada en menos de dos años</strong> desde la anterior gestación.</li> <li>Hacer <strong><a href="https://www.onmeda.es/embarazo/diagnostico_prenatal.html">seguimiento médico del embarazo</a></strong>. En cada cita, el obstetra mide la tensión arterial de la madre, revisa su peso y solicita las pruebas analíticas necesarias de aquellos parámetros cuya alteración puede hacer sospechar que la mujer padece preeclampsia. Por ello, no hay que saltarse ninguna prueba prenatal.</li> <li>Si el <strong>consumo de calcio</strong> es insuficiente, se puede administrar a la madre un suplemento de <a href="https://www.onmeda.es/nutrientes/calcio.html">calcio</a>, especialmente si es población de riesgo.</li> <li>También el <strong>ácido acetilsalicílico</strong> en dosis bajas puede prevenir la aparición de la preeclampsia en mujeres predispuestas a padecerla.</li> <li>En embarazadas con preeclampsia grave, los médicos suelen prescribir <strong>sulfato de magnesio</strong> para evitar que evolucione a una eclampsia.</li> </ul><nav class="pagination"><strong class="pagination__prev is--disabled"><span>&nbsp;</span></strong><ul class="pagination__pages"><li><a href="https://www.onmeda.es/enfermedades/preeclampsia.html"class="is--active">1</a></li><li><a href="https://www.onmeda.es/enfermedades/preeclampsia-mas-informacion-18128-2.html">2</a></li></ul><a href="https://www.onmeda.es/enfermedades/preeclampsia-mas-informacion-18128-2.html" title="weiter" class="pagination__next"><span>&nbsp;</span></a></nav> <div id="taboola-below-article-thumbnails-1"></div> <script type="text/javascript"> window._taboola = window._taboola || []; _taboola.push({ mode: 'thumbnails-a', container: 'taboola-below-article-thumbnails-1', placement: 'Below Article Thumbnails 1', target_type: 'mix' }); </script> <div id="taboola-below-article-thumbnails"></div> <script type="text/javascript"> window._taboola = window._taboola || []; _taboola.push({ mode: 'organic-thumbnails-a', container: 'taboola-below-article-thumbnails', placement: 'Below Article Thumbnails', target_type: 'mix' }); </script> <div class="content-teaser"> <div class="grid"> <div class="grid__item is--center-ad"> </div> </div> </div> </div> <div class="l-col l-col--secondary" id="sprout_secondary"> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 12, [], 'Publicidad');</script> <span data-sidebarname="Sidebar Embarazo"></span><h2 class="sidebar__title"><span data-site-icon="T"></span><a href="https://www.onmeda.es/test/calculadora_fecha_parto.html">¡Haz nuestro test!</a></h2> <div class="promo-item-featured is--test" data-clickable="true"> <div class="promo-item__thumb"> <a href="https://www.onmeda.es/test/calculadora_fecha_parto.html" class="promo-item__link"> <img src="https://i.onmeda.de/es/pies-recien-nacido-430x242.jpg" alt="Un bebé con un pañal"/> </a> </div> <!-- /.promo-item__thumb --> <div class="promo-item__content"> <h2 class="promo-item__title"><a href="https://www.onmeda.es/test/calculadora_fecha_parto.html" class="promo-item__link">Calculadora de la fecha del parto</a></h2><p>¿Cuándo va a nacer tu bebé? Calcula la fecha aproximada del parto y ten todo listo para la llegada de tu hijo. </p> </div> <!-- /.promo-item__content --> </div> <p class="link-more"><a href="https://www.onmeda.es/"></a></p><div class="promo-item-featured" data-clickable="true"> <div class="promo-item__thumb"> <a href="https://www.facebook.com/Onmeda.es/" class="promo-item__link"> <img src="https://i.onmeda.de/es/facebook-onmeda.jpg" alt="Facebook de Onmeda.es"/> </a> </div> <!-- /.promo-item__thumb --> <div class="promo-item__content"> <h2 class="promo-item__title"><a href="https://www.facebook.com/Onmeda.es/" class="promo-item__link">¡Nuestra página de Facebook!</a></h2><p>¡Síguenos también en Facebook y descubrirás mucha más información interesante sobre salud!</p> </div> <!-- /.promo-item__content --> </div> <div class="promo-item-featured" data-clickable="true"> <div class="promo-item__thumb"> <a href="https://www.onmeda.es/embarazo/alimentacion_embarazo.html" class="promo-item__link"> <img src="https://i.onmeda.de/es/embarazada-tumbada-sofa-comiendo-430x242.jpg" alt="Una mujer embarazada comiendo una ensalada. "/> </a> </div> <!-- /.promo-item__thumb --> <div class="promo-item__content"> <h2 class="promo-item__title"><a href="https://www.onmeda.es/embarazo/alimentacion_embarazo.html" class="promo-item__link">Alimentación en el embarazo</a></h2><p>¿Hay que comer por dos personas? ¿Por qué no se puede comer jamón serrano? ¿Qué alimentos hay que evitar? Repasa las recomendaciones de alimentación para las mujeres embarazadas. </p> </div> <!-- /.promo-item__content --> </div> <div class="promo-item-featured" data-clickable="true"> <div class="promo-item__thumb"> <a href="https://www.instagram.com/onmeda/" class="promo-item__link"> <img src="https://i.onmeda.de/es/instagram-onmeda.jpg" alt="Instagram Onmeda"/> </a> </div> <!-- /.promo-item__thumb --> <div class="promo-item__content"> <h2 class="promo-item__title"><a href="https://www.instagram.com/onmeda/" class="promo-item__link">¡Síguenos en Instagram!</a></h2><p>¡En nuestras redes sociales descubrirás muchos más consejos y cosas curiosas sobre salud. ¡Te sorprenderás!</p> </div> <!-- /.promo-item__content --> </div> <h2 class="sidebar__title">Foros más visitados</h2><!-- linklist-sitebar --> <ul class="linklist-sitebar"> <li><h3><a href="https://www.onmeda.es/foros/embarazo-y-nacimiento-de-un-bebé">Foro de Embarazo y Nacimiento de un bebé</a></h3><p>¿Estás embarazada y tienes alguna pregunta? ¿No sabes qué puedes comer y qué no? ¿Tienes dudas sobre sexualidad en el embarazo? Plantea tus preguntas a nuestros expertos. </p></li><li><h3><a href="https://www.onmeda.es/foros/ginecología">Foro de Ginecología</a></h3><p>Consulta todos tus miedos o preocupaciones para estar bien informada en todo momento. </p></li><li><h3><a href="https://www.onmeda.es/foros/sexualidad">Foro de Sexualidad</a></h3><p>¿Tienes alguna pregunta sobre sexualidad en el embarazo? Resuelve todas tus dudas. </p></li></ul> <!-- / linklist --> <p class="link-more"><a href="https://www.onmeda.es/foros/ ">Todos los foros</a></p><p class="link-more"><a href="https://www.onmeda.es/foros/register?urlpath=aHR0cDovL3d3dy5vbm1lZGEuZXMvZm9yb3Mv">¡Regístrate y participa!</a></p> </div> <div class="l-clear"></div> </section> <div class="footer" style="margin: 10px;"> <hr class="divider" /> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 3243, ["4040"], 'Publicidad');</script> <hr class="divider" /> <section class="footer-global__socialmedia"> <div class="grid-2up"> <article class="grid__item footer-global__socialmedia--text"> <a href="http://www.facebook.com/Onmeda.es" target="_blank"><span data-site-icon="L"></span>Síguenos en Facebook</a> </article> <article class="grid__item footer-global__socialmedia--text"> <a href="https://twitter.com/Onmeda_es" target="_blank"><span data-site-icon="t"></span>Síguenos en Twitter</a> </article> </div> </section> <!-- /.footer-global__logo-items --> <section class="footer-global__logo-items"> </section> <!-- /.footer-global__logo-items --> <section class="footer-global__logo-items"> <div class="grid-2up"> <article class="grid__item footer-global__logo-item--text"> <a rel="nofollow" onclick="window.open(this.href); return false;" href="https://wma.ssl.comb.es/seal.php?INTWMA=2435&amp;idi=esp" target="_blank"> <!-- <img src="https://wma.comb.es/imglogo.php?INTWMA=2435&amp;size=small" border="0" alt="Web Médica Acreditada. Ver más información" longdesc="http://wma.comb.es/esp/presentacio.htm" title="Web Médica Acreditada. Ver más información"> --> </a> <p> Onmeda cumple y respeta las reglas de conducta ética y deontológica deseables para la comunidad médica. </p> </article> <!-- /.footer-global__logo-item --> <article class="grid__item footer-global__logo-item--text"> <a rel="nofollow" onclick="window.open(this.href); return false;" href="http://www.portalesmedicos.com/web_interes_sanitario/120202.htm" target="_blank"> <img border="0" alt="web interes sanitario" src="https://i.onmeda.de/es/logo_wis_80x36.jpg" title="web interes sanitario"> </a> <p>Onmeda está certificada como web de interés sanitario porque cumple sus principios generales y el código ético</p> </article> <!-- /.footer-global__logo-item --> </div> <div class="grid-2up"> <article class="grid__item footer-global__logo-item--text"> <a onclick="window.open(this.href); return false;" href="https://www.healthonnet.org/HONcode/Spanish/?HONConduct435283"> <img alt="Nosotros subscribimos Los Principios del código HONcode de la Fundación Salud en la Red" title="Nosotros subscribimos Los Principios del código HONcode de la Fundación Salud en la Red" style="border:0px; width: 49px; height: 72px; float: left;" src="https://www.honcode.ch/HONcode/Seal/HONConduct435283_s.gif"></a> <p>Nosotros subscribimos los <a onclick="window.open(this.href); return false;" href="http://www.healthonnet.org/HONcode/Spanish/"> Principios del código HONcode</a>. <br><a onclick="window.open(this.href); return false;" href="https://www.healthonnet.org/HONcode/Spanish/?HONConduct435283">Compruébelo aquí.</a> </p> </article> <!-- /.footer-global__logo-item --> <article class="grid__item footer-global__logo-item--text"> <a rel="nofollow" onclick="window.open(this.href); return false;" href="http://www.medicina21.com/doc2.php?op=sello_mostrar2&idsello=964" target="_blank"><img src="https://www.medicina21.com/images/sello_m21.gif" alt="Sello de calidad Medicina XXI" border="0"></a> <p>Onmeda respeta el interés general y el derecho a desarrollar la sociedad de la información en el ámbito de la salud.</p> </article> <!-- /.footer-global__logo-item --> </div> </section> <!-- /.footer-global__logo-items --> <section class="footer-global__disclaimer"> <h2>Aviso legal:</h2> <p>© Copyright © 2018 <EMAIL> - Todos los contenidos publicados en el portal de salud y medicina <a href="/">Onmeda.es</a> tienen un carácter puramente informativo y no sustituyen en ningún caso la consulta médica o el asesoramiento profesional. Onmeda.es tampoco sugiere el autodiagnóstico y la automedicación. En caso de duda consulta nuestra sección de <a id="HAFTUNG_DISCLAIMER" href="/exencion_de_responsabilidad.html" rel="nofollow">exención de responsabilidad</a>. </p> </section> <!-- /.footer-global__disclaimer --> <section class="footer-global__link-items"> <div class="grid"> <nav class="grid__item footer-global__link-item footer-global__link-item--col"> <ul> <li><a href="https://www.onmeda.es/sobre_nosotros.html">Sobre Nosotros</a></li> <li><a href="https://www.onmeda.es/contacto.html">Contacto</a></li> <li><a href="https://www.onmeda.es/sitemap.html">Mapa del sitio</a></li> </ul> </nav> <!-- /.footer-global__link-item--col --> <nav class="grid__item footer-global__link-item footer-global__link-item--col"> <ul> <li><a rel="nofollow" href="https://www.onmeda.es/exencion_de_responsabilidad.html" id="DATENSCHUTZ">Exención de responsabilidad</a></li> <li><a rel="nofollow" href="https://www.onmeda.es/equipo.html" id="IMPRESSUM">Equipo</a></li> </ul> </nav> <!-- /.footer-global__link-item--col --> <nav class="grid__item footer-global__link-item footer-global__link-item--col"> </nav> <!-- /.footer-global__link-item--col --> </div> </section> <!-- /.footer-global__link-items --> <!-- <section class="footer-global__legal"> <ul> <li><a target="_blank" href="http://www.enfemenino.com/"><img border="0" alt="" src="https://i.onmeda.de/ts/logo-enfemenino2014.png"></a></li> </ul> <p>© 2018 Enfemenino.com</p> </section>--> <!-- /.footer-global__legal --> </div> </div> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script> <script>window.jQuery || document.write('<script src="https://i.onmeda.de/core/assets/scripts/libs/jquery/jquery-1.9.1.min.js"><\/script>')</script> <!-- sprout's core modules and js --> <script src="https://i.onmeda.de/javascripts/app-core.js" type="text/javascript"></script> <!-- sprouts instance's/local modules and js --> <script src="https://i.onmeda.de/javascripts/app-local.js" type="text/javascript"></script> <script src="https://i.onmeda.de/shariff/complete.js"></script> <script> onmeda.callback._trackers.ga = new onmeda.tracker.ga(); onmeda.callback._trackers.ga.set_account('UA-115586-14'); onmeda.callback._trackers.ga.enable_v2(); </script> <script type="text/javascript">onmeda.set('client_device', 'DESKTOP');</script> <div id ="wallpaper"></div> <script type="text/javascript" language="javascript">try { af_reftrack('onmeda.es'); } catch (e) { }</script> <script type="text/javascript"> (function() { var hm = document.createElement('script'); hm.type ='text/javascript'; hm.async = true; hm.src = ('++u-heatmap-it+log-js').replace(/[+]/g,'/').replace(/-/g,'.'); var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(hm, s); })(); </script> <script> setTimeout(function(){ var v=document.visibilityState; if(v && v!='visible')document.location=document.location.href; },300000) </script> <script type="text/javascript"> function _eStat_Whap_loaded_func(){ eStatWhap.serial("800000206973"); eStatWhap.send(); } (function() { var myscript = document.createElement('script'); myscript.src = ('https:' == document.location.protocol ? 'https://' : 'http://') + 'w.estat.com/js/whap.js'; myscript.setAttribute('async', 'true'); var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(myscript, s); })(); </script> <!-- Quantcast Tag --> <script type="text/javascript"> var _qevents = _qevents || []; (function() { var elem = document.createElement('script'); elem.src = (document.location.protocol == "https:" ? "https://secure" : "http://edge") + ".quantserve.com/quant.js"; elem.async = true; elem.type = "text/javascript"; var scpt = document.getElementsByTagName('script')[0]; scpt.parentNode.insertBefore(elem, scpt); })(); _qevents.push({ qacct:"p-_ttKUxC0CDauZ" }); </script> <noscript> <div style="display:none;"> <img src="//pixel.quantserve.com/pixel/p-_ttKUxC0CDauZ.gif" border="0" height="1" width="1" alt="Quantcast"/> </div> </noscript> <!-- End Quantcast tag --> <script type="text/javascript"> window._taboola = window._taboola || []; _taboola.push({article: 'auto'}); !function (e, f, u, i) { if (!document.getElementById(i)) { e.async = 1; e.src = u; e.id = i; f.parentNode.insertBefore(e, f); } }(document.createElement('script'), document.getElementsByTagName('script')[0], 'https://cdn.taboola.com/libtrc/onmeda-onmedaes/loader.js', 'tb_loader_script'); if (window.performance && typeof window.performance.mark == 'function') { window.performance.mark('tbl_ic'); } window._taboola = window._taboola || []; _taboola.push({flush: true}); $.getScript('https://www.aufeminin.com/reloaded/static/Front/Vendor/auf/cmp/cmp.js', function() { }); </script> </body> </html> <file_sep>/Experimento2/diagnosticador.py from gensim.models.keyedvectors import KeyedVectors import sys import csv from utilidades import * import numpy as np from scipy import spatial def printD(di): s = [(k, di[k]) for k in sorted(di, key=di.get, reverse=True)] print("\n".join(map(lambda x:" -> ".join(map(str,x)),s))) def vectorPromedioDeTexto(words, model, num_features): # Funcion modificada de https://datascience.stackexchange.com/questions/23969/sentence-similarity-prediction featureVec = np.zeros((num_features,), dtype="float32") nwords=0 for word in words: try: nwords = nwords+1 featureVec = np.add(featureVec, model[word]) except: pass #ignoramos la palabra que conocemos if nwords>0: featureVec = np.divide(featureVec, nwords) return featureVec def predecirSimilaridad(vector1,vector2): return 1 - spatial.distance.cosine(vector1,vector2) wordvectors_file_vec = 'fasttext-sbwc.3.6.e20.vec.gz' cantidad = 100000 num_features=300 wordvectors = KeyedVectors.load_word2vec_format(wordvectors_file_vec, limit=cantidad) corpus={} with open('datos.csv',"r",newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=' ', quotechar='|') for row in spamreader: corpus[row[0]]=vectorPromedioDeTexto(textoALista(row[1]),wordvectors,num_features) # print(corpus[row[0]]) with open(sys.argv[1],"r") as f: sintomas=vectorPromedioDeTexto(textoALista(noCaracteresEspeciales(f.read())),wordvectors,num_features) vectorDeSimilaridades={} for nombre,enfermedad in corpus.items(): vectorDeSimilaridades[nombre]=predecirSimilaridad(enfermedad,sintomas) printD(vectorDeSimilaridades)<file_sep>/Experimento4/main5.py import sys,traceback import unicodedata import pickle import numpy as np from scipy.spatial import distance import time from math import exp,log # Código para limpiar el corpus # with open(sys.argv[1],"r") as f: # enf=map(lambda x:x.split("|",2),f.read().splitlines()) # #sin revisar # salida=open(archPickle,"w") # for i in enf: # if len(i)>1 : # if len(i[1])<500: # mantener=input(i) # if mantener!="n": # salida.write("|".join(i[:2])+"\n") # else: # print("meto ",i) # salida.write("|".join(i[:2])+"\n") def tf(doc,p): return doc.count(p)/len(doc) def idf(docs,p,th): cont=0 for d in docs: if p in d: cont+=1 if cont<th: return 0 else: return log(len(docs)/cont,10) def splitInv(cad,caracteres,stopwords): arreglo=[] aux="" cad=cad.lower() for c in cad: if c not in caracteres : if aux.rstrip('sao')!="" and aux not in stopwords: arreglo.append(aux.rstrip('sao')) aux="" else: aux+=c # print(arreglo) return arreglo def trnDocs(rawdocs,stopwords): docs={} for e,d in rawdocs.items(): docs[e]=splitInv(unicodedata.normalize('NFKD',d),"qwertyuiopasdfghjklzxcvbnm",stopwords) return docs def getVoc(docs,th=1,th2=0.1): vocab=[] for d in docs: for p in d: if p not in [v[0] for v in vocab]: aux=idf(docs,p,th) # print(aux) if aux>=th2: vocab.append((p,aux)) return vocab def leer(corpus): with open(corpus,"r") as f: for line in f: yield line.split("|",1) def doc2Vec(doc,vocab,funcion): with open("../Experimento3/stopwords-es-master/stopwords-es.txt","r") as f: stopwords=f.read().splitlines() try: palabras=splitInv(unicodedata.normalize('NFKD',doc),"qwertyuiopasdfghjklzxcvbnm",stopwords) except Exception as e: print(e,end="\r") palabras=doc vector=[] for key,value in vocab: vector.append(tf(palabras,key)*funcion(value)) # print(vocab) return np.array(vector).reshape(1,-1) def crearVectores(archCorpus,archPickle,funcion): with open("../Experimento3/stopwords-es-master/stopwords-es.txt","r") as f: stopwords=f.read().splitlines() ti=time.time() corpus=dict(leer(archCorpus)) print("Corpus leido") docs=trnDocs(corpus,stopwords) print("Documentos normalizados") vocab=getVoc(docs.values(),2,0.1) print("Vocabulario obtenido") vectores=[(n,doc2Vec(d,vocab,funcion)) for n,d in docs.items()] print("Vectores obtenidos") print("Me tarde : ",time.time()-ti) pickle.dump([vectores,vocab],open(archPickle,"wb")) return [vectores,vocab] def leerVectores(archPickle): return pickle.load(open(archPickle,"rb")) def predecir(archSintomas,vectores,vocab,funcion): sintomas=open(archSintomas,"r").read() vector=doc2Vec(sintomas,vocab,funcion) docs=[] for n,v in vectores: # docs.append([n,distance.cdist( vector,v, 'wminkowski',w=np.random.rand(v.shape[0]))]) # docs.append([n,distance.cdist( vector,v, 'matching')]) # docs.append([n,distance.cdist( vector,v, 'braycurtis')]) # docs.append([n,distance.cdist( vector,v, 'canberra')]) # docs.append([n,distance.cdist( vector,v, 'chebyshev')]) # docs.append([n,distance.cdist( vector,v, 'jaccard')]) # docs.append([n,distance.cdist( vector,v, 'correlation')]) # docs.append([n,distance.cdist( vector,v, 'sqeuclidean')]) # docs.append([n,distance.cdist( vector,v, 'cityblock')]) docs.append([n,distance.cdist( vector,v, 'cosine')]) # docs.append([n,distance.cdist( vector,v, 'euclidean')]) docs.sort(key=lambda x:x[1]) return docs funcion=lambda x:1/(1+exp(5*(1-x))) try: vectores,vocab=leerVectores(sys.argv[1]) ti=time.time() dists=predecir(sys.argv[2],vectores,vocab,funcion) print("Me tarde : ",time.time()-ti) except Exception as e: print(e) try: vectores,vocab=crearVectores(sys.argv[1],sys.argv[2],funcion) dists=predecir(sys.argv[3],vectores,vocab,funcion) except Exception as ex: print(ex) traceback.print_exc(file=sys.stdout) print("Debes dar un archivo donde esten los vectores y los sintomas o el corpus y donde quieres guardar los vectores y los sintomas") print("ejemplo:\tpython3 main.py datosLimpios.csv vectores.pkl misSintomas.txt\n\t\tpython3 main.py vectores.pkl misSintomas.txt" ) exit() for d in dists: input(d) <file_sep>/Experimento2/paginasEspecificas/rotura_menisco.html <!doctype html> <!--[if lt IE 7]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 lt-ie9 lt-ie8 lt-ie7"><![endif]--> <!--[if IE 7]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 lt-ie9 lt-ie8 ie7"><![endif]--> <!--[if IE 8]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 lt-ie9 ie8"><![endif]--> <!--[if IE 9]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 ie9"><![endif]--> <!--[if gt IE 9]><!--><html lang="es" dir="ltr" id="sprout" class="no-js"><!--<![endif]--> <head> <meta charset="utf-8"/> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/> <meta name="HandheldFriendly" content="True"/> <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no"/> <meta http-equiv="cleartype" content="on"/> <title>Rotura de menisco : Incidencia - Onmeda.es</title> <!-- fugly script to sort things for windows-phone-8: see here: http://mattstow.com/responsive-design-in-ie10-on-windows-phone-8.html --> <script type="text/javascript"> (function() { if ("-ms-user-select" in document.documentElement.style && navigator.userAgent.match(/IEMobile\/10\.0/)) { var msViewportStyle = document.createElement("style"); msViewportStyle.appendChild(document.createTextNode("@-ms-viewport{width:auto!important}")); document.getElementsByTagName("head")[0].appendChild(msViewportStyle); } })(); </script> <!--[if lte IE 9]><link rel="stylesheet" href="https://i.onmeda.de/core/assets/css/app-ie.css"/><![endif]--> <!--[if gt IE 9]><!--><link rel="stylesheet" href="https://i.onmeda.de/core/assets/css/app.css"/><!--<![endif]--> <link rel="stylesheet" href="https://i.onmeda.de/shariff/complete.css"/> <script src="https://i.onmeda.de/core/assets/scripts/libs/modernizr/modernizr.js"></script> <meta name="author" content="gofeminin.de GmbH, 51149 Koeln, Germany"/> <meta name="language" content="es"/> <meta name="robots" content="index, follow, noodp"/> <meta name="description" content="El menisco es el disco cartilaginoso de la articulaci&oacute;n de la rodilla en forma de media luna. En cada rodilla hay un menisco interno y otro"/> <meta name="keywords" content="rotura menisco, rotura rodilla, rotura menisco tratamiento, rotura menisco curacion, rotura menisco operacion, rotura menisco ejercicios, rotura menisco sintomas"/> <link rel="icon" href="https://i.onmeda.de/nav/favicon_neu.ico" type="image/ico"/> <meta property="og:site_name" content="onmeda.es"/> <meta property="og:type" content="article"/> <meta property="og:title" content="Rotura de menisco : Incidencia - Onmeda.es"/> <meta property="og:description" content="El menisco es el disco cartilaginoso de la articulaci&oacute;n de la rodilla en forma de media luna. En cada rodilla hay un menisco interno y otro"/> <meta property="og:image" content="https://i.onmeda.de/es/meniskusriss-870x435.jpg"/> <meta property="og:url" content="enfermedades/rotura_menisco"/> <meta property="fb:pages" content="342661132432481"/> <meta property="article:published_time" content="2016-12-13T00:00:00+01:00"/> <meta property="article:author" content=" <NAME>"/> <meta property="url:home" content="https://www.onmeda.es/enfermedades/rotura_menisco.html"/> <script type="text/javascript" src="https://i.onmeda.de/onmeda_ads_2018.js"></script> <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); ga('create','UA-115586-14','auto'); ga('require','displayfeatures'); ga('send','pageview'); ga('set','anonymizeIp',true); </script> <script type="text/javascript" src="https://script.ioam.de/iam.js"></script> <!-- Google Tag Manager --> <script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src='https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);})(window,document,'script','dataLayer','GTM-KH5LF8B'); </script> <!-- End Google Tag Manager --> <link rel="next" href="https://www.onmeda.es/enfermedades/rotura_menisco-mas-informacion-6130-2.html"/><link rel="canonical" href="https://www.onmeda.es/enfermedades/rotura_menisco.html"/><link rel="amphtml" href="https://www.onmeda.es/amp/enfermedades/rotura_menisco.html"/> <script> var _prum = [['id', '56727a59abe53d726f7ece44'], ['mark', 'firstbyte', (new Date()).getTime()]]; (function() { var s = document.getElementsByTagName('script')[0] , p = document.createElement('script'); p.async = 'async'; p.src = '//rum-static.pingdom.net/prum.min.js'; s.parentNode.insertBefore(p, s); })(); </script> <script src="https://i.onmeda.de/nav/visitor_api_v1.6.js"></script> <script src="https://i.onmeda.de/nav/dil_v6.4.js"></script> <script> var af_consentInternetTargeting = true; if (typeof(window.__cmp) !== 'undefined') { var customCookieData = JSON.parse(window.__cmp.getCustomConsentBeforeLoad()); if (customCookieData !== null && typeof(customCookieData.internet_targeting) !== 'undefined') { af_consentInternetTargeting = customCookieData.internet_targeting; } } var omf_datalayer = [{ 'siteInfo': { 'sid': 60391, 'platform': 'onmeda.es', 'uaid': 'UA-115586-14', 'lang': 'es', 'geo': 'es' }, 'pageInfo': { 'contentType': 'Hard-Inhalt', 'ops': 'none', 'section': 'enfermedades', 'additional': 'Enfermedades', 'thematic': 'rotura_menisco.html' }, 'userInfo': { 'du': 'es', 'logged': 0, 'device': 'desktop', 'hashedUserId': '', 'referer': '0', 'consentInternetTargeting': (af_consentInternetTargeting ? 1 : 0) }, 'campaignInfo': { 'utmMedium': '', 'utmSource': '', 'utmCampaign': '' } }]; var partnerDIL = DIL.create({ partner : 'aufeminin', uuidCookie : { name : 'aam_uuid', days : 30 }, visitorService: { namespace: '44326DF2572396FB7F000101@AdobeOrg' } }); if (partnerDIL) { partnerDIL.api.signals({ c_sid:omf_datalayer[0].siteInfo.sid, c_platform:omf_datalayer[0].siteInfo.platform, c_uaid:omf_datalayer[0].siteInfo.uaid, c_lang:omf_datalayer[0].siteInfo.lang, c_geo:omf_datalayer[0].siteInfo.geo, c_contentType:omf_datalayer[0].pageInfo.contentType, c_ops:omf_datalayer[0].pageInfo.ops, c_section:omf_datalayer[0].pageInfo.section, c_additional:omf_datalayer[0].pageInfo.additional, c_thematic:omf_datalayer[0].pageInfo.thematic, c_userInfo:omf_datalayer[0].userInfo.du, c_device:omf_datalayer[0].userInfo.device, c_logged:omf_datalayer[0].userInfo.logged, c_hashedUserId:omf_datalayer[0].userInfo.hashedUserId, c_referer:omf_datalayer[0].userInfo.referer, c_utmMedium:omf_datalayer[0].campaignInfo.utmMedium, c_utmSource:omf_datalayer[0].campaignInfo.utmSource, c_utmCampaign:omf_datalayer[0].campaignInfo.utmCampaign, c_share:omf_datalayer[0].userInfo.consentInternetTargeting }); } var af_dataLayer = omf_datalayer; </script> </head> <script type="text/javascript"> (function(window, document) { if (!window.__cmp ) { window.__cmp = (function() { var listen = window.attachEvent || window.addEventListener; listen('message', function(event) { window.__cmp.receiveMessage(event); }, false); function addLocatorFrame() { if (!window.frames['__cmpLocator']) { if (document.body) { var frame = document.createElement('iframe'); frame.style.display = 'none'; frame.name = '__cmpLocator'; document.body.appendChild(frame); } else { setTimeout(addLocatorFrame, 5); } } } addLocatorFrame(); var commandQueue = []; var cmp = function(command, parameter, callback) { if (command === 'ping') { if (callback) { callback({ gdprAppliesGlobally: !!(window.__cmp && window.__cmp.config && window.__cmp.config.storeConsentGlobally), cmpLoaded: false }); } } else { commandQueue.push({ command: command, parameter: parameter, callback: callback }); } } cmp.commandQueue = commandQueue; cmp.receiveMessage = function(event) { var data = event && event.data && event.data.__cmpCall; if (data) { commandQueue.push({ callId: data.callId, command: data.command, parameter: data.parameter, event: event }); } }; cmp.getConsentBeforeLoad = function() { var nameEQ = 'af_eu_consent' + "="; var ca = document.cookie.split(';'); console.log(ca); for(var i=0;i < ca.length;i++) { var c = ca[i]; while (c.charAt(0) === ' ') { c = c.substring(1,c.length); } if (c.indexOf(nameEQ) === 0) { return c.substring(nameEQ.length,c.length); } } return null; } cmp.config = { forceLocale: 'es', acceptOnScroll: false, logoUrl: 'https://i.onmeda.de/nav/logo-es.png', cguLink: 'https://www.onmeda.es', minimumVendorList : [], minimumVendorListTime : 259200000, minimumVendorListCheck : false, siteId : 37 } return cmp; }()); } })(window, document); </script> <body> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-KH5LF8B"height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- <script type="text/javascript" src="https://static.afcdn.com/world/aufeminin.js?v=201711151857"></script> --> <script type="text/javascript"> var sas_networkId = 1; var sas_domain = 'https://ww1.smartadserver.com'; var sas_siteId = 60391; var sas_pageId = 476834; var sas_formatIds = '12,117,3243,4040,5,4856'; var sas_target = (window.sas_target ? sas_target : '') + 'top=enfermedades_t;rotura_menisco;CID_0006130'; var sas_timestamp = 1540435412; </script> <script type="text/javascript" src="https://static.afcdn.com/reles/concat.js?PCM-lZPBTsMwDIafhmOrdiAEBw4gthNcmNi1SlI3zZY6UZygwtPTdGIbaGXpxaqd_6t-W_YGsDbuarFioYmxpiG2fPxk1oOLeTU8Pu7TfEsPm2kmNE-qfjFS_lHawClERZnf5sWhkJFCqcEb_KXWRnbMK5Ft45_JiZNa3ik8Y0Jqw5muhOk6g5U39pzR5oyzn6c1ECmDF_o7zmRwr1UH653Cr7TZHFnDfDuXoY457zy3OtBcllmL0CdyVO-iRQvYZy3PtPIwMfOJ5iI516HCGvplL1qGEubCLnAlDM7Y0GfgQaYI373SlCJcMeGN-0yRvjJk8h-fQwT3cXp5-zwJGNfkIjEskVRI--rKiEBp0jdoHFCbJo6nMe157Otwj_miKO_KYnFT3hfX3w"></script> <script type="text/javascript"> window.afSession = window.afSession || new AufSession(); </script> <script type="text/javascript"> window.afLogger = window.afLogger || new Aflog.Logger(); af_addEvent(window, 'beforeunload', function() { afLogger.bqBulkPost(); }, false); </script> <script type="text/javascript"> window.aufAdtechParams = window.aufAdtechParams || {}; aufAdtechParams.domUser = ''; </script> <script type="text/javascript"> aufAdtechParams['SmartAdServer'] = aufAdtechParams['SmartAdServer'] || {}; aufAdtechParams['SmartAdServer'].target = window.sas_target || ''; if (!(afSession.isPaid().toString() == '*' || '*' == '*')) { aufAdtechParams['SmartAdServer'].disabled = true; } </script> <script type="text/javascript"> if (af_getCookie('organicAddServer_v0_1') === 'dfp') { if ('SmartAdServer' == 'DFP') { aufAdtechParams['SmartAdServer'].disabled = false; window.sas_target += ';ismigration=1'; aufAdtechParams['SmartAdServer'].target += ';ismigration=1'; } else { aufAdtechParams['SmartAdServer'].disabled = true; } } else if (af_getCookie('organicAddServer_v0_1') === 'smart') { if ('SmartAdServer' == 'SmartAdServer') { aufAdtechParams['SmartAdServer'].disabled = false; } else { aufAdtechParams['SmartAdServer'].disabled = true; } } </script> <script type="text/javascript"> if (!aufAdtechParams['SmartAdServer'].disabled) { if (!aufAdtechParams['SmartAdServer'].formatIds) { aufAdtechParams['SmartAdServer'].formatIds = window.sas_formatIds || ''; } else if (!window.sas_formatIds) { sas_formatIds = aufAdtechParams['SmartAdServer'].formatIds; } if (!window.sas_siteId) { sas_siteId = aufAdtechParams['SmartAdServer'].siteId = 0; } adTechSiteId = sas_siteId; } </script> <script type="text/javascript"> sas_domain = aufAdtechParams['SmartAdServer'].domain = 'https://ww1.smartadserver.com'; sas_networkId = aufAdtechParams['SmartAdServer'].networkId = 1; if (!aufAdtechParams['SmartAdServer'].disabled) { var aufSmartAdParams = {}; if (aufAdtechParams['SmartAdServer'].placeholders) { aufSmartAdParams.placeholders = aufAdtechParams['SmartAdServer'].placeholders; } window.afAd = new AufAds.SmartAdserver(aufSmartAdParams); } </script> <script type="text/javascript"> if (!window.aufHBDisabled) { if (window.aufbidjs) { afLogger.logError('HB already initialized', 'HB'); } else if (!window.AufBid) { afLogger.logError('AufBid library not defined', 'HB'); } else { window.aufbidjs = new AufBid.Manager({ data: {"formats":{"117":{"name":"topleaderboard","asyncOnly":false,"position":"ATF"},"12":{"name":"toprectangle","asyncOnly":false,"position":"ATF"},"3243":{"name":"bottomleaderboard","asyncOnly":false,"position":"BTF"},"9888":{"name":"bottomrectangle","asyncOnly":false,"position":"BTF"}},"accounts":[{"name":"auf_rubicon","bidder":"rubicon","accountId":"12982","siteId":"60396","currency":"USD","discount":10,"adjustment":-2,"grossRevenue":1,"placements":{"117":[{"tag":"288072-2","size":"728x90"},{"tag":"288072-58","size":"1000x90"},{"tag":"288072-57","size":"970x250"}],"12":[{"tag":"288072-8","size":"120x600"},{"tag":"288072-9","size":"160x600"},{"tag":"288072-10","size":"300x600"},{"tag":"288072-15","size":"300x250"}],"3243":[{"tag":"503912-2","size":"728x90"},{"tag":"503912-58","size":"1000x90"}],"9888":[{"tag":"503912-15","size":"300x250"}]}},{"name":"auf_indexExchange","bidder":"indexExchange","accountId":"184403","siteId":"241111","currency":"EUR","discount":0,"adjustment":-5,"grossRevenue":0,"placements":{"117":[{"tag":"241111","size":"728x90,970x250"}],"12":[{"tag":"241428","size":"120x600,160x600,300x250,300x600"}],"3243":[{"tag":"243141","size":"728x90"}],"9888":[{"tag":"241430","size":"300x250"}]}},{"name":"auf_openx","bidder":"openx","accountId":null,"siteId":"aufeminin-e","currency":"EUR","discount":0,"adjustment":0,"grossRevenue":0,"placements":{"117":[{"tag":"539645072","size":"970x250,728x90,1000x90"}],"12":[{"tag":"539645073","size":"300x250,300x600,120x600,160x600"}],"3243":[{"tag":"539710459","size":"728x90,1000x90"}],"9888":[{"tag":"539645074","size":"300x250"}]}},{"name":"auf_appnexus_old","bidder":"appnexus","accountId":"7695","siteId":null,"currency":"USD","discount":0,"adjustment":0,"grossRevenue":0,"placements":{"117":[{"tag":"12561949","size":"728x90,970x250,1000x90"}],"12":[{"tag":"12580494","size":"120x600,160x600,300x600,300x250"}],"3243":[{"tag":"12686969","size":"728x90,1000x90"}],"9888":[{"tag":"12580482","size":"300x250"}]}},{"name":"auf_smartrtbplus","bidder":"smartrtbplus","accountId":null,"siteId":"60391","currency":"EUR","discount":9.5,"adjustment":null,"grossRevenue":1,"placements":{"12":[{"tag":"12","size":"300x250,300x600,160x600,120x600"}],"3243":[{"tag":"3243","size":"728x90,1000x90"}],"9888":[{"tag":"9888","size":"300x250"}]}},{"name":"auf_oath","bidder":"oath","accountId":"4887.1","siteId":null,"currency":"USD","discount":0,"adjustment":-20,"grossRevenue":0,"placements":{"117":[{"tag":"6550788","size":"1000x90"},{"tag":"6546175","size":"728x90"},{"tag":"6546173","size":"970x250"}],"12":[{"tag":"6546187","size":"120x600"},{"tag":"6546182","size":"160x600"},{"tag":"6546180","size":"300x250"},{"tag":"6546188","size":"300x600"}],"3243":[{"tag":"6550786","size":"1000x90"},{"tag":"6550787","size":"728x90"}]}},{"name":"sublimeskinz","bidder":"sublimeSkinz","accountId":"24549","siteId":null,"currency":"EUR","discount":0,"adjustment":null,"grossRevenue":0,"placements":{"117":[{"tag":"24549","size":"1800x1000"}]}}]}, isMobile: false, currenciesRates: {"GBP":1.13185,"USD":0.87163} }); } } </script> <link href="https://sac.ayads.co" rel="preconnect"> <link href="https://sac.ayads.co" rel="dns-prefetch"> <link href="https://adserver-eu.adtech.advertising.com" rel="preconnect"> <link href="https://adserver-eu.adtech.advertising.com" rel="dns-prefetch"> <link href="https://ib.adnxs.com" rel="preconnect"> <link href="https://ib.adnxs.com" rel="dns-prefetch"> <link href="https://aufeminin-g-d.openx.net" rel="preconnect"> <link href="https://aufeminin-g-d.openx.net" rel="dns-prefetch"> <link href="https://aufeminin-g-e.openx.net" rel="preconnect"> <link href="https://aufeminin-g-e.openx.net" rel="dns-prefetch"> <link href="https://as-sec.casalemedia.com" rel="preconnect"> <link href="https://as-sec.casalemedia.com" rel="dns-prefetch"> <link href="https://fastlane.rubiconproject.com" rel="preconnect"> <link href="https://fastlane.rubiconproject.com" rel="dns-prefetch"> <link href="https://ac-ns.sascdn.com" rel="preconnect"> <link href="https://ac-ns.sascdn.com" rel="dns-prefetch"> <link href="https://ec-ns.sascdn.com" rel="preconnect"> <link href="https://ec-ns.sascdn.com" rel="dns-prefetch"> <link href="https://ced.sascdn.com" rel="preconnect"> <link href="https://ced.sascdn.com" rel="dns-prefetch"> <link href="https://ww1.smartadserver.com" rel="preconnect"> <link href="https://ww1.smartadserver.com" rel="dns-prefetch"> <link href="https://www.onmeda.es" rel="preconnect"> <link href="https://www.onmeda.es" rel="dns-prefetch"> <link href="https://image.afcdn.com" rel="preconnect"> <link href="https://image.afcdn.com" rel="dns-prefetch"> <link href="https://static.afcdn.com/reles/concat.css?PCM-C0vNS8kv0k8sTdNPTCnWL8gpTc_MKwbxg7Mz8_SKk4uL9YwMDC0MDYxMDC0NjAE" media="all" rel="stylesheet" type="text/css"> <div id="sas_FormatID_117" class="sas_FormatID_117"><p class="ad__label">Publicidad</p><div id="sas_117"><script type="text/javascript">if (window.afAd) {afAd.render('117');}</script></div></div> <div id="sas_FormatID_5" class="sas_FormatID_5"><div id="sas_5"><script type="text/javascript">if (window.afAd) {afAd.render('5');}</script></div></div> <div class="doc"> <header class="header-global"> <!--<div class="toolbar__header"><img src="https://i.onmeda.de/es/enfeminio-network-logo.png" alt=""/></div>--> <div class="function-bar js-function-bar"> <div class="function-bar__inner"> <div class="function-bar__wrap"> <div class="function-bar__primary"> <div class="function-bar__menu-button js-function-bar-menu-toggle"> <div class="function-bar__menu-button-wrap"> <span class="function-bar__menu-button-icon icon icon--list"></span> <span class="function-bar__menu-button-label">Menú</span> </div> </div> <a href="https://www.onmeda.es/sintomas/comprobador_de_sintomas.html" class="function-bar-cta function-bar-cta--hide-sticky function-bar-cta--hide-small"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--heart2"></span> </span> <span class="function-bar-cta__label">Comprobador de síntomas</span> </a> </div> <div class="function-bar__brand"> <a href="/" class="function-bar__brand__logo"> <img src="//i.onmeda.de/es/logo_es258x80.png" alt="" class="function-bar__brand__logo-1"> <img src="//i.onmeda.de/es/logo_es258x80.png" alt="" class="function-bar__brand__logo-2"> </a> </div> <div class="function-bar__secondary"> <div class="function-bar__share js-function-bar-share"> <a href="#" class="function-bar-cta function-bar-cta--hide-small js-function-bar-share-toggle"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--follow"></span> </span> <span class="function-bar-cta__label">Síguenos</span> </a> <div class="function-bar__share__menu js-function-bar-share-menu"> <div class="function-bar__share__menu-wrap"> <div class="function-bar__share__menu-item"> <a href="https://www.facebook.com/Onmeda.es" target="_blank" class="function-bar-cta function-bar-cta--facebook"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--facebook"></span> </span> <span class="function-bar-cta__label">Facebook</span> </a> </div> <div class="function-bar__share__menu-item"> <a href="https://twitter.com/Onmeda_es" target="_blank" class="function-bar-cta function-bar-cta--twitter"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--twitter"></span> </span> <span class="function-bar-cta__label">Twitter</span> </a> </div> <div class="function-bar__share__menu-item"> <a href="https://es.pinterest.com/onmeda/" target="_blank" class="function-bar-cta function-bar-cta--googleplus"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--pinterest"></span> </span> <span class="function-bar-cta__label">Pinterest</span> </a> </div> </div> </div> </div> <a href="https://www.onmeda.es/juegos.html#utm_source=contenttracker&utm_medium=Header&utm_term=inteligencia&utm_campaign=contenttracking" class="function-bar-cta"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--brainteaser"></span> </span> <span class="function-bar-cta__label">Juegos</span> </a> <a href="#" onclick="(function(window,document){window.scrollTo(0,0);})(window,document);" class="function-bar-cta display-toggler" data-target="#login-inline"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--lock"></span> </span> <span class="function-bar-cta__label">Login</span> </a> </div> </div> </div> <div class="function-bar__menu js-function-bar-menu"> <div class="function-bar__menu__search"> <div class="function-bar__menu__search-wrap"> <form method="GET" action="https://www.onmeda.es/busqueda/"> <input type="search" name="q" placeholder="b&uacute;squeda" class="function-bar__menu__search-input js-function-bar-search" data-autosuggest="{method: 'GET',url:'/busqueda/search_service.php?action=suggest&resultType=json&q=[AUTOSUGGEST]&portal=ONMEDA_ES', styleConfig: {ulClassName: 'ac_list'}, onsubmit: window.onmedaAutosuggestSearchCallback}"> <button type="submit" class="function-bar__menu__search-button"> <span class="icon icon--search"></span> </button> </form> </div> </div> <div class="function-bar__menu__links"> <ul class="function-bar__menu__links-list"> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/enfermedades_sintomas.html">Enfermedades y síntomas<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/informacion_medicamentos.html">Medicamentos<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/embarazo_familia.html">Embarazo y familia<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/vida_saludable.html">Vida saludable<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/especiales_destacados.html">Especiales<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/actualidad_test.html">Actualidad y tests<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/foros/">Foros<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> </ul> </div> <ul class="function-bar__menu__ctas"> <li class="function-bar__menu__cta"> <a href="https://www.onmeda.es/sintomas/comprobador_de_sintomas.html" class="function-bar-cta function-bar-cta--menu-cta"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--heart2"></span> </span> <span class="function-bar-cta__label">Comprobador de síntomas</span> </a> </li> </ul> <div class="function-bar__menu__social"> <div class="function-bar__menu__social-label"> Síguenos en </div> <div class="function-bar__menu__social-links"> <a href="https://www.facebook.com/Onmeda.es" target="_blank" class="function-bar-cta function-bar-cta--menu-cta function-bar-cta--menu-cta-social"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--facebook"></span> </span> <span class="function-bar-cta__label">Facebook</span> </a> <a href="https://twitter.com/Onmeda_es" target="_blank" class="function-bar-cta function-bar-cta--menu-cta function-bar-cta--menu-cta-social"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--twitter"></span> </span> <span class="function-bar-cta__label">Twitter</span> </a> <a href="https://es.pinterest.com/onmeda/" target="_blank" class="function-bar-cta function-bar-cta--menu-cta function-bar-cta--menu-cta-social"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--pinterest"></span> </span> <span class="function-bar-cta__label">Pinterest</span> </a> </div> </div> </div> </div> <div id="login-inline" class="login-inline is--hidden group"> <div class="login-inline__panels group"> <form id="existuserlogin" action="#" class="login-inline__panel login-inline__existing" data-toggler-label="Login"> <h3 class="login-inline__title">Login y registro</h3> <fieldset class="login-inline__fields"> <div class="frow login-inline__username"> <div class="frow__label is--hidden"><label for="username">Nombre de usuario</label></div> <div class="frow__capture" data-label="Nombre de usuario"></div> </div> <div class="frow login-inline__password"> <div class="frow__label is--hidden"><label for="password">Contraseña</label></div> <div class="frow__capture" data-label="Contraseña"></div> </div> </fieldset> <p class="login-inline__forgot"><a href="//www.onmeda.es/foros/lostpw">¿Has olvidado tu contraseña?</a></p> <p><input type="submit" class="btn btn--generic login-inline__action" value="Login" /></p> </form> <div class="login-inline__panel login-inline__new" data-toggler-label="New Users"> <h3 class="login-inline__title">Regístrate gratis</h3> <div class="login-inline__intro"> <p>¿Todavía no estás registrado? Pincha en el siguiente enlace y regístrate.</p> </div> <p><a href="https://www.onmeda.es/foros/register" class="btn btn--generic login-inline__action">Registro</a></p> </div> </div> </div> <div class="masthead"> <div class="masthead__logo"> <a href="/"> <img src="//i.onmeda.de/nav/logo-es.png" alt="Onmeda.de Logo" /> </a> </div> <!-- /.masthead__logo --> <script>window.onmedaAutosuggestSearchCallback=function(r){window.location.href=r.url;};</script> <form method="GET" action="/busqueda/" class="masthead__search"> <input type="search" placeholder="b&uacute;squeda" name="q" class="masthead__search__input" data-autosuggest="{method: 'GET',url:'/busqueda/search_service.php?action=suggest&resultType=json&q=[AUTOSUGGEST]&portal=ONMEDA_ES', styleConfig: {ulClassName: 'ac_list'}, onsubmit: onmedaAutosuggestSearchCallback}" /> <button class="masthead__search_button"><span class="masthead__search_button__label">búsqueda</span><span data-icon="s" aria-hidden="true"></span></button> </form> <!-- /.masthead__search --> </div> <nav class="nav-global group" role="navigation" data-dropdowns-loaded="false" data-dropdowns-url="/-main_navigation/main_navigation/show/sub" data-custom-mobile-menu-icon=">" data-custom-mobile-menu-icon-site="true" data-line-handling="true"> <ul class="nav-global__list nav-global__list--links"> <li class="nav-global__item nav-global__item--home"> <a href="/"> <span data-site-icon="H" aria-hidden="true"></span> <span class="is--visuallyhidden">Home</span> </a> </li> <li class="nav-global__item nav-global__item--menu" data-dropdown="handheld"> <a href="#"> <span class="nav-global__item__text"> <span class="line1"><span class="icon" data-icon="m" aria-hidden="true"></span>Menú</span> </span> </a> </li> <li class="nav-global__item nav-global__item--link is--current" data-dropdown="menuitem1" data-showtablet="true" data-follow-link="true"> <a href="https://www.onmeda.es/enfermedades_sintomas.html"> <span class="nav-global__item__text"> <span class="line1">Enfermedades y</span><span class="line2"> síntomas<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem2" data-showtablet="true" data-follow-link="true"> <a href="https://www.onmeda.es/informacion_medicamentos.html"> <span class="nav-global__item__text"> <span class="line1">Medicamentos<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem3" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/embarazo_familia.html"> <span class="nav-global__item__text"> <span class="line1">Embarazo y</span><span class="line2"> familia<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem4" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/vida_saludable.html"> <span class="nav-global__item__text"> <span class="line1">Vida saludable<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem5" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/especiales_destacados.html"> <span class="nav-global__item__text"> <span class="line1">Especiales<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem6" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/actualidad_test.html"> <span class="nav-global__item__text"> <span class="line1">Actualidad y</span><span class="line2"> tests<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem7" data-showtablet="true" data-follow-link="true"> <a href="https://www.onmeda.es/foros/"> <span class="nav-global__item__text"> <span class="line1">Foros<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> </ul> <ul class="nav-global__list nav-global__list--functions"> <li class="nav-global__item nav-global__item--menu-secondary is--visuallyhidden" id="sprout_secondary__toggler"> <a href="#sprout_secondary"> <span data-icon="m" class="icon" aria-hidden="true"></span> <span class="is--visuallyhidden">Menu</span> </a> </li> </ul> </nav> <div id="sas_FormatID_4856" class="sas_FormatID_4856"><p class="ad__label">Publicidad</p><div id="sas_4856"><script type="text/javascript">if (window.afAd) {afAd.render('4856');}</script></div></div> </header> <section class="global-dropdowns"> <article id="menuitem1" class="global-dropdown"> <div class="global-dropdown__grid"> <div class="global-dropdown__column global-dropdown__column--primary"> <h3><a href="https://www.onmeda.es/enfermedades/">Enfermedades</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/enfermedades__az.html">Enfermedades de la A a la Z<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/enfermedades_especialidad_medica.html">Especialidades médicas<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/enfermedades_frecuentes.html">Enfermedades frecuentes<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/que_enfermedad_tengo/">¿Qué enfermedad tengo?<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul> <h3><a href="https://www.onmeda.es/sintomas/">Síntomas</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sintomas/sintomas_az.html">Síntomas de la A a la Z<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sintomas/comprobador_de_sintomas.html">Comprobador de síntomas<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sintomas/sintomas_frecuentes.html">Síntomas más frecuentes<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul></div><div class="global-dropdown__column global-dropdown__column--secondary"> <h3><a href="https://www.onmeda.es/clinica/consulta_medica.html">Consulta médica</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/exploracion_tratamiento/index_az.html">Exploración clínica y tratamientos<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/prevencion/">Prevención de enfermedades<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/valores_analitica/">Valores de una analítica<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul> <h3><a href="https://www.onmeda.es/enciclopedia_de_salud.html">Enciclopedia de salud</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/anatomia/">Enciclopedia de anatomía y fisiología<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/nutrientes/">Enciclopedia de nutrición<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sexualidad/">Enciclopedia de sexualidad<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul></div> <div class="global-dropdown__column global-dropdown__column--tertiary"> </div> </div> </article> </section> <section class="l-content group" id="sprout_content"> <div class="l-col l-col--primary" id="sprout_primary"> <div class="group"> <div class="content-function-bar group"> <nav class="breadcrumb float--left"> <ul class="breadcrumb__tree"> <li><a href="https://www.onmeda.es/"><span></span></a></li> <li><span></span><a href="https://www.onmeda.es/">Home</a></li><li><span></span><a href="https://www.onmeda.es/enfermedades/">Enfermedades</a></li><li><span></span>Rotura de menisco</li> </ul> <p class="breadcrumb__back"><span></span><a href="https://www.onmeda.es/enfermedades/">Página anterior</a></p> </nav> <div class="social float--right"> <a class="btn--print" href="https://www.onmeda.es/enfermedades/6130-print.html" target="_blank">Drucken</a> <a href="#social" class="social-toggler">Social Links</a> </div> </div> <div class="social-widgets"> <style> .social-toggler { width: 115px; background: transparent url(https://i.onmeda.de/nav/social_sprite.png) no-repeat 0 0; } .btn--print { background: transparent url(https://i.onmeda.de/nav/social_sprite.png) no-repeat -151px 0; } .btn--print:hover { background-position: -151px -34px; } ul.orientation-horizontal li:first-child { margin-left: 0; } </style> <div class="shariff" data-lang="es" data-backend-url="https://www.onmeda.es/shariff/" data-services="[&quot;twitter&quot;,&quot;facebook&quot;,&quot;pinterest&quot;]"></div> </div> </div> <span data-is-scrollable-to-next-page></span><div class="content-header"> <h1 class="content-header__title"> Rotura de menisco </h1> <p class="content-header__meta"> <span class="content-header__meta__item span content-header__meta__item--author"><strong>Autor: <a href="/equipo.html" target="_new"> <NAME></a></strong></span> <span class="content-header__meta__item span content-header__meta__item--author"><strong>Revisión médica: <NAME></strong> <span class="content-header__meta__item span content-header__meta__item--datestamp">(13 de Diciembre de 2016)</span> </p> </div> <div class="content-image content-image--full"> <div class="content-image__image"> <img src="https://i.onmeda.de/nwl/blindfisch.gif" data-lazyload="https://i.onmeda.de/es/meniskusriss-870x435.jpg" width="870" height="435"/><span class="image-copyright">© Jupiterimages/Stockbyte</span> </div> </div> <nav class="subnav float--left js-fix-it is--active" data-toggles="true" data-openicon="+" data-closedicon="-" data-avoid=".footer-global__legal" data-fixed-title="Indice"> <div class="subnav__title"> <h2 class="subnav__title__text"><span class="subnav__title__label">Indice</span></h2> </div> <ul class="subnav__list"> <li class="subnav__list__item is--active"><a href="https://www.onmeda.es/enfermedades/rotura_menisco.html" class="subnav__list__item__link">Rotura de menisco</a></li><li class="subnav__list__item"><a href="https://www.onmeda.es/enfermedades/rotura_menisco-mas-informacion-6130-2.html" class="subnav__list__item__link">Más información</a></li> </ul> </nav> <p data-adhere="true" class="lead">El menisco es el disco cartilaginoso de la articulación de la rodilla en forma de media luna. En cada rodilla hay un menisco interno y otro externo. La rotura de menisco es el doble de frecuente en los hombres que en las mujeres.</p> <p data-adhere="true">La <strong>rotura de menisco</strong> puede producirse por una <a href="https://www.onmeda.es/enfermedades/lesiones.html">lesión</a> o por <a href="https://www.onmeda.es/enfermedades/artrosis.html">desgaste articular</a>. Las malformaciones congénitas de los discos de cartílago (por ejemplo, el menisco discorde) también pueden ser la <strong>causa</strong> de una rotura de menisco.</p> <p data-adhere="true">El <strong>tratamiento</strong> puede ser tanto conservador como quirúrgico. Este último se puede hacer por artroscopia, que consiste en la <strong>extirpación</strong> o <strong>sutura del menisco</strong>.</p> <h3>Incidencia</h3> <p data-adhere="true">La <strong>rotura de menisco</strong> afecta con mayor frecuencia al tercio posterior del <strong>menisco interno</strong>; ahí se producen más de la mitad de las roturas. El <strong>menisco externo</strong> resulta bastante menos afectado, ya que tiene más movilidad que el interno, que está soldado al ligamento interno.</p> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 4040, ["12","300"], 'Publicidad');</script> <p data-adhere="true">Las <strong>roturas de menisco</strong> en las que se lesionan simultáneamente los <strong>ligamentos</strong> estabilizadores de la <strong>rodilla</strong> corresponden a la <strong>rotura del menisco interno</strong>. Afecta sobre todo a personas jóvenes y activas. Las causas son el <a href="https://www.onmeda.es/deporte/">deporte</a> excesivo y los accidentes. La rotura de menisco que no provoca <strong>lesiones</strong> simultáneas de los <strong>ligamentos</strong> se da en edades más avanzadas. Estas roturas suelen ser más complicadas, ya que van acompañadas del <strong>desgaste del menisco</strong> propio de la edad.</p> <p data-adhere="true">Esta lesión afecta el doble de ocasiones a los hombres que a las mujeres. Esto puede deberse a que los hombres practican más a menudo deportes como el fútbol y ejercen trabajos en los que se producen <strong>lesiones de menisco</strong> constantes, como obreros o jardineros.</p> <h2 data-adhere="true">Definición</h2> <p data-adhere="true">La <strong>rotura de menisco</strong> se define como una lesión en los discos del <strong>cartílago</strong> de la <strong>rodilla</strong> (<strong>menisco</strong>). La rotura de menisco se produce bien como consecuencia de un trauma (por ejemplo, un accidente), por el desgaste del tejido cartilaginoso o por una combinación de ambos. Las roturas se dividen según su localización (tercio del menisco anterior, medio o posterior) o su forma (vertical, horizontal, transversal, en asa de cubo, lobular.</p> <h3 data-adhere="true">El menisco</h3> <p data-adhere="true">El menisco es un disco de cartílago de la <strong>articulación de la rodilla</strong> en forma de media luna. En cada rodilla hay un <strong>menisco interno</strong> y otro externo. Están situados entre la cabeza de la tibia y el final del fémur. Los <strong>meniscos</strong> proporcionan una combinación perfecta en la articulación de la rodilla para que las superficies de las articulaciones del fémur y de la tibia encajen a pesar de su forma diferente. Junto con el cartílago de las superficies de las articulaciones, los meniscos compensan las superficies irregulares. Para ello, los meniscos absorben aproximadamente un tercio del peso y lo dividen en partes iguales sobre la articulación de la rodilla. Ejercen de distribuidores del peso, de frenos, de amortiguadores y de estabilizadores.</p> <h2 data-adhere="true">Causas</h2> <p data-adhere="true">Las <strong>causas</strong> de la <strong>rotura de menisco</strong> suelen ser lesiones o daños que se producen debidos al desgaste. Las malformaciones congénitas de los discos de cartílago (por ejemplo, el menisco discorde) también pueden ser la causa de una rotura de menisco.</p> <p data-adhere="true">Las <strong>lesiones de menisco</strong> agudas se producen principalmente en los <a href="https://www.onmeda.es/deporte/">deportes</a> en los que se realizan giros repentinos y paradas bruscas, tales como el tenis, el fútbol o el esquí. La combinación de la tensión y la rotación producen fuerzas de cizallamiento en las articulaciones. Estas conducen a menudo a una combinación de <strong>rotura de menisco</strong> y <strong><a href="https://www.onmeda.es/enfermedades/rotura_ligamentos.html">rotura de ligamentos</a></strong>. Asimismo, la actividad deportiva regular como el <em><a href="https://www.onmeda.es/deporte/correr.html">footing</a></em> sobrecarga con el paso de los años el tejido cartilaginoso y puede derivar en una lesión del menisco.</p> <p data-adhere="true">La <strong>rotura de menisco</strong> se puede deber también a otras <strong>causas</strong>. El tejido cartilaginoso, al contrario que otros tipos de tejido, solo se puede nutrir indirectamente a través del líquido sinovial, por lo que los cartílagos tienen una capacidad muy limitada para regenerarse. Por lo tanto, con la edad el menisco se torna frágil y pierde resistencia. A lo largo de los años se van formando finos huecos (fisuras) que pueden desencadenar en una rotura si se les somete a cargas continuas. Este tipo de <strong>desgastes</strong> y roturas <strong>de menisco</strong> pueden aparecer ya a edades tempranas, por ejemplo si se somete a esfuerzos por actividades que le exigen estar de rodillas durante mucho tiempo.</p> <p data-adhere="true">Las posiciones incorrectas de las piernas también pueden causar o favorecer una <strong>lesión del menisco</strong>. En las personas con las piernas arqueadas, el <strong>menisco interno</strong> resulta casi siempre afectado, mientras que en las piernas en X, el daño es mayor en el <strong>menisco externo</strong>.</p> <h2 data-adhere="true">Síntomas</h2> <p data-adhere="true">Si la <strong>rotura de menisco</strong> se debe a un accidente, los <strong>síntomas</strong> más característicos son <a href="https://www.onmeda.es/enfermedades/dolor.html">dolores</a> agudos e <strong>hinchazón</strong> en la articulación de la <strong>rodilla</strong>. Si la rotura de menisco se origina por el desgaste, los síntomas son: <strong>dolores</strong> en aumento en la <strong>rodilla</strong>, que aparecen sobre todo tras el esfuerzo físico, y una sensación de inestabilidad. Las partes desgarradas del menisco actúan con frecuencia como cuerpos extraños en la articulación. Si se interponen entre las superficies de las articulaciones, pueden conducir a limitaciones en el movimiento y la capacidad de esfuerzo muy dolorosas. A veces pueden escucharse ruidos sordos y chasquidos. En algunos casos, la articulación llega a bloquearse por completo.</p> <p data-adhere="true">Como los <strong>meniscos lesionados</strong> ya no pueden cumplir su misión como amortiguadores, los cartílagos de las articulaciones soportan una carga mayor. Por este aumento en la carga, estos se lesionan y liberan determinadas sustancias, <a href="https://www.onmeda.es/enfermedades/artritis.html">la articulación se inflama</a> y se produce un derrame en la articulación, casi siempre muy doloroso. La rodilla se hincha y se sobrecalienta de repente.</p> <h2 data-adhere="true">Diagnóstico</h2> <p data-adhere="true">Para poder establecer el <strong>diagnóstico</strong> de la <strong>rotura de menisco</strong>, puede ser muy útil tener información sobre esfuerzos continuados o accidentes previos, además de la descripción de las molestias agudas. Mediante la manipulación controlada y unas pruebas especiales, el médico puede descubrir rápidamente la rotura de menisco y las posibles <strong>lesiones</strong> secundarias de la <strong>rodilla</strong> y con ello asegurar el diagnóstico.</p> <p data-adhere="true">En caso de <strong>derrame articular en la rodilla</strong>, una punción articular y el examen del líquido que se extraiga pueden proporcionar más información al respecto. En una radiografía el médico puede descartar las lesiones óseas y asegurar los daños ocasionados por el desgaste. Si tras estos estudios no queda claro si se trata de una <strong>rotura de menisco</strong>, la <a href="https://www.onmeda.es/exploracion_tratamiento/trm.html">tomografía por resonancia magnética</a> (TRM) o la artroscopia son dos métodos bastante fiables para establecer un <strong>diagnóstico</strong> inequívoco.</p> <h2 data-adhere="true">Tratamiento</h2> <p data-adhere="true">En caso de <strong>rotura de menisco</strong>, el objetivo del <strong>tratamiento</strong> es disminuir el <a href="https://www.onmeda.es/enfermedades/dolor.html">dolor</a> o conseguir que desaparezca por completo. Además, debe servir para restablecer la movilidad y función de la <strong>articulación de la rodilla</strong>.</p> <p data-adhere="true">Básicamente se pueden distinguir dos tipos de procedimientos que pueden emplearse en el <strong>tratamiento</strong> de la <strong>rotura de menisco</strong>: el tratamiento conservador sin cirugía y el <strong>tratamiento quirúrgico</strong>. Las roturas pequeñas y estables pueden tratarse de forma conservadora (por ejemplo, con terapia de movimiento y medicamentos), aunque en la mayoría de las <strong>lesiones de menisco</strong> es necesaria la <strong>cirugía</strong>. Debido a su importante función de amortiguación y apoyo, se intenta conservar la mayor parte posible del <strong>menisco</strong>.</p> <h3 data-adhere="true">Tratamiento conservador</h3> <p data-adhere="true">Si el médico realiza un <strong>tratamiento conservador</strong> para la <strong>rotura de menisco</strong>, en primer lugar es necesario conocer la evolución natural de la enfermedad. Para ello, el médico explica de qué manera influye el comportamiento de las articulaciones en la vida diaria (por ejemplo, evitar ponerse en cuclillas o practicar deportes que impliquen cambios bruscos de dirección). Si se realiza fisioterapia regularmente, los afectados pueden fortalecer una musculatura débil (sobre todo mediante el ejercicio en la musculatura de los muslos). La electroterapia puede apoyar este tratamiento. También puede aliviar algo de peso en las rodillas con el uso de muletas. Si además de la rotura de menisco padece <a href="https://www.onmeda.es/enfermedades/artritis.html">artritis</a>, vel dolor se alivia enfriando la articulación.</p> <p data-adhere="true">En las lesiones tales como la <strong>rotura de menisco</strong>, el <strong>tratamiento</strong> con medicamentos va dirigido al dolor y las inflamaciones. Se pueden utilizar antiinflamatorios sin cortisona (antirreumáticos no esteroideos). En casos aislados, si las molestias son muy fuertes, pueden inyectarse anestésicos locales y antiinflamatorios (por ejemplo, cortisona) directamente sobre la <strong>articulación de la rodilla</strong>.</p> <h3 data-adhere="true">Tratamiento quirúrgico</h3> <p data-adhere="true">Mientras que hasta hace 20 años los cirujanos y los ortopedas tenían que operar muy a menudo las <strong>roturas de menisco</strong>, en la actualidad la mayoría de los casos pueden tratarse mediante una <strong>artroscopia</strong> con anestesia general o local. Es posible incluso realizar las intervenciones de forma ambulatoria, aunque en algunos casos es necesaria la hospitalización. La estancia en el hospital en estos casos no suele durar más de una semana. La cirugía abierta solo es necesaria si, además de la rotura de menisco, resultan también afectados los huesos o los ligamentos.</p> <p data-adhere="true">En el <strong>tratamiento quirúrgico</strong> se puede distinguir entre la extracción parcial del menisco (más común) y la <strong>sutura de menisco</strong>.</p> <p data-adhere="true">En la extracción parcial del menisco se extraen todas las partes del menisco destrozadas y dañadas. Si existe inestabilidad en la rodilla o una mala alineación de las piernas, el cirujano puede emplear ligamentos artificiales o corregir los huesos quirúrgicamente (<strong>osteotomía</strong>). Ya que, sobre todo en los jóvenes afectados, la eliminación total del menisco puede conllevar a una <a href="https://www.onmeda.es/enfermedades/artrosis.html">artrosis</a> temprana, en casos excepcionales puede realizarse un <strong><em>implante</em></strong> <em>de colágeno</em> de parte del <strong>menisco</strong>.</p> <p data-adhere="true"><script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 4040, ["12","300"], 'Publicidad');</script></p> <p data-adhere="true">Con la <strong>sutura de menisco</strong> es posible conservar todo el tejido cartilaginoso mediante la restauración del menisco. Tras suavizar los bordes de la rotura, el cirujano sutura el menisco y lo restaura a su forma original. Este método se recomienda en pacientes jóvenes, ya que en estos casos el menisco no tiene ningún desgaste significativo.</p> <p data-adhere="true">En muchos casos se emplean ambos tratamientos combinados (la extracción parcial del menisco y la <strong>sutura de menisco</strong>) para recuperar la mayor cantidad posible de tejido cartilaginoso sano.</p> <h2>Evolución</h2> <p data-adhere="true">Aparte de los riesgos generales que suponen la cirugía y la anestesia, es muy poco probable que surjan <strong>complicaciones</strong> en la evolución de los casos de <strong>rotura de menisco</strong>. Durante la intervención pueden aparecer en casos excepcionales lesiones en los cartílagos, los ligamentos, las mucosas o los nervios. Aunque las infecciones son poco frecuentes, pueden derivar en complicaciones graves como rigidez en las articulaciones, por lo que es importante seguir el <strong>tratamiento</strong> correspondiente (por ejemplo, con antibióticos).</p> <h3 data-adhere="true">Complicaciones</h3> <p data-adhere="true">Aparte de los riesgos generales que suponen la cirugía y la anestesia, es muy poco probable que surjan <strong>complicaciones</strong> en la evolución de los casos de <strong>rotura de menisco</strong>. Durante la intervención pueden aparecer en casos excepcionales lesiones en los cartílagos, los ligamentos, las mucosas o los nervios. Aunque las infecciones son poco frecuentes, pueden derivar en complicaciones graves como rigidez en las articulaciones, por lo que es importante seguir el <strong>tratamiento</strong> correspondiente (por ejemplo, con antibióticos).</p> <h3 data-adhere="true">Pronóstico</h3> <p data-adhere="true">Si la <strong>ruptura de menisco</strong> es por una <a href="https://www.onmeda.es/enfermedades/lesiones.html">lesión</a>, el <strong>pronóstico</strong> es favorable si la ruptura se produce en una zona del menisco cercana a la base y bien irrigada. Las manifestaciones de desgaste mejoran sin necesidad de <strong>tratamiento</strong>. Si la rotura de menisco no se trata a tiempo, puede derivar en pérdida de líquido articular, otras lesiones en los cartílagos por obstrucción o <a href="https://www.onmeda.es/enfermedades/artrosis.html">artrosis</a>. Una rotura parcial puede evolucionar a una <strong>rotura</strong> completa del <strong>menisco</strong>. Tras un tratamiento con sutura de menisco, las posibilidades de curación van del 30 al 100%, aunque siempre existe el riesgo de que se produzca una nueva rotura. La extirpación parcial puede ocasionar una sobrecarga del cartílago articular y reducción de la estabilidad. El riesgo de padecer artrosis en la rodilla afectada va del 50 al 80%. Además, en los casos de <strong>rotura de menisco</strong>, el <strong>pronóstico</strong> y la posibilidad de curación dependen en gran medida de otras lesiones relacionadas, por ejemplo, en los ligamentos.</p> <h3 data-adhere="true">Atención postoperatoria</h3> <p data-adhere="true">Tras una <strong>operación de rotura de menisco</strong>, los afectados deben ir ejercitando la rodilla poco a poco para que vaya realizando las actividades diarias. Dependiendo del procedimiento quirúrgico y la gravedad de la <strong>rotura de menisco</strong>, se debe aliviar el peso en la rodilla durante un tiempo tras la intervención mediante el uso de muletas. Podrá volver a practicar deporte con normalidad después de 4 a 6 semanas de la extirpación parcial, y en el caso de una sutura de menisco, de 3 a 9 meses después. La fisioterapia puede entrenar la musculatura, con lo que mejora la orientación muscular de la articulación. Además, las posibilidades de curación aumentan con la ayuda de ejercicios de coordinación o la electroterapia.</p> <h2 data-adhere="true">Prevención</h2> <p data-adhere="true">La <strong>rotura de menisco</strong> puede prevenirse si se evitan determinados movimientos como ponerse en cuclillas bruscamente o los deportes que sobrecargan las articulaciones (fútbol, balonmano, tenis o esquí, por ejemplo). Es muy recomendable calentar antes de practicar estos deportes y colocarse los protectores adecuados. Se puede conservar la movilidad de la rodilla (e incluso mejorarla) con la práctica de deportes de bajo impacto en las articulaciones tales como el <a href="https://www.onmeda.es/deporte/ciclismo.html">ciclismo</a>, la <a href="https://www.onmeda.es/deporte/natacion.html">natación</a> o determinados ejercicios gimnásticos.</p> <p data-adhere="true">En las actividades diarias que requieran estar mucho tiempo sentado es importante intentar cambiar de posición muy a menudo. Ante una <strong>lesión en el menisco</strong>, es aconsejable disminuir el <a href="https://www.onmeda.es/enfermedades/obesidad.html">sobrepeso</a>.</p><nav class="pagination"><strong class="pagination__prev is--disabled"><span>&nbsp;</span></strong><ul class="pagination__pages"><li><a href="https://www.onmeda.es/enfermedades/rotura_menisco.html"class="is--active">1</a></li><li><a href="https://www.onmeda.es/enfermedades/rotura_menisco-mas-informacion-6130-2.html">2</a></li></ul><a href="https://www.onmeda.es/enfermedades/rotura_menisco-mas-informacion-6130-2.html" title="weiter" class="pagination__next"><span>&nbsp;</span></a></nav> <div id="taboola-below-article-thumbnails-1"></div> <script type="text/javascript"> window._taboola = window._taboola || []; _taboola.push({ mode: 'thumbnails-a', container: 'taboola-below-article-thumbnails-1', placement: 'Below Article Thumbnails 1', target_type: 'mix' }); </script> <div id="taboola-below-article-thumbnails"></div> <script type="text/javascript"> window._taboola = window._taboola || []; _taboola.push({ mode: 'organic-thumbnails-a', container: 'taboola-below-article-thumbnails', placement: 'Below Article Thumbnails', target_type: 'mix' }); </script> <div class="content-teaser"> <div class="grid"> <div class="grid__item is--center-ad"> </div> </div> </div> </div> <div class="l-col l-col--secondary" id="sprout_secondary"> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 12, [], 'Publicidad');</script> <span data-sidebarname="Sidebar Enfermedades"></span><h2 class="sidebar__title"><a href="https://www.onmeda.es/enfermedades/enfermedades_frecuentes.html">Enfermedades frecuentes</a></h2><ul class="linklist"><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/asma.html">Asma</a></li><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/diabetes.html">Diabetes Mellitus</a></li><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/psoriasis.html">Psoriasis</a></li><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/hipertension_arterial.html">Hipertensión arterial</a></li><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/obesidad.html">Obesidad</a></li></ul><hr class="divider"/><div class="promo-item-featured" data-clickable="true"> <div class="promo-item__thumb"> <a href="https://www.facebook.com/Onmeda.es/" class="promo-item__link"> <img src="https://i.onmeda.de/es/facebook-onmeda.jpg" alt="Facebook de Onmeda.es"/> </a> </div> <!-- /.promo-item__thumb --> <div class="promo-item__content"> <h2 class="promo-item__title"><a href="https://www.facebook.com/Onmeda.es/" class="promo-item__link">¡Nuestra página de Facebook!</a></h2><p>¡Síguenos también en Facebook y descubrirás mucha más información interesante sobre salud!</p> </div> <!-- /.promo-item__content --> </div> <h2 class="sidebar__title">Los foros más visitados</h2><!-- linklist-sitebar --> <ul class="linklist-sitebar"> <li><h3><a href="https://www.onmeda.es/foros/nutrición-y-dietas">Foro de Nutrición</a></h3><p>Alimentación sana, recomendaciones para perder peso, dietas peligrosas. Sal de dudas con nuestra experta. </p></li><li><h3><a href="https://www.onmeda.es/foros/sexualidad">Foro de Sexualidad</a></h3><p>Tener una vida sexual satisfactoria es fundamental para tener una buena vida de pareja. ¿Tienes alguna pregunta?</p></li><li><h3><a href="https://www.onmeda.es/foros/otorrinolaringología">Foro de Otorrinolaringología</a></h3><p>Otitis media, una amigdalitis o una afonía son enfermedades otorrinolaringológicas. Consulta con nuestro experto.</p></li></ul> <!-- / linklist --> <p class="link-more"><a href="https://www.onmeda.es/foros/ ">Todos los foros</a></p><p class="link-more"><a href="https://www.onmeda.es/foros/register?urlpath=aHR0cDovL3d3dy5vbm1lZGEuZXMvZm9yb3Mv">¡Regístrate y participa!</a></p><div class="promo-item-featured" data-clickable="true"> <div class="promo-item__thumb"> <a href="https://www.instagram.com/onmeda/" class="promo-item__link"> <img src="https://i.onmeda.de/es/instagram-onmeda.jpg" alt="Instagram Onmeda"/> </a> </div> <!-- /.promo-item__thumb --> <div class="promo-item__content"> <h2 class="promo-item__title"><a href="https://www.instagram.com/onmeda/" class="promo-item__link">¡Síguenos en Instagram!</a></h2><p>¡En nuestras redes sociales descubrirás muchos más consejos y cosas curiosas sobre salud. ¡Te sorprenderás!</p> </div> <!-- /.promo-item__content --> </div> </div> <div class="l-clear"></div> </section> <div class="footer" style="margin: 10px;"> <hr class="divider" /> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 3243, ["4040"], 'Publicidad');</script> <hr class="divider" /> <section class="footer-global__socialmedia"> <div class="grid-2up"> <article class="grid__item footer-global__socialmedia--text"> <a href="http://www.facebook.com/Onmeda.es" target="_blank"><span data-site-icon="L"></span>Síguenos en Facebook</a> </article> <article class="grid__item footer-global__socialmedia--text"> <a href="https://twitter.com/Onmeda_es" target="_blank"><span data-site-icon="t"></span>Síguenos en Twitter</a> </article> </div> </section> <!-- /.footer-global__logo-items --> <section class="footer-global__logo-items"> </section> <!-- /.footer-global__logo-items --> <section class="footer-global__logo-items"> <div class="grid-2up"> <article class="grid__item footer-global__logo-item--text"> <a rel="nofollow" onclick="window.open(this.href); return false;" href="https://wma.ssl.comb.es/seal.php?INTWMA=2435&amp;idi=esp" target="_blank"> <!-- <img src="https://wma.comb.es/imglogo.php?INTWMA=2435&amp;size=small" border="0" alt="Web Médica Acreditada. Ver más información" longdesc="http://wma.comb.es/esp/presentacio.htm" title="Web Médica Acreditada. Ver más información"> --> </a> <p> Onmeda cumple y respeta las reglas de conducta ética y deontológica deseables para la comunidad médica. </p> </article> <!-- /.footer-global__logo-item --> <article class="grid__item footer-global__logo-item--text"> <a rel="nofollow" onclick="window.open(this.href); return false;" href="http://www.portalesmedicos.com/web_interes_sanitario/120202.htm" target="_blank"> <img border="0" alt="web interes sanitario" src="https://i.onmeda.de/es/logo_wis_80x36.jpg" title="web interes sanitario"> </a> <p>Onmeda está certificada como web de interés sanitario porque cumple sus principios generales y el código ético</p> </article> <!-- /.footer-global__logo-item --> </div> <div class="grid-2up"> <article class="grid__item footer-global__logo-item--text"> <a onclick="window.open(this.href); return false;" href="https://www.healthonnet.org/HONcode/Spanish/?HONConduct435283"> <img alt="Nosotros subscribimos Los Principios del código HONcode de la Fundación Salud en la Red" title="Nosotros subscribimos Los Principios del código HONcode de la Fundación Salud en la Red" style="border:0px; width: 49px; height: 72px; float: left;" src="https://www.honcode.ch/HONcode/Seal/HONConduct435283_s.gif"></a> <p>Nosotros subscribimos los <a onclick="window.open(this.href); return false;" href="http://www.healthonnet.org/HONcode/Spanish/"> Principios del código HONcode</a>. <br><a onclick="window.open(this.href); return false;" href="https://www.healthonnet.org/HONcode/Spanish/?HONConduct435283">Compruébelo aquí.</a> </p> </article> <!-- /.footer-global__logo-item --> <article class="grid__item footer-global__logo-item--text"> <a rel="nofollow" onclick="window.open(this.href); return false;" href="http://www.medicina21.com/doc2.php?op=sello_mostrar2&idsello=964" target="_blank"><img src="https://www.medicina21.com/images/sello_m21.gif" alt="Sello de calidad Medicina XXI" border="0"></a> <p>Onmeda respeta el interés general y el derecho a desarrollar la sociedad de la información en el ámbito de la salud.</p> </article> <!-- /.footer-global__logo-item --> </div> </section> <!-- /.footer-global__logo-items --> <section class="footer-global__disclaimer"> <h2>Aviso legal:</h2> <p>© Copyright © 2018 Enfemenino.com - Todos los contenidos publicados en el portal de salud y medicina <a href="/">Onmeda.es</a> tienen un carácter puramente informativo y no sustituyen en ningún caso la consulta médica o el asesoramiento profesional. Onmeda.es tampoco sugiere el autodiagnóstico y la automedicación. En caso de duda consulta nuestra sección de <a id="HAFTUNG_DISCLAIMER" href="/exencion_de_responsabilidad.html" rel="nofollow">exención de responsabilidad</a>. </p> </section> <!-- /.footer-global__disclaimer --> <section class="footer-global__link-items"> <div class="grid"> <nav class="grid__item footer-global__link-item footer-global__link-item--col"> <ul> <li><a href="https://www.onmeda.es/sobre_nosotros.html">Sobre Nosotros</a></li> <li><a href="https://www.onmeda.es/contacto.html">Contacto</a></li> <li><a href="https://www.onmeda.es/sitemap.html">Mapa del sitio</a></li> </ul> </nav> <!-- /.footer-global__link-item--col --> <nav class="grid__item footer-global__link-item footer-global__link-item--col"> <ul> <li><a rel="nofollow" href="https://www.onmeda.es/exencion_de_responsabilidad.html" id="DATENSCHUTZ">Exención de responsabilidad</a></li> <li><a rel="nofollow" href="https://www.onmeda.es/equipo.html" id="IMPRESSUM">Equipo</a></li> </ul> </nav> <!-- /.footer-global__link-item--col --> <nav class="grid__item footer-global__link-item footer-global__link-item--col"> </nav> <!-- /.footer-global__link-item--col --> </div> </section> <!-- /.footer-global__link-items --> <!-- <section class="footer-global__legal"> <ul> <li><a target="_blank" href="http://www.enfemenino.com/"><img border="0" alt="" src="https://i.onmeda.de/ts/logo-enfemenino2014.png"></a></li> </ul> <p>© 2018 Enfemenino.com</p> </section>--> <!-- /.footer-global__legal --> </div> </div> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script> <script>window.jQuery || document.write('<script src="https://i.onmeda.de/core/assets/scripts/libs/jquery/jquery-1.9.1.min.js"><\/script>')</script> <!-- sprout's core modules and js --> <script src="https://i.onmeda.de/javascripts/app-core.js" type="text/javascript"></script> <!-- sprouts instance's/local modules and js --> <script src="https://i.onmeda.de/javascripts/app-local.js" type="text/javascript"></script> <script src="https://i.onmeda.de/shariff/complete.js"></script> <script> onmeda.callback._trackers.ga = new onmeda.tracker.ga(); onmeda.callback._trackers.ga.set_account('UA-115586-14'); onmeda.callback._trackers.ga.enable_v2(); </script> <script type="text/javascript">onmeda.set('client_device', 'DESKTOP');</script> <div id ="wallpaper"></div> <script type="text/javascript" language="javascript">try { af_reftrack('onmeda.es'); } catch (e) { }</script> <script type="text/javascript"> (function() { var hm = document.createElement('script'); hm.type ='text/javascript'; hm.async = true; hm.src = ('++u-heatmap-it+log-js').replace(/[+]/g,'/').replace(/-/g,'.'); var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(hm, s); })(); </script> <script> setTimeout(function(){ var v=document.visibilityState; if(v && v!='visible')document.location=document.location.href; },300000) </script> <script type="text/javascript"> function _eStat_Whap_loaded_func(){ eStatWhap.serial("800000206973"); eStatWhap.send(); } (function() { var myscript = document.createElement('script'); myscript.src = ('https:' == document.location.protocol ? 'https://' : 'http://') + 'w.estat.com/js/whap.js'; myscript.setAttribute('async', 'true'); var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(myscript, s); })(); </script> <!-- Quantcast Tag --> <script type="text/javascript"> var _qevents = _qevents || []; (function() { var elem = document.createElement('script'); elem.src = (document.location.protocol == "https:" ? "https://secure" : "http://edge") + ".quantserve.com/quant.js"; elem.async = true; elem.type = "text/javascript"; var scpt = document.getElementsByTagName('script')[0]; scpt.parentNode.insertBefore(elem, scpt); })(); _qevents.push({ qacct:"p-_ttKUxC0CDauZ" }); </script> <noscript> <div style="display:none;"> <img src="//pixel.quantserve.com/pixel/p-_ttKUxC0CDauZ.gif" border="0" height="1" width="1" alt="Quantcast"/> </div> </noscript> <!-- End Quantcast tag --> <script type="text/javascript"> window._taboola = window._taboola || []; _taboola.push({article: 'auto'}); !function (e, f, u, i) { if (!document.getElementById(i)) { e.async = 1; e.src = u; e.id = i; f.parentNode.insertBefore(e, f); } }(document.createElement('script'), document.getElementsByTagName('script')[0], 'https://cdn.taboola.com/libtrc/onmeda-onmedaes/loader.js', 'tb_loader_script'); if (window.performance && typeof window.performance.mark == 'function') { window.performance.mark('tbl_ic'); } window._taboola = window._taboola || []; _taboola.push({flush: true}); $.getScript('https://www.aufeminin.com/reloaded/static/Front/Vendor/auf/cmp/cmp.js', function() { }); </script> </body> </html> <file_sep>/Experimento2/getAllPages.py import time import os with open("paginas.txt","r") as f: links=f.readlines() for l in links: try: open(l[:-1].rsplit("/",1)[1],"r") except: os.system( "wget "+l) time.sleep(10) <file_sep>/Experimento3/readme.md # Experimento 3 En este esperimento se usan redes neuronales con el uso de la herramienta gensim, sin embargo dado que los documentos no estan balanceados, no hay el mismo número de documentos que traten sobre el mismo tema, su desempeño es pobre, además que cuando el corpus crece también debe crecer el número de epochs.<file_sep>/README.md # Diagnosticador médico en español Este proyecto propone un sistema que analiza los textos relacionados con una enfermedad, los cuales pueden ser los síntomas de ésta (este caso), y predice según un texto del usuario describiendo los síntomas, cual es la enfermedad cuya descripción se aproxima más. ## Corpus Los corpus fueron obtenidos de páginas donde describian los síntomas de las enfermedades, fue posible obtener hasta 491 enfermedades con sus sintomas (última versión en Experimento4/datosLimpios.csv) los métodos que se usaron fueron Gensim, tf-idf y los embedding del corpus BWS (http://crscardellino.github.io/SBWCE/) ## Conclusiones En general los experimentos fueron exítosos, sin embargo algo que se puede notar es que es necesario describir extensamente cuales son los síntomas que se tienen, una descripción de una 50 o más palabras es lo mejor, además que el sistema no reconoce cuales son las enfermedades que son más comunes, por lo que le parecerá igual de común una gripe o un cancer. Me parece importante aclarar que este proyecto es puramente académico y que su uso debe ser discreto y sin confiar mucho en los resultados, ya que no es un producto final, sino un experimento.<file_sep>/Experimento1/getWebPages.py import time import os l=ord("A") while l<=ord("Z"): dir=("https://cuidateplus.marca.com/enfermedades/%1s.html"%(str(unichr(l)))) os.system( "wget "+dir) time.sleep(10) l+=1<file_sep>/Experimento3/main2.py import time import gensim import os import collections import smart_open import random import csv import unicodedata def read_corpus(fname, tokens_only=False): global etq # ignorar=open("stopwords-es-master/stopwords-es.txt","r").read().splitlines() ignorar=[] with open(fname,"r") as f: lineas=map(lambda x:[i for i in x.split("|",1)],f.read().splitlines()) ind=0 for row in lineas: if len(row)>1 and len(row[1])!=0: aux=unicodedata.normalize('NFKD',row[1]).lower() for i in ignorar: aux=aux.replace(" "+i+" "," ") if len(aux.split())>30: if tokens_only: yield gensim.utils.simple_preprocess(aux) else: yield gensim.models.doc2vec.TaggedDocument(gensim.utils.simple_preprocess(aux), [ind]) etq[ind]=row[0] # input(ind) ind+=1 def splitInv(cad,caracteres,stopwords): arreglo=[] aux="" cad=cad.lower()+"." for c in cad: if c not in caracteres : if aux!="" and aux not in stopwords: arreglo.append(aux) aux="" else: aux+=c return arreglo etq={} # stopwords=open("stopwords-es-master/stopwords-es.txt","r").read().splitlines() stopwords=[] train_corpus = list(read_corpus("datosLimpios.csv")) model = gensim.models.doc2vec.Doc2Vec( vector_size=100, min_count=2, window=4, # dm=1, dm_concat=1, # dbow_words=1, epochs=475, workers=16, # ns_exponent=0 ) # train_corpus = list(read_corpus("../datos.csv")) # model = gensim.models.doc2vec.Doc2Vec( # vector_size=100, # min_count=5, # window=2, # dm=0, # dm_concat=1, # # dbow_words=0, # epochs=300, # workers=16, # ns_exponent=1.125 # ) model.build_vocab(train_corpus) model.train(train_corpus, total_examples=model.corpus_count, epochs=model.epochs) texto=splitInv(open("misSintomas.txt").read(),"qwertyuiopasdfghjklzxcvbnm",stopwords) print(texto) vector=model.infer_vector(texto) sim=model.docvecs.most_similar([vector],topn=4) for i in sim: print("Documento Similar %s %s: " % (etq[i[0]],i[1])) # - texto=splitInv(open("misSintomas2.txt").read(),"qwertyuiopasdfghjklzxcvbnm",stopwords) print(texto) vector=model.infer_vector(texto) sim=model.docvecs.most_similar([vector],topn=4) for i in sim: print("Documento Similar %s %s: " % (etq[i[0]],i[1])) # - texto=splitInv(open("misSintomas3.txt").read(),"qwertyuiopasdfghjklzxcvbnm",stopwords) print(texto) vector=model.infer_vector(texto) sim=model.docvecs.most_similar([vector],topn=4) for i in sim: print("Documento Similar %s %s: " % (etq[i[0]],i[1])) <file_sep>/Experimento2/paginasEspecificas/piedras_en_la_vesicula.html <!doctype html> <!--[if lt IE 7]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 lt-ie9 lt-ie8 lt-ie7"><![endif]--> <!--[if IE 7]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 lt-ie9 lt-ie8 ie7"><![endif]--> <!--[if IE 8]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 lt-ie9 ie8"><![endif]--> <!--[if IE 9]><html lang="es" dir="ltr" id="sprout" class="no-js lt-ie10 ie9"><![endif]--> <!--[if gt IE 9]><!--><html lang="es" dir="ltr" id="sprout" class="no-js"><!--<![endif]--> <head> <meta charset="utf-8"/> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/> <meta name="HandheldFriendly" content="True"/> <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no"/> <meta http-equiv="cleartype" content="on"/> <title>Piedras en la vesícula (cólico biliar, litiasis biliar) - Onmeda.es</title> <!-- fugly script to sort things for windows-phone-8: see here: http://mattstow.com/responsive-design-in-ie10-on-windows-phone-8.html --> <script type="text/javascript"> (function() { if ("-ms-user-select" in document.documentElement.style && navigator.userAgent.match(/IEMobile\/10\.0/)) { var msViewportStyle = document.createElement("style"); msViewportStyle.appendChild(document.createTextNode("@-ms-viewport{width:auto!important}")); document.getElementsByTagName("head")[0].appendChild(msViewportStyle); } })(); </script> <!--[if lte IE 9]><link rel="stylesheet" href="https://i.onmeda.de/core/assets/css/app-ie.css"/><![endif]--> <!--[if gt IE 9]><!--><link rel="stylesheet" href="https://i.onmeda.de/core/assets/css/app.css"/><!--<![endif]--> <link rel="stylesheet" href="https://i.onmeda.de/shariff/complete.css"/> <script src="https://i.onmeda.de/core/assets/scripts/libs/modernizr/modernizr.js"></script> <meta name="author" content="gofeminin.de GmbH, 51149 Koeln, Germany"/> <meta name="language" content="es"/> <meta name="robots" content="index, follow, noodp"/> <meta name="description" content="Las piedras en la ves&iacute;cula (c&oacute;lico biliar, litiasis biliar) son causadas por el dep&oacute;sito progresivo de materias s&oacute;lidas como colesterol o"/> <meta name="keywords" content="piedras en la vesicula, litiasis biliar, colelitiasis, vesicula biliar, causas litiasis biliar, tratamiento litiasis biliar, sintomas litiasis biliar, prevencion litiasis biliar,"/> <link rel="icon" href="https://i.onmeda.de/nav/favicon_neu.ico" type="image/ico"/> <meta property="og:site_name" content="onmeda.es"/> <meta property="og:type" content="article"/> <meta property="og:title" content="Piedras en la vesícula (cólico biliar, litiasis biliar) - Onmeda.es"/> <meta property="og:description" content="Las piedras en la ves&iacute;cula (c&oacute;lico biliar, litiasis biliar) son causadas por el dep&oacute;sito progresivo de materias s&oacute;lidas como colesterol o"/> <meta property="og:image" content="https://i.onmeda.de/es/gallensteine-870x435.jpg"/> <meta property="og:url" content="enfermedades/piedras_en_la_vesicula"/> <meta property="fb:pages" content="342661132432481"/> <meta property="article:published_time" content="2016-12-15T00:00:00+01:00"/> <meta property="article:author" content=" <NAME>"/> <meta property="url:home" content="https://www.onmeda.es/enfermedades/piedras_en_la_vesicula.html"/> <script type="text/javascript" src="https://i.onmeda.de/onmeda_ads_2018.js"></script> <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); ga('create','UA-115586-14','auto'); ga('require','displayfeatures'); ga('send','pageview'); ga('set','anonymizeIp',true); </script> <script type="text/javascript" src="https://script.ioam.de/iam.js"></script> <!-- Google Tag Manager --> <script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src='https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);})(window,document,'script','dataLayer','GTM-KH5LF8B'); </script> <!-- End Google Tag Manager --> <link rel="next" href="https://www.onmeda.es/enfermedades/piedras_en_la_vesicula-mas-informacion-1344-2.html"/><link rel="canonical" href="https://www.onmeda.es/enfermedades/piedras_en_la_vesicula.html"/><link rel="amphtml" href="https://www.onmeda.es/amp/enfermedades/piedras_en_la_vesicula.html"/> <script> var _prum = [['id', '56727a59abe53d726f7ece44'], ['mark', 'firstbyte', (new Date()).getTime()]]; (function() { var s = document.getElementsByTagName('script')[0] , p = document.createElement('script'); p.async = 'async'; p.src = '//rum-static.pingdom.net/prum.min.js'; s.parentNode.insertBefore(p, s); })(); </script> <script src="https://i.onmeda.de/nav/visitor_api_v1.6.js"></script> <script src="https://i.onmeda.de/nav/dil_v6.4.js"></script> <script> var af_consentInternetTargeting = true; if (typeof(window.__cmp) !== 'undefined') { var customCookieData = JSON.parse(window.__cmp.getCustomConsentBeforeLoad()); if (customCookieData !== null && typeof(customCookieData.internet_targeting) !== 'undefined') { af_consentInternetTargeting = customCookieData.internet_targeting; } } var omf_datalayer = [{ 'siteInfo': { 'sid': 60391, 'platform': 'onmeda.es', 'uaid': 'UA-115586-14', 'lang': 'es', 'geo': 'es' }, 'pageInfo': { 'contentType': 'Hard-Inhalt', 'ops': 'none', 'section': 'enfermedades', 'additional': 'Enfermedades', 'thematic': 'piedras_en_la_vesicula.html' }, 'userInfo': { 'du': 'es', 'logged': 0, 'device': 'desktop', 'hashedUserId': '', 'referer': '0', 'consentInternetTargeting': (af_consentInternetTargeting ? 1 : 0) }, 'campaignInfo': { 'utmMedium': '', 'utmSource': '', 'utmCampaign': '' } }]; var partnerDIL = DIL.create({ partner : 'aufeminin', uuidCookie : { name : 'aam_uuid', days : 30 }, visitorService: { namespace: '44326DF2572396FB7F000101@AdobeOrg' } }); if (partnerDIL) { partnerDIL.api.signals({ c_sid:omf_datalayer[0].siteInfo.sid, c_platform:omf_datalayer[0].siteInfo.platform, c_uaid:omf_datalayer[0].siteInfo.uaid, c_lang:omf_datalayer[0].siteInfo.lang, c_geo:omf_datalayer[0].siteInfo.geo, c_contentType:omf_datalayer[0].pageInfo.contentType, c_ops:omf_datalayer[0].pageInfo.ops, c_section:omf_datalayer[0].pageInfo.section, c_additional:omf_datalayer[0].pageInfo.additional, c_thematic:omf_datalayer[0].pageInfo.thematic, c_userInfo:omf_datalayer[0].userInfo.du, c_device:omf_datalayer[0].userInfo.device, c_logged:omf_datalayer[0].userInfo.logged, c_hashedUserId:omf_datalayer[0].userInfo.hashedUserId, c_referer:omf_datalayer[0].userInfo.referer, c_utmMedium:omf_datalayer[0].campaignInfo.utmMedium, c_utmSource:omf_datalayer[0].campaignInfo.utmSource, c_utmCampaign:omf_datalayer[0].campaignInfo.utmCampaign, c_share:omf_datalayer[0].userInfo.consentInternetTargeting }); } var af_dataLayer = omf_datalayer; </script> </head> <script type="text/javascript"> (function(window, document) { if (!window.__cmp ) { window.__cmp = (function() { var listen = window.attachEvent || window.addEventListener; listen('message', function(event) { window.__cmp.receiveMessage(event); }, false); function addLocatorFrame() { if (!window.frames['__cmpLocator']) { if (document.body) { var frame = document.createElement('iframe'); frame.style.display = 'none'; frame.name = '__cmpLocator'; document.body.appendChild(frame); } else { setTimeout(addLocatorFrame, 5); } } } addLocatorFrame(); var commandQueue = []; var cmp = function(command, parameter, callback) { if (command === 'ping') { if (callback) { callback({ gdprAppliesGlobally: !!(window.__cmp && window.__cmp.config && window.__cmp.config.storeConsentGlobally), cmpLoaded: false }); } } else { commandQueue.push({ command: command, parameter: parameter, callback: callback }); } } cmp.commandQueue = commandQueue; cmp.receiveMessage = function(event) { var data = event && event.data && event.data.__cmpCall; if (data) { commandQueue.push({ callId: data.callId, command: data.command, parameter: data.parameter, event: event }); } }; cmp.getConsentBeforeLoad = function() { var nameEQ = 'af_eu_consent' + "="; var ca = document.cookie.split(';'); console.log(ca); for(var i=0;i < ca.length;i++) { var c = ca[i]; while (c.charAt(0) === ' ') { c = c.substring(1,c.length); } if (c.indexOf(nameEQ) === 0) { return c.substring(nameEQ.length,c.length); } } return null; } cmp.config = { forceLocale: 'es', acceptOnScroll: false, logoUrl: 'https://i.onmeda.de/nav/logo-es.png', cguLink: 'https://www.onmeda.es', minimumVendorList : [], minimumVendorListTime : 259200000, minimumVendorListCheck : false, siteId : 37 } return cmp; }()); } })(window, document); </script> <body> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-KH5LF8B"height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- <script type="text/javascript" src="https://static.afcdn.com/world/aufeminin.js?v=201711151857"></script> --> <script type="text/javascript"> var sas_networkId = 1; var sas_domain = 'https://ww1.smartadserver.com'; var sas_siteId = 60391; var sas_pageId = 476834; var sas_formatIds = '12,117,3243,4040,5,4856'; var sas_target = (window.sas_target ? sas_target : '') + 'top=enfermedades_t;piedras_en_la_vesicula;CID_0001344'; var sas_timestamp = 1540435040; </script> <script type="text/javascript" src="https://static.afcdn.com/reles/concat.js?PCM-lZPBTsMwDIafhmOrdiAEBw4gthNcmNi1SlI3zZY6UZygwtPTdGIbaGXpxaqd_6t-W_YGsDbuarFioYmxpiG2fPxk1oOLeTU8Pu7TfEsPm2kmNE-qfjFS_lHawClERZnf5sWhkJFCqcEb_KXWRnbMK5Ft45_JiZNa3ik8Y0Jqw5muhOk6g5U39pzR5oyzn6c1ECmDF_o7zmRwr1UH653Cr7TZHFnDfDuXoY457zy3OtBcllmL0CdyVO-iRQvYZy3PtPIwMfOJ5iI516HCGvplL1qGEubCLnAlDM7Y0GfgQaYI373SlCJcMeGN-0yRvjJk8h-fQwT3cXp5-zwJGNfkIjEskVRI--rKiEBp0jdoHFCbJo6nMe157Otwj_miKO_KYnFT3hfX3w"></script> <script type="text/javascript"> window.afSession = window.afSession || new AufSession(); </script> <script type="text/javascript"> window.afLogger = window.afLogger || new Aflog.Logger(); af_addEvent(window, 'beforeunload', function() { afLogger.bqBulkPost(); }, false); </script> <script type="text/javascript"> window.aufAdtechParams = window.aufAdtechParams || {}; aufAdtechParams.domUser = ''; </script> <script type="text/javascript"> aufAdtechParams['SmartAdServer'] = aufAdtechParams['SmartAdServer'] || {}; aufAdtechParams['SmartAdServer'].target = window.sas_target || ''; if (!(afSession.isPaid().toString() == '*' || '*' == '*')) { aufAdtechParams['SmartAdServer'].disabled = true; } </script> <script type="text/javascript"> if (af_getCookie('organicAddServer_v0_1') === 'dfp') { if ('SmartAdServer' == 'DFP') { aufAdtechParams['SmartAdServer'].disabled = false; window.sas_target += ';ismigration=1'; aufAdtechParams['SmartAdServer'].target += ';ismigration=1'; } else { aufAdtechParams['SmartAdServer'].disabled = true; } } else if (af_getCookie('organicAddServer_v0_1') === 'smart') { if ('SmartAdServer' == 'SmartAdServer') { aufAdtechParams['SmartAdServer'].disabled = false; } else { aufAdtechParams['SmartAdServer'].disabled = true; } } </script> <script type="text/javascript"> if (!aufAdtechParams['SmartAdServer'].disabled) { if (!aufAdtechParams['SmartAdServer'].formatIds) { aufAdtechParams['SmartAdServer'].formatIds = window.sas_formatIds || ''; } else if (!window.sas_formatIds) { sas_formatIds = aufAdtechParams['SmartAdServer'].formatIds; } if (!window.sas_siteId) { sas_siteId = aufAdtechParams['SmartAdServer'].siteId = 0; } adTechSiteId = sas_siteId; } </script> <script type="text/javascript"> sas_domain = aufAdtechParams['SmartAdServer'].domain = 'https://ww1.smartadserver.com'; sas_networkId = aufAdtechParams['SmartAdServer'].networkId = 1; if (!aufAdtechParams['SmartAdServer'].disabled) { var aufSmartAdParams = {}; if (aufAdtechParams['SmartAdServer'].placeholders) { aufSmartAdParams.placeholders = aufAdtechParams['SmartAdServer'].placeholders; } window.afAd = new AufAds.SmartAdserver(aufSmartAdParams); } </script> <script type="text/javascript"> if (!window.aufHBDisabled) { if (window.aufbidjs) { afLogger.logError('HB already initialized', 'HB'); } else if (!window.AufBid) { afLogger.logError('AufBid library not defined', 'HB'); } else { window.aufbidjs = new AufBid.Manager({ data: {"formats":{"117":{"name":"topleaderboard","asyncOnly":false,"position":"ATF"},"12":{"name":"toprectangle","asyncOnly":false,"position":"ATF"},"3243":{"name":"bottomleaderboard","asyncOnly":false,"position":"BTF"},"9888":{"name":"bottomrectangle","asyncOnly":false,"position":"BTF"}},"accounts":[{"name":"auf_rubicon","bidder":"rubicon","accountId":"12982","siteId":"60396","currency":"USD","discount":10,"adjustment":-2,"grossRevenue":1,"placements":{"117":[{"tag":"288072-2","size":"728x90"},{"tag":"288072-58","size":"1000x90"},{"tag":"288072-57","size":"970x250"}],"12":[{"tag":"288072-8","size":"120x600"},{"tag":"288072-9","size":"160x600"},{"tag":"288072-10","size":"300x600"},{"tag":"288072-15","size":"300x250"}],"3243":[{"tag":"503912-2","size":"728x90"},{"tag":"503912-58","size":"1000x90"}],"9888":[{"tag":"503912-15","size":"300x250"}]}},{"name":"auf_indexExchange","bidder":"indexExchange","accountId":"184403","siteId":"241111","currency":"EUR","discount":0,"adjustment":-5,"grossRevenue":0,"placements":{"117":[{"tag":"241111","size":"728x90,970x250"}],"12":[{"tag":"241428","size":"120x600,160x600,300x250,300x600"}],"3243":[{"tag":"243141","size":"728x90"}],"9888":[{"tag":"241430","size":"300x250"}]}},{"name":"auf_openx","bidder":"openx","accountId":null,"siteId":"aufeminin-e","currency":"EUR","discount":0,"adjustment":0,"grossRevenue":0,"placements":{"117":[{"tag":"539645072","size":"970x250,728x90,1000x90"}],"12":[{"tag":"539645073","size":"300x250,300x600,120x600,160x600"}],"3243":[{"tag":"539710459","size":"728x90,1000x90"}],"9888":[{"tag":"539645074","size":"300x250"}]}},{"name":"auf_appnexus_old","bidder":"appnexus","accountId":"7695","siteId":null,"currency":"USD","discount":0,"adjustment":0,"grossRevenue":0,"placements":{"117":[{"tag":"12561949","size":"728x90,970x250,1000x90"}],"12":[{"tag":"12580494","size":"120x600,160x600,300x600,300x250"}],"3243":[{"tag":"12686969","size":"728x90,1000x90"}],"9888":[{"tag":"12580482","size":"300x250"}]}},{"name":"auf_smartrtbplus","bidder":"smartrtbplus","accountId":null,"siteId":"60391","currency":"EUR","discount":9.5,"adjustment":null,"grossRevenue":1,"placements":{"12":[{"tag":"12","size":"300x250,300x600,160x600,120x600"}],"3243":[{"tag":"3243","size":"728x90,1000x90"}],"9888":[{"tag":"9888","size":"300x250"}]}},{"name":"auf_oath","bidder":"oath","accountId":"4887.1","siteId":null,"currency":"USD","discount":0,"adjustment":-20,"grossRevenue":0,"placements":{"117":[{"tag":"6550788","size":"1000x90"},{"tag":"6546175","size":"728x90"},{"tag":"6546173","size":"970x250"}],"12":[{"tag":"6546187","size":"120x600"},{"tag":"6546182","size":"160x600"},{"tag":"6546180","size":"300x250"},{"tag":"6546188","size":"300x600"}],"3243":[{"tag":"6550786","size":"1000x90"},{"tag":"6550787","size":"728x90"}]}},{"name":"sublimeskinz","bidder":"sublimeSkinz","accountId":"24549","siteId":null,"currency":"EUR","discount":0,"adjustment":null,"grossRevenue":0,"placements":{"117":[{"tag":"24549","size":"1800x1000"}]}}]}, isMobile: false, currenciesRates: {"GBP":1.13185,"USD":0.87163} }); } } </script> <link href="https://sac.ayads.co" rel="preconnect"> <link href="https://sac.ayads.co" rel="dns-prefetch"> <link href="https://adserver-eu.adtech.advertising.com" rel="preconnect"> <link href="https://adserver-eu.adtech.advertising.com" rel="dns-prefetch"> <link href="https://ib.adnxs.com" rel="preconnect"> <link href="https://ib.adnxs.com" rel="dns-prefetch"> <link href="https://aufeminin-g-d.openx.net" rel="preconnect"> <link href="https://aufeminin-g-d.openx.net" rel="dns-prefetch"> <link href="https://aufeminin-g-e.openx.net" rel="preconnect"> <link href="https://aufeminin-g-e.openx.net" rel="dns-prefetch"> <link href="https://as-sec.casalemedia.com" rel="preconnect"> <link href="https://as-sec.casalemedia.com" rel="dns-prefetch"> <link href="https://fastlane.rubiconproject.com" rel="preconnect"> <link href="https://fastlane.rubiconproject.com" rel="dns-prefetch"> <link href="https://ac-ns.sascdn.com" rel="preconnect"> <link href="https://ac-ns.sascdn.com" rel="dns-prefetch"> <link href="https://ec-ns.sascdn.com" rel="preconnect"> <link href="https://ec-ns.sascdn.com" rel="dns-prefetch"> <link href="https://ced.sascdn.com" rel="preconnect"> <link href="https://ced.sascdn.com" rel="dns-prefetch"> <link href="https://ww1.smartadserver.com" rel="preconnect"> <link href="https://ww1.smartadserver.com" rel="dns-prefetch"> <link href="https://www.onmeda.es" rel="preconnect"> <link href="https://www.onmeda.es" rel="dns-prefetch"> <link href="https://image.afcdn.com" rel="preconnect"> <link href="https://image.afcdn.com" rel="dns-prefetch"> <link href="https://static.afcdn.com/reles/concat.css?PCM-C0vNS8kv0k8sTdNPTCnWL8gpTc_MKwbxg7Mz8_SKk4uL9YwMDC0MDYxMDC0NjAE" media="all" rel="stylesheet" type="text/css"> <div id="sas_FormatID_117" class="sas_FormatID_117"><p class="ad__label">Publicidad</p><div id="sas_117"><script type="text/javascript">if (window.afAd) {afAd.render('117');}</script></div></div> <div id="sas_FormatID_5" class="sas_FormatID_5"><div id="sas_5"><script type="text/javascript">if (window.afAd) {afAd.render('5');}</script></div></div> <div class="doc"> <header class="header-global"> <!--<div class="toolbar__header"><img src="https://i.onmeda.de/es/enfeminio-network-logo.png" alt=""/></div>--> <div class="function-bar js-function-bar"> <div class="function-bar__inner"> <div class="function-bar__wrap"> <div class="function-bar__primary"> <div class="function-bar__menu-button js-function-bar-menu-toggle"> <div class="function-bar__menu-button-wrap"> <span class="function-bar__menu-button-icon icon icon--list"></span> <span class="function-bar__menu-button-label">Menú</span> </div> </div> <a href="https://www.onmeda.es/sintomas/comprobador_de_sintomas.html" class="function-bar-cta function-bar-cta--hide-sticky function-bar-cta--hide-small"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--heart2"></span> </span> <span class="function-bar-cta__label">Comprobador de síntomas</span> </a> </div> <div class="function-bar__brand"> <a href="/" class="function-bar__brand__logo"> <img src="//i.onmeda.de/es/logo_es258x80.png" alt="" class="function-bar__brand__logo-1"> <img src="//i.onmeda.de/es/logo_es258x80.png" alt="" class="function-bar__brand__logo-2"> </a> </div> <div class="function-bar__secondary"> <div class="function-bar__share js-function-bar-share"> <a href="#" class="function-bar-cta function-bar-cta--hide-small js-function-bar-share-toggle"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--follow"></span> </span> <span class="function-bar-cta__label">Síguenos</span> </a> <div class="function-bar__share__menu js-function-bar-share-menu"> <div class="function-bar__share__menu-wrap"> <div class="function-bar__share__menu-item"> <a href="https://www.facebook.com/Onmeda.es" target="_blank" class="function-bar-cta function-bar-cta--facebook"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--facebook"></span> </span> <span class="function-bar-cta__label">Facebook</span> </a> </div> <div class="function-bar__share__menu-item"> <a href="https://twitter.com/Onmeda_es" target="_blank" class="function-bar-cta function-bar-cta--twitter"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--twitter"></span> </span> <span class="function-bar-cta__label">Twitter</span> </a> </div> <div class="function-bar__share__menu-item"> <a href="https://es.pinterest.com/onmeda/" target="_blank" class="function-bar-cta function-bar-cta--googleplus"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--pinterest"></span> </span> <span class="function-bar-cta__label">Pinterest</span> </a> </div> </div> </div> </div> <a href="https://www.onmeda.es/juegos.html#utm_source=contenttracker&utm_medium=Header&utm_term=inteligencia&utm_campaign=contenttracking" class="function-bar-cta"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--brainteaser"></span> </span> <span class="function-bar-cta__label">Juegos</span> </a> <a href="#" onclick="(function(window,document){window.scrollTo(0,0);})(window,document);" class="function-bar-cta display-toggler" data-target="#login-inline"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--lock"></span> </span> <span class="function-bar-cta__label">Login</span> </a> </div> </div> </div> <div class="function-bar__menu js-function-bar-menu"> <div class="function-bar__menu__search"> <div class="function-bar__menu__search-wrap"> <form method="GET" action="https://www.onmeda.es/busqueda/"> <input type="search" name="q" placeholder="b&uacute;squeda" class="function-bar__menu__search-input js-function-bar-search" data-autosuggest="{method: 'GET',url:'/busqueda/search_service.php?action=suggest&resultType=json&q=[AUTOSUGGEST]&portal=ONMEDA_ES', styleConfig: {ulClassName: 'ac_list'}, onsubmit: window.onmedaAutosuggestSearchCallback}"> <button type="submit" class="function-bar__menu__search-button"> <span class="icon icon--search"></span> </button> </form> </div> </div> <div class="function-bar__menu__links"> <ul class="function-bar__menu__links-list"> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/enfermedades_sintomas.html">Enfermedades y síntomas<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/informacion_medicamentos.html">Medicamentos<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/embarazo_familia.html">Embarazo y familia<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/vida_saludable.html">Vida saludable<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/especiales_destacados.html">Especiales<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/actualidad_test.html">Actualidad y tests<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> <li class="function-bar__menu__links-item"><a href="https://www.onmeda.es/foros/">Foros<span class="function-bar__menu__links-item-icon icon-site--arrow_right_onmeda"></span></a></li> </ul> </div> <ul class="function-bar__menu__ctas"> <li class="function-bar__menu__cta"> <a href="https://www.onmeda.es/sintomas/comprobador_de_sintomas.html" class="function-bar-cta function-bar-cta--menu-cta"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--heart2"></span> </span> <span class="function-bar-cta__label">Comprobador de síntomas</span> </a> </li> </ul> <div class="function-bar__menu__social"> <div class="function-bar__menu__social-label"> Síguenos en </div> <div class="function-bar__menu__social-links"> <a href="https://www.facebook.com/Onmeda.es" target="_blank" class="function-bar-cta function-bar-cta--menu-cta function-bar-cta--menu-cta-social"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--facebook"></span> </span> <span class="function-bar-cta__label">Facebook</span> </a> <a href="https://twitter.com/Onmeda_es" target="_blank" class="function-bar-cta function-bar-cta--menu-cta function-bar-cta--menu-cta-social"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--twitter"></span> </span> <span class="function-bar-cta__label">Twitter</span> </a> <a href="https://es.pinterest.com/onmeda/" target="_blank" class="function-bar-cta function-bar-cta--menu-cta function-bar-cta--menu-cta-social"> <span class="function-bar-cta__icon"> <span class="function-bar-cta__icon-el icon icon--pinterest"></span> </span> <span class="function-bar-cta__label">Pinterest</span> </a> </div> </div> </div> </div> <div id="login-inline" class="login-inline is--hidden group"> <div class="login-inline__panels group"> <form id="existuserlogin" action="#" class="login-inline__panel login-inline__existing" data-toggler-label="Login"> <h3 class="login-inline__title">Login y registro</h3> <fieldset class="login-inline__fields"> <div class="frow login-inline__username"> <div class="frow__label is--hidden"><label for="username"><NAME></label></div> <div class="frow__capture" data-label="Nombre de usuario"></div> </div> <div class="frow login-inline__password"> <div class="frow__label is--hidden"><label for="<PASSWORD>"><PASSWORD></label></div> <div class="frow__capture" data-label="Contraseña"></div> </div> </fieldset> <p class="login-inline__forgot"><a href="//www.onmeda.es/foros/lostpw">¿Has olvidado tu contraseña?</a></p> <p><input type="submit" class="btn btn--generic login-inline__action" value="Login" /></p> </form> <div class="login-inline__panel login-inline__new" data-toggler-label="New Users"> <h3 class="login-inline__title">Regístrate gratis</h3> <div class="login-inline__intro"> <p>¿Todavía no estás registrado? Pincha en el siguiente enlace y regístrate.</p> </div> <p><a href="https://www.onmeda.es/foros/register" class="btn btn--generic login-inline__action">Registro</a></p> </div> </div> </div> <div class="masthead"> <div class="masthead__logo"> <a href="/"> <img src="//i.onmeda.de/nav/logo-es.png" alt="Onmeda.de Logo" /> </a> </div> <!-- /.masthead__logo --> <script>window.onmedaAutosuggestSearchCallback=function(r){window.location.href=r.url;};</script> <form method="GET" action="/busqueda/" class="masthead__search"> <input type="search" placeholder="b&uacute;squeda" name="q" class="masthead__search__input" data-autosuggest="{method: 'GET',url:'/busqueda/search_service.php?action=suggest&resultType=json&q=[AUTOSUGGEST]&portal=ONMEDA_ES', styleConfig: {ulClassName: 'ac_list'}, onsubmit: onmedaAutosuggestSearchCallback}" /> <button class="masthead__search_button"><span class="masthead__search_button__label">búsqueda</span><span data-icon="s" aria-hidden="true"></span></button> </form> <!-- /.masthead__search --> </div> <nav class="nav-global group" role="navigation" data-dropdowns-loaded="false" data-dropdowns-url="/-main_navigation/main_navigation/show/sub" data-custom-mobile-menu-icon=">" data-custom-mobile-menu-icon-site="true" data-line-handling="true"> <ul class="nav-global__list nav-global__list--links"> <li class="nav-global__item nav-global__item--home"> <a href="/"> <span data-site-icon="H" aria-hidden="true"></span> <span class="is--visuallyhidden">Home</span> </a> </li> <li class="nav-global__item nav-global__item--menu" data-dropdown="handheld"> <a href="#"> <span class="nav-global__item__text"> <span class="line1"><span class="icon" data-icon="m" aria-hidden="true"></span>Menú</span> </span> </a> </li> <li class="nav-global__item nav-global__item--link is--current" data-dropdown="menuitem1" data-showtablet="true" data-follow-link="true"> <a href="https://www.onmeda.es/enfermedades_sintomas.html"> <span class="nav-global__item__text"> <span class="line1">Enfermedades y</span><span class="line2"> síntomas<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem2" data-showtablet="true" data-follow-link="true"> <a href="https://www.onmeda.es/informacion_medicamentos.html"> <span class="nav-global__item__text"> <span class="line1">Medicamentos<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem3" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/embarazo_familia.html"> <span class="nav-global__item__text"> <span class="line1">Embarazo y</span><span class="line2"> familia<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem4" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/vida_saludable.html"> <span class="nav-global__item__text"> <span class="line1">Vida saludable<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem5" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/especiales_destacados.html"> <span class="nav-global__item__text"> <span class="line1">Especiales<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem6" data-showtablet="false" data-follow-link="true"> <a href="https://www.onmeda.es/actualidad_test.html"> <span class="nav-global__item__text"> <span class="line1">Actualidad y</span><span class="line2"> tests<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> <li class="nav-global__item nav-global__item--link" data-dropdown="menuitem7" data-showtablet="true" data-follow-link="true"> <a href="https://www.onmeda.es/foros/"> <span class="nav-global__item__text"> <span class="line1">Foros<span class="icon" data-icon=" { " aria-hidden="true"></span></span> </span> </a> </li> </ul> <ul class="nav-global__list nav-global__list--functions"> <li class="nav-global__item nav-global__item--menu-secondary is--visuallyhidden" id="sprout_secondary__toggler"> <a href="#sprout_secondary"> <span data-icon="m" class="icon" aria-hidden="true"></span> <span class="is--visuallyhidden">Menu</span> </a> </li> </ul> </nav> <div id="sas_FormatID_4856" class="sas_FormatID_4856"><p class="ad__label">Publicidad</p><div id="sas_4856"><script type="text/javascript">if (window.afAd) {afAd.render('4856');}</script></div></div> </header> <section class="global-dropdowns"> <article id="menuitem1" class="global-dropdown"> <div class="global-dropdown__grid"> <div class="global-dropdown__column global-dropdown__column--primary"> <h3><a href="https://www.onmeda.es/enfermedades/">Enfermedades</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/enfermedades__az.html">Enfermedades de la A a la Z<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/enfermedades_especialidad_medica.html">Especialidades médicas<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/enfermedades_frecuentes.html">Enfermedades frecuentes<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/enfermedades/que_enfermedad_tengo/">¿Qué enfermedad tengo?<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul> <h3><a href="https://www.onmeda.es/sintomas/">Síntomas</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sintomas/sintomas_az.html">Síntomas de la A a la Z<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sintomas/comprobador_de_sintomas.html">Comprobador de síntomas<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sintomas/sintomas_frecuentes.html">Síntomas más frecuentes<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul></div><div class="global-dropdown__column global-dropdown__column--secondary"> <h3><a href="https://www.onmeda.es/clinica/consulta_medica.html">Consulta médica</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/exploracion_tratamiento/index_az.html">Exploración clínica y tratamientos<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/prevencion/">Prevención de enfermedades<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/valores_analitica/">Valores de una analítica<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul> <h3><a href="https://www.onmeda.es/enciclopedia_de_salud.html">Enciclopedia de salud</a></h3> <ul class="global-dropdown__links"> <li class="global-dropdown__link"><a href="https://www.onmeda.es/anatomia/">Enciclopedia de anatomía y fisiología<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/nutrientes/">Enciclopedia de nutrición<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> <li class="global-dropdown__link"><a href="https://www.onmeda.es/sexualidad/">Enciclopedia de sexualidad<span class="icon" data-site-icon=">" aria-hidden="true"></span></a></li> </ul></div> <div class="global-dropdown__column global-dropdown__column--tertiary"> </div> </div> </article> </section> <section class="l-content group" id="sprout_content"> <div class="l-col l-col--primary" id="sprout_primary"> <div class="group"> <div class="content-function-bar group"> <nav class="breadcrumb float--left"> <ul class="breadcrumb__tree"> <li><a href="https://www.onmeda.es/"><span></span></a></li> <li><span></span><a href="https://www.onmeda.es/">Home</a></li><li><span></span><a href="https://www.onmeda.es/enfermedades/">Enfermedades</a></li><li><span></span>Piedras en la vesícula (cólico biliar, litiasis biliar)</li> </ul> <p class="breadcrumb__back"><span></span><a href="https://www.onmeda.es/enfermedades/">Página anterior</a></p> </nav> <div class="social float--right"> <a class="btn--print" href="https://www.onmeda.es/enfermedades/1344-print.html" target="_blank">Drucken</a> <a href="#social" class="social-toggler">Social Links</a> </div> </div> <div class="social-widgets"> <style> .social-toggler { width: 115px; background: transparent url(https://i.onmeda.de/nav/social_sprite.png) no-repeat 0 0; } .btn--print { background: transparent url(https://i.onmeda.de/nav/social_sprite.png) no-repeat -151px 0; } .btn--print:hover { background-position: -151px -34px; } ul.orientation-horizontal li:first-child { margin-left: 0; } </style> <div class="shariff" data-lang="es" data-backend-url="https://www.onmeda.es/shariff/" data-services="[&quot;twitter&quot;,&quot;facebook&quot;,&quot;pinterest&quot;]"></div> </div> </div> <span data-is-scrollable-to-next-page></span><div class="content-header"> <h1 class="content-header__title"> Piedras en la vesícula (cólico biliar, litiasis biliar) </h1> <p class="content-header__meta"> <span class="content-header__meta__item span content-header__meta__item--author"><strong>Autor: <a href="/equipo.html" target="_new"> Redacción Onmeda</a></strong></span> <span class="content-header__meta__item span content-header__meta__item--author"><strong>Revisión médica: Dra. <NAME></strong> <span class="content-header__meta__item span content-header__meta__item--datestamp">(15 de Diciembre de 2016)</span> </p> </div> <div class="content-image content-image--full"> <div class="content-image__image"> <img src="https://i.onmeda.de/nwl/blindfisch.gif" data-lazyload="https://i.onmeda.de/es/gallensteine-870x435.jpg" width="870" height="435"/><span class="image-copyright">© Jupiterimages/iStockphoto</span> </div> </div> <nav class="subnav float--left js-fix-it is--active" data-toggles="true" data-openicon="+" data-closedicon="-" data-avoid=".footer-global__legal" data-fixed-title="Indice"> <div class="subnav__title"> <h2 class="subnav__title__text"><span class="subnav__title__label">Indice</span></h2> </div> <ul class="subnav__list"> <li class="subnav__list__item is--active"><a href="https://www.onmeda.es/enfermedades/piedras_en_la_vesicula.html" class="subnav__list__item__link">Piedras en la vesícula</a></li><li class="subnav__list__item"><a href="https://www.onmeda.es/enfermedades/piedras_en_la_vesicula-mas-informacion-1344-2.html" class="subnav__list__item__link">Más información</a></li> </ul> </nav> <p data-adhere="true" class="lead">Las piedras en la vesícula (cólico biliar, litiasis biliar) son causadas por el depósito progresivo de materias sólidas como colesterol o proteínas en la vesícula biliar o en el conducto biliar. El cólico biliar (colelitiasis del griego <em>chole</em> = vesícula biliar, <em>lithos</em> = cálculos) es la dolencia más frecuente y peligrosa de la vesícula y del conducto biliar.</p> <p data-adhere="true">Casi todas las enfermedades de los conductos biliares están relacionadas con la aparición de cálculos en su sistema de drenaje. En los países occidentales industrializados, entre el 10 y el 15% de las personas adultas padecen esta dolencia, aunque sólo el 25% de los afectados manifiestan su clínica. Esta patología es más frecuente en mujeres que en hombres. Entre los factores de riesgo se encuentran, principalmente, la obesidad y los altos índices de albúmina.</p> <p data-adhere="true">El síntoma más característico de las <strong>piedras en la vesícula</strong> es un dolor agudo en la región derecha del epigastrio llamado cólico hepático. Este síntoma puede acompañarse de <a href="https://www.onmeda.es/sintomas/nauseas.html">náuseas</a>. El diagnóstico de la <strong>litiasis biliar</strong> se realiza a partir de la clínica que refiere el paciente y pruebas de imagen como la <a href="https://www.onmeda.es/exploracion_tratamiento/ecografia.html">ecografía</a>.</p> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 4040, ["12","300"], 'Publicidad');</script> <p data-adhere="true">Cuando los cálculos están situados en la vesícula biliar, lo más común es proceder directamente a su extirpación (colecistectomía). Si, por lo contrario, los cálculos se localizan en el conducto biliar, el médico procede a eliminarlos mediante endoscopia. Normalmente, el tratamiento suele realizarse cuando la litiasis biliar es clínicamente sintomática.<br /></p> <h2 data-adhere="true">Definición</h2> <p data-adhere="true">Los <strong>cálculos biliares</strong> del <strong>cólico biliar</strong> son cristales presentes tanto en la <strong>vesícula biliar</strong> (<strong>colecistolitiasis</strong>) como en los conductos biliares (<strong>colangiolitiasis</strong>), o en el conducto colédoco, o en vía biliar principal de los ductos biliares del intestino (<strong>coledocolitiasis</strong>). Estos elementos se forman cuando los diversos componentes de la bilis están desequilibrados unos de otros y cuando aquéllos que son difíciles de disolver se engrumecen se vuelven más densos.</p> <p data-adhere="true">Los cálculos biliares tienen diferentes tamaños. Miden desde unos pocos milímetros (también llamado lodo barro biliar) hasta algunos centímetros. Pueden aparecer aislados o en conjunto.</p> <p data-adhere="true">Según su composición, se diferencian dos clases principales: los de colesterol, compuestos en su mayor parte por esta sustancia, y los pigmentarios.<br /><section class="carousel carousel--standard carousel--sidebar carousel--external" data-width="580" data-height="435" data-external="/-galleries/gallery/get_json/286" data-page="de" data-clickable="true" data-callback="onmeda.callback.carousel({ga: 'enfermedades/piedras_en_la_vesicula-gally-00286.html'})"> <div class="carousel__slides"></div> </section>  </p> <h3>Historia</h3> <p data-adhere="true">La <strong>vesícula biliar</strong> ha desempeñado un papel importante desde la antigüedad hasta el siglo XVIII. Según la vieja teoría de Empédocles de Agrigento, los cuatro humores (aire, agua, fuego, tierra) correspondían a cuatro flujos corporales: <a href="https://www.onmeda.es/valores_analitica/componentes_de_la_sangre.html">sangre</a>, flema, bilis, bilis negra. Galeno de Pérgamo (129-199), uno de los médicos más importantes de la Antigüedad, describió en su concepto humoral patológico que la causa de todas las enfermedades residía en el desequilibrio de los flujos corporales. Por lo tanto, la salud dependía del equilibrio perfecto (eucrasia, del griego <em>eu</em> = bueno, <em>krásis</em> = mezcla), y la enfermedad dependía del desequilibrio (discrasia, del griego <strong>dys</strong> = defectuoso). Galeno describió los cuatro humores siguientes, de acuerdo con el elemento que estuviera desproporcionado: sanguíneo (sangre), flemático (flema), colérico (bilis), melancólico (bilis negra). La asociación de la bilis con el fuego daba lugar a la interpretación colérica e irascible de la persona. A día de hoy, aún existen expresiones como bilioso; para denominar a alguien mordaz o irascible.</p> <p data-adhere="true">La <strong>litiasis biliar</strong> (<strong>colelitiasis</strong>) comenzó a investigarse en el año 1892 con la obra de <NAME>. Naunyn reconoció que los diversos compuestos de la bilis solidifican y bloqueaban el flujo de la bilis y que, consecuentemente, aparecían las molestias, como los dolores en la región derecha del epigastrio, las <a href="https://www.onmeda.es/sintomas/nauseas.html">náuseas</a>, los <a href="https://www.onmeda.es/sintomas/gases.html">gases</a> y la <a href="https://www.onmeda.es/sintomas/sensacion_plenitud.html">sensación de saciedad</a>.</p> <h3 data-adhere="true">Anatomía de la vesícula biliar</h3> <p data-adhere="true">La <strong>vesícula biliar</strong> (<em>vesica biliaris</em>) es una bolsa en forma de pera que recibe todos los <strong>conductos biliares</strong> situados fuera del hígado (extrahepáticos) que unen el hígado con el duodeno. Los conductos biliares derecho e izquierdo procedentes del hígado se juntan y se funden en un solo conducto biliar común (ductus hepaticus communis). La vesícula biliar se sitúa en la parte inferior del hígado. Su vía saliente, el conducto cístico (<em>ductus cysticus</em>), une la vesícula biliar con ambos conductos hepáticos. Con la unión de los conductos hepáticos y del conducto cístico se forma el ducto conducto colédoco (<em>ductus choledochus</em>), el cual, junto con el conducto pancreático, desemboca en el duodeno.</p> <p data-adhere="true">Diariamente, el hígado segrega entre 500 y 600 mililitros de bilis hepática de color amarillento. La bilis se hace más espesa en la <strong>vesícula biliar</strong>, y se convierte en una bilis verdosa. Según su peso, los componentes de la bilis son los siguientes:</p> <ul> <li>Agua (82 %).</li> <li>Ácidos biliares. Desempeñan un papel fundamental en el metabolismo lipídico (12 %).</li> <li>Colorantes biliares (sobre todo, la bilirrubina) que resultan de la degradación de la hemoglobina.</li> <li><a href="https://www.onmeda.es/nutrientes/colesterol.html">Colesterol</a>.</li> <li>Productos de desintegración del metabolismo.</li> <li>Sales.</li> </ul> <p data-adhere="true">La función principal de la bilis es ayudar a digerir las grasas por medio de sus componentes. Además, ayuda a excretar las sustancias nocivas y los subproductos del cuerpo.</p> <p data-adhere="true">En la vesícula biliar se almacena la bilis formada en el hígado. Este órgano mide, aproximadamente, 9 centímetros y almacena una media de 50 mililitros de bilis. Con la deshidratación, la bilis de la <strong>vesícula biliar</strong> se concentra. Al ingerir alimentos, ésta se libera por la contracción de la musculatura de la vesícula biliar, y fluye a través de los ductos conductos biliares. Aproximadamente, el 95% de los ácidos biliares se vuelven a absorber en el intestino y regresan al hígado. Aquí, se vuelven a emplear en la producción de más bilis.</p> <h3 data-adhere="true">Tipos de cálculos biliares</h3> <p data-adhere="true">Los <strong>cálculos biliares</strong> suelen aparecen por causa de alguna alteración metabólica de la bilirrubina, del colesterol o de algún ácido biliar. Los cálculos biliares se pueden diferenciar en tres <strong>tipos</strong> según su composición:</p> <ul> <li><strong>Cálculo de colesterol</strong>: los cálculos de colesterol están formados por <a href="https://www.onmeda.es/nutrientes/colesterol.html">colesterol</a> en más de un 70%. En los países industrializados constituyen el tipo de <strong>litiasis biliar</strong> más frecuente (cerca del 70%). Estos cálculos son de color amarillo y pueden llegar a ser tan grandes como un hueso de cereza. Su aparición está relacionada con la alimentación.</li> <li><strong>Cálculos pigmentarios</strong>: cerca del 20% de los cálculos se componen de sales cálcicas de pigmentos biliares y de bilirrubina. Su núcleo suele contener colesterol, en el que se depositan pigmentos biliares (bilirrubina) en forma de bilirrubinato de calcio. Normalmente, suelen ser tan grandes como un grano de arena y aparecen en grandes cantidades. Su color es marrón o negro y su consistencia es blanda. A menudo, los <strong>cálculos biliares</strong> marrones están relacionados con sobreinfecciones en la <strong>vesícula biliar</strong> y en los <strong>conductos biliares</strong>. Los cálculos pigmentarios no están relacionados con los hábitos <a href="https://www.onmeda.es/nutricion/">alimenticios</a>.</li> <li><strong>Cálculos mixtos</strong>: tanto los cálculos de colesterol como los pigmentarios pueden llegar a calcificarse durante los procesos inflamatorios. El resultado es una mezcla de colesterol, pigmentos y calcio. Los cálculos mixtos están compuestos por la suma de colesterol, pigmentos biliares y calcio. En estos cálculos, el contenido de colesterol se sitúa entre el 30 y el 70%. Frecuentemente, estos cálculos pueden tener distintos colores. Aproximadamente, en el 10% de los pacientes los <strong>cálculos biliares</strong> se calcifican en su evolución.</li> </ul> <h3>Incidencia</h3> <p data-adhere="true">Entre el 10 y el 15% de la población de los países industrializados desarrolla cálculos biliares alguna vez en la vida. Sin embargo, sólo cerca del 25% de estos pacientes muestra síntomas de la enfermedad y, por ese motivo, la patología suele estar infra diagnosticada.</p> <h2 data-adhere="true">Causas</h2> <p data-adhere="true">Las <strong>causas de las piedras en la vesícula</strong> son numerosas. La formación de <strong>cálculos de colesterol</strong> depende de muchos factores, tanto internos (endógenos) como externos (exógenos). La condición para que aparezcan estos cálculos es la saturación de la bilis por el <a href="https://www.onmeda.es/nutrientes/colesterol.html">colesterol</a>. Normalmente, el colesterol no se disuelve, por lo que siempre tiene que estar rodeado de suficiente ácido biliar. En general, en la bilis, la relación de ácido biliar y colesterol es de 20:1. Cuando esta proporción se sitúa por debajo de 13:1, el colesterol ya no queda disuelto y se satura. Adicionalmente, la alta concentración de flemas (mucinas), producidas en el conducto biliar, colabora a que el colesterol en la bilis se densifique. De este modo, se crean microcristales, los cuales pueden convertirse en <strong>cálculos biliares</strong>.</p> <p data-adhere="true">Los motivos que alteran la relación entre ácidos biliares y colesterol son numerosos. Dicha alteración puede darse cuando los valores de colesterol son altos, cuando la concentración del ácido biliar es baja o cuando la motilidad de la <strong>vesícula biliar</strong> está dañada. Por otro lado, los cálculos pigmentarios tienen su origen en una sobrecarga de bilirrubina en el cuerpo, por ejemplo, en el caso de hemólisis crónica, en la que se produce un exceso de bilirrubina, porque se están degradando muchos glóbulos rojos.</p> <p data-adhere="true">Existen más factores que desempeñan un papel importante en la creación de <strong>cálculos biliares</strong>. Estos factores se describen en la llamada "regla de las 5F": mujer (<strong>f</strong>emale), mayor de cuarenta años (<strong>f</strong>orty), muchos hijos (<strong>f</strong>ertile), sobrepeso (<strong>f</strong>at) y pelo claro (tipo nórdico, <strong>f</strong>air).</p> <h3 data-adhere="true">Factores genéticos</h3> <p data-adhere="true">Cuando las <strong>piedras en la vesícula</strong> (<strong>litiasis biliar</strong>, <strong>cólico biliar</strong>) se dan en repetidas ocasiones en una misma familia se habla del factor genético (predisposición genética). Se desconocen los factores que contribuyen a esta disposición. Las personas con alteraciones del metabolismo lipídico heredadas genéticamente, cuyo cuerpo contiene demasiado <a href="https://www.onmeda.es/nutrientes/colesterol.html">colesterol</a>, tanto procedente de la alimentación como del propio cuerpo, corren un alto riesgo de formar <strong>cálculos biliares</strong>.</p> <h4>Género</h4> <p data-adhere="true">Las <strong>piedras en la vesícula</strong> (<strong>litiasis biliar</strong>, <strong>cólico biliar</strong>) afectan tres veces más a las <strong>mujeres</strong> que a los hombres. Además, los <strong><a href="https://www.onmeda.es/embarazo/">embarazos</a></strong> aumentan el <strong>riesgo</strong> de desarrollar <strong>cálculos biliares</strong>. Posiblemente, también haya una relación entre la ingesta de estrógenos después de la <strong><a href="https://www.onmeda.es/enfermedades/menopausia.html">menopausia</a></strong> con el desarrollo de piedras en la vesícula. Durante el embarazo, entre el 20 y el 30% de las mujeres forman pequeños cálculos. Estos no suelen ser molestos y en un 5 a 12% de las mujeres embarazadas desaparecen por sí solos después del parto.</p> <h4>Edad</h4> <p data-adhere="true">La <strong>edad</strong> es un <strong>factor de riesgo</strong> para desarrollar <strong>cálculos biliares</strong> (litiasis biliar, cólico biliar). Aproximadamente, un 50% de las mujeres de 70 años padece <strong>litiasis biliar</strong>. Esta patología afecta anualmente a entre un 2,5 y un 4 % de las personas mayores de cuarenta años. Dado que los cálculos crecen un promedio de, aproximadamente, 2,6 milímetros al año, pueden transcurrir hasta diez años hasta que el paciente manifieste síntomas dolorosos.</p> <h3 data-adhere="true">Trastornos metabólicos</h3> <p data-adhere="true">Los <strong>trastornos metabólicos</strong>, como la <a href="https://www.onmeda.es/enfermedades/diabetes.html">diabetes mellitus</a> o el trastorno del metabolismo lipídico (hipertrigliceridemia), son factores de <strong>riesgo</strong> para el desarrollo de <strong>cálculos biliares</strong>, ya que alteran las concentraciones normales de los compuestos de la <a href="https://www.onmeda.es/valores_analitica/componentes_de_la_sangre.html">sangre</a> y de la bilis.</p> <h4 data-adhere="true">Alimentacion</h4> <p data-adhere="true">La <strong><a href="https://www.onmeda.es/nutricion/">alimentación</a></strong> rica en colesterol aumenta el índice de colesterol en la sangre y favorece la formación de <strong>cálculos biliares de colesterol</strong>. Indirectamente, la alimentación pobre en fibras vegetales aumenta el <strong>riesgo</strong> de que se formen <strong>cálculos biliares</strong> y dificulta el paso de los alimentos en el intestino, y que la sangre absorba más grasas y, por consiguiente, que haya un mayor índice de colesterol en sangre.</p> <p data-adhere="true">Por razones inexplicables, el consumo de azúcares muy refinados como la sacarosa (azúcar de uso doméstico) aumenta el riesgo de formación de cálculos biliares. Sin embargo, el café posiblemente rebaja el riesgo de que se forme una litiasis biliar.</p> <p data-adhere="true">En cualquier caso, en un 10 a un 20 % de los casos, los responsables de la formación de <strong>cálculos biliares</strong> son la alimentación hipocalórica y/o una pérdida de peso a corto plazo. La nutrición parenteral representa también otro factor de riesgo.</p> <h4 data-adhere="true">Sobrepeso</h4> <p data-adhere="true">Actualmente, en los países industrializados, el <strong><a href="https://www.onmeda.es/enfermedades/obesidad.html">sobrepeso</a></strong> se considera uno de los mayores factores de riesgo en la formación de <strong>litiasis biliar</strong>. El aumento de dietas pobres en fibras vegetales y rica en alimentos con altos contenidos en colesterol potencia el riesgo. Un peso un 20% más alto de lo normal duplica el <strong>riesgo</strong> de desarrollar piedras en la vesícula.</p> <h3 data-adhere="true">Carencia de ácidos biliares</h3> <p data-adhere="true">La <strong>litiasis biliar</strong> también se puede desarrollar por una <strong>carencia de ácidos biliares</strong> o una reducción en la formación de los mismos. Dichos ácidos mantienen el colesterol diluido y evitan que éste se sature. Una vez digeridas las grasas, el intestino reabsorbe los <strong>ácidos biliares</strong>. Únicamente, se pierde el 5% en las heces y el 95% restante regresa al hígado. De este modo, los ácidos biliares circulan en la llamada circulación enterohepática, entre el intestino y el hígado. Las enfermedades inflamatorias del intestino, por ejemplo, la <a href="https://www.onmeda.es/enfermedades/enfermedad_crohn.html">enfermedad de Crohn</a> o las extirpaciones parciales del intestino, dificultan que el intestino reabsorba los ácidos biliares, de modo que éstos se pierden en la defecación con las heces. Dado que el hígado no puede contrarrestar esta pérdida, se disminuye el índice de los ácidos biliares de la bilis.</p> <h4 data-adhere="true">Exceso de bilirrubina</h4> <p data-adhere="true">Normalmente, los cálculos pigmentarios se forman por el <strong>exceso de bilirrubina</strong> en la bilis. Este colorante biliar de color rojo-anaranjado, resultante de la degradación de la hemoglobina de los glóbulos rojos, se expulsa a través de la bilis al intestino. Esto ocurre en todas las enfermedades en las que haya una ruptura intensa de glóbulos rojos e incitan la producción de la <strong>bilirrubina</strong>. Asimismo, las enfermedades hepáticas, congénitas o heredadas, provocan un trastorno metabólico de la bilirrubina, cuya consecuencia es la formación de cálculos pigmentarios con lo que como esta no se metaboliza aumenta su concentración en sangre y en los jugos biliares y favorece el <strong>riesgo</strong> de <strong>litiasis biliar</strong>. De igual modo, las patologías que obstruyen el paso de la bilis en la <strong>vesícula biliar</strong>, o aquellas infecciones de los conductos biliares por bacterias o parásitos, representan <strong>factores de riesgo</strong> en la formación de <strong>cálculos biliares</strong>.</p> <h2 data-adhere="true">Síntomas</h2> <p data-adhere="true">En un 75% de los pacientes, la <strong>litiasis biliar</strong> es asintomática. Las molestias que pueden aparecer suelen ser poco específicas. Sobre todo, después de comidas copiosas y muy grasas, se manifiestan dolores en la región derecha del epigastrio, a menudo, acompañados por <a href="https://www.onmeda.es/sintomas/sensacion_plenitud.html">sensación de saciedad</a>, <a href="https://www.onmeda.es/sintomas/gases.html">gases</a> y <a href="https://www.onmeda.es/sintomas/nauseas.html">náuseas</a>. Cuando se trata de una ligera irritación de la <strong>vesícula biliar</strong>, las molestias suelen ser similares a las de una <a href="https://www.onmeda.es/enfermedades/gastritis.html">gastritis</a>, por lo que pueden confundirse. Ante mayor irritación, pueden presentarse dolores de <a href="https://www.onmeda.es/anatomia/anatomia_espalda.html">espalda</a> y en el hombro derecho.</p> <p data-adhere="true">Los <strong>síntomas</strong> fuertes aparecen cuando los cálculos comienzan a desplazarse. Este movimiento provoca una inflamación u obstrucción de los conductos biliares. Cuando un <strong>cálculo biliar</strong> bloquea el conducto cístico o el conducto biliar común (conducto colédoco), puede darse el llamado <strong>cólico biliar</strong> (del griego <em>kolike</em> = dolor del colon). En este caso, la musculatura de la pared del conducto biliar se contrae para poder seguir transportando el cálculo. Por el aumento de la presión en el conducto biliar aparecen <a href="https://www.onmeda.es/enfermedades/dolor.html">dolores</a>, caracterizados por violentos retortijones, sobre todo en la región derecha del epigastrio, que luego se sienten en la espalda y en el hombro derecho y que el paciente suele describir como insoportable. El dolor suele ser intermitente, con periodos muy dolorosos y periodos de menos dolor. El cólico biliar puede durar muy intensamente más de 30 minutos y prolongarse hasta cinco horas, acompañado por náuseas, eructos y <a href="https://www.onmeda.es/sintomas/vomitos.html">vómitos</a>.</p> <p data-adhere="true">Los potenciales factores <strong>desencadenantes</strong> de la <strong>litiasis biliar</strong> son los siguientes:</p> <ul> <li>Alimentos ricos en grasas</li> <li>Comidas copiosas después de mucho tiempo en ayuno</li> <li>Comidas más abundante del habitual</li> </ul> <p data-adhere="true">A veces, aparece una ligera <a href="https://www.onmeda.es/sintomas/ictericia.html">ictericia</a> (color amarillento de las mucosas, la piel y de las uñas). La defecación del paciente suele ser incolora, dado que los colorantes biliares no pueden pasar por los conductos biliares. Además, en estos casos, los pacientes suelen padecer también las llamadas diarreas grasas, ya que por la falta de bilis, no llega a eliminarse la grasa. En la diarrea grasa, la grasa se defeca en grandes cantidades concentradas y líquidas, que, en frío, se solidifican.</p> <h2 data-adhere="true">Diagnóstico</h2> <p data-adhere="true">En el <strong>diagnóstico de la litiasis biliar</strong>, tan solo las molestias del paciente y su historial médico (intolerancia a las comidas grasas, posibles <strong>cólicos biliares</strong>) apuntan a esta enfermedad. A menudo, en el examen clínico, se manifiesta un dolor intenso en la vesícula biliar. Cuando el médico sospecha que puede haber una obstrucción, suele pedir una analítica sanguínea, ya que ciertos parámetros de la sangre se alteran en caso de enfermedad.</p> <p data-adhere="true">Normalmente, cuando los <strong>cálculos biliares</strong> obstruyen la salida de la <strong>vesícula biliar</strong>, éstos pueden reconocerse claramente en una <strong><a href="https://www.onmeda.es/exploracion_tratamiento/ecografia.html">ecografía</a></strong>. Sin embargo, si los cálculos están situados en los conductos biliares intrahepáticos, la sensibilidad de la ecografía no suele ser suficiente para distinguirlos. Lo cálculos son visibles cuando miden al menos 5 milímetros de diámetro. Por este motivo, es frecuente que el <strong>diagnóstico</strong> se determine mediante endoscopia. Es decir, mediante un contraste del conducto biliar defectuoso, en el que previamente se hubiera administrado un producto de contraste. La <strong>endoscopia</strong> se suele llevar a cabo a la vez que un <strong>control de radiografía</strong> (colangiopancreatografía retrógrada endoscópica, denominada <strong>CPRE</strong>).</p> <p data-adhere="true">Otro método seguro para comprobar la existencia de <strong>cálculos biliares</strong> en la vesícula biliar es la llamada colecistografía oral. Aquí, los pacientes deben ingerir un producto de <strong>contraste</strong>, antes de someterse a una <strong>radiografía de la vesícula biliar</strong>. En un reconocimiento radioscópico sin producto de contraste sólo llegan a reconocerse los cálculos calcáreos. En un reconocimiento radioscópico con toma de producto de contraste, el médico puede verificar si la salida de la vesícula biliar está obstruida y si es necesario proceder a una <strong>extirpación</strong> de la <strong>vesícula biliar</strong> posterior.</p> <h2 data-adhere="true">Tratamiento</h2> <p data-adhere="true">El <strong>tratamiento</strong> habitual para las <strong>piedras en la vesícula</strong> es <strong>retirar la vesícula</strong> biliar (<strong>colecistectomía</strong>). El <strong>tratamiento</strong> de elección suele ser una <strong>laparoscopia</strong> realizada con la ayuda de un endoscopio, de este modo se consigue evitar una intervención quirúrgica abierta. La <strong>operación</strong> abierta de la <strong>vesícula biliar</strong> ha sido sustituida en los últimos años por este tratamiento poco aparatoso, ya que exige menor tiempo de recuperación. Con la extirpación de la vesícula biliar, la enfermedad se cura prácticamente del todo. Únicamente, en pocos casos, aparecen nuevos <strong>cálculos biliares</strong> (recidivas).</p> <p data-adhere="true">En cualquier caso, sólo las personas que experimentan molestias reciben un <strong>tratamiento</strong>. No tiene ningún sentido médico <strong>extirpar la vesícula biliar</strong> preventivamente. La extirpación apenas altera la calidad de vida del paciente. Aunque la vesícula esté ausente, siempre queda algo de bilis en el intestino. Los pacientes que han sido operadas de la vesícula biliar deben llevar una dieta pobre en grasas.</p> <p data-adhere="true">En la mayoría de los casos, el médico retira los <strong>cálculos biliares</strong> después de un <strong>diagnóstico</strong> de contraste por radiografía (ERCP). A través del endoscopio se introduce una cestita por el conducto biliar, se agarra el cálculo y se extrae.</p> <p data-adhere="true">Por lo general, los métodos de <strong>tratamiento sin operación</strong> son menos exitosos y menos frecuentes. El <strong>tratamiento</strong> medicamentoso con ácidos biliares, mediante el cual, los <strong>cálculos</strong> se pueden <strong>disolver</strong> después de algunos meses, suele presentar recidivas.</p> <p data-adhere="true"></p> <p data-adhere="true">En un 50% de los casos, en un plazo de cinco años, las recidivas vuelven a aparecer también con los métodos más modernos, tales como la <strong>disolución de cálculos</strong> mediante <strong>metil terbutílico</strong> (<strong>MTBE</strong>) o la litotripsia extracorpórea por ondas sonoras.</p> <p data-adhere="true">La ingesta de alimentos pobres en grasas puede resultar de gran ayuda. Estas medidas dietéticas pueden ayudar al paciente a atenuar los <strong>síntomas de los cálculos biliares</strong>. Sin embargo, no remplazan al <strong>tratamiento</strong> curativo. Por lo general, una vez extraída la vesícula biliar, no suelen haber demasiadas restricciones en la <a href="https://www.onmeda.es/nutricion/">alimentación</a>.</p> <p data-adhere="true">En el caso del <strong>cólico biliar</strong> agudo, algunos analgésicos y fármacos antiespasmódicos (por ejemplo, butilescopolamina) pueden disminuir las molestias.</p> <h2 data-adhere="true">Evolución</h2> <p data-adhere="true">En su <strong>evolución</strong>, los <strong>cálculos biliares</strong> pueden dar lugar a <strong>complicaciones</strong>. Cuando el conducto de la vesícula biliar está obstruido por cálculos biliares, la complicación más frecuente es la <strong>inflamación de la vesícula</strong> biliar (<strong><a href="https://www.onmeda.es/enfermedades/colecistitis.html">colecistitis</a></strong>). Ésta aparece, cuando el cálculo bloquea la salida de bilis por la vesícula biliar de la vesícula biliar y ésta, altamente concentrada, no puede seguir su flujo. La bilis provoca entonces daños en la mucosa de la vesícula biliar. Dada la posición de la bilis, puede sobre infectarse con bacterias y desencadenarse una infección de la vesícula. Aparte de los <a href="https://www.onmeda.es/enfermedades/dolor.html">dolor</a> cólico, aparecería <a href="https://www.onmeda.es/sintomas/fiebre.html">fiebre</a>. A veces, la vesícula biliar inflamada puede incluso llegar a palparse a través de la pared abdominal siendo esta palpación dolorosa para el paciente.</p> <p data-adhere="true">En los casos de una grave <strong>evolución</strong> de la <strong>inflamación de la vesícula</strong> biliar, se pueden crear abscesos con pus, denominados empiemas vesiculares. En estos casos, el peligro es que los <strong>cálculos biliares</strong> atraviesen la pared de la vesícula biliar hacia en intestino delgado o, a veces, hacia la cavidad abdominal. Cuando los cálculos más grandes llegan a la altura del intestino pueden llegar incluso a obstruir el intestino delgado (íleo biliar). Asimismo, la pared vesicular puede llegar a dañarse aún más por la gangrena de ésta la vesícula biliar, es decir, estaríamos ante una infección grave de la vesícula biliar. La excesiva extensión de la vesícula biliar puede provocar un desgaste del tejido que la rodea. Éste se daña y puede llegar a destruirse.</p> <p data-adhere="true">Cuando un cálculo llega a introducirse en el conducto biliar (ducto colédoco) y bloquearlo, puede provocar una llamada <strong>ictericia obstructiva</strong> (del griego = <a href="https://www.onmeda.es/sintomas/ictericia.html">ictericia</a>). En estos casos, los colorantes biliares que ya no pueden fluir libremente hacia el intestino se concentran en grandes cantidades en la <a href="https://www.onmeda.es/valores_analitica/componentes_de_la_sangre.html">sangre</a> y tiñen los tejidos corporales de tono amarillento. Esta ictericia puede apreciarse sobre todo en la dermis (esclera, la parte blanca del <a href="https://www.onmeda.es/anatomia/anatomia_ojo.html">ojo</a>). Más allá, la obstrucción del conducto biliar puede acarrear una grave inflamación de los conductos biliares (colangitis) o una inflamación aguda del páncreas (<a href="https://www.onmeda.es/enfermedades/pancreatitis.html">pancreatitis</a>).</p> <p data-adhere="true">Las recurrentes inflamaciones crónicas de la vesícula biliar por <strong>litiasis biliar</strong> pueden provocar una calcificación de la pared de la vesícula biliar (<strong>vesícula de porcelana</strong>) y contribuir a que se creen <strong><a href="https://www.onmeda.es/enfermedades/colangiocarcinoma.html">tumores de la vesícula biliar</a></strong>.<span><span><br /></span></span></p> <h2 data-adhere="true">Prevención</h2> <p data-adhere="true"><span><span>La </span><strong>litiasis biliar</strong><span> no se puede </span><strong>prevenir</strong><span>. Sin embargo, llevar una </span><a href="https://www.onmeda.es/nutricion/">alimentación</a><span> saludable baja en grasas y rica en fibra, frutas y verduras es imprescindible. Además, se recomienda realizar periódicamente un </span><strong>control del <a href="https://www.onmeda.es/nutrientes/colesterol.html">colesterol</a></strong><span>. Es importante evitar el </span><strong><a href="https://www.onmeda.es/enfermedades/obesidad.html">sobrepeso</a></strong><span> y controlar correctamente otros trastornos metabólicos, como la </span><strong><a href="https://www.onmeda.es/enfermedades/diabetes.html">diabetes mellitus</a></strong><span>. Debe evitarse la dieta absoluta, así como la nutrición artificial (parental) a largo plazo.</span></span></p><nav class="pagination"><strong class="pagination__prev is--disabled"><span>&nbsp;</span></strong><ul class="pagination__pages"><li><a href="https://www.onmeda.es/enfermedades/piedras_en_la_vesicula.html"class="is--active">1</a></li><li><a href="https://www.onmeda.es/enfermedades/piedras_en_la_vesicula-mas-informacion-1344-2.html">2</a></li></ul><a href="https://www.onmeda.es/enfermedades/piedras_en_la_vesicula-mas-informacion-1344-2.html" title="weiter" class="pagination__next"><span>&nbsp;</span></a></nav> <div id="taboola-below-article-thumbnails-1"></div> <script type="text/javascript"> window._taboola = window._taboola || []; _taboola.push({ mode: 'thumbnails-a', container: 'taboola-below-article-thumbnails-1', placement: 'Below Article Thumbnails 1', target_type: 'mix' }); </script> <div id="taboola-below-article-thumbnails"></div> <script type="text/javascript"> window._taboola = window._taboola || []; _taboola.push({ mode: 'organic-thumbnails-a', container: 'taboola-below-article-thumbnails', placement: 'Below Article Thumbnails', target_type: 'mix' }); </script> <div class="content-teaser"> <div class="grid"> <div class="grid__item is--center-ad"> </div> </div> </div> </div> <div class="l-col l-col--secondary" id="sprout_secondary"> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 12, [], 'Publicidad');</script> <span data-sidebarname="Sidebar Enfermedades"></span><h2 class="sidebar__title"><a href="https://www.onmeda.es/enfermedades/enfermedades_frecuentes.html">Enfermedades frecuentes</a></h2><ul class="linklist"><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/asma.html">Asma</a></li><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/diabetes.html">Diabetes Mellitus</a></li><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/psoriasis.html">Psoriasis</a></li><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/hipertension_arterial.html">Hipertensión arterial</a></li><li class="linklist__item"><a href="https://www.onmeda.es/enfermedades/obesidad.html">Obesidad</a></li></ul><hr class="divider"/><div class="promo-item-featured" data-clickable="true"> <div class="promo-item__thumb"> <a href="https://www.facebook.com/Onmeda.es/" class="promo-item__link"> <img src="https://i.onmeda.de/es/facebook-onmeda.jpg" alt="Facebook de Onmeda.es"/> </a> </div> <!-- /.promo-item__thumb --> <div class="promo-item__content"> <h2 class="promo-item__title"><a href="https://www.facebook.com/Onmeda.es/" class="promo-item__link">¡Nuestra página de Facebook!</a></h2><p>¡Síguenos también en Facebook y descubrirás mucha más información interesante sobre salud!</p> </div> <!-- /.promo-item__content --> </div> <h2 class="sidebar__title">Los foros más visitados</h2><!-- linklist-sitebar --> <ul class="linklist-sitebar"> <li><h3><a href="https://www.onmeda.es/foros/nutrición-y-dietas">Foro de Nutrición</a></h3><p>Alimentación sana, recomendaciones para perder peso, dietas peligrosas. Sal de dudas con nuestra experta. </p></li><li><h3><a href="https://www.onmeda.es/foros/sexualidad">Foro de Sexualidad</a></h3><p>Tener una vida sexual satisfactoria es fundamental para tener una buena vida de pareja. ¿Tienes alguna pregunta?</p></li><li><h3><a href="https://www.onmeda.es/foros/otorrinolaringología">Foro de Otorrinolaringología</a></h3><p>Otitis media, una amigdalitis o una afonía son enfermedades otorrinolaringológicas. Consulta con nuestro experto.</p></li></ul> <!-- / linklist --> <p class="link-more"><a href="https://www.onmeda.es/foros/ ">Todos los foros</a></p><p class="link-more"><a href="https://www.onmeda.es/foros/register?urlpath=aHR0cDovL3d3dy5vbm1lZGEuZXMvZm9yb3Mv">¡Regístrate y participa!</a></p><div class="promo-item-featured" data-clickable="true"> <div class="promo-item__thumb"> <a href="https://www.instagram.com/onmeda/" class="promo-item__link"> <img src="https://i.onmeda.de/es/instagram-onmeda.jpg" alt="Instagram Onmeda"/> </a> </div> <!-- /.promo-item__thumb --> <div class="promo-item__content"> <h2 class="promo-item__title"><a href="https://www.instagram.com/onmeda/" class="promo-item__link">¡Síguenos en Instagram!</a></h2><p>¡En nuestras redes sociales descubrirás muchos más consejos y cosas curiosas sobre salud. ¡Te sorprenderás!</p> </div> <!-- /.promo-item__content --> </div> </div> <div class="l-clear"></div> </section> <div class="footer" style="margin: 10px;"> <hr class="divider" /> <script type="text/javascript">onmeda_ads.docwrite_formatid_if_resolution_gt_or_eq(851, 3243, ["4040"], 'Publicidad');</script> <hr class="divider" /> <section class="footer-global__socialmedia"> <div class="grid-2up"> <article class="grid__item footer-global__socialmedia--text"> <a href="http://www.facebook.com/Onmeda.es" target="_blank"><span data-site-icon="L"></span>Síguenos en Facebook</a> </article> <article class="grid__item footer-global__socialmedia--text"> <a href="https://twitter.com/Onmeda_es" target="_blank"><span data-site-icon="t"></span>Síguenos en Twitter</a> </article> </div> </section> <!-- /.footer-global__logo-items --> <section class="footer-global__logo-items"> </section> <!-- /.footer-global__logo-items --> <section class="footer-global__logo-items"> <div class="grid-2up"> <article class="grid__item footer-global__logo-item--text"> <a rel="nofollow" onclick="window.open(this.href); return false;" href="https://wma.ssl.comb.es/seal.php?INTWMA=2435&amp;idi=esp" target="_blank"> <!-- <img src="https://wma.comb.es/imglogo.php?INTWMA=2435&amp;size=small" border="0" alt="Web Médica Acreditada. Ver más información" longdesc="http://wma.comb.es/esp/presentacio.htm" title="Web Médica Acreditada. Ver más información"> --> </a> <p> Onmeda cumple y respeta las reglas de conducta ética y deontológica deseables para la comunidad médica. </p> </article> <!-- /.footer-global__logo-item --> <article class="grid__item footer-global__logo-item--text"> <a rel="nofollow" onclick="window.open(this.href); return false;" href="http://www.portalesmedicos.com/web_interes_sanitario/120202.htm" target="_blank"> <img border="0" alt="web interes sanitario" src="https://i.onmeda.de/es/logo_wis_80x36.jpg" title="web interes sanitario"> </a> <p>Onmeda está certificada como web de interés sanitario porque cumple sus principios generales y el código ético</p> </article> <!-- /.footer-global__logo-item --> </div> <div class="grid-2up"> <article class="grid__item footer-global__logo-item--text"> <a onclick="window.open(this.href); return false;" href="https://www.healthonnet.org/HONcode/Spanish/?HONConduct435283"> <img alt="Nosotros subscribimos Los Principios del código HONcode de la Fundación Salud en la Red" title="Nosotros subscribimos Los Principios del código HONcode de la Fundación Salud en la Red" style="border:0px; width: 49px; height: 72px; float: left;" src="https://www.honcode.ch/HONcode/Seal/HONConduct435283_s.gif"></a> <p>Nosotros subscribimos los <a onclick="window.open(this.href); return false;" href="http://www.healthonnet.org/HONcode/Spanish/"> Principios del código HONcode</a>. <br><a onclick="window.open(this.href); return false;" href="https://www.healthonnet.org/HONcode/Spanish/?HONConduct435283">Compruébelo aquí.</a> </p> </article> <!-- /.footer-global__logo-item --> <article class="grid__item footer-global__logo-item--text"> <a rel="nofollow" onclick="window.open(this.href); return false;" href="http://www.medicina21.com/doc2.php?op=sello_mostrar2&idsello=964" target="_blank"><img src="https://www.medicina21.com/images/sello_m21.gif" alt="Sello de calidad Medicina XXI" border="0"></a> <p>Onmeda respeta el interés general y el derecho a desarrollar la sociedad de la información en el ámbito de la salud.</p> </article> <!-- /.footer-global__logo-item --> </div> </section> <!-- /.footer-global__logo-items --> <section class="footer-global__disclaimer"> <h2>Aviso legal:</h2> <p>© Copyright © 2018 Enfemenino.com - Todos los contenidos publicados en el portal de salud y medicina <a href="/">Onmeda.es</a> tienen un carácter puramente informativo y no sustituyen en ningún caso la consulta médica o el asesoramiento profesional. Onmeda.es tampoco sugiere el autodiagnóstico y la automedicación. En caso de duda consulta nuestra sección de <a id="HAFTUNG_DISCLAIMER" href="/exencion_de_responsabilidad.html" rel="nofollow">exención de responsabilidad</a>. </p> </section> <!-- /.footer-global__disclaimer --> <section class="footer-global__link-items"> <div class="grid"> <nav class="grid__item footer-global__link-item footer-global__link-item--col"> <ul> <li><a href="https://www.onmeda.es/sobre_nosotros.html">Sobre Nosotros</a></li> <li><a href="https://www.onmeda.es/contacto.html">Contacto</a></li> <li><a href="https://www.onmeda.es/sitemap.html">Mapa del sitio</a></li> </ul> </nav> <!-- /.footer-global__link-item--col --> <nav class="grid__item footer-global__link-item footer-global__link-item--col"> <ul> <li><a rel="nofollow" href="https://www.onmeda.es/exencion_de_responsabilidad.html" id="DATENSCHUTZ">Exención de responsabilidad</a></li> <li><a rel="nofollow" href="https://www.onmeda.es/equipo.html" id="IMPRESSUM">Equipo</a></li> </ul> </nav> <!-- /.footer-global__link-item--col --> <nav class="grid__item footer-global__link-item footer-global__link-item--col"> </nav> <!-- /.footer-global__link-item--col --> </div> </section> <!-- /.footer-global__link-items --> <!-- <section class="footer-global__legal"> <ul> <li><a target="_blank" href="http://www.enfemenino.com/"><img border="0" alt="" src="https://i.onmeda.de/ts/logo-enfemenino2014.png"></a></li> </ul> <p>© 2018 Enfemenino.com</p> </section>--> <!-- /.footer-global__legal --> </div> </div> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script> <script>window.jQuery || document.write('<script src="https://i.onmeda.de/core/assets/scripts/libs/jquery/jquery-1.9.1.min.js"><\/script>')</script> <!-- sprout's core modules and js --> <script src="https://i.onmeda.de/javascripts/app-core.js" type="text/javascript"></script> <!-- sprouts instance's/local modules and js --> <script src="https://i.onmeda.de/javascripts/app-local.js" type="text/javascript"></script> <script src="https://i.onmeda.de/shariff/complete.js"></script> <script> onmeda.callback._trackers.ga = new onmeda.tracker.ga(); onmeda.callback._trackers.ga.set_account('UA-115586-14'); onmeda.callback._trackers.ga.enable_v2(); </script> <script type="text/javascript">onmeda.set('client_device', 'DESKTOP');</script> <div id ="wallpaper"></div> <script type="text/javascript" language="javascript">try { af_reftrack('onmeda.es'); } catch (e) { }</script> <script type="text/javascript"> (function() { var hm = document.createElement('script'); hm.type ='text/javascript'; hm.async = true; hm.src = ('++u-heatmap-it+log-js').replace(/[+]/g,'/').replace(/-/g,'.'); var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(hm, s); })(); </script> <script> setTimeout(function(){ var v=document.visibilityState; if(v && v!='visible')document.location=document.location.href; },300000) </script> <script type="text/javascript"> function _eStat_Whap_loaded_func(){ eStatWhap.serial("800000206973"); eStatWhap.send(); } (function() { var myscript = document.createElement('script'); myscript.src = ('https:' == document.location.protocol ? 'https://' : 'http://') + 'w.estat.com/js/whap.js'; myscript.setAttribute('async', 'true'); var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(myscript, s); })(); </script> <!-- Quantcast Tag --> <script type="text/javascript"> var _qevents = _qevents || []; (function() { var elem = document.createElement('script'); elem.src = (document.location.protocol == "https:" ? "https://secure" : "http://edge") + ".quantserve.com/quant.js"; elem.async = true; elem.type = "text/javascript"; var scpt = document.getElementsByTagName('script')[0]; scpt.parentNode.insertBefore(elem, scpt); })(); _qevents.push({ qacct:"p-_ttKUxC0CDauZ" }); </script> <noscript> <div style="display:none;"> <img src="//pixel.quantserve.com/pixel/p-_ttKUxC0CDauZ.gif" border="0" height="1" width="1" alt="Quantcast"/> </div> </noscript> <!-- End Quantcast tag --> <script type="text/javascript"> window._taboola = window._taboola || []; _taboola.push({article: 'auto'}); !function (e, f, u, i) { if (!document.getElementById(i)) { e.async = 1; e.src = u; e.id = i; f.parentNode.insertBefore(e, f); } }(document.createElement('script'), document.getElementsByTagName('script')[0], 'https://cdn.taboola.com/libtrc/onmeda-onmedaes/loader.js', 'tb_loader_script'); if (window.performance && typeof window.performance.mark == 'function') { window.performance.mark('tbl_ic'); } window._taboola = window._taboola || []; _taboola.push({flush: true}); $.getScript('https://www.aufeminin.com/reloaded/static/Front/Vendor/auf/cmp/cmp.js', function() { }); </script> </body> </html> <file_sep>/Experimento1/getAllPages.py import time import os with open("paginas.txt","r") as f: links=f.readlines() for l in links: os.system( "wget "+l) time.sleep(20) <file_sep>/Experimento2/webDownloader.py import nltk import sys from urllib.request import urlopen from bs4 import BeautifulSoup import os def gethref(texto,lim1,lim2): cond=False salida=[] for lineas in texto: aux=lineas.get("href") if aux==lim1: cond=True elif cond: if aux==lim2: return salida else: salida.append(aux) return salida archivo=open(sys.argv[1],"w") limites=["https://www.onmeda.es/enfermedades/enfermedades__xyz.html","https://www.onmeda.es/enfermedades/enfermedades_frecuentes.html"] dirBase="./paginasBase/enfermedades__" letras=["az"]+[chr(i) for i in range(ord("b"),ord("w")+1)]+["xyz"] for l in letras: url = dirBase+l+".html" html_doc = open(url,"r").read() soup = BeautifulSoup(html_doc, 'html.parser') link = soup.find_all('a') archivo.write( "\n".join(gethref(link,limites[0],limites[1]))) <file_sep>/Experimento4/main4.py #!/usr/bin/env python # coding: utf-8 def leer(corpus): with open(corpus,"r") as f: for line in f: yield line.split("|",1) def getData(coo_matrix,feature_names): puntos=[] nombres=[] for score,idx in zip(coo_matrix.data,coo_matrix.col): aux=feature_names[idx].rstrip('sao') if aux not in nombres: puntos.append(score) nombres.append(aux) elif score>puntos[nombres.index(aux)]: #aqui podemos ver que tomo el tf más grande de todos #principal diferencia con el experimento 5 puntos[nombres.index(aux)]=score tup=zip(nombres,puntos) return sorted(tup,key=lambda x: x[1],reverse=True) import unicodedata import numpy as np def doc2Vec(doc,vocab,funcion): palabras=list(map(lambda x: unicodedata.normalize('NFKD',x).rstrip('sao'),doc.split())) vector=[] for key,value in vocab.items(): vector.append(funcion(palabras.count(key))*value) return np.array(vector).reshape(1,-1) from sklearn.feature_extraction.text import TfidfVectorizer import pickle import time from math import exp def crearVectores(archCorpus,archPickle,funcion): # se usa un set de palabras que deben detenerse, lo encontré en este repo. # https://github.com/stopwords-iso/stopwords-es with open("../stopwords-es-master/stopwords-es.txt","r") as f: stopwords=f.read().splitlines() corpus=list(leer(archCorpus)) # leemos el corpus vectorizador=TfidfVectorizer(smooth_idf=True, #aplicamos un suavizado del idf, no afecta mucho al resultado use_idf=True, # usamos idf stop_words=stopwords, # le damos el set de palabras a deterner min_df=2, # queremos que las palabras aparezcan al menos en dos documentos #si cambiamos esto a 1, pone muchas palabras que solo hacen el programa mas lento strip_accents='ascii') # le aplica una normalización a las palabras palabras=vectorizador.fit_transform(map(lambda x:x[1],corpus)) #aqui se aplica el tf-idf feature_names=vectorizador.get_feature_names() #se obtine el vocab keywords=dict(getData(palabras.tocoo(),feature_names)) # obtenemos el vocab relacionado con sus tf-idf vectores={} cont=0 total=len(corpus) ti=time.time() for c in corpus: #para cada documentos c en el corpus vectores[c[0]]=doc2Vec(c[1],keywords,funcion) # se transforma a un vector print("llevo ",cont," de ",total,end="\r") # contamos cuantos lleva cont+=1 print("Me tarde : ",time.time()-ti) pickle.dump([vectores,keywords],open(archPickle,"wb")) #lo guardamos en un pickle para despues return [vectores,keywords] #regresamos los vectores y el vocab from scipy.spatial import distance def predecir(archSintomas,vectores,vocab,funcion): sintomas=open(archSintomas,"r").read() vector=doc2Vec(sintomas,vocab,funcion) docs=[] for n,v in vectores.items(): # docs.append([n,distance.cdist( vector,v, 'wminkowski',w=np.random.rand(v.shape[0]))]) # docs.append([n,distance.cdist( vector,v, 'matching')]) # docs.append([n,distance.cdist( vector,v, 'braycurtis')]) # docs.append([n,distance.cdist( vector,v, 'canberra')]) # docs.append([n,distance.cdist( vector,v, 'chebyshev')]) # docs.append([n,distance.cdist( vector,v, 'jaccard')]) # docs.append([n,distance.cdist( vector,v, 'correlation')]) # docs.append([n,distance.cdist( vector,v, 'sqeuclidean')]) # docs.append([n,distance.cdist( vector,v, 'cityblock')]) docs.append([n,distance.cdist( vector,v, 'cosine')]) #es la que mejor sirve # docs.append([n,distance.cdist( vector,v, 'euclidean')]) docs.sort(key=lambda x:x[1]) return docs import sys funcion=lambda x:1/(1+exp(5*(1-x))) try: vectores,vocab=pickle.load(open(sys.argv[1],"rb")) ti=time.time() dists=predecir(sys.argv[2],vectores,vocab,funcion) print("Me tarde : ",time.time()-ti) except Exception as e: print(e) try: vectores,vocab=crearVectores(sys.argv[1],sys.argv[2],funcion) dists=predecir(sys.argv[3],vectores,vocab,funcion) except Exception as ex: print(ex) traceback.print_exc(file=sys.stdout) print("Debes dar un archivo donde esten los vectores y los sintomas o el corpus y donde quieres guardar los vectores y los sintomas") print("ejemplo:\tpython3 main.py datosLimpios.csv vectores.pkl misSintomas.txt\n\t\tpython3 main.py vectores.pkl misSintomas.txt" ) exit() for i in range(10): print(dists[i]) <file_sep>/Experimento2/utilidades.py from unicodedata import normalize import re def noCaracteresEspeciales(texto): # -> NFD y eliminar diacríticos # -> NFC texto=re.sub('\W',' ',texto) # texto = normalize( 'NFC', texto) # print(normalize('NFKC',texto)) return texto.lower() def textoALista(texto): listaaux=texto.split() listaaux.sort() lista=[] for i in listaaux: if i not in lista: lista.append(i) return lista print( noCaracteresEspeciales("holá , como estas"))<file_sep>/Experimento1/prueba.py di={1:2,3:2,32:1} def printD(di): s = [(k, di[k]) for k in sorted(di, key=di.get, reverse=True)] print("\n".join(map(lambda x:" -> ".join(map(str,x)),s))) printD(di)<file_sep>/Experimento2/getWebPages.py # Programa actualizado para e experimento import time import os import sys l=["az"]+[chr(i) for i in range(ord("b"),ord("w")+1)]+["xyz"] for i in l: dir=("https://www.onmeda.es/enfermedades/enfermedades__%s.html"%i) # print ("wget "+dir+ " -o ./paginasBase/") os.system("wget "+dir) time.sleep(10) <file_sep>/Experimento1/getInfoFromPages.py import nltk import sys from urllib.request import urlopen from bs4 import BeautifulSoup import os from utilidades import * import csv csvfile=open("datos.csv","w",newline='') dataWriter=csv.writer(csvfile, delimiter=' ',quotechar='|', quoting=csv.QUOTE_MINIMAL) listaDeSecciones=["Síntomas"] #la informacion que deseamos sacar dirBase="./paginasEspecificas/" # dirBase="" for nombre in open(dirBase+"htmls.txt","r").read().splitlines(): # input("hola") # print(nombre) fhtml=open(dirBase+nombre).read() soup = BeautifulSoup(fhtml,'html.parser') secciones = soup.find_all('h2',attrs={'class':'c'}) ant=0 act=0 for s in secciones: if ant==0: pass else: if s.text.strip() in listaDeSecciones: html_aux=fhtml[ant:act] temp_soup=BeautifulSoup(html_aux,'html.parser') # print(temp_soup.get_text()) dataWriter.writerow([nombre,noCaracteresEspeciales(temp_soup.get_text())]) ant,act=act,fhtml.find(str(s)) <file_sep>/Experimento1/readme.md # Experimento 1 Este experimento se hizo usando directamente los embedding ya generados a partir del corpus http://crscardellino.github.io/SBWCE/ su desempeño es relativamente bueno.<file_sep>/Experimento2/README.md # Experimento 2 En el experimento 2 no hay mucho que decir, en general se intentaron usar los embeddings para predecir que enfermedad es, el mayor cambio es la obtención del segundo corpus, que es más grande, se obtuvo de manera similar que en el experimento 1
ad335203fa7f2c1a0842f30e51e61844ce370dc2
[ "Markdown", "Python", "HTML" ]
23
Python
sanchezmaxar/DiagnosticadorMedico
fc57d481c4df6591f126f4b4e13c6df23e074c66
320c3c47c2019652bd7a141e87f0d2e363493d9e
refs/heads/master
<repo_name>ziad-abdo/n_queens<file_sep>/n_queens.rb BOARD_A = [ ["Q....", "..Q..", "....Q", ".Q...", "...Q."], ## valid ["Q....", "..Q..", "....Q", ".....", "....."], ## valid [".Q...", "...Q.", "Q....", "..Q..", "....Q"], ## valid [".Q...", "....Q", "Q....", "..Q..", "....Q"], ## invalid ["Q...", "..Q.", "....", "...."] ## valid ] # [Q . . . .] # [. . Q . .], # [. . . . Q], # [. . . . .], # [. . . . .] # [0, 7, 14, 16, 21, 23] # [Q . . . .] # [. . Q . .] # [. . . . Q] # [. Q . . .] # [. . . Q .] # cd = (column - @column).abs # rd = (row - @row).abs # return true if cd == rd # 0 1 2 3 4 # 0 [. Q . . .] 2 -1 =1, 1-0 = 0 # 1 [. . Q . .] # 2 [Q . . . .] # 3 [. . Q . .] # 4 [. . . . Q] # [. . Q . .] # [Q . . . .] # [. . . Q .] # [. Q . . .] # [. . . . Q] unrecorded # [1,4,0,2,4] # [1, 9, 10, 17, 24] # [. Q . . .] 1 diagonals = 5, 7, 13, 19 # [. . . . Q] 9 diagonals = 3, 13, 17, 21 # [Q . . . .] 10 diagonals = 2, 6, 16, 22 # [. . Q . .] 17 diagonals = 21, 23, 11, 13, 5, 9, # [. . . . Q] 24 diagonals = 18, 12, 6, 0 def old_valid_board?(ary) queen_placements = ary.map{|x| x.index("Q")} ## could check duplicates here for fast return board_as_string = ary.join('').split('') invalid_spaces = [] nqueens = ary.size queen_placements.each_with_index do |q, idx| next if q == nil board_max = nqueens*nqueens -1 qplace = idx*nqueens + q horizontal = (idx*nqueens)...(idx*nqueens+nqueens) horizontal = horizontal.to_a horizontal.delete(q+(idx*nqueens)) invalid_spaces = invalid_spaces + horizontal vertical = [] nqueens.times{|i| vertical << (q + nqueens*i) } vertical.delete(q+(idx*nqueens)) invalid_spaces = invalid_spaces + vertical diagonal = [] go_left = (1..q).to_a go_left.each do |i| top_left = qplace - (nqueens+1)*i bottom_left = qplace + (nqueens-1)*i diagonal << top_left if top_left >= 0 diagonal << bottom_left if bottom_left <= board_max end go_right = (1..(nqueens-(q+1))) go_right.each do |i| top_right = qplace - (nqueens-1)*i bottom_right = qplace + (nqueens+1)*i diagonal << top_right if top_right >= 0 diagonal << bottom_right if bottom_right <= board_max end invalid_spaces = invalid_spaces + diagonal end queens_numerical_placement = queen_placements.compact.each_with_index.map{|x,i| i*nqueens + x} invalid_spaces.each do |s| if queens_numerical_placement.include?(s) return false end end return true end def valid_board?(board) twod_board = board.map{ |x| x.split("") } twod_board.each_with_index do |row, idx| curr_col = row.index("Q") next if curr_col == nil twod_board.each_with_index do |other_row, other_idx| next if idx == other_idx other_col = other_row.index("Q") next if other_col == nil cd = (other_col - curr_col).abs rd = (other_idx - idx).abs return false if cd == rd end end true end def solve_n_queens(n) board_results = [] ## place queen on first point of row ## if bad placement, remove and place in next spot ## if good placement try the same with the next row ## once queen is in valid spot do the same with the next row of the board place_queen_on_row = lambda do |row, board, cols_used| if board.all?{|row| row.include?("Q") } board_results = board_results + [board.clone] return end board.size.times do |i| next if cols_used.include?(i) split_row = board[row].split('') split_row[i] = "Q" board[row] = split_row.join('') if valid_board?(board) cols_used << i place_queen_on_row.call(row+1, board, cols_used) end cols_used.delete(i) board[row] = "."*board.size end end base_board = [] n.times{ base_board << "." * n } place_queen_on_row.call(0, base_board, []) board_results end def solve_with_time(n) start = Time.now() solve_n_queens(n) stop = Time.now() puts (stop - start) end
5cb9052de64cda30dbb60293602339765ee71ecf
[ "Ruby" ]
1
Ruby
ziad-abdo/n_queens
99e306c2831f358677738dc52fa713d414063a03
5b0a6fc6f0bbbc3ac45fb81f28547208e7c8a256
refs/heads/master
<file_sep>gapminder= read.csv("data/gapminder-FiveYearData.csv") source(gapminder) source("scripts/DF-gapminder.R") str(gapminder) head(gapminder) colnames(gapminder) dim(gapminder) names(DF-gapminder)= c("País", "Año" , "Pobación", "Continente", "Expect", "PBI") gapminder names(gapminder)= c("País", "Año" , "Pobación", "Continente", "Expect", "PBI") head (gapminder) x <- c(5.4, 6.2, 7.1, 4.8, 7.5) names(x) <- c('a', 'b', 'c', 'd', 'e') x X[c(-1,-5)] x x(-1,-5) x[-1,-5] x[c(-1, -5)] x[c(2, 3, 4)] x[-c(1, 5)] head(gapminder) gapminder[c([1],"argentina")] gapminder=="Argentina", TRUE gapminder[which(names(País) == "Argentina")] x <- c(5.4, 6.2, 7.1, 4.8, 7.5) names(x) <- c('a', 'b', 'c', 'd', 'e') print(x) x[-which(names(x) == "g")] names(x) =="g" gapminder[which(gapminder$País) == "Argentina"] gapminder[which(names(gapminder) == c("País", "Argentina")] gapminder[gapminder$País = "Argentina",] x [x > 4 & x < 7] x47 = x[x > 4 & x < 7] x47 levels (x47) if (any (gapminder$Año == 2012)){ print("hay 2002 loco") } else print("no hay che") for(i in seq(from= 1, to= 10, by= 2-1)){print (i)} MatrixSalida= matrix(nrow =5, ncol=5) vector_j = c("a", "b", "c", "d", "e") for (i in 1:5){ for(j in 1:5){ valor_j_temp =vector_j[j] salida_temp =paste(i, valor_j_temp) MatrixSalida[j,i] = salida_temp } } MatrixSalida vector_salida = as.vector(MatrixSalida) vector_salida vector_salida2 <- vector(mode = "character") for (i in 1:5){ for (j in c('a', 'b', 'c', 'd', 'e')){ salida_temporal <- paste(i, j) vector_salida2 <- c(vector_salida2, salida_temporal) } } vector_salida2 vector_salida gapminder[gapminder$Continente=="Asia",mean "Expect", ] mean(gapminder$Expect[gapminder$Continente=="Asia"]) media_asia= mean(gapminder$Expect[gapminder$Continente=="Asia"]) media_america = mean(gapminder$Expect[gapminder$Continente=="Americas"]) media_africa = mean(gapminder$Expect[gapminder$Continente=="Africa"]) media_europa = mean(gapminder$Expect[gapminder$Continente=="Europa"]) media_america continentes= levels(gapminder$Continente) for (continente in continentes){ media= mean(gapminder$Expect[gapminder$Continente== continente]) if (media > 70) { print(paste("en",continente,"lamedia es mayor a 70")) } else {print(paste("en",continente,"lamedia es menor a 70")) } } paises= levels(gapminder$País) for (paises2 in paises){ media= mean(gapminder$Expect[gapminder$País== paises2]) if (media < 50) { print(paste("En",paises2, "la media es menor a 50")) } else if (media > 50 & media < 70) { print(paste("En",paises2, "la media es mayor a 50 y menor que 70")) } else print(paste("En",paises2, "la media es mayor a 70")) } library(ggplot2) ggplot(gapminder,aes(x=PBI, y=Expect)) + geom_point() head (gapminder) ggplot(data = gapminder, aes(x = Año, y = Expect,by = País)) + geom_line() ejercicio2 + geom_line(aes(color=Continente, by=País)) ejercicio2 + scale_x_log10() ejercicio2 =ggplot(data = gapminder, aes(x = Año, y = Expect,by = País)) + geom_line ggplot(gapminder,aes(x=PBI, y=Expect)) + geom_point()+scale_x_log10() ggplot(gapminder,aes(x=PBI, y=Expect)) + geom_point(alpha = 0.1,color="red")+scale_x_log10() ggplot(gapminder,aes(x=PBI, y=Expect)) + geom_point(alpha = 0.1,color="red")+scale_x_log10()+geom_smooth()+geom_smooth(method = "lm",color = "green") ?loess ggplot(gapminder,aes(x=PBI, y=Expect, color = Continente)) + geom_point(size = 2,alpha = 0.5)+scale_x_log10()+geom_smooth()+geom_smooth(method = "lm") ggplot(gapminder,aes(x=PBI, y=Expect)) + geom_point(alpha = 0.1,color="red")+scale_x_log10()+geom_smooth()+geom_smooth(method = "lm",color = "green") starts.with <- substr(gapminder$País, start = 1, stop = 1) az.País <- gapminder[starts.with %in% c("A", "Z"), ] ggplot(data = az.País, aes(x = Año, y = Expect, color = Continente)) + geom_line() + facet_wrap( ~ País) ggplot(data = az.País, aes(x = Año, y = Expect, color = Continente)) + geom_line() + facet_wrap( ~ País) + xlab("Year") + ylab("Life expectancy") + ggtitle("Figura 1") + scale_colour_discrete( name = "Continente") + theme(strip.text = element_text(size = 13)) + theme_dark() ggplot(data = az.País, aes(x = Año, y = Expect, color = Continente)) + geom_line() + facet_wrap( ~ País) + xlab("Year") + ylab("Life expectancy") + ggtitle("Figura 1") + scale_colour_manual( name = "Continente", values = c(Africa = "red", Americas = "blue", Asia = "yellow", Oceania = "orange", Europa = "purple"))+ theme(axis.text.x= element_text(angle = 90, hjust = 1)) pop= (gapminder$Pobación/1000000) pop gapminder = cbind(gapminder,pop) head (gapminder) ggplot(gapminder,aes(x=Año, y=pop, color = Continente)) + geom_point() head(gapminder) m= matrix(1:12, nrow = 3, ncol =4) m m2= m*5 m2 m3=m%*% 5 m^-1 m* c(1,0,-1) m> c(0,20) m *c(1,0,-1,2) vector1 = 1:100 vector2 = 1:10000 vectorsuma= sum (vector1**-2) vectrosuma vectorsuma vecsum2= sum (vector2**-2) vecsum2 ddply( .data = calcPBI(gapminder), .variables = "continent", .fun = function(x) mean(x$gdp) ) <file_sep>Paste() ?paste() paste ("juan", c('juan'sep = ) ??csv readcsv ??read.csv() read.csv(file= "data/gapminder-FiveYearData.csv") read.csv(file="data/datos-felinos.csv") gatos = read.csv(file="data/datos-felinos.csv") ls(gatos) InsectSprays gatos gatos$pelaje typeof(gatos$peso) read.csv("data/datos-felinos2.csv") gastos2 =(read.csv("data/datos-felinos2.csv")) gatos2 gatos2 gastos2 gatos2=gastos2 gatos2 gatos2$peso typeof(gatos2$peso) mi_vector = vector(length = 3) mi_vector otro_vector <- vector(mode = 'character', length = 3) otro_vector str(otro_vector) str(gatos) str(gatos2) edad = c(2,3,5) edad cbind(gatos, edad) gatos3 = cbind(gatos, edad) nueva_fila = list("carey", 3.3, TRUE,9) nueva_filarbind rbind(gatos, nueva_fila) levels(gatos$pelaje) rbind(gatos ,nueva_fila) as.numeric(character_vector) character_vector = c("0", "2", "3") character_vector as.numeric(character_vector) letters vector1_26 = (1:26) vector1_26 vector1_26 = vector1_26*2 names(vector1_26)= c(letters) vector1_26 str(gatos$pelaje) gatos ?read.csv default.stringsAsFactors() gatos = read.csv("data/datos-felinos.csv", stringsAsFactors= FALSE) typeof(gatos$pelaje) class(gatos) as.list(gatos) gatos(1) gatos[1] gatos[[1]] gatoa$pelaje gatos$pelaje gatos["pelaje"] gatos[1,1] gatos[,1] gatos[1, ] gatos["pelaje"] gatos matrix_example = matrix(0, ncol = 6, nrow = 3) metrix_example matrix_example = matrix(0, ncol = 6, nrow = 3) metrix_example trinity= matrix(1:50, nrow = 10, ncol = 5) trinity ?matrix list? ?list ej6=list(data_type = c("integer", "double", "logical","complex", "character"), data_structures= c("vector", "matrix", "list", "data.freame")) ej6 ej7 = matrix(c(4, 9, 10, 1, 5, 7), nrow = 2) ej7 gatos2 gatos3 gatos levels levels(gatos$pelaje) nueva_fila = list("carey", 3.3, TRUE, 9) gatos = rbind(gatos, nueva_fila) gatos gatos$edad = c(4,5,8,9) gatos gatos$vidas=c(7,5,8,6) gatos ?edit gatos ls gatos[,-1] gatos[-1,] nuevafila3 = list("persa", 3.3 , TRUE,9,3) gatos = rbind(gatos,nuevafila3) gatos na.omit=gatos gatos gatos = na.omit(gatos) gatos df <- data.frame(Nombre = c('Ramiro',"Eugenia"), Apellido = c("López","Raffi"), Suerte = c("48","7"), stringsAsFactors = FALSE) df df$recreo df df$recreo = c("si","si") df cbind(df, recreo=c("si", "si")) df levels(df) agrgar = list("Paula","Sotelano", "4", "claramente") rbind (df,agrgar) df = rbind (df,agrgar) df gapminder = read.csv("data/gapminder-FiveYearData.csv") gapminder install.packages("readxl") library (readxl) str(gapminder) length(gapminder) nrow(gapminder) names(gapminder) = c("Pais", "Años", "Poblacion", "Continente", "Expectativas de vida", "PBI") head(gapminder) str(gapminder) typeof(gapminder$Poblacion)gapminder agrgar2 = list("Erika","Bedoya", "7", "si") df= rbind (df, agrgar2) df ??"%IN%" set.speed(1) morfeo = matrix(rnorm(6*4), ncol=4, nrow= 6) morfeo morfeo [5,2] morfeo[, c(3, 4)] xlist <- list(a = "UNTDF", b = 1:10, data = head(iris)) xlist[1] xlist[2] xlist[3] xlis xlist xlist["a"] xlist[[1]] gapminder[["País"]][["Argentina"]] gapminder [gapminder$País=="Argentina",] gapminder [8:10] gapminder [5] gapminder [1] xlist[["b"]][[2]] gapminder[["País"]][["Argentina"]] mod <- aov(Pobación ~ Expect, data=gapminder) mod attr(mod) gapminder[gapminder$Año == 1957,] head (gapminder[, -c(1:4)]) gapminder View(gapminder) library(plyr) <file_sep>mi_suma <- function(a, b) { suma <- a + b return(suma) } mi_suma(a= 5, b=4) kelvin_a_fahr <- function(temp) { fahr <- (temp - 273.15) * (9/5) + 32 return(fahr) } celsius_a_kelvin <- function(temp){ kelvin <- temp + 273.15 return(kelvin) } temp = 15 kelvin_a_fahr(temp) celsius_a_kelvin(450) celsius_a_Fahr = function (temp) { CelFahr= (temp) * (9/5) +32 return(CelFahr) } Cel_a_Fahr = function (temp){ primera= celsius_a_kelvin(temp) segunda= kelvin_a_fahr (primera) return(segunda) } Cel_a_Fahr (20) gapminder head (gapminder) calPBI= function (dat, Año = NULL, País,) if(!is.null(Año)){ data = dat } calcPBI <- function(dat, Año=NULL, País=NULL) { if(!is.null(Año)) { dat <- dat[dat$Año %in% Año, ] } if (!is.null(País)) { dat <- dat[dat$País %in% País,] } gdp <- dat$Pobación * dat$PBI new <- cbind(dat, gdp = gdp) return(new) } new head (calcPBI) calcPBI(head(gapminder)) head(calcPBI(gapminder, Año = 2007)) head(calcPBI(gapminder, País = "Argentina")) head(calcPBI(gapminder, País = "New Zealand", Año = 1987)) head(calcPBI(gapminder, País = "New Zealand", Año = c(1952,1987))) vallar(salio) vallar salio #' Title #' #' @param texto #' @param envoltura #' #' @return #' @export #' #' @examples vallar= function(texto, envoltura = "***") { texto = paste0(envoltura, paste(texto, collapse =" "),envoltura) return (texto)} vallar(mejores_practicas) vallar(mejores_practicas) library(ggplot2) pdf("Life_Exp_vs_time.pdf", width=12, height=12, onefile = TRUE) print(ggplot(data=gapminder, aes(x=Año, y=Expect, colour=País)) + geom_line()+ facet_grid(continente ~ .) dev.off() pdf("Mi_grafico.pdf", width = 12, height = 8, onefile = TRUE) for(continente in levels(gapminder$Continente)){ print(ggplot(data= gapminder[gapminder$Continente == continente, ], aes(x=Año, y=Expect, colour=País)) + geom_line()) } dev.off() pdf("Life_Exp_vs_time.pdf", width=12, height=12, onefile = TRUE) ggplot(data=gapminder, aes(x=Año, y=Expect, colour=País)) + geom_line() + facet_grid(Continente ~ .)+ theme(legend.position = "none") # You then have to make sure to turn off the pdf device! dev.off() head (gapminder-aus.csv) a1990_subset = gapminder[gapmider$Año >= 1990,] write.table(aust_subset, file = "cleaned-data/gapminder-aus.csv", sep = ",", quote = FALSE, row.names = FALSE ) write.table(1990_subset, file = "cleaned-data/gapminder-1990.csv",gapminder$Año == 1990, sep = ",", quote = FALSE, row.names = FALSE ) write.table(1990_subset, file = "cleaned-data/gapminder-1990.csv", sep = ",", quote = FALSE, row.names = FALSE ) conPBI = calcPBI(gapminder) ddply( .data = conPBI, .variables = "Continente", .fun = function(dat) mean(dat$PBI) ) names(gapminder). conPBI = calcPBI(gapminder) ddply( .data = conPBI, .variables = c( "Continente", "Año"), .fun = function(dat) mean(dat$PBI)) conPBI = calcPBI(gapminder) MediaVida= ddply( .data = conPBI, .variables = c( "Continente"), .fun = function(dat) mean(dat$Expect)) MediaVida MediaVidaAño1= ddply( .data = conPBI[conPBI$Año == 1952 ,], .variables = c( "Continente"), .fun = function(dat) mean(dat$Expect)) MediaVidaAño1 "Año" == 2007 MediaVidaAño2= ddply( .data = conPBI[conPBI$Año == 2007 ,], .variables = c( "Continente"), .fun = function(dat) mean(dat$Expect)) MediaVidaAño2 MediaVidaAño3 REsta= cbind(MediaVidaAño1, anio_2007= MediaVidaAño2$V1,resta=MediaVidaAño3) names REsta REsta names names(Resta) lebels(REsta) REsta names(REsta) names(REsta)[2]= "anio_1952" REsta ddply(gapminder, .(Continente), summarise, Expect = mean(Expect), sd_Exoect = sd(Expect)) library(dplyr) install.packages('dplyr') Africa gdp_bycontinents <- gapminder %>% group_by(Continente) %>% summarize(mean_PBI = mean(PBI)) gdp_bycontinents mediaIspa <- gapminder %>% group_by(País) %>% summarize(mean_Expect = mean(Expect)) mediaIspa[which.min(mediaIspa$mean_Expect),] mediaIspa[which.max(mediaIspa$mean_Expect),] pipa MediaVidaAño2002= ddply( .data = gapminder[gapminder$Año == 2002 ,], .variables = c( "País"), .fun = function(dat) mean(dat$Expect)) MediaVidaAño2002 ############# clase del viernes############ install.packages("tidyr") library("tidyr") str (gapminder) library(ggplot2) library(GGally) install.packages("GGally") gap_wide <- read.csv("http://bit.ly/gap_wide", stringsAsFactors = FALSE) str(gap_wide) View(gap_wide) gap_long <- gap_wide %>% gather(obstype_year,obs_values, starts_with('pop'), starts_with('lifeExp'), starts_with('gdpPercap')) str(gap_long) library("dplyr") View (gap_long) gap_long <- gap_wide %>% gather(obstype_year,obs_values,-continent,-country) str(gap_long) View(gap_long) gap_long <- gap_long %>% separate(obstype_year, into = c('obs_type','year'), sep = "_") gap_long$year <- as.integer(gap_long$year) View (gap_long) mediaIspa <- gap_long %>% group_by(continent, obs_type) %>% summarize(mean_Expect = mean(obs_values)) mediaIspa[which.min(mediaIspa$mean_Expect),] mediaIspa[which.max(mediaIspa$mean_Expect),] head (gap_long) gap_normal <- gap_long %>% spread(obs_type, obs_values) dim(gap_normal) dim(gapminder) head (gapminder, gap_normal) head(gap_normal) names(gapminder) names(gap_normal) gap_normal <- gap_normal[,names(gapminder)] all.equal(gap_normal,gapminder) gap_normal <- gap_normal[,names(gapminder)] all.equal(gap_normal,gapminder) gap_temp <- gap_long %>% unite(var_ID, continent, country, sep = "_") View(gap_temp) gap_temp <- gap_long %>% unite(ID_var,continent,country,sep = "_") %>% unite(var_names, obs_type, year, sep = "_") str(gap_temp) View(gap_temp) gap_wide_new <- gap_long %>% unite(ID_var, continent, country, sep = "_") %>% unite(var_names, obs_type, year, sep = "_") %>% spread(var_names, obs_values) gap_wide_new <- gap_long %>% unite(ID_var, continent, country, sep = "_") %>% unite(var_names, obs_type, year, sep = "_") %>% spread(var_names, obs_values) View(gap_wide_new) gap_ridiculamente_ancho <- gap_long %>% unite(var_names, country, obs_type, year, sep = "_") %>% spread(var_names, obs_values) dim(gap_ridiculamente_ancho) View(gap_long) install.packages(c("jsonlite", "base64enc")) <file_sep>--- title: "RxRMark" output: html_document --- **negrita** __negrita__ ## Haaaaaaaa #### mira vos!!
bb60f495be7a430b7709c7701a6af53c9f9021c3
[ "R", "RMarkdown" ]
4
R
orimaramiro/RXR
8128cdc6d2051e49ba76ed30543a9ca8aa04e844
aa678ba64d9d1ddb11ecf13ee9b95216e8211d2b
refs/heads/master
<file_sep>// // Created by hilla on 4/23/19. // // <NAME> // 208953083 #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <sys/wait.h> #include <ctype.h> #include <fcntl.h> #include <string.h> #define PRINT_ERROR_EXIT write(2, "Error in system call\n", strlen("Error in system call\n")); exit(-1); #define FORK_ERROR perror("fork error - system call failure"); #define EXPECTED_NUM_OF_COMMAND_ARGS 3 #define CLOSE_FILES close(fd[0]); close(fd[1]); typedef struct readDetails { ssize_t read_char; char buffer; } readDetails; //checks if files are the same int same_files(char *argv1, char *argv2); //checks if files are similar int similar_files(char *argv1, char *argv2); void skip_until_letter(int *fd, readDetails *details); void try_2_open(char *file_name, int *fd, int place); /** * @param argc num of args * @param argv the args * @return 1 - identical, 2 - different, 3 - similar, -1 - stderr */ int main(int argc, char *argv[]) { if (argc != EXPECTED_NUM_OF_COMMAND_ARGS) { //illegal number of arguments PRINT_ERROR_EXIT } if (same_files(argv[1], argv[2])) { //same files! return 1; } else if (similar_files(argv[1], argv[2])) { //similar files! return 3; } // neither return 2; } /** * check if two files are the same - make use of the execvp command * @param argv1 * @param argv2 * @return 1/0 */ int same_files(char *argv1, char *argv2) { // param list for execvp char *cmp_command[EXPECTED_NUM_OF_COMMAND_ARGS + 1] = {"cmp", argv1, argv2, NULL}; int cmp_result = -1; pid_t pid; if ((pid = fork()) == -1) FORK_ERROR else if (pid == 0) { //child process // use execvp - to execute the cmp command execvp(cmp_command[0], cmp_command); PRINT_ERROR_EXIT //only if execvp fails - shouldn't get here } else { // parent wait(&cmp_result); } //files same - cmp returns 0, otherwise returns 1 if (!cmp_result) { return 1; } return 0; } /** * check if teo files are similar * @param argv1 * @param argv2 * @return 1/0 */ int similar_files(char *argv1, char *argv2) { if (same_files(argv1, argv2)) return 1; // file descriptor int fd[2]; /** * try to gain access to files if fails - exits * else fd will hold the places to open */ try_2_open(argv1, fd, 0); try_2_open(argv2, fd, 1); readDetails readDetails1; readDetails readDetails2; while (1) { //skip whitespaces - buffer should hole the next letter to compare skip_until_letter(fd, &readDetails1); skip_until_letter(fd, &readDetails2); if (readDetails1.read_char == 0 && readDetails2.read_char == 0) { //end of both files with no difference - close and similar CLOSE_FILES return 1; } else if (readDetails1.read_char != readDetails2.read_char) { // case that one file is finished but the other is not - CLOSE_FILES return 0; } /** * compare lower case of the letter-if its not a letter - there will be no harm */ if (tolower(readDetails1.buffer) == tolower(readDetails2.buffer)) { continue; } else { // char is different, close and not similar CLOSE_FILES return 0; } } } /** * skip until reach of actual letter * update the details of the file with the read info * @param fd * @param details */ void skip_until_letter(int *fd, readDetails *details) { char file_buffer; ssize_t read_char; do { read_char = read(fd[0], &file_buffer, 1); if (read_char == -1) { PRINT_ERROR_EXIT } if (file_buffer != '\n' && file_buffer != ' ') break; } while (read_char != 0); details->buffer = file_buffer; details->read_char = read_char; } /** * tries to open file * @param file_name * @param fd * @param place */ void try_2_open(char *file_name, int *fd, int place) { if (access(file_name, F_OK) == 0) { // place in 0 fd[place] = open(file_name, O_RDONLY); } else { PRINT_ERROR_EXIT } } <file_sep># Automated-testing-system an automated testing system to compile run and compare c files ex31 has the comparing logic so it needs to be compiled befor use and get the name = cmp.out ex31 gets two paths of files and check for comparsent. ex32 defines the testing system : get a confogoration file which defines 3 paths: students directory,input,and right output the program will then search for each student a c file compile and run it with the input and then using the cmp.out check with the right output. the reuslts are written to a reasults.csv file. #files #system calls #threads <file_sep>// // Created by hilla on 4/29/19. // /* * <NAME> * 208953083 */ #include <dirent.h> #include <fcntl.h> #include <unistd.h> #include <stdlib.h> #include <wait.h> #include <stdio.h> #include <memory.h> #include <sys/stat.h> //TODO ex31 check for similar bad //TODO make sure no items left behind //TODO arrange #define FILE_DEAFULT "hello.out" #define MU_OUTPUT "my output.txt" #define RUN "./hello.out" #define EXPECTED_NUM_OF_COMMAND_ARGS 2 #define MAX_LINE_LENGTH 150 #define MAX_FILE_LENGTH 3*150 #define LS_COMMAND "/bin/ls" #define PRINT_ERROR_EXIT write(2, "Error in system call\n", strlen("Error in system call\n")); exit(-1); #define GREAT_JOB 100 #define SIMILAR 80 #define DIFFERENRT 60 #define TIMEOUT 40 #define COMPILE_ERROR 20 #define NO_FILES 0 //struct with the confo details typedef struct confDetails { char directory_path[MAX_LINE_LENGTH];//the folder with the user names folders char input_path[MAX_LINE_LENGTH]; //the input path char right_output_path[MAX_LINE_LENGTH]; //the output path } confDetails; //struct with data for execute typedef struct ToExe { int status; char userName[MAX_LINE_LENGTH]; char path[MAX_LINE_LENGTH]; char name[MAX_LINE_LENGTH]; } ToExe; //find c file in folder struct ToExe FindCfile(char *name, char *father); // execute (a.out) void ExecuteC(ToExe *exe, int fd_result, confDetails *confDetails1); // write on result file the correct result void WriteResult(int fd, char *name, int grade, int key); // return reason for grade by some key char *RetReason(int key); //read a file into array void readfile(char *fileName, char *lines) { int fd; ssize_t ret; char buf[MAX_LINE_LENGTH * 3]; if ((fd = open(fileName, O_RDONLY)) < 0) { PRINT_ERROR_EXIT } else { while ((ret = read(fd, buf, sizeof(buf) - 1)) > 0) { buf[ret] = 0x00; if (strcmp(buf, "\n") == 0) { //empty line continue; } strcpy(lines, buf); } close(fd); } } //read conf details void readLines(char *file, confDetails *confDetails1) { //TODO validate exictence of 3 lines! char lines[MAX_FILE_LENGTH]; readfile(file, lines); char *split = strtok(lines, "\n"); stpcpy(confDetails1->directory_path, split); split = strtok(NULL, "\n"); stpcpy(confDetails1->input_path, split); split = strtok(NULL, "\n"); stpcpy(confDetails1->right_output_path, split); } //get number of files int getNumberofFiles(char *path) { int file_count = 0; DIR *dirp; struct dirent *entry; if ((dirp = opendir(path)) == NULL) { PRINT_ERROR_EXIT } while ((entry = readdir(dirp)) != NULL) { if (entry->d_type == DT_DIR) { /* If the entry is a directory */ file_count++; } } closedir(dirp); return file_count - 2; } //get users char *getUsers(char *directory_path) { char *fileName = "users.txt"; int fd; //new file descriptor //open txt file - users list if ((fd = open(fileName, O_CREAT | O_TRUNC | O_RDWR, 0777)) < 0)//0644 { PRINT_ERROR_EXIT } dup2(fd, 1); int status; pid_t pid = fork(); if (pid == 0) { execl(LS_COMMAND, LS_COMMAND, directory_path, NULL);//ls confDetails.directory_path //case of fail: PRINT_ERROR_EXIT } else { wait(&status); //return to the regular state of printing int one = dup(1); int two = dup(fd); dup2(one, 1); dup2(two, fd); if (close(fd) == -1) { PRINT_ERROR_EXIT //close failed } return fileName; } } //find a c file for specific user ToExe FindCfile(char *name, char *father) { char *path = malloc(strlen(father) + strlen(name) + 10); strcpy(path, father); strcat(path, "/"); strcat(path, name); DIR *dirp; struct dirent *entry; if ((dirp = opendir(path)) == NULL) { PRINT_ERROR_EXIT } struct ToExe toExe; toExe.name[0] = '\0'; toExe.path[0] = '\0'; toExe.userName[0] = '\0'; toExe.status = 1; strcat(toExe.userName, name); while ((entry = readdir(dirp)) != NULL) { char *dot = strrchr(entry->d_name, '.'); if (dot && strcmp(dot, ".c") == 0) { //its a C file! char copmlete[strlen(path) + strlen(entry->d_name) + 10]; copmlete[0] = '\0'; strcpy(copmlete, path); strcat(copmlete, "/"); strcat(copmlete, entry->d_name); toExe.path[0] = '\0'; strcpy(toExe.path, copmlete); strcat(toExe.name, entry->d_name); closedir(dirp); return toExe; } struct stat chackStat; if (S_ISDIR(chackStat.st_mode)) { /* If the entry is a directory */ return FindCfile(entry->d_name, father); } } //NO_C_FILE toExe.status = -1; closedir(dirp); return toExe; } //compile void compile(ToExe *toExe) { if (toExe->status == -1) { return; } int status; int pid = fork(); if (pid == -1) { PRINT_ERROR_EXIT; } if (pid == 0) { //compiling the c file - giving it the user name char *gcc[] = {"gcc", "-o", FILE_DEAFULT, toExe->path, NULL}; if (execvp("gcc", gcc) < 0) { PRINT_ERROR_EXIT//execlp FAILED } } else { wait(&status); int ret = WEXITSTATUS(status);//the return from gcc if (ret == 1) {//gcc failed! //COMPILATION_ERROR toExe->status = -2; return; } } } // write on result file the correct result void WriteResult(int fd, char *name, int grade, int key) { if (grade < 0) {//not allow negative grade grade = 0; } char srtGrade[4]; sprintf(srtGrade, "%d", grade); srtGrade[3] = '\0'; char data[26 + strlen(name)]; data[0] = '\0'; strcpy(data, name); strcat(data, ","); strcat(data, srtGrade); strcat(data, ","); //the reason text strcat(data, RetReason(key)); //write the data on result file int a = (int) write(fd, &data, strlen(data)); if (a == -1) { PRINT_ERROR_EXIT //WRITE FAILED } } // return reason for grade by some key char *RetReason(int key) { switch (key) { case 1: return "NO_C_FILE\n\0"; case 2: return "COMPILATION_ERROR\n\0"; case 3: return "TIMEOUT\n\0"; case 4: return "BAD_OUTPUT\n\0"; case 5: return "SIMILLAR_OUTPUT\n\0"; case 6: return "GREAT_JOB\n\0"; default: break; } return ""; } //cmp void Compare(confDetails *confDetails1, int fd_result, char *name) { int pid = fork(); int status = 0; int key = 0; int grade = 0; if (pid == -1) { unlink(MU_OUTPUT); PRINT_ERROR_EXIT; } if (pid == 0) { int a = execl("cmp.out","./cmp.out", MU_OUTPUT, confDetails1->right_output_path,NULL); //char *args[] = {"cmp.out","./cmp.out",MU_OUTPUT, confDetails1->right_output_path, NULL}; if (a < 0) { unlink(MU_OUTPUT); PRINT_ERROR_EXIT //A.OUT FAILED } } else { wait(&status); // unlink(myOut_f); int ret = WEXITSTATUS(status);//the return value from comparing switch (ret) { case 3: key = 4; grade = DIFFERENRT; //different break; case 2: key = 5; grade = SIMILAR; //similar break; case 1: key = 6; grade = GREAT_JOB; //same break; default: key = 4; grade = DIFFERENRT; //different break; //PRINT_ERROR_EXIT } WriteResult(fd_result, name, grade, key); unlink(MU_OUTPUT); } } //run int run(int fd_in, int fd_myOut, int fd_result, char *name) { int status = 0; int save_fd0 = dup(0); int save_fd1 = dup(1); int save_fd_in = dup(fd_in); int save_fd_out = dup(fd_myOut); //to print output in myOutput file && read from input file dup2(fd_in, 0); dup2(fd_myOut, 1); pid_t pid = fork(); if (pid < 0) { unlink(MU_OUTPUT); PRINT_ERROR_EXIT } if (pid == 0) { //execute the c file char *args[] = {RUN, NULL}; if (execvp(RUN, args) < 0) { unlink(MU_OUTPUT); PRINT_ERROR_EXIT //A.OUT FAILED } } else { //check if the program run above 5 seconds sleep(5); int f = waitpid(pid, &status, WNOHANG); //return to the regular state of printing dup2(save_fd1, 1); dup2(save_fd0, 0); dup2(save_fd_in, fd_in); dup2(save_fd_out, fd_myOut); if (f == 0) {//program NOT finish in 5 seconds //TIMEOUT kill(pid, SIGSTOP); unlink(MU_OUTPUT); WriteResult(fd_result, name, TIMEOUT, 3); return -1; } //close input file if (close(fd_in) == -1) { unlink(MU_OUTPUT); PRINT_ERROR_EXIT //CLOSE FAILED } lseek(fd_myOut, 0, SEEK_SET); } return 0; } //execute (a.out) void ExecuteC(ToExe *exe, int fd_result, confDetails *confDetails1) { lseek(fd_result, 0, SEEK_END); if (exe->status < 0) { if (exe->status == -1) {//NO_C_FILE WriteResult(fd_result, exe->userName, NO_FILES, 1); } else if (exe->status == -2) {//COMPILATION_ERROR WriteResult(fd_result, exe->userName, COMPILE_ERROR, 2); } } else { int fd_in; //input - file descriptor int fd_myOut;//my output - file descriptor //open input file if ((fd_in = open(confDetails1->input_path, O_RDWR, 0444)) < 0)//0644 { PRINT_ERROR_EXIT //OPEN FAILED } //open my output file if ((fd_myOut = open(MU_OUTPUT, O_CREAT | O_TRUNC | O_RDWR, 0666)) < 0)//0644 { PRINT_ERROR_EXIT //OPEN FAILED } if (run(fd_in, fd_myOut, fd_result, exe->userName) == -1) { //time out return; } Compare(confDetails1, fd_result, exe->userName); } } //handle each user directory void handleUsers(char *users, confDetails *confDetails, int fd) { char delim[] = "\n"; char *ptr = strtok(users, delim); while (ptr != NULL) { //ptr now holds a user name //looking for c file ToExe exe = FindCfile(ptr, confDetails->directory_path); //compile compile(&exe); //execute it ExecuteC(&exe, fd, confDetails);//run the c file if (exe.status > 0) { unlink(FILE_DEAFULT); } ptr = strtok(NULL, delim); } } int main(int argc, char *argv[]) { // confo details confDetails confDetails; readLines(argv[1], &confDetails); //users details char *usersFile = getUsers(confDetails.directory_path); int numberOfUsers = getNumberofFiles(confDetails.directory_path); char users[MAX_LINE_LENGTH * numberOfUsers]; readfile(usersFile, users); //after read delete users.txt unlink(usersFile); //open the result file int fd_result; if ((fd_result = open("results.csv", O_CREAT | O_TRUNC | O_RDWR, 0666)) < 0) { PRINT_ERROR_EXIT //open failed; } lseek(fd_result, 0, SEEK_SET); handleUsers(users, &confDetails, fd_result); return 0; }
c5f6e9fbedce85e3e562c7dbfd26d54829f61b22
[ "Markdown", "C" ]
3
C
hillahalevi/Automated-testing-system
3d8010711765e5fc40ded795f8bea0cd84c5a9be
56dbbe200d18cdc109cad9cc471ed3e1db720c25
refs/heads/master
<file_sep># <NAME> Small website project. <file_sep>-- MySQL Script generated by MySQL Workbench -- dim. 30 sept. 2018 20:27:16 CEST -- Model: New Model Version: 1.0 -- MySQL Workbench Forward Engineering SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; -- ----------------------------------------------------- -- Schema mydb -- ----------------------------------------------------- -- ----------------------------------------------------- -- Schema mydb -- ----------------------------------------------------- CREATE SCHEMA IF NOT EXISTS `mydb` DEFAULT CHARACTER SET utf8 ; USE `mydb` ; -- ----------------------------------------------------- -- Table `mydb`.`organization` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mydb`.`organization` ; CREATE TABLE IF NOT EXISTS `mydb`.`organization` ( `id` INT NOT NULL AUTO_INCREMENT, `name` VARCHAR(128) NULL, `short_intro` TINYTEXT NULL, `intro` TEXT(65535) NULL, `transport_access` TINYTEXT NULL, `address1_details` VARCHAR(128) NULL, `address2_number_town` VARCHAR(128) NULL, `address3_city_state` VARCHAR(128) NULL, `address4_zip_country` VARCHAR(128) NULL, `since_year` VARCHAR(45) NULL, `students_nb` VARCHAR(16) NULL, `students_nations` VARCHAR(128) NULL, `students_by_class` VARCHAR(16) NULL, `study_penetration_rate` VARCHAR(16) NULL, `housing_fees` VARCHAR(128) NULL, `additionnal_infos` TINYTEXT NULL, `map_shortcode` TEXT(65535) NULL, `feedback1` TINYTEXT NULL, `feedback2` TINYTEXT NULL, `feedback3` TINYTEXT NULL, `type` ENUM('school', 'housing', 'spot') NULL, `residents_nb` VARCHAR(16) NULL, `residents_nations` VARCHAR(128) NULL, `residents_by_room` VARCHAR(16) NULL, `rental_fee` VARCHAR(128) NULL, PRIMARY KEY (`id`)) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `mydb`.`user` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mydb`.`user` ; CREATE TABLE IF NOT EXISTS `mydb`.`user` ( `id` INT NOT NULL AUTO_INCREMENT, `email` VARCHAR(255) NOT NULL, `passwd_hash` VARCHAR(128) NOT NULL, `passwd_salt` VARCHAR(128) NOT NULL, `role` ENUM('admin', 'nonadmin') NULL, `name` VARCHAR(128) NULL, PRIMARY KEY (`id`)) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `mydb`.`cursus` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mydb`.`cursus` ; CREATE TABLE IF NOT EXISTS `mydb`.`cursus` ( `duration` VARCHAR(45) NULL, `selection_fee` INT NULL, `admission_fee` INT NULL, `lesson_fee` INT NULL, `other_fee` INT NULL, `organization_id` INT NOT NULL, `purpose` TINYTEXT NULL, PRIMARY KEY (`organization_id`), INDEX `fk_cursus_organization_idx` (`organization_id` ASC), CONSTRAINT `fk_cursus_organization` FOREIGN KEY (`organization_id`) REFERENCES `mydb`.`organization` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `mydb`.`news` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mydb`.`news` ; CREATE TABLE IF NOT EXISTS `mydb`.`news` ( `id` INT NOT NULL AUTO_INCREMENT, `keywords` TINYTEXT NULL, `title` VARCHAR(128) NULL, `content` TEXT(65535) NULL, `user_id` INT NOT NULL, PRIMARY KEY (`id`), INDEX `fk_news_user1_idx` (`user_id` ASC), CONSTRAINT `fk_news_user1` FOREIGN KEY (`user_id`) REFERENCES `mydb`.`user` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `mydb`.`faq` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mydb`.`faq` ; CREATE TABLE IF NOT EXISTS `mydb`.`faq` ( `id` INT NOT NULL AUTO_INCREMENT, `question` VARCHAR(128) NULL, `answer` TEXT(65535) NULL, `keywords` TINYTEXT NULL, PRIMARY KEY (`id`)) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `mydb`.`photo` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mydb`.`photo` ; CREATE TABLE IF NOT EXISTS `mydb`.`photo` ( `id` INT NOT NULL AUTO_INCREMENT, `alt_text` VARCHAR(64) NOT NULL, `url` TINYTEXT NULL, `keywords` TINYTEXT NULL, `organization_id` INT NOT NULL, PRIMARY KEY (`id`, `organization_id`), INDEX `fk_photo_organization1_idx` (`organization_id` ASC), CONSTRAINT `fk_photo_organization1` FOREIGN KEY (`organization_id`) REFERENCES `mydb`.`organization` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION) ENGINE = InnoDB; SET SQL_MODE=@OLD_SQL_MODE; SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
e28ca4c9e7ba71f3d67c8c7393c760000a547177
[ "Markdown", "SQL" ]
2
Markdown
LordGeck/nihongo
5a81f16bffc651e3f84374e2691467116a25a068
54a1e6a74a9ab82a74ec31b182e271c89c206a7b
refs/heads/master
<repo_name>JLemmetti/leffat<file_sep>/app/models/user.js import DS from 'ember-data'; export default DS.Model.extend({ movies: DS.hasMany({inverse: null}), name: DS.attr('string'), email: DS.attr('string') }); <file_sep>/app/components/edit-movie-item.js import Component from '@ember/component'; export default Component.extend({ tagName: '', actions: { setRating(movie, rating) { this.sendAction('setRating', movie, rating); }, saveMovie(movie) { this.sendAction('saveMovie', movie); }, deleteMovie(movie) { this.sendAction('deleteMovie', movie); }, cancelEdit(movie) { this.sendAction('cancelEdit', movie); } } }); <file_sep>/app/components/movie-item.js import Component from '@ember/component'; export default Component.extend({ tagName: '', actions: { editMovie (movie) { this.sendAction('editMovie', movie); } } }); <file_sep>/app/controllers/users/user.js import { filterBy, sort } from '@ember/object/computed'; import Controller from '@ember/controller'; export default Controller.extend({ movies: filterBy('model.movies', 'isNew', false), newMovies: filterBy('model.movies', 'isNew', true), sortProperties: ['watchDate:desc'], sortedMovies: sort('movies', 'sortProperties'), actions: { addNewMovie() { let movie = this.store.createRecord('movie'); let user = this.store.findRecord('user', this.get('session.uid')).then(user => { user.get('movies').pushObject(movie); }); }, setRating(movie, rating) { movie.set('rating', rating); }, saveMovie(movie) { movie.set('editing', false); movie.save().then(movie => { let user = this.store.findRecord('user', this.get('session.uid')).then(user => { user.get('movies').pushObject(movie); user.save(); }); }); }, editMovie(movie) { movie.set('editing', true); }, cancelEdit(movie) { if (movie.get('isNew')) { movie.deleteRecord(); } else { movie.set('editing', false); movie.rollbackAttributes(); } }, deleteMovie(movie) { movie.deleteRecord(); movie.save(); } } });
6f7e7213c9b304ab9959e9402a4ed89657991d2f
[ "JavaScript" ]
4
JavaScript
JLemmetti/leffat
ecc2e9d16c4cc7f39d9050e02d31e096485ced84
3796d3432d4203d30254e484488e5b607b7f05dd
refs/heads/master
<file_sep>#include <iostream> #include "server.h" using namespace std; server::server(io_service& service): sock_(service), is_started_(false) { } bool server::started() { return is_started_; } void server::stop() { cout << "stop. is_started: " << is_started_ << endl; if (started()) { is_started_ = false; sock_.close(); } } network::socket& server::sock() { return sock_; } server_ptr server::new_(io_service& service) { server_ptr new_(new server(service)); return new_; } void server::start() { is_started_ = true; read(); } void server::read() { cout << "read" << endl; if (started()) { async_read(sock_, boost::asio::buffer(read_buffer_), MEM_FN2(on_read_progress, _1, _2), MEM_FN2(on_read_completed,_1,_2)); // sock_.async_read_some(boost::asio::buffer(read_buffer_), MEM_FN2(on_read_completed,_1,_2)); } } size_t server::on_read_progress(const bs::error_code& err, size_t bytes) { cout << "on_read_progress. Bytes: " << bytes << ", err:" << err << endl; if (!err) { const bool found = std::find(read_buffer_, read_buffer_ + bytes, '\n') < read_buffer_ + bytes; return found ? 0 : 1; } return 0; } void server::on_read_completed(const bs::error_code& err, size_t bytes) { cout << "on_read_completed. Bytes: " << bytes << ", err:" << err << endl; if (!err) { const std::string msg(read_buffer_, bytes); write(msg + "\n"); } cout << "on_read_completed. before stop" << endl; stop(); } void server::write(const string& msg) { cout << "write. isStarted:" << is_started_ << endl; if (started()) { std::copy(msg.begin(), msg.end(), write_buffer_); sock_.async_write_some(boost::asio::buffer(write_buffer_, msg.size()), MEM_FN2(on_write,_1,_2)); } } void server::on_write(const bs::error_code& err, size_t bytes) { cout << "on_write. Bytes: " << bytes << ", err:" << err << endl; read(); } <file_sep>#ifndef CSERVER_H #define CSERVER_H #include <functional> #include <boost/asio.hpp> #define MEM_FN(x) std::bind(&server::x, shared_from_this()) #define MEM_FN1(x,y) std::bind(&server::x, shared_from_this(),y) #define MEM_FN2(x,y,z) std::bind(&server::x, shared_from_this(),y,z) class server; using server_ptr = std::shared_ptr<server>; using io_service = boost::asio::io_service; using network = boost::asio::ip::tcp; using std::placeholders::_1; using std::placeholders::_2; namespace bs = boost::system; constexpr uint msg_size = 1024; class server : public std::enable_shared_from_this<server>, boost::noncopyable { public: static server_ptr new_(io_service& service); void start(); void stop(); bool started(); void read(); void write(const std::string& msg); void on_read_completed(const bs::error_code& err, size_t bytes); void on_write(const bs::error_code& err, size_t bytes); size_t on_read_progress(const bs::error_code& err, size_t bytes); network::socket& sock(); private: server(io_service& service); network::socket sock_; char read_buffer_[msg_size]; char write_buffer_[msg_size]; bool is_started_; }; #endif // CSERVER_H <file_sep>#include <iostream> #include "server.h" void handle_accept(network::acceptor& acceptor, io_service& service, server_ptr client, const bs::error_code& err); int main() { io_service service; network::acceptor acceptor(service, network::endpoint(network::tcp::v4(), 2345)); server_ptr client = server::new_(service); acceptor.async_accept(client->sock(), std::bind( handle_accept, std::ref(acceptor), std::ref(service), client, _1) ); service.run(); } void handle_accept(network::acceptor& acceptor, io_service& service, server_ptr client, const bs::error_code& err) { client->start(); server_ptr new_client = server::new_(service); acceptor.async_accept(new_client->sock(), std::bind(handle_accept, std::ref(acceptor), std::ref(service), new_client, _1) ); }
e3d0aa8a28dc54b009d089ff98e35d36a7fc3c6a
[ "C++" ]
3
C++
kolisergej/boost_demos
2b0d20f2593b1d8806766f118e2cbec00f06f9af
002d6d16f1d3e0feb1ad6b5a2fa54f8108d4f7fe
refs/heads/master
<file_sep><?php require_once 'ahp.php'; session_start(); extract($_POST); if(isset($_SESSION["arreglo"])){ $arreglo = $_SESSION['arreglo']; } $objetivo = $arreglo[0]; $criterios = $arreglo[1]; $alternativas = $arreglo[2]; $ahp = new AHP($objetivo); foreach($criterios as $criterio){ $ahp->addCriterio($criterio); if(isset($_SESSION[$criterio])){ $subcriterios = array_keys($_SESSION[$criterio]); foreach($subcriterios as $subcriterio){ $ahp->addCriterio($subcriterio,$criterio); } } } foreach($alternativas as $alternativa){ $ahp->addAlternativa($alternativa); } $matricesDeComparacion = array(); $matrizDePrioridad = $ahp->getMatrizDePrioridad(); // MATRIX DE COMPARACION foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ $subcriterios = $ahp->getSubCriterios($criterio); foreach($subcriterios as $subcriterio => $prioridad){ $matricesDeComparacion[$subcriterio] = $ahp->getMatrizAlternativa(); } } else{ $matricesDeComparacion[$criterio] = $ahp->getMatrizAlternativa(); } } // MATRIZ DE PRIORIDAD PARA LOS SUBCRITERIOS $matrizDePrioridadSubcriterios = array(); foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ $subcriterios = $ahp->getSubCriterios($criterio); $matrizDePrioridadSubcriterios[$criterio] = $ahp->getMatrizDePrioridad($criterio); } } foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ $subcriterios = $ahp->getSubCriterios($criterio); foreach($subcriterios as $subcriterio => $prioridad){ $matricesDeComparacion[$subcriterio] = $ahp->getMatrizAlternativa(); } } else{ $matricesDeComparacion[$criterio] = $ahp->getMatrizAlternativa(); } } foreach($_POST as $key => $value){ if(substr_count($key,'*')==1){ $criterio1 = substr($key,0,(strpos($key,'*'))); $criterio2 = substr($key,strpos($key,'*')+1); if(substr_count($value,'/')==1){ $value = 1 / substr($value,2); } // echo floatval($value); // echo "<br>"; if(isset($matrizDePrioridad[$criterio1][$criterio2])){ $matrizDePrioridad[$criterio1][$criterio2] = $value; $matrizDePrioridad[$criterio2][$criterio1] = 1 / $value; } } if(substr_count($key,'#')==1){ $criterioPadre = substr($key,0,strpos($key,'#')); $subcriterio1 = substr($key,strpos($key,'#')+1,strpos($key,'*')-10); $subcriterio2 = substr($key,strpos($key,'*')+1); if(substr_count($value,'/')==1){ $value = 1 / substr($value,2); } if(isset($matrizDePrioridadSubcriterios[$criterioPadre][$subcriterio1][$subcriterio2])){ $matrizDePrioridadSubcriterios[$criterioPadre][$subcriterio1][$subcriterio2] = $value; $matrizDePrioridadSubcriterios[$criterioPadre][$subcriterio2][$subcriterio1] = 1 / $value; } } if(substr_count($key,'-')==1){ $criterio =substr($key,0,strpos($key,'-')); $alternativa1 = substr(substr($key,0,(strpos($key,'*'))),strpos($key,'-')+1); $alternativa2 = substr($key,strpos($key,'*')+1); if(substr_count($value,'/')==1){ $value = 1 / substr($value,2); } if(isset($matricesDeComparacion[$criterio][$alternativa1][$alternativa2])){ $matricesDeComparacion[$criterio][$alternativa1][$alternativa2] = $value; $matricesDeComparacion[$criterio][$alternativa2][$alternativa1] = 1 / $value; } } } // echo '<pre>'; print_r($matrizDePrioridadSubcriterios); echo '</pre>'; // echo '<pre>'; print_r($matrizDePrioridad); echo '</pre>'; // echo '<pre>'; print_r($matricesDeComparacion); echo '</pre>'; // echo "<br><br>"; // echo '<pre>'; print_r($ahp->getVectorPromedio($matrizDePrioridad)); echo '</pre>'; // foreach($criterios as $criterio){ // echo "<br><br>"; // echo "Comparacion de alternativas por criterio: $criterio"; // echo '<pre>'; print_r($ahp->getVectorPromedio($matricesDeComparacion[$criterio],1)); echo '</pre>'; // } $vectorFinal = $ahp->getVectorFinal(); $vectorCriterio = $ahp->getVectorPromedio($matrizDePrioridad); foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ $vectorSubcriterio[$criterio] = $ahp->getVectorPromedioSubcriterio($matrizDePrioridadSubcriterios[$criterio],$criterio); foreach($vectorSubcriterio[$criterio] as $posicion => $valor){ $vectorSubcriterio[$criterio][$posicion] = $valor * $vectorCriterio[$criterio]; // LO QUE HAGO ACA ES PRIORIDAD LOCAL * PRIORIDAD DEL PADRE = PRIORIDAD GLOBAL. } } } // print_r($vectorSubcriterio); // print_r($vectorCriterio); // $vectorResultado = $ahp->getVectorResultado(); $vectorAlternativasC = array(); $todosLosCriterios = $criterios; foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ $subcriterios = $ahp->getSubCriterios($criterio); $todosLosCriterios = array_merge(array_values($todosLosCriterios),array_keys($subcriterios)); } } foreach($todosLosCriterios as $criterio){ if(!($ahp->esPadre($criterio))){ $vectorAlternativasC[$criterio] = $ahp->getVectorPromedio($matricesDeComparacion[$criterio],1); } } // print_r($vectorAlternativasC); // foreach($alternativas as $alternativa){ // foreach($todosLosCriterios as $criterio){ // if(!($ahp->esPadre($criterio))){ // $producto = 0; // if($ahp->esHijo($criterio)){ // echo $vectorSubcriterio[$criterio]; // $producto = $vectorSubcriterio[$criterio] * $vectorAlternativasC[$criterio][$alternativa]; // } // else{ // $producto = $vectorCriterio[$criterio] * $vectorAlternativasC[$criterio][$alternativa]; // } // $vectorFinal[$alternativa] = $vectorFinal[$alternativa] + $producto; // } // } // } foreach($alternativas as $alternativa){ foreach($todosLosCriterios as $criterio){ if(!($ahp->esPadre($criterio))){ $producto = 0; if($ahp->esHijo($criterio)){ $producto = $vectorSubcriterio[$ahp->getPadre($criterio)][$criterio] * $vectorAlternativasC[$criterio][$alternativa]; } else{ $producto = $vectorCriterio[$criterio] * $vectorAlternativasC[$criterio][$alternativa]; } $vectorFinal[$alternativa] = $vectorFinal[$alternativa] + $producto; } } } // echo "<br><br>"; // echo '<pre>'; print_r($vectorFinal); echo '</pre>'; ?> <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Bienvenido/a</title> <meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0"> <link href="https://fonts.googleapis.com/css?family=Oswald:400,700" rel="stylesheet"> <link rel="stylesheet" type="text/css" href="css/estilos.css"> </head> <body class="body-generar-comparaciones"> <div id="particles-js"></div> <header class="header-barra"> <div class="contenedor"> <div class="menu-barra"> <nav class="enlaces"> <a href="./index.php">Inicio</a> <a href="#">Trabajos</a> <a href="#">Contacto</a> </nav> </div> </div> </header> <div class="contenedor-resultados"> <div class="contenedor"> <h1>Resultados</h1> <h2>De acuerdo a los criterios elegidos y las puntuaciones establecidas, los resultados son los siguientes:</h2> <div class="caja-resultados"> <div class="titulo-alternativa"> <h4>Alternativas</h4> </div> <div class="titulo-puntuacion"> <h4>Puntuacion</h4> </div> <?php foreach($alternativas as $alternativa){ ?> <?php if($vectorFinal[$alternativa] == max($vectorFinal)){ $mejorAlternativa = $alternativa; } ?> <div class="resultado"> <div class="alternativa"> <?php echo $alternativa ?> </div> <div class="numero"> <?php echo $vectorFinal[$alternativa] ?> </div> </div> <?php } ?> </div> <h2>Como se puede observar, la mejor alternativa es: <span><?php echo $mejorAlternativa ?></span></h2> <p>Con aproximadamente el <span><?php echo round(max($vectorFinal),4)*100 ?>%</span>, se convierte en la mejor opcion para el problema planteado.</p> <?php session_destroy(); ?> </div> </div> <footer> <div class="contenedor"> <div class="menu-foot"> <nav> <a href="#">Inicio</a> <a href="#">Trabajos</a> <a href="#">Contacto</a> </nav> </div> <div class="copyright"> <h1 class="titulo">Copyright - <NAME> @2017</h1> </div> </div> </footer> <script type="text/javascript" src="js/jquery-3.1.1.min.js"></script> <script type="text/javascript" src="js/particles.js"></script> <script type="text/javascript" src="js/particulas.js"></script> </body> </html><file_sep><?php extract($_POST); foreach($_POST as $key => $value){ if(substr_count($key,'*')==1){ $criterio1 = substr($key,0,(strpos($key,'*'))); $criterio2 = substr($key,strpos($key,'*')+1); if(isset($matrizDePrioridad[$criterio1][$criterio2])){ $matrizDePrioridad[$criterio1][$criterio2] = $value; $matrizDePrioridad[$criterio2][$criterio1] = 1 / $value; } } if(substr_count($key,'-')==1){ $criterio =substr($key,0,strpos($key,'-')); $alternativa1 = substr(substr($key,0,(strpos($key,'*'))),strpos($key,'-')+1); $alternativa2 = substr($key,strpos($key,'*')+1); if(isset($matricesDeComparacion[$criterio][$alternativa1][$alternativa2])){ $matricesDeComparacion[$criterio][$alternativa1][$alternativa2] = $value; $matricesDeComparacion[$criterio][$alternativa2][$alternativa1] = 1 / $value; } } } echo '<pre>'; print_r($matrizDePrioridad); echo '</pre>'; echo '<pre>'; print_r($matricesDeComparacion); echo '</pre>'; echo "<br><br>"; echo '<pre>'; print_r($ahp->getVectorPromedio($matrizDePrioridad)); echo '</pre>'; foreach($criterios as $criterio){ echo "<br><br>"; echo "Comparacion de alternativas por criterio: $criterio"; echo '<pre>'; print_r($ahp->getVectorPromedio($matricesDeComparacion[$criterio],1)); echo '</pre>'; } $vectorFinal = $ahp->getVectorFinal(); $vectorCriterio = $ahp->getVectorPromedio($matrizDePrioridad); $vectorResultado = $ahp->getVectorResultado(); $vectorAlternativasC = array(); foreach($criterios as $criterio){ $vectorAlternativasC[$criterio] = $ahp->getVectorPromedio($matricesDeComparacion[$criterio],1); } foreach($alternativas as $alternativa){ foreach($criterios as $criterio){ $producto = 0; $producto = $vectorCriterio[$criterio] * $vectorAlternativasC[$criterio][$alternativa]; $vectorFinal[$alternativa] = $vectorFinal[$alternativa] + $producto; } } echo "<br><br>"; echo '<pre>'; print_r($vectorFinal); echo '</pre>'; ?><file_sep><?php class AHP{ private static $numAlternativas = 0; private $objetivo; private $criterios = array(); private $alternativas = array(); public function __construct($objetivo = ''){ $this->objetivo = $objetivo; } public function setCriterios($criterios,$padre = null){ if ($padre != null) { if (isset($this->criterios[$padre])) { $this->criterios[$padre]['subcriterios'] = array(); } } else { $this->criterios = array(); } foreach ($criterios as $criterio) { if(!$this->addCriterio($criterio, $padre)) { return false; } } return true; } public function getSubCriterios($padre){ if($padre != null){ if(isset($this->criterios[$padre])){ return $this->criteria[$padre]['subcriteria']; } else{ return false; } } } public function existePadre($padre){ return isset($this->criterios[$padre]); } public function esPadre($criterio){ return isset($this->criteria[$criterio]['subcriteria']); } public function getPadre($criterioHijo){ foreach(array_keys($this->criterios) as $criterio){ if($this->esPadre($criterio)){ $subcriterios = $this->getSubCriterios($criterio); if(in_array($criterioHijo,array_keys($subcriterios))){ return $criterio; } } } return false; } public function esHijo($criterioHijo){ foreach(array_keys($this->criterios) as $criterio){ if($this->esPadre($criterio)){ $subcriterios = $this->getSubCriterios($criterio); if(in_array($criterioHijo,array_keys($subcriterios))){ return true; } } } return false; } public function getSubCriteriosPosition($padrePosition){ if($padrePosition != null){ if(isset($this->criterios[$padrePosition])){ return $this->criteria[$padrePosition]['subcriteria']; } else{ return false; } } } public function addCriterio($criterio, $padre = null) { if ($padre != null) { if (isset($this->criterios[$padre])) { $this->criteria[$padre]['subcriteria'][$criterio]['prioridades'] = array( 'local' => 0, 'global' => 0 ); } else { return false; } } else { $this->criterios[$criterio]['prioridades'] = array( 'local' => 0, 'global' => 0 ); } return true; } public function addAlternativa($alternativa){ $this->alternativas[AHP::$numAlternativas] = $alternativa; AHP::$numAlternativas++; } public function setAlternativas($alternativas){ $this->alternativas = $alternativas; AHP::$numAlternativas = count($alternativas); } public function getAlternativas(){ return $this->alternativas; } public function getObjetivo(){ return $this->objetivo; } public function getCriterios(){ return $this->criterios; } public function getMatrizDePrioridad($criterio = null) { $matrix = array(); $criterios = ($criterio != null) ? array_keys($this->getSubCriterios($criterio)) : array_keys($this->criterios); foreach ($criterios as $criterioFila) { foreach ($criterios as $criterioColumna) { if ($criterioFila != $criterioColumna) { if (!isset($matrix[$criterioColumna][$criterioFila])) { $matrix[$criterioFila][$criterioColumna] = 0; } } else{ if (!isset($matrix[$criterioColumna][$criterioFila])) { $matrix[$criterioFila][$criterioColumna] = 1; } } } } return $matrix; } public function getMatrizAlternativa(){ $matrix = array(); $alternativas = array_values($this->alternativas); foreach($alternativas as $alternativaFila ){ foreach($alternativas as $alternativaColumna){ if($alternativaFila != $alternativaColumna){ if(!isset($matrix[$alternativaColumna][$alternativaFila])){ $matrix[$alternativaFila][$alternativaColumna] = 0; } } else{ if(!isset($matrix[$alternativaColumna][$alternativaFila])){ $matrix[$alternativaFila][$alternativaColumna] = 1; } } } } return $matrix; } public function getVectorPromedioSubcriterio($matriz,$criterioPadre,$alternativas=0){ $matrizAux = $matriz; $vectorPromedio = array(); // if($alternativas==0){ $sumaColumna = array(); $subcriterios = array_keys($this->getSubCriterios($criterioPadre)); foreach ($subcriterios as $criterioColumna){ $suma = 0; foreach($subcriterios as $criterioFila){ $suma= $suma + $matrizAux[$criterioFila][$criterioColumna]; } $sumaColumna[$criterioColumna] = $suma; } foreach($subcriterios as $criterioFila){ foreach($subcriterios as $criterioColumna){ $matrizAux[$criterioFila][$criterioColumna] = $matrizAux[$criterioFila][$criterioColumna] / $sumaColumna[$criterioColumna]; } } foreach($subcriterios as $criterioFila){ $vectorPromedio[$criterioFila] = 0; foreach($subcriterios as $criterioColumna){ $vectorPromedio[$criterioFila] = $vectorPromedio[$criterioFila] + $matrizAux[$criterioFila][$criterioColumna]; } $vectorPromedio[$criterioFila] = $vectorPromedio[$criterioFila] / count($subcriterios); } // } // else{ // $sumaColumna = array(); // $alternativas = array_values($this->alternativas); // foreach ($alternativas as $alternativaColumna){ // $suma = 0; // foreach($alternativas as $alternativaFila){ // $suma = $suma + $matrizAux[$alternativaFila][$alternativaColumna]; // } // $sumaColumna[$alternativaColumna] = $suma; // } // foreach($alternativas as $alternativaFila){ // foreach ($alternativas as $alternativaColumna) { // $matrizAux[$alternativaFila][$alternativaColumna] = $matrizAux[$alternativaFila][$alternativaColumna] / $sumaColumna[$alternativaColumna]; // } // } // foreach($alternativas as $alternativaFila){ // $vectorPromedio[$alternativaFila] = 0; // foreach ($alternativas as $alternativaColumna) { // $vectorPromedio[$alternativaFila] = $vectorPromedio[$alternativaFila] + $matrizAux[$alternativaFila][$alternativaColumna]; // } // $vectorPromedio[$alternativaFila] = $vectorPromedio[$alternativaFila] / count($alternativas); // } // } return $vectorPromedio; } public function getVectorPromedio($matriz,$alternativas=0){ $matrizAux = $matriz; $vectorPromedio = array(); if($alternativas==0){ $sumaColumna = array(); $criterios = array_keys($this->criterios); foreach ($criterios as $criterioColumna){ $suma = 0; foreach($criterios as $criterioFila){ $suma= $suma + $matrizAux[$criterioFila][$criterioColumna]; } $sumaColumna[$criterioColumna] = $suma; } foreach($criterios as $criterioFila){ foreach($criterios as $criterioColumna){ $matrizAux[$criterioFila][$criterioColumna] = $matrizAux[$criterioFila][$criterioColumna] / $sumaColumna[$criterioColumna]; } } foreach($criterios as $criterioFila){ $vectorPromedio[$criterioFila] = 0; foreach($criterios as $criterioColumna){ $vectorPromedio[$criterioFila] = $vectorPromedio[$criterioFila] + $matrizAux[$criterioFila][$criterioColumna]; } $vectorPromedio[$criterioFila] = $vectorPromedio[$criterioFila] / count($criterios); } } else{ $sumaColumna = array(); $alternativas = array_values($this->alternativas); foreach ($alternativas as $alternativaColumna){ $suma = 0; foreach($alternativas as $alternativaFila){ $suma = $suma + $matrizAux[$alternativaFila][$alternativaColumna]; } $sumaColumna[$alternativaColumna] = $suma; } foreach($alternativas as $alternativaFila){ foreach ($alternativas as $alternativaColumna) { $matrizAux[$alternativaFila][$alternativaColumna] = $matrizAux[$alternativaFila][$alternativaColumna] / $sumaColumna[$alternativaColumna]; } } foreach($alternativas as $alternativaFila){ $vectorPromedio[$alternativaFila] = 0; foreach ($alternativas as $alternativaColumna) { $vectorPromedio[$alternativaFila] = $vectorPromedio[$alternativaFila] + $matrizAux[$alternativaFila][$alternativaColumna]; } $vectorPromedio[$alternativaFila] = $vectorPromedio[$alternativaFila] / count($alternativas); } } return $vectorPromedio; } public function getVectorResultado(){ $vectorResultado = array(); foreach( $this->getAlternativas() as $alternativa){ $vectorResultado[$alternativa] = 0; } return $vectorResultado; } public function getVectorFinal(){ $vectorFinal = array(); foreach( $this->getAlternativas() as $alternativa){ $vectorFinal[$alternativa] = 0; } return $vectorFinal; } } ?><file_sep><!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Bienvenido/a</title> <meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0"> <link href="https://fonts.googleapis.com/css?family=Oswald:400,700" rel="stylesheet"> <link rel="stylesheet" type="text/css" href="css/estilos.css"> <link rel="stylesheet" type="text/css" href="style.css"> <link rel="stylesheet" type="text/css" href="css/animate.css"> </head> <body> <?php session_start(); session_destroy(); ?> <div id="particles-js"></div> <header> <div class="contenedor"> <div class="menu"> <div class="logo animated delay-01 bounceInDown">LogotipoHerramienta</div> <nav class="enlaces"> <a href="#" class="active animated delay-02 bounceInDown">Inicio</a> <a href="#" class="animated delay-03 bounceInDown">Trabajos</a> <a href="#" class="animated delay-04 bounceInDown">Contacto</a> </nav> </div> <div class="texto"> <h2 class="titulo animated delay-07 bounceInLeft">Bienvenido Usuario,</h2> <p class="descripcion animated delay-07 bounceInLeft">Esta herramienta está destinada a facilitar el proceso de toma de decisiones.</p> <button class="boton-go animated delay-08 bounceInDown" onclick="comenzar();">¡Haz Click para Comenzar!</button> </div> </div> </header> <div class="container-criterios"> <div class="contenedor"> <div class="titulo"> <h1>¡Primer Paso!</h1> <h2>Ingresa los Criterios y Sub-Criterios a Tener en Cuenta</h2> </div> <div class="contenedor-form" id="contenedor-criterios"> <form action="generar-comparaciones.php" class="formulario-ahp" name="formulario_ahp" method="post"> <div class="input-group" id="input1"> <input type="text" name="objetivo" id="objetivo"> <label for="objetivo" class="label">Objetivo</label> </div> <div class="input-group" id="input2"> <input type="text" name="criterio1" id="criterio1"> <label for="criterio1" class="label">Criterio 1</label> <div class="agrega-subCriterio agrega-subCriterio1" onclick="agregaSubCriterio(1);"><span class="icon-plus"></div> <div id="subcriterios"></div> </div> <div class="input-group" id="criterios2"> <input type="text" name="criterio2" id="criterio2"> <label for="criterio2" class="label">Criterio 2</label> <div class="agrega-subCriterio agrega-subCriterio2" onclick="agregaSubCriterio(2);"><span class="icon-plus"></div> <div id="subcriterios"></div> </div> <div class="input-group" id="criterios3"> <input type="text" name="criterio3" id="criterio3"> <label for="criterio3" class="label">Criterio 3</label> <div class="agrega-subCriterio agrega-subCriterio3" onclick="agregaSubCriterio(3);"><span class="icon-plus"></div> <div id="subcriterios"></div> </div> <div class="input-group" id="input5"> <input type="text" name="alternativa1" id="alternativa1"> <label for="alternativa1" class="label">Alternativa 1</label> </div> <div class="input-group" id="alternativas2"> <input type="text" name="alternativa2" id="alternativa2"> <label for="alternativa2" class="label">Alternativa 2</label> </div> <div class="input-group" id="alternativas3"> <input type="text" name="alternativa3" id="alternativa3"> <label for="alternativa3" class="label">Alternativa 3</label> </div> <div class="submit-group"> <input type="submit" name="pair-waise" value="Comparaciones Pair-Waise"> <input type="submit" name="matrices" value="Comparaciones con Matrices"> </div> </form> <div class="contenedor-botones"> <button class="btn-alternativa" id="boton1"><span class="icon-plus"></span> Alternativa</button> <button class="btn-criterios" id="boton2"><span class="icon-plus"></span> Criterio</button> <button class="btn-alternativa rojo" id="boton3"><span class="icon-minus"></span> Alternativa</button> <button class="btn-criterios rojo" id="boton4"><span class="icon-minus"></span> Criterio</button> </div> </div> </div> </div> <footer> <div class="contenedor"> <div class="menu-foot"> <nav> <a href="#">Inicio</a> <a href="#">Trabajos</a> <a href="#">Contacto</a> </nav> </div> <div class="copyright"> <h1 class="titulo">Copyright - <NAME> @2017</h1> </div> </div> </footer> <script type="text/javascript" src="js/jquery-3.1.1.min.js"></script> <script type="text/javascript" src="js/jquery.waypoints.min.js"></script> <script type="text/javascript" src="js/particles.js"></script> <script type="text/javascript" src="js/particulas.js"></script> <script type="text/javascript" src="js/funciones.js"></script> <script type="text/javascript" src="js/efectos.js"></script> <script src="js/formulario.js"></script> <script src="js/vis.min.js"></script> <script type="text/javascript"> function comenzar(){ $('html, body').animate({ scrollTop: 1080 },1000); } </script> </body> </html><file_sep><!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Bienvenido/a</title> <meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0"> <link href="https://fonts.googleapis.com/css?family=Oswald:400,700" rel="stylesheet"> <link rel="stylesheet" type="text/css" href="css/estilos.css"> <link rel="stylesheet" type="text/css" href="style.css"> </head> <body class="body-generar-comparaciones"> <div id="particles-js"></div> <!-- <header class="header-barra"> <div class="contenedor"> <div class="menu-barra"> <nav class="enlaces"> <a href="#" class="active animated delay-02 bounceInDown">Comparaciones Pair-Waise</a> <a href="#" class="animated delay-03 bounceInDown">Comparaciones por Matrices</a> </nav> </div> </div> </header> --> <?php extract($_POST); session_start(); include "ahp.php"; $ahp = new AHP($objetivo); $nroCriterios=1; foreach($_POST as $key => $value){ if(substr_count($key,'criterio')==1){ if(count($value)>0){ $ahp->addCriterio($value); $arreglo_criterios_index_num[$nroCriterios] = $value; $nroCriterios++; } } else if(substr_count($key,'sub')==1){ $padreNombre = $arreglo_criterios_index_num[substr($key,(strpos($key,'sub')+3),1)]; if($ahp->existePadre($padreNombre)){ $ahp->addCriterio($value,$padreNombre); } } else if(substr_count($key,'alternativa')==1){ if(count($value)>0){ $ahp->addAlternativa($value); } } } $matricesDeComparacion = array(); $criterios = array_keys($ahp->getCriterios()); $alternativas = $ahp->getAlternativas(); $matrizDePrioridad = $ahp->getMatrizDePrioridad(); foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ $subcriterios = $ahp->getSubCriterios($criterio); foreach($subcriterios as $subcriterio => $prioridad){ $matricesDeComparacion[$subcriterio] = $ahp->getMatrizAlternativa(); } } else{ $matricesDeComparacion[$criterio] = $ahp->getMatrizAlternativa(); } } if(!isset($_SESSION["arreglo"])){ $_SESSION['arreglo'] = array($objetivo,$criterios,$alternativas); foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ $_SESSION[$criterio] = $ahp->getSubCriterios($criterio); } } } if(isset($_POST['pair-waise'])){ ?> <div class="comparaciones"> <div class="contenedor"> <form action="generar-resultados.php" method="post"> <div class="criterios-criterios"> <?php $contador = 1; foreach ($criterios as $fila){ foreach($criterios as $columna){ if($fila != $columna){ if(isset($matrizDePrioridad[$fila][$columna])){ ?> <div class="group"> <?php echo "<h1>".$fila." vs ".$columna."</h1>" ?> <?php echo "<p> Acorde a la evaluacion que desea realizar. ¿Cuanto considera mas importante el criterio ".$fila." en comparacion al criterio ".$columna. "? </p>" ?> <div class="input-group radio"> <input type="radio" onclick="semovio()" name="<?php echo "$fila*$columna" ?>" id="<?php echo "mas9".$contador ?>" value="9"> <label class="mas9" for="<?php echo "mas9".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$fila*$columna" ?>" id="<?php echo "mas7".$contador ?>" value="7"> <label class="mas7" for="<?php echo "mas7".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$fila*$columna" ?>" id="<?php echo "mas5".$contador ?>" value="5"> <label class="mas5" for="<?php echo "mas5".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$fila*$columna" ?>" id="<?php echo "mas3".$contador ?>" value="3"> <label class="mas3" for="<?php echo "mas3".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$fila*$columna" ?>" id="<?php echo "mas1".$contador ?>" value="1" checked> <label class="mas0" for="<?php echo "mas1".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$fila*$columna" ?>" id="<?php echo "menos3".$contador ?>" value="1/3"> <label class="menos3" for="<?php echo "menos3".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$fila*$columna" ?>" id="<?php echo "menos5".$contador ?>" value="1/5"> <label class="menos5" for="<?php echo "menos5".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$fila*$columna" ?>" id="<?php echo "menos7".$contador ?>" value="1/7"> <label class="menos7" for="<?php echo "menos7".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$fila*$columna" ?>" id="<?php echo "menos9".$contador ?>" value="1/9"> <label class="menos9" for="<?php echo "menos9".$contador ?>"></label> </div> <div class="contenedor-numeros"> <img src="img/numeros.png"> </div> <!-- <p class="numeros"><span class="mas9">+9</span><span class="mas8">+8</span><span class="mas7">+7</span><span class="mas6">+6</span> <span class="mas5">+5</span><span class="mas4">+4</span><span class="mas3">+3</span><span class="mas2">+2</span> <span class="mas1">+1</span><span class="mas0">+0</span><span class="menos1">-1</span><span class="menos2">-2</span> <span class="menos3">-3</span><span class="menos4">-4</span><span class="menos5">-5</span><span class="menos6">-6</span> <span class="menos7">-7</span><span class="menos8">-8</span><span class="menos9">-9</span></p> --> </div> <?php $contador++; } } } } ?> </div> <div class="criterios-criterios subcriterios"> <?php foreach ($criterios as $criterio){ if($ahp->esPadre($criterio)){ $subcriterios = $ahp->getSubCriterios($criterio); // $contador=1; $matrizdePrioridadAux = $ahp->getMatrizDePrioridad($criterio); foreach($subcriterios as $fila => $prioridad){ foreach($subcriterios as $columna => $prioridad){ if(isset($matrizdePrioridadAux[$fila][$columna])){ if($fila!=$columna){?> <div class="group"> <?php echo "<h1>".$fila." vs ".$columna."</h1>" ?> <?php echo "<p> Acorde a la evaluacion que desea realizar. ¿Cuanto considera mas importante el subcriterio ".$fila." en comparacion al subcriterio ".$columna. "? </p>" ?> <div class="input-group radio"> <input type="radio" onclick="semovio()" name="<?php echo "$criterio#$fila*$columna" ?>" id="<?php echo "mas9".$contador ?>" value="9"> <label class="mas9" for="<?php echo "mas9".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio#$fila*$columna" ?>" id="<?php echo "mas7".$contador ?>" value="7"> <label class="mas7" for="<?php echo "mas7".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio#$fila*$columna" ?>" id="<?php echo "mas5".$contador ?>" value="5"> <label class="mas5" for="<?php echo "mas5".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio#$fila*$columna" ?>" id="<?php echo "mas3".$contador ?>" value="3"> <label class="mas3" for="<?php echo "mas3".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio#$fila*$columna" ?>" id="<?php echo "mas1".$contador ?>" value="1" checked> <label class="mas0" for="<?php echo "mas1".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio#$fila*$columna" ?>" id="<?php echo "menos3".$contador ?>" value="1/3"> <label class="menos3" for="<?php echo "menos3".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio#$fila*$columna" ?>" id="<?php echo "menos5".$contador ?>" value="1/5"> <label class="menos5" for="<?php echo "menos5".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio#$fila*$columna" ?>" id="<?php echo "menos7".$contador ?>" value="1/7"> <label class="menos7" for="<?php echo "menos7".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio#$fila*$columna" ?>" id="<?php echo "menos9".$contador ?>" value="1/9"> <label class="menos9" for="<?php echo "menos9".$contador ?>"></label> </div> <div class="contenedor-numeros"> <img src="img/numeros.png"> </div> </div> <?php $contador++;} } } } } }?> </div> <?php foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ $subcriterios = $ahp->getSubCriterios($criterio); foreach($subcriterios as $subcriterio => $prioridad){ foreach($alternativas as $fila){ foreach($alternativas as $columna){ if($fila != $columna){ if(isset($matricesDeComparacion[$subcriterio][$fila][$columna])){ ?> <div class="criterios-criterios-alternativa"> <div class="group"> <?php echo "<h1>".$fila." vs ".$columna."</h1>" ?> <?php echo "<h2>Respecto a ".$subcriterio." (Criterio Padre: ".$criterio.")</h2>" ?> <?php echo "<p>Cuanto considera mas importante la alternativa ".$fila." con respecto a la alternativa ".$columna. " teniendo en cuenta el subcriterio ".$subcriterio."</p> "?> <div class="input-group radio"> <input type="radio" onclick="semovio()" name="<?php echo "$subcriterio-$fila*$columna" ?>" id="<?php echo "mas9".$contador ?>" value="9"> <label class="mas9" for="<?php echo "mas9".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$subcriterio-$fila*$columna" ?>" id="<?php echo "mas7".$contador ?>" value="7"> <label class="mas7" for="<?php echo "mas7".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$subcriterio-$fila*$columna" ?>" id="<?php echo "mas5".$contador ?>" value="5"> <label class="mas5" for="<?php echo "mas5".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$subcriterio-$fila*$columna" ?>" id="<?php echo "mas3".$contador ?>" value="3"> <label class="mas3" for="<?php echo "mas3".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$subcriterio-$fila*$columna" ?>" id="<?php echo "mas1".$contador ?>" value="1" checked> <label class="mas0" for="<?php echo "mas1".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$subcriterio-$fila*$columna" ?>" id="<?php echo "menos3".$contador ?>" value="1/3"> <label class="menos3" for="<?php echo "menos3".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$subcriterio-$fila*$columna" ?>" id="<?php echo "menos5".$contador ?>" value="1/5"> <label class="menos5" for="<?php echo "menos5".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$subcriterio-$fila*$columna" ?>" id="<?php echo "menos7".$contador ?>" value="1/7"> <label class="menos7" for="<?php echo "menos7".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "menos9".$contador ?>" value="1/9"> <label class="menos9" for="<?php echo "menos9".$contador ?>"></label> </div> <div class="contenedor-numeros"> <img src="img/numeros.png"> </div> </div> </div> <?php $contador++; } } } } } } foreach($alternativas as $fila){ foreach($alternativas as $columna){ if($fila != $columna){ if(isset($matricesDeComparacion[$criterio][$fila][$columna])){ ?> <div class="criterios-criterios-alternativa"> <div class="group"> <?php echo "<h1>".$fila." vs ".$columna."</h1>" ?> <?php echo "<h2>Respecto a ".$criterio."</h2>" ?> <?php echo "<p>Cuanto considera mas importante la alternativa ".$fila." con respecto a la alternativa ".$columna. " teniendo en cuenta el criterio ".$criterio."</p> "?> <div class="input-group radio"> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "mas9".$contador ?>" value="9"> <label class="mas9" for="<?php echo "mas9".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "mas7".$contador ?>" value="7"> <label class="mas7" for="<?php echo "mas7".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "mas5".$contador ?>" value="5"> <label class="mas5" for="<?php echo "mas5".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "mas3".$contador ?>" value="3"> <label class="mas3" for="<?php echo "mas3".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "mas1".$contador ?>" value="1" checked> <label class="mas0" for="<?php echo "mas1".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "menos3".$contador ?>" value="1/3"> <label class="menos3" for="<?php echo "menos3".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "menos5".$contador ?>" value="1/5"> <label class="menos5" for="<?php echo "menos5".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "menos7".$contador ?>" value="1/7"> <label class="menos7" for="<?php echo "menos7".$contador ?>"></label> <input type="radio" onclick="semovio()" name="<?php echo "$criterio-$fila*$columna" ?>" id="<?php echo "menos9".$contador ?>" value="1/9"> <label class="menos9" for="<?php echo "menos9".$contador ?>"></label> </div> <div class="contenedor-numeros"> <img src="img/numeros.png"> </div> </div> </div> <?php $contador++; } } } } } ?> <input type="submit" name="accion" value="Guardar"> </form> </div> </div> <?php }else{ ?> <div class="comparaciones-matrices"> <div class="contenedor"> <form action="generar-resultados.php" class="matrices" name="matrices" method="post"> <div class="comparacion-m"> <h1 class="titulo-comparacion-m">Criterios vs Criterios</h1> <p class="descripcion-comparacion-m">En esta matriz se guardaran los resultados, segun tus preferencias.</p> <div class="criterios-criterios-matrix"> <TABLE> <?php for($fila=0;$fila<=count($criterios);$fila++){ echo "<tr>"; for($columna=0;$columna<=count($criterios);$columna++){ if($fila==0 and $columna==0){ echo "<th class=\"primero\"></th>"; } else if($fila==0 and $columna>0){ $index = $columna -1; echo "<th>$criterios[$index]</th>"; } else if($fila>0 and $columna==0){ $index = $fila -1; echo "<th>$criterios[$index]</th>"; } else if($fila == $columna){ echo "<td class=\"diagonal\"><input type=\"text\" value=\"1\" name=\"$fila"."$columna\" disabled></td>"; } else{ $indexFila = $fila-1; $indexColumna = $columna-1; echo "<td><input onblur=\"cambiaValor($fila,$columna)\" type=\"text\" value=\"1\" name=\"$criterios[$indexFila]"."*"."$criterios[$indexColumna]\" id=\"$fila"."$columna\"></td>"; } } echo "</tr>"; }?> </TABLE> <button>Continuar <span class="icon-arrow-right"></span></button> </div> </div> <?php foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ ?> <div class="comparacion-m"> <h1 class="titulo-comparacion-m">Subcriterios vs Subcriterios</h1> <p class="descripcion-comparacion-m">Criterio Padre: <?php echo $criterio; ?></p> <div class="criterios-criterios-matrix subcriterio"> <?php $subcriterios = array_keys($ahp->getSubCriterios($criterio)); ?> <TABLE> <?php for($fila=0;$fila<=count($subcriterios);$fila++){ echo "<tr>"; for($columna=0;$columna<=count($subcriterios);$columna++){ if($fila==0 and $columna==0){ echo "<th class=\"primero\"></th>"; } else if($fila==0 and $columna>0){ $index = $columna -1; echo "<th>$subcriterios[$index]</th>"; } else if($fila>0 and $columna==0){ $index = $fila -1; echo "<th>$subcriterios[$index]</th>"; } else if($fila == $columna){ echo "<td class=\"diagonal\"><input type=\"text\" value=\"1\" name=\"$fila"."$columna\" disabled></td>"; } else{ $indexFila = $fila-1; $indexColumna = $columna-1; echo "<td><input onblur=\"cambiaValorSubcriterio($fila,$columna,'$criterio')\" type=\"text\" value=\"1\" name=\"$criterio#"."$subcriterios[$indexFila]"."*"."$subcriterios[$indexColumna]\" id=\"subcriterio"."$criterio"."$fila"."$columna\"></td>"; } } echo "</tr>"; }?> </TABLE> </div></div> <?php } } ?> <div class="comparacion-m-many"> <h1 class="titulo-comparacion-m">Comparación entre Alternativas</h1> <?php foreach($criterios as $criterio){ if($ahp->esPadre($criterio)){ $subcriterios = array_keys($ahp->getSubCriterios($criterio)); foreach($subcriterios as $subcriterio){ ?> <p class="descripcion-comparacion-m">Comparacion respecto al criterio: <?php echo $subcriterio; ?> (Criterio Padre: <?php echo $criterio; ?>) </p> <div class="alternativas-alternativas-matrix"> <?php echo "<table>"; for($fila=0;$fila<=count($alternativas);$fila++){ echo "<tr>"; for($columna=0;$columna<=count($alternativas);$columna++){ if($fila==0 and $columna==0){ echo "<th class=\"primero\"></th>"; } else if($fila==0 and $columna>0){ $index = $columna -1; echo "<th>$alternativas[$index]</th>"; } else if($fila>0 and $columna==0){ $index = $fila -1; echo "<th>$alternativas[$index]</th>"; } else if($fila==$columna){ echo "<td class=\"diagonal\"><input type=\"text\" value=\"1\" name=\"$subcriterio"."$fila"."$columna\" disabled></td>"; } else{ $indexColumna = $columna -1; $indexFila = $fila -1; echo "<td><input onblur=\"cambiaValorAlt($fila,$columna,'$subcriterio')\" type=\"text\" value=\"1\" name=\"$subcriterio"."-"."$alternativas[$indexFila]"."*"."$alternativas[$indexColumna]\" id=\"$subcriterio"."$fila"."$columna\" ></td>"; } } echo "</tr>"; } echo "</table>"; ?> </div> <?php } } else{ ?> <p class="descripcion-comparacion-m">Comparacion respecto al criterio: <?php echo $criterio; ?> </p> <div class="alternativas-alternativas-matrix"> <?php echo "<table>"; for($fila=0;$fila<=count($alternativas);$fila++){ echo "<tr>"; for($columna=0;$columna<=count($alternativas);$columna++){ if($fila==0 and $columna==0){ echo "<th class=\"primero\"></th>"; } else if($fila==0 and $columna>0){ $index = $columna -1; echo "<th>$alternativas[$index]</th>"; } else if($fila>0 and $columna==0){ $index = $fila -1; echo "<th>$alternativas[$index]</th>"; } else if($fila==$columna){ echo "<td class=\"diagonal\"><input type=\"text\" value=\"1\" name=\"$criterio"."$fila"."$columna\" disabled></td>"; } else{ $indexColumna = $columna -1; $indexFila = $fila -1; echo "<td><input onblur=\"cambiaValorAlt($fila,$columna,'$criterio')\" type=\"text\" value=\"1\" name=\"$criterio"."-"."$alternativas[$indexFila]"."*"."$alternativas[$indexColumna]\" id=\"$criterio"."$fila"."$columna\" ></td>"; } } echo "</tr>"; } echo "</table>"; ?> </div> <?php } } ?> </div> </form> </div> </div> <?php } ?> <script type="text/javascript" src="js/jquery-3.1.1.min.js"></script> <script type="text/javascript" src="js/particles.js"></script> <script type="text/javascript" src="js/particulas.js"></script> <script type="text/javascript" src="js/control-matrices.js"></script> <script type="text/javascript"> function semovio(){ $('html, body').animate({ scrollTop: $(document).scrollTop() + 768 },500); } </script> </body> </html><file_sep>$("#contenedor-criterios").waypoint(function() { $("#input1").addClass("animated delay-01 bounceInLeft") $("#input2").addClass("animated delay-02 bounceInLeft") $("#criterios2").addClass("animated delay-03 bounceInLeft") $("#criterios3").addClass("animated delay-04 bounceInLeft") $("#input5").addClass("animated delay-05 bounceInLeft") $("#alternativas2").addClass("animated delay-06 bounceInLeft") $("#alternativas3").addClass("animated delay-07 bounceInLeft") $("#boton1").addClass("animated delay-01 bounceInRight") $("#boton2").addClass("animated delay-02 bounceInRight") $("#boton3").addClass("animated delay-03 bounceInRight") $("#boton4").addClass("animated delay-04 bounceInRight") }, { offset: '100%'});
ebd09e15b80439ca9f3189571a1ccf85c93de527
[ "JavaScript", "PHP" ]
6
PHP
mariniayrtond/toolahp
86a996c641620ffe646cd0c3410cb4080c88c665
6d3e80e410efd3769915febe8a2f2132740ff857
refs/heads/master
<repo_name>flyberson/progeksamentest2<file_sep>/php/index.php <?php session_start(); include 'requireLogin.php'; ?> <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Title</title> <script src="javascript/js1.js"></script> <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script> </head> <body> <nav> <a href=php/admin.php>admin</a> | <a href="php/startsiden.php">startsiden</a> | <a href="index.html">index</a> | </nav> <form action="php/insert.php" method="get" id="form1"> <p>Title</p> <input id="title" type="text" name="title"> <p>url</p> <input id="url" type="text" name="url"> <p>Kategori</p> <input id="Kategori" type="text" name="kategori"> <p>Submit</p> <input type="submit"> <?php include 'databaseconnection.php';?> </form> </body> </html><file_sep>/javascript/js1.js $(document).ready(function () { //start(); }) function start() { var text = $("#title").val(); var text = $("#url").val(); var text = $("#Kategori").val(); }<file_sep>/php/admin.php <html> <header><title>This is title</title> <link rel="stylesheet" href="../css/styles.css"></header> <body> <?php if ($_GET["password"]!="12"){ exit("du skal indtase password"); } $int = 1; include 'databaseconnection.php'; if ($result->num_rows > 0) { // output data of each row echo "<table> <tr> <td> Title </td> <td> url </td> <td>kategori</td> <td>dato</td> <td>delete</td> </tr>"; while ($row = $result->fetch_assoc()) { echo "<tr>"; echo "<td>"; echo $row["title"]; echo "</td>"; echo "<td>"; echo $row["url"]; echo "</td>"; echo "<td>"; echo $row["kategori"]; echo "</td>"; echo "<td>"; echo $row["date"]; echo "</td>"; echo "<td>"; echo "<a href=delete.php?delete=".$row["id"]."> delete</a>"; echo "</td>"; $int++; } echo "</table>"; } ?> </body> </html><file_sep>/php/delete.php <html> <header><title>This is title</title> <link rel="stylesheet" href="../css/styles.css"></header> <body> <?php $delete= $_GET["delete"]; $servername = "localhost"; $username = "root"; $password = "<PASSWORD>"; $dbname = "startsiden"; // Create connection $conn = new mysqli($servername, $username, $password,$dbname); // Check connection if ($conn->connect_error) { die("Connection failed: " . $conn->connect_error); } echo "Connected successfully"; if ($conn->connect_error) { die("Connection failed: " . $conn->connect_error); } $sql = "DELETE FROM linklist WHERE id=".$delete; $result = $conn->query($sql); if ($conn->query($sql) === TRUE) { echo "Record deleted successfully"; } else { echo "Error deleting record: " . $conn->error; } ?> <form action="admin.php"> <input type="submit" value="return" /> </form> </body> </html> <file_sep>/php/getmicrolink.php <?php // debugging curl example /* try { $ch = curl_init('https://api.microlink.io?url=https://vimeo.com/188175573'); if (FALSE === $ch) throw new Exception('failed to initialize'); curl_setopt($ch,CURLOPT_RETURNTRANSFER,1); curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false); $content = curl_exec($ch); if (FALSE === $content) throw new Exception(curl_error($ch), curl_errno($ch)); // ...process $content now } catch(Exception $e) { trigger_error(sprintf( 'Curl failed with error #%d: %s', $e->getCode(), $e->getMessage()), E_USER_ERROR); } echo $content; ?>*/ function curlmicro($url){ $ch = curl_init('https://api.microlink.io?url='.$url); curl_setopt($ch,CURLOPT_RETURNTRANSFER,1); curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false); $result = curl_exec($ch); $resultj = json_decode($result); $imgurl= "https://cdn.browshot.com/static/images/not-found.png"; // object json $object->key->key->value; if(isset($resultj->data->logo->url)){ $imgurl = $resultj->data->logo->url;} return $imgurl; } <file_sep>/php/createTable.php <?php //header example $sql = "create table if not EXISTS linklist (title VARCHAR(30),url VARCHAR(30),kategori VARCHAR (30),dato DATE )"; $conn->query($sql); $sql = "INSERT INTO linklist (title, url, kategori,dato) VALUES ($titledata, $urldata, $kategoridata,CURRENT_DATE())"; $conn->query($sql); header("Location:http://localhost:63342/progeksamentest2/php/startsiden.php"); exit;<file_sep>/php/api.php <?php include 'databaseconnection.php'; $array = array(); $count=0; $sql = "SELECT * FROM linklist"; $result = $conn->query($sql); while ($row = $result->fetch_assoc()) { if(!isset($myObj)){ $myObj= new stdClass();} $myObj->title=$row["title"]; $myObj->url=$row["url"]; $myObj->kategori=$row["kategori"]; $myObj->date=$row["dato"]; $myJSON = json_encode($myObj); $array[$count]=$myJSON; //"Title: ".$row["title"]."Kategori: ".$row["kategori"]."url: ".$row["url"]."date: ".$row["date"]; $count++; } $rand = rand(0,count($array)-1); echo $array[$rand]; //echo $array[1]; ?><file_sep>/php/createDatabase.php <?php try{ $conn = new mysqli($servername, $username, $password); $sql = "Create Database if not Exists startsiden;"; if ($conn->query($sql) === TRUE) { echo "Database created successfully <br>"; } else { echo "Database not created: " . $conn->error; } $conn->close(); $conn = new mysqli($servername, $username, $password,$dbname); }catch (mysqli_sql_exception $e){ debug_print_backtrace(); } <file_sep>/php/startsiden.php <html> <header><title>This is title</title> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta.2/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous"> <link rel="stylesheet" href="https://cdn.datatables.net/1.10.16/css/jquery.dataTables.min.css"> <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script> <script src="https://cdn.datatables.net/1.10.16/js/jquery.dataTables.min.js"></script> <script src="https://cdn.datatables.net/1.10.16/js/dataTables.bootstrap4.min.js"></script> <script src="/javascript/js3.js" ></script> <body> <h1>Startsiden</h1> <button onclick="getpoke()"></button> <?php include 'databaseconnection.php'; include 'getmicrolink.php'; $sql = "SELECT * FROM linklist"; $result = $conn->query($sql); if ($result->num_rows > 0) { // output data of each row ?> <table id='table1' class='table table-striped table-hover table-bordered' width="100%" cellspacing=""> <thead> <tr> <td> Title </td> <td> url </td> <td>kategori</td> <td>dato</td> </tr></thead> <tbody> <?php while ($row = $result->fetch_assoc()) { $output =curlmicro($row["url"]); echo "<tr>"; echo "<td>"; echo $row["title"]; echo "</td>"; echo "<td>"; echo $row["url"]; echo "</td>"; echo "<td>"; echo $row["kategori"]; echo "</td>"; echo "<td>"; echo $row["dato"]; echo "</td>"; echo "<td>"; echo "<img src='$output' width='120' height='80'> </img>"; echo "</td>"; echo "</tr>"; } } ?> </tbody> </table> <script>$('#table1').DataTable();</script> </body> </html><file_sep>/javascript/js3.js function getpoke(){ $('#table1 tr').each(function() { var userinput = $(this).find("td").eq(0).html(); }); alert("started"); alert(userinput); var Json= "https://api.microlink.io?url="+userinput; $.getJSON(Json,function (data) { //alert(JSON.stringify(data,null," ")); var pokename = data.name; var poketype = data.types[0].name; alert(pokename+poketype) $("#pokeholder").append(pokename+poketype); }); } $(document).ready(function() { $('#table1').DataTable(); } );<file_sep>/php/insert.php <?php include 'databaseconnection.php'; $titledata ="'".$_GET["title"]."'"; $urldata="'".$_GET["url"]."'"; $kategoridata = "'".$_GET["kategori"]."'"; $sql = "INSERT INTO linklist (title, url, kategori,dato) VALUES ($titledata, $urldata, $kategoridata,CURRENT_DATE())"; if ($conn->query($sql) === TRUE) { echo "New record created successfully"; } else { include 'createTable.php'; if ( $conn->query($sql)==false){ echo "Error: " . $sql . "<br>" . $conn->error;} } ?>
855cc442a18bcec57efcdcc3c3f5a6a692ebf8f0
[ "JavaScript", "PHP" ]
11
PHP
flyberson/progeksamentest2
10e7711ec19d9bfc30cdb904bd695bc96a03f866
936fe50d42e19ba6b3e8bb2d197f03c9e29fcc63
refs/heads/master
<file_sep>module.exports = { metadata: { family: "btleHeartRateMonitor", plugin: "btleHeartRateMonitor", label: "BTLE Heart Rate Monitor", manufacturer: "Generic", tangible: true, discoverable: true, state: [{ id: "heartRate", label: "Heart Rate", type: { id: "number" }, min: 0, max: 250 }, { id: "bodyLocation", label: "Body Location", type: { id: "string" } }, { id: "batteryLevel", label: "Battery Level", type: { id: "number" }, min: 0, max: 100 }], actorTypes: [], sensorTypes: [], services: [], configuration: [{ id: "criticalThresholdLow", label: "Critical Threshold (Low)", type: { id: "number" }, defaultValue: 40 }, { id: "warningThresholdLow", label: "Warning Threshold (Low)", type: { id: "number" }, defaultValue: 50 }, { id: "warningThresholdHigh", label: "Warning Threshold (High)", type: { id: "number" }, defaultValue: 170 }, { id: "criticalThresholdHigh", label: "Critical Threshold High", type: { id: "number" }, defaultValue: 180 }] }, create: function (device) { return new BtleHeartRateMonitor(); }, discovery: function (options) { var discovery = new BtleHeartRateMonitorDiscovery(); discovery.options = options; return discovery; } }; var q = require('q'); var noble; var GUID_PATTERN = /([a-f0-9]{8})-?([a-f0-9]{4})-?([a-f0-9]{4})-?([a-f0-9]{4})-?([a-f0-9]{12})/; var GUID_REPLACEMENT = "$1-$2-$3-$4-$5"; var HEART_RATE_SERVICE_UUID = "f000aa10-0451-4000-b000-000000000000"; // TODO Correct values var HEART_RATE_DATA_UUID = /^f000aa11-0451-4000-b000-000000000000$/; // TODO Correct values var HEART_RATE_CONFIGURATION_UUID = "f000aa12-0451-4000-b000-000000000000"; // TODO Correct values var HEART_RATE_PERIOD_UUID = "f000aa13-0451-4000-b000-000000000000"; // TODO Correct values var HR_CHARACTERISTIC_BODY_SENSOR_UUID = '2a38'; var HR_CHARACTERISTIC_HEART_RATE_UUID = '2a37'; var BATTERY_LEVEL_SERVICE_UUID = "f000aa10-0451-4000-b000-000000000000"; // TODO Correct values var BATTERY_LEVEL_DATA_UUID = /^f000aa11-0451-4000-b000-000000000000$/; // TODO Correct values var BATTERY_LEVEL_CONF_UUID = "f000aa12-0451-4000-b000-000000000000"; // TODO Correct values var BATTERY_LEVEL_PERIOD_UUID = "f000aa13-0451-4000-b000-000000000000"; // TODO Correct values var BATTERY_SERVICE_UUID = '180f'; var BATTERY_CHARACTERISTIC_LEVEL_UUID = '2a19'; var HEART_RATE_VALUE_FORMAT = 1; var BODY_LOCATION_STRINGS = ['Other', 'Chest', 'Wrist', 'Finger', 'Hand', 'Ear Lobe', 'Foot']; function BtleHeartRateMonitorDiscovery() { /** * * @param options */ BtleHeartRateMonitorDiscovery.prototype.start = function () { if (this.isSimulated()) { } else { if (!noble) { noble = require('noble'); } noble.on('discover', function (peripheral) { if (peripheral.advertisement.localName && peripheral.advertisement.localName.indexOf('@@@@') === 0) { console.log("Found Heart Rate Monitor " + peripheral.advertisement.localName); console.log("\tUUID " + peripheral.uuid); if (peripheral.uuid) { console.log("GUID " + peripheral.uuid.replace(GUID_PATTERN, GUID_REPLACEMENT)); } var heartRateMonitor = new BtleHeartRateMonitor(); heartRateMonitor.peripheral = peripheral; heartRateMonitor.uuid = peripheral.uuid; this.advertiseDevice(heartRateMonitor); } }.bind(this)); noble.startScanning(); } }; /** * * @param options */ BtleHeartRateMonitorDiscovery.prototype.stop = function () { if (this.isSimulated()) { } else { noble.stopScanning(); } }; } /** * */ function BtleHeartRateMonitor() { /** * */ BtleHeartRateMonitor.prototype.start = function () { var deferred = q.defer(); this.operationalState = { status: 'PENDING', message: 'Waiting for initialization...' }; this.publishOperationalStateChange(); this.state = { heartRate: null, bodyLocation: null, batteryLevel: null }; if (this.isSimulated()) { setInterval(function () { this.state.heartRate = 120 + Math.floor((Math.random() * 20)); this.state.batteryLevel = 30; this.state.bodyLocation = "Wrist"; this.publishStateChange(); }.bind(this), 1000); this.operationalState = { status: 'OK', message: 'BTLE Heart Rate Monitor successfully initialized' } this.publishOperationalStateChange(); deferred.resolve(); } else { if (!noble) { noble = require('noble'); } if (this.peripheral) { this.connect(); deferred.resolve(); } else { console.log("Start Heart Rate Monitor Discovery"); noble.on('discover', function (peripheral) { console.log(peripheral.advertisement.localName); if (peripheral.advertisement.localName && peripheral.uuid === this.uuid) { console.log("\tFound configured Heart Rate Monitor " + peripheral.advertisement.localName); console.log("\tUUID " + peripheral.uuid); if (peripheral.uuid) { console.log("\tGUID " + peripheral.uuid.replace(GUID_PATTERN, GUID_REPLACEMENT)); } this.peripheral = peripheral; noble.stopScanning(); this.connect(); } }.bind(this)); noble.startScanning(); console.log("\tScanning started"); deferred.resolve(); } } return deferred.promise; }; /** * */ BtleHeartRateMonitor.prototype.connect = function () { this.peripheral.connect(function (error) { if (error) { this.operationalState = { status: 'ERROR', message: 'BTLE Heart Rate Monitor connection error' } this.publishOperationalStateChange(); console.log(error); } else { peripheral.discoverAllServicesAndCharacteristics(function (error, services, characteristics) { for (var n = 0; n < services.length; ++n) { var guid = services[n].uuid.replace(Constants.GUID_PATTERN, Constants.GUID_REPLACEMENT); switch (guid) { case HEART_RATE_SERVICE_UUID: this.heartRateService = services[n]; break; case BATTERY_LEVEL_SERVICE_UUID: this.heartRateService = services[n]; break; } } for (var n = 0; n < characteristics.length; ++n) { var guid = characteristics[n].uuid.replace(Constants.GUID_PATTERN, Constants.GUID_REPLACEMENT); switch (guid) { case HEART_RATE_DATA_UUID: this.heartRateDataCharacteristic = characteristics[n]; break; case BATTERY_LEVEL_DATA_UUID: this.batteryLevelCharacteristic = characteristics[n]; break; case HR_CHARACTERISTIC_BODY_SENSOR_UUID: this.bodyLocationCharacteristic = characteristics[n]; break; } } this.heartRateDataCharacteristic.notify(true); this.heartRateDataCharacteristic.read(function (error, data) { if (error) { } var flags = data.readUInt8(0); if (!((flags & HEART_RATE_VALUE_FORMAT) != HEART_RATE_VALUE_FORMAT)) { return; } this.state.heartRate = data.readUInt8(1); this.getBodySensorData(function (error, bodyLocation) { if (error) { console.error(error); } else { this.state.bodyLocation = bodyLocation; this.publishStateChange(); } }.bind(this)); }); this.batteryLevelCharacteristic.notify(true) this.batteryLevelCharacteristic.read(function (error, data) { if (error) { console.error(error); } else { this.state.batteryLevel = data.readUInt8(0); this.publishStateChange(); } }); }.bind(this)); this.operationalState = { status: 'OK', message: 'BTLE Heart Rate Monitor successfully initialized' } this.publishOperationalStateChange(); } }.bind(this)); }; BtleHeartRateMonitor.prototype.getBodySensorData = function (callback) { this.bodyLocationCharacteristic.read(function (error, data) { if (error) return callback(error); callback(null, this.bodyLocationToString(data.readUInt8(0))); }); }; BtleHeartRateMonitor.prototype.bodyLocationToString = function (location) { if (!BODY_LOCATION_STRINGS[location]) { return BODY_LOCATION_STRINGS[0]; } return BODY_LOCATION_STRINGS[location]; }; /** * */ BtleHeartRateMonitor.prototype.setState = function (state) { this.state = state; }; /** * */ BtleHeartRateMonitor.prototype.getState = function () { return this.state; }; } <file_sep>module.exports = { label: "Room 22", id: "room22", devices: [{ id: "heartMonitorJenn", label: "Heart Monitor Jenn", plugin: "btle-heart-rate-monitor/btleHeartRateMonitor", configuration: {simulated: true}, actors: [], sensors: [] }, { id: "heartMonitorFrank", label: "Heart Monitor Frank", plugin: "btle-heart-rate-monitor/btleHeartRateMonitor", configuration: {simulated: true}, actors: [], sensors: [] }], services: [], eventProcessors: [] }; <file_sep># thing-it-device-btle-heart-rate-monitor [![NPM](https://nodei.co/npm/thing-it-device-btle-heart-rate-monitor.png)](https://nodei.co/npm/thing-it-device-btle-heart-rate-monitor/) [![NPM](https://nodei.co/npm-dl/thing-it-device-btle-heart-rate-monitor.png)](https://nodei.co/npm/thing-it-device-btle-heart-rate-monitor/) [thing-it-node] Device Plugin for Heart Rate Monitors supporting the GATT protocol. This allows you to * control Bluetooth LE Heart Rate Monitor devices over the Internet, * define complex services, event processing, storyboards and Jobs combining Bluetooth LE Heart Rate Monitor devices with other Devices, Sensors and Actors. by means of [thing-it-node](https://github.com/marcgille/thing-it-node) and [thing-it.com](http://www.thing-it.com). ## Installation After you have * [set up](http://www.thing-it.com/thing-it/index.html?document=gettingStarted#/documentationPanel) your [thing-it] Node Box and * configured or copied a [thing-it] Mesh with a BTLE Heart Rate Monitor Device, no further configuration is required. # User Interface The Heart Rate Monitor appears as follows in the [thing-it] Mobile UI: <img src="./documentation/images/group-with-heart-rate.png">
d023cad6afdb2efbc6c93fe06eeb66a14eca146f
[ "JavaScript", "Markdown" ]
3
JavaScript
marcgille/thing-it-device-btle-heart-rate-monitor
43fb226791241cbd147a1cfb542a49315c056c72
329e09383ced34ab433f37108a195e38d25eaf93
refs/heads/master
<repo_name>Wanchunwei/HTTPserver<file_sep>/WebServer.java // import java.net.*; import java.nio.*; import java.nio.file.*; import java.io.*; import java.util.*; import java.text.*; public class WebServer { public static void main(String[] args) { // dummy value that is overwritten below int port = 8080; try { port = Integer.parseInt(args[0]); } catch (Exception e) { System.out.println("Usage: java WebServer <port> "); System.exit(0); } WebServer serverInstance = new WebServer(); serverInstance.start(port); } private void start(int port){ System.out.println("Starting server on port " + port); // NEEDS IMPLEMENTATION // You have to understand how sockets work and how to program // them in Java. // A good starting point is the socket tutorial from Oracle // http://docs.oracle.com/javase/tutorial/networking/sockets/ // But there are a billion other resources on the Internet. // // Hints // 1. You should set up the socket(s) and then call handleClientSocket. try{ ServerSocket serverSocket = new ServerSocket(port); while(true){ try{ Socket client = serverSocket.accept(); handleClientSocket(client); //client.close(); }catch(Exception e){ e.printStackTrace(); continue; } } }catch(Exception e){ e.printStackTrace(); System.exit(0); } //HTTP1.0 } /** * Handles requests sent by a client * @param client Socket that handles the client connection */ private void handleClientSocket(Socket client) { // NEEDS IMPLEMENTATION // This function is supposed to handle the request // Things to do: // (1) Read the request from the socket // (2) Parse the request and set variables of // the HttpRequest class (at the end of the file!) // (3) Form a response using formHttpResponse. // (4) Send a response using sendHttpResponse. // // A BufferedReader might be useful here, but you can also // solve this in many other ways. try{ InputStream input = client.getInputStream(); InputStreamReader reader = new InputStreamReader(input); BufferedReader bf = new BufferedReader(reader); String s = null; int timeout = 2000; while((s = bf.readLine()) != null){ //System.out.println("This is parse!"); String[] spliteds = s.split(" "); if(spliteds[0].equals("GET")){ HttpRequest clientRequest = new HttpRequest(); clientRequest.parse(s); sendHttpResponse(client, formHttpResponse(clientRequest)); if(clientRequest.getProtocal().equals("HTTP/1.0")){ client.close(); break; }else{ client.setSoTimeout(timeout); } } } } catch(Exception e){ e.printStackTrace(); System.out.println("Cannot get request..."); } } /** * Sends a response back to the client * @param client Socket that handles the client connection * @param response the response that should be send to the client */ private void sendHttpResponse(Socket client, byte[] response) { // NEEDS IMPLEMENTATION try{ OutputStream responseOutput = client.getOutputStream(); responseOutput.write(response); }catch(Exception e){ e.printStackTrace(); } } /** * Form a response to an HttpRequest * @param request the HTTP request * @return a byte[] that contains the data that should be send to the client */ private byte[] formHttpResponse(HttpRequest request) { // NEEDS IMPLEMENTATION // Make sure you follow the (modified) HTTP specification // in the assignment regarding header fields and newlines // You might want to use the concatenate method, // but you do not have to. // If you want to you can use a StringBuilder here // but it is possible to solve this in multiple different ways. //System.out.println("I am here!"); byte[] headerInByte; byte[] blank = new byte[1024]; FileInputStream fis = null; try{ File file = new File(request.getFilePath()); if(file.exists()){ String responseHeader = request.getProtocal() + " " + "200 OK\r\nContent-Length: " + file.length() + "\r\n" + "Last-Modified: " + getFileTime(request) + "GMT" + "\r\n\r\n"; headerInByte = responseHeader.getBytes(); fis = new FileInputStream(file); byte[] fileInByte = new byte[fis.available()]; fis.read(fileInByte); fis.close(); byte[] responseInByte = concatenate(headerInByte, fileInByte); return responseInByte; }else{ String error = request.getProtocal() + " 404 Not Found\r\nContent-Length: 0\r\n\r\n"; headerInByte = error.getBytes(); return headerInByte; } } catch (Exception e){ System.out.println(e.toString()); } return blank; } private String getFileTime(HttpRequest request) throws Exception{ String requestURL = request.getFilePath(); File requestFile = new File(requestURL); long time = requestFile.lastModified(); Date date = new Date(); date.setTime(time); SimpleDateFormat formatter = new SimpleDateFormat("EEE, dd MM yyyy HH:mm:ss "); return formatter.format(date); } /** * Concatenates 2 byte[] into a single byte[] * This is a function provided for your convenience. * @param buffer1 a byte array * @param buffer2 another byte array * @return concatenation of the 2 buffers */ private byte[] concatenate(byte[] buffer1, byte[] buffer2) { byte[] returnBuffer = new byte[buffer1.length + buffer2.length]; System.arraycopy(buffer1, 0, returnBuffer, 0, buffer1.length); System.arraycopy(buffer2, 0, returnBuffer, buffer1.length, buffer2.length); return returnBuffer; } } class HttpRequest { // NEEDS IMPLEMENTATION // This class should represent a HTTP request. // Feel free to add more attributes if needed. private String filePath; String getFilePath() { return "." + requestURL; } private String method; private String protocal; private String requestURL; private void setMethod(String d){ method = d; } private void setProtocal(String d){ protocal = d; } private void setRequestURL(String d){ requestURL = d; } void parse(String s){ String[] splitString = s.split(" "); setMethod(splitString[0]); setRequestURL(splitString[1]); setProtocal(splitString[2]); } String getMethod(){ return method; } String getRequestURL(){ return requestURL; } String getProtocal(){ return protocal; } // NEEDS IMPLEMENTATION // If you add more private variables, add your getter methods here }
ffbe855b8c2c85941b122b846238d2a842f20bca
[ "Java" ]
1
Java
Wanchunwei/HTTPserver
246101afbb3926cb957f0c9ab4c983187d3327ef
8beebb11cda85a1d3dbd82da9ce6fc263037f497
refs/heads/main
<repo_name>TwT-Teaker99/Virus_File_Infected_Sample<file_sep>/ClearFile.cpp #include <stdio.h> #include <windows.h> using namespace std; DWORD Align(DWORD size, DWORD align); bool DelSection(char* filepath); char filepath[] = "D:\\gen1.exe"; //file path to be infected BYTE secName[8] = ".viral"; //Name of section infected int main(int argc, char* argv[]) { DelSection(filepath); return 0; } //Function: rounding size according to alignment DWORD Align(DWORD size, DWORD align) { if (!(size % align)) return size; else return (size / align + 1) * align; } //Function: add blank section to PE file bool DelSection(char* filepath) { HANDLE file = CreateFileA(filepath, GENERIC_READ | GENERIC_WRITE, 0, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL); if (file == INVALID_HANDLE_VALUE) { MessageBoxA(NULL, "Error opening file", NULL, MB_OK); return false; } DWORD fileSize = GetFileSize(file, NULL); BYTE* pByte = new BYTE[fileSize]; DWORD dw; ReadFile(file, pByte, fileSize, &dw, NULL); PIMAGE_DOS_HEADER dosHeader = (PIMAGE_DOS_HEADER)pByte; if (dosHeader->e_magic != IMAGE_DOS_SIGNATURE) { MessageBoxA(NULL, "Not PE file", NULL, MB_OK); return false; } PIMAGE_FILE_HEADER fileHeader = (PIMAGE_FILE_HEADER)(pByte + dosHeader->e_lfanew + sizeof(DWORD)); PIMAGE_OPTIONAL_HEADER OptionHeader = (PIMAGE_OPTIONAL_HEADER)(pByte + dosHeader->e_lfanew + sizeof(DWORD) + sizeof(IMAGE_FILE_HEADER)); //check file 32bit if (OptionHeader->Magic != IMAGE_NT_OPTIONAL_HDR32_MAGIC) { MessageBoxA(NULL, "Not PE file 32bit!", NULL, MB_OK); return FALSE; } PIMAGE_SECTION_HEADER sectionHeader = (PIMAGE_SECTION_HEADER)(pByte + dosHeader->e_lfanew + sizeof(IMAGE_NT_HEADERS)); WORD last = fileHeader->NumberOfSections - 1; fileSize -= sectionHeader[last].SizeOfRawData; //check name of last section for (int i = 0; i < 8; i++) { if (sectionHeader[last].Name[i] != secName[i]) { MessageBoxA(NULL, "Not file injected!", NULL, MB_OK); return false; } } //read OEP save in shellcode //0xA89 offset save OEP SetFilePointer(file, sectionHeader[last].PointerToRawData + 0xA89, NULL, FILE_BEGIN); BYTE* pTemp = new BYTE; ReadFile(file, pTemp, 4, &dw, 0); SetFilePointer(file, fileSize, NULL, FILE_BEGIN); //truncate file size if (SetEndOfFile(file) == 0) { MessageBoxA(NULL, "False!", NULL, MB_OK); return FALSE; } //fix SizeOfImage, NumOfSec OptionHeader->SizeOfImage -= Align(sectionHeader[last].Misc.VirtualSize, OptionHeader->SectionAlignment); fileHeader->NumberOfSections -= 1; //clear sectionHeader memset(&sectionHeader[last], 0, sizeof(IMAGE_SECTION_HEADER)); //write file SetFilePointer(file, 0, NULL, FILE_BEGIN); WriteFile(file, pByte, fileSize, &dw, NULL); //write override OEP SetFilePointer(file, dosHeader->e_lfanew + 0x28, NULL, FILE_BEGIN); WriteFile(file, pTemp, 4, &dw, 0); CloseHandle(file); return true; }
f4884d0a881c98cac247824c863d4b02a8570141
[ "C++" ]
1
C++
TwT-Teaker99/Virus_File_Infected_Sample
96a03ec530c8dcdcfb8147a61da103c471d77204
a59413203fb5f2c2ddf1be54bd313a0325914a5e
refs/heads/master
<repo_name>tomerun/icfpc2020<file_sep>/README.md # ICFP Programming Contest 2020 team name : tomerun * src/main.cr * ship AI * rotates around avoiding collision with the planet * it didn't attack by shoot nor denotate because I couldn't realize their command spec * src/defs.cr, src/parser.cr, src/reducer.cr * galaxy evaluator and visualizer * just output the screen to an image file * I couldn't create a clickable UI * src/modem.cr * converter between cons list <-> modulated binary form <-> Crystal Array This problem was too hard for single person team without knowledges of functional programming languages. I miserably spent first two days for implementing galaxy evaluator. That's why I could only create trivial AI. Anyway I appreciate organizers' effort to administrate this challenging competition without any severe trouble. <file_sep>/build.sh #!/bin/sh shards build app
1a34cdb7dd952ee0361bbf561641ca10ea31a050
[ "Markdown", "Shell" ]
2
Markdown
tomerun/icfpc2020
bdec220fc754063caf744736faafae9b2ba0f03a
ef2baeffa120da85c95b2bfea1a85fafad090d59
refs/heads/master
<repo_name>mertcerciler/Supervised-Learning<file_sep>/README.md # Supervised-Learning ANN with linear regressor and ANN with single hidden layer is implemented and the dataset is applied to both models. * Sum of Squared errors is used as loss function. * Sigmoid activation function is used to define the hidden units. * Stochastic learning algorithm is used. Different parameters are used to find the best model that gives the lowest error result. The resulting configuration is: * ANN used: ANN for linear regression gives more accurate results. * Learning rate: 0.0005. * I initialized weights with random numbers between (0,1) for both models. * I used different numbers of epochs with different parameters. The number is 100 in this configuration. * The stop value is 0.00001. Even the epoch does not reach its max, if the step size is lower than the stop value, learning is terminated. * Normalization highly affects the error value since the difference of maximum and minimum values of the dataset is very high. If the normalization applied with respect to whole dataset, the numbers are decreased hugely. That makes the error value and the data points lower. However, I don’t think the ratio between the data points and the error values are also decreased, it is used for just playing with lower numbers. Since, learning process can be observed with bigger numbers, I choose not to apply the normalization. * **Training loss (averaged value over training instances): 1586** * **Test loss (averaged value over test instances): 1275** The plots are drawn below for this configuration, for both training and test datasets. <img src="/img/training_lr.png"> <img src="/img/test_lr.png"> While finding the best configuration, ANN with single hidden layer with 2,4,8,16 and 32 hidden units are also applied to the model and following plots are drawn for the training dataset. <img src="/img/training_2.png"> <img src="/img/traning_4.png"> <img src="/img/traning_8.png"> <img src="/img/traning_16.png"> <img src="/img/traning_32.png"> Below table shows the training loss, test loss, training loss over test loss and standard deviation. <img src ="/img/table.png"> <file_sep>/src/ann.py import numpy as np import matplotlib.pyplot as plt #below functions are functions that is used both ann algorithms or functions that is used by main #sigmoid function. def sigmoid(x): return 1/(1 + np.exp(-x)) #derivative of sigmoid function. def sigmoid_derivative(x): return sigmoid(x) * (1-sigmoid(x)) #function for reading and seperating datasets into inputs and labels. def reading_datasets(f): x = [] y = [] for line in f: is_x = True x_index = '' y_index = '' for ch in line: if (ch == '\t'): is_x = False elif(ch == '\n'): x_index = float(x_index) x.append(x_index) y_index = float(y_index) y.append(y_index) break if is_x: x_index += ch else: y_index += ch x = np.array(x) y = np.array(y) x = x.reshape(x.shape[0],1) y = y.reshape(y.shape[0], 1) return x, y #returns loss of the output, with the sum of squares loss function. def loss(output_set, label_set): loss = np.square(output_set - label_set) return sum(loss)/output_set.shape[0] def standard_dev(output_set, label_set): loss = np.square(output_set - label_set) return np.std(loss) #returns the normalized dataset. def normalization(x_train, y_train, x_test, y_test): #first, concatenate all datasets to a single dataset in order to make normalization according to whole dataset. all_dataset = np.concatenate((x_train, y_train, x_test, y_test), axis=0) norm_x_train = x_train norm_y_train = y_train norm_x_test = x_test norm_y_test = y_test #normalize x_train for row in range(0, x_train.shape[0]): norm_x_train[row] = (x_train[row] - all_dataset.min()) / (all_dataset.max() - all_dataset.min()) #normalize y_train for row in range(0, y_train.shape[0]): norm_y_train[row] = (y_train[row] - all_dataset.min()) / (all_dataset.max() - all_dataset.min()) #normalize x_test for row in range(0, x_test.shape[0]): norm_x_test[row] = (x_test[row] - all_dataset.min()) / (all_dataset.max() - all_dataset.min()) #normalize y_test for row in range(0, y_test.shape[0]): norm_y_test[row] = (y_test[row] - all_dataset.min()) / (all_dataset.max() - all_dataset.min()) return norm_x_train, norm_y_train, norm_x_test, norm_y_test def plot(x_points, real_values, predictions, title, xlabel, ylabel): fig = plt.figure() ax1 = fig.add_subplot(111) ax1.scatter(x_points, real_values, s=10, c='b', marker="s", label="Real Values") ax1.scatter(x_points, predictions, s=10, c='r', marker="s", label="Predictions") plt.legend(loc='upper left') plt.title(title) plt.xlabel(xlabel) plt.ylabel(ylabel) #This is the class for ann with single hidden layer. class ann_with_shl: def __init__(self, x, y, units, max_epoch, learning_rate, stop_value): self.input = x self.weights1 = np.random.rand(self.input.shape[1],units) self.weights2 = np.random.rand(units,1) self.y = y self.output = np.zeros(self.y.shape) self.max_epoch = max_epoch self.step_size1 = 1 self.step_size2 = 1 self.learning_rate = learning_rate self.training_output = np.zeros(self.output.shape[0]) self.stop_value = stop_value #feedforward algorithm. def feedforward(self): self.layer1 = sigmoid(np.dot(self.input, self.weights1)) self.output = np.dot(self.layer1, self.weights2) #backpropagation algorithm. def backpropagation(self): loss_derivative = 2*((self.output-self.y)) sgm_derivative = sigmoid_derivative(self.layer1) #chain rule. gradient_hidden2output = np.dot(self.layer1.T, loss_derivative) gradient_input2hidden = np.dot(self.input.T, np.dot(loss_derivative, self.weights2.T) * sgm_derivative) #calculating step size with multiplying gradient with learning rate. self.step_size2 = gradient_hidden2output * -1*self.learning_rate self.step_size1 = gradient_input2hidden * -1*self.learning_rate #print('stepsize1:', self.step_size1.shape) #updating the weights. self.weights2 = self.step_size2 + self.weights2 self.weigths1 = self.step_size1 + self.weights1 #fitting the dataset to ann with single layer with given conditions. def fit(self): epoch = 1 error_training = np.zeros(self.max_epoch) while (epoch <= self.max_epoch or (self.step_size1.any() <= self.stop_value and self.step_size2.any() <= self.stop_value)): self.feedforward() if epoch == self.max_epoch: self.training_output = self.output self.backpropagation() loss_fitting = loss(self.output, self.y) error_training[epoch-1] = loss_fitting epoch += 1 stdev = standard_dev(self.output, self.y) return stdev, self.training_output, error_training #predicting the given dataset and calculating the error def predict(self, input_set, label_set): hidden_layer = sigmoid(np.dot(input_set, self.weights1)) prediction = np.dot(hidden_layer, self.weights2) loss_prediction = loss(prediction, label_set) error_test = loss_prediction return prediction, error_test #This is the class for ann with linear regressor. class ann_with_lr: def __init__(self, x, y, max_epoch, learning_rate, stop_value): self.input = x self.weights = np.random.rand(self.input.shape[1],1) self.y = y self.max_epoch = max_epoch self.learning_rate = learning_rate self.step_size = 1 self.stop_value = stop_value self.output = np.zeros(self.y.shape) self.training_output = np.zeros(self.output.shape[0]) #feedforward algorithm. def feedforward(self): self.output = np.dot(self.input, self.weights) #backpropagation algorithm. def backpropagation(self): loss_derivative = 2*((self.output - self.y)) #chain rule. gradient = np.dot(self.input.T, loss_derivative) #calculating step size with multiplying gradient with learning rate. self.step_size = gradient * -1*self.learning_rate #updating the weights. self.weights = self.step_size + self.weights #fitting the dataset to ann with single layer with given conditions. def fit(self): epoch = 1 error_training = np.zeros(self.max_epoch) while (epoch <= self.max_epoch or (self.step_size.any() <= self.stop_value)): self.feedforward() if epoch == self.max_epoch: self.training_output = self.output self.backpropagation() loss_fitting = loss(self.output, self.y) error_training[epoch-1] = loss_fitting epoch +=1 stdev = standard_dev(self.output, self.y) return stdev, self.training_output, error_training #predicting the given dataset and calculating the error def predict(self, input_set, label_set): prediction = np.dot(input_set, self.weights) loss_prediction = loss(prediction, label_set) error_test = loss_prediction return prediction, error_test #main (we are initilazing and calling the classes below). #reading datasets and seperate them into inputs and outputs. f_training = open("../data/train1.txt", "r") f_test = open("../data/test1.txt", "r") x_train, y_train = reading_datasets(f_training) x_test, y_test = reading_datasets(f_test) #Normalize the whole dataset. Normalization do not be applied since learning process can be observed if the numbers are bigger. #If it is wanted to normalize the dataset, below line should be commented out. #x_train, y_train, x_test, y_test = normalization(x_train, y_train, x_test, y_test) #training the dataset with ann single hidden layer with 2 hidden units. ann_shl_2 = ann_with_shl(x = x_train, y = y_train, max_epoch = 100, units = 2, learning_rate = 0.025, stop_value = 0.0001) stdev_2, training_output_shl_2, error_training_shl_2 = ann_shl_2.fit() prediction_shl_2, error_test_shl_2 = ann_shl_2.predict(x_test, y_test) #training the dataset with ann single hidden layer with 4 hidden units. ann_shl_4 = ann_with_shl(x = x_train, y = y_train, max_epoch = 100, units = 4, learning_rate = 0.01, stop_value = 0.0001) stdev_4, training_output_shl_4, error_training_shl_4 = ann_shl_4.fit() prediction_shl_4, error_test_shl_4 = ann_shl_4.predict(x_test, y_test) #training the dataset with ann single hidden layer with 8 hidden units. ann_shl_8 = ann_with_shl(x = x_train, y = y_train, max_epoch = 100, units = 8, learning_rate = 0.001, stop_value = 0.0001) stdev_8, training_output_shl_8, error_training_shl_8 = ann_shl_8.fit() prediction_shl_8, error_test_shl_8 = ann_shl_8.predict(x_test, y_test) #training the dataset with ann single hidden layer with 16 hidden units. ann_shl_16 = ann_with_shl(x = x_train, y = y_train, max_epoch = 100, units = 16, learning_rate = 0.0015, stop_value = 0.0001) stdev_16, training_output_shl_16, error_training_shl_16 = ann_shl_16.fit() prediction_shl_16, error_test_shl_16 = ann_shl_16.predict(x_test, y_test) #training the dataset with ann single hidden layer with 32 hidden units. ann_shl_32 = ann_with_shl(x = x_train, y = y_train, max_epoch = 100, units = 32, learning_rate = 0.001, stop_value = 0.0001) stdev_32, training_output_shl_32, error_training_shl_32 = ann_shl_32.fit() prediction_shl_32, error_test_shl_32 = ann_shl_32.predict(x_test, y_test) #training the dataset with ann linear regressor ann_lr = ann_with_lr(x = x_train, y = y_train, max_epoch = 100, learning_rate = 0.0005, stop_value = 0.00001) stdev_lr, training_output_lr, error_training_lr =ann_lr.fit() prediction_lr, error_test_lr = ann_lr.predict(x_test, y_test) #plotting the predictions vs labels. x_points_test = np.zeros(y_test.shape[0]) for i in range(0, y_test.shape[0]): x_points_test[i] = i x_points_train = np.zeros(y_train.shape[0]) for i in range(0, y_test.shape[0]): x_points_train[i] = i plot(x_points=x_points_test, real_values=y_test, predictions=prediction_lr, title='Test predictions vs test labels for linear regressor', xlabel='Input points', ylabel='Output points') plot(x_points=x_points_train, real_values=y_train, predictions=training_output_lr, title='Training outputs vs traning labels for linear regressor', xlabel='Input points', ylabel='Output points') plot(x_points=x_points_train, real_values=y_train, predictions=training_output_shl_2, title='Training outputs vs traning labels for ann with shl with 2 hidden units', xlabel='Input points', ylabel='Output points') plot(x_points=x_points_train, real_values=y_train, predictions=training_output_shl_4, title='Training outputs vs traning labels for ann with shl with 4 hidden units', xlabel='Input points', ylabel='Output points') plot(x_points=x_points_train, real_values=y_train, predictions=training_output_shl_8, title='Training outputs vs traning labels for ann with shl with 8 hidden units', xlabel='Input points', ylabel='Output points') plot(x_points=x_points_train, real_values=y_train, predictions=training_output_shl_16, title='Training outputs vs training labels for ann with shl with 16 hidden units', xlabel='Input points', ylabel='Output points') plot(x_points=x_points_train, real_values=y_train, predictions=training_output_shl_32, title='Training outputs vs traning labels for ann with shl with 32 hidden units', xlabel='Input points', ylabel='Output points')
5d60b05107429ab53ee885a0a7b88ac5a65a9f6a
[ "Markdown", "Python" ]
2
Markdown
mertcerciler/Supervised-Learning
803e815c7538847fa1d09af00a98866572175dc7
eb0fc975e82a5b32a478a2bd1a481343e6b64cb9
refs/heads/master
<file_sep>def find_element_index(array, value_to_find) array.length.times do |count| if array[count] == value_to_find return count end end nil end #Another way #def find_element_index(array, value_to_find) #array.index(value_to_find) #end def find_max_value(array) array.max end def find_min_value(array) array.min end
fdf3db55349f523a95417679d519431d34922f2f
[ "Ruby" ]
1
Ruby
mariiamii/programming-univbasics-4-array-concept-review-lab-nyc-web-010620
543071629b00d7ebb64ff00e5699da6374866cfb
75d71f8e53cca5706bea961dd560cad4c80fc137
refs/heads/master
<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Jun 18 16:10:23 2019 @author: kartiktanksali """ #String Permutation def create_hashmap(string): hash_map = {} for i in string: if i in hash_map: hash_map[i] += 1 else: hash_map[i] = 1 return hash_map str1 = input("Enter string 1: ") str2 = input("Enter string 2: ") str1_map = create_hashmap(str1) str2_map = create_hashmap(str2) if str1_map == str2_map: print("One is permutation of another") else: print("They are not")<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Apr 28 12:13:05 2019 @author: kartiktanksali """ str1 = "abc" str2 = "racecar" def check_palindrome(string): st1 = string st2 = string[::-1] if st1 == st2: l = int(len(st1)) h = int(len(st1)/2) if h%2==0: print(f"({st1[0:h]})({st1[h:l]})") else: print(f"({st1[0:h]}){st1[h]}({st1[h+1:l]})") print("Palindrome") else: print("Not a palindrome") check_palindrome(str1) check_palindrome(str2) <file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Apr 28 11:37:29 2019 @author: kartiktanksali """ def for_sum(lst): sums = 0 for i in lst: sums += i return sums def while_sum(lst): sums = 0 i=0 while(i<len(lst)): sums += lst[i] i+=1 return sums lst = [1,2,3,4,10] res = for_sum(lst) print("Sum using for loop ",res) res1 = while_sum(lst) print("Sum using for loop ",res1)<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Apr 28 12:04:27 2019 @author: kartiktanksali """ list1 = ["a","b","c"] list2 = [1,2,3] def combine(list1, list2): lst = [] len1 = len(list1) len2 = len(list2) for index in range( max(len1, len2) ): if index+1 <= len1: lst += [list1[index]] if index+1 <= len2: lst += [list2[index]] return lst res = combine(list1,list2) print(res)<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri Mar 15 16:24:40 2019 @author: kartiktanksali """ from collections import Counter string = input() dic = Counter(string) dic_hard = {"(":")","[":"]","{":"}"} for k in dic.keys(): if k in dic_hard: if dic[k]==dic[dic_hard[k]]: print("No issues with :",k,dic_hard[k]) else: print("Issues with :",k,dic_hard[k])<file_sep>from collections import Counter def Solution(A): mode = Counter(A) m = mode.most_common(1)[0][0] count=0 if m==0: rep = 1 else: rep = 0 for i in range(len(A)): if A[i]!=m: A[i]=rep count+=1 return count A = [1,1,0,0,0,0,1,1,0] res = Solution(A) print(res) <file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Mar 17 17:27:45 2019 @author: kartiktanksali """ #Balacing the paranthesis using Stack paranthesis_dict = {"(":")","[":"]","{":"}"} def BalanceIt(string): stack = [] top=-1 for i in string: if i in paranthesis_dict.keys(): stack.append(i) top+=1 print(stack) elif i in paranthesis_dict.values(): if (len(stack)>0) and (i == paranthesis_dict[stack[top]]): stack.pop() top-=1 print(stack) else: return False if len(stack)==0: return True string = input("Enter a expression: ") res = BalanceIt(string) print("Balanced" if res==True else "Unbalanced")<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Jun 23 18:03:31 2019 @author: kartiktanksali """ #Find the squares import math def CountSquares(arr): new_lst = [] for i in arr: temp = i.split(" ") start = int(temp[0]) end = int(temp[1]) new_lst.append((math.floor(math.sqrt(end)) - math.ceil(math.sqrt(start)) + 1)) return new_lst lst = ["3 9","17 24"] res = CountSquares(lst) print(res) <file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Jun 18 16:36:20 2019 @author: kartiktanksali """ #Check if all the characters of a string are unique def StrUnique(string): if len(string) > 256: return "Not Unique" else: lst = [False] * 128 for i in range(len(string)): val = ord(string[i]) if lst[val]: return "Not Unique" lst[val] = True return "Unique" string = input("Enter a string: ") res = StrUnique(string) print(res)<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Jun 18 16:54:07 2019 @author: kartiktanksali """ #URLify def URLify(string): return string.replace(" ","%20") string = input("Enter a string: ") res = URLify(string) print(res)<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- #Missing words def MissingWords(s,t): new_s = s.split(" ") new_t = t.split(" ") lst = [] for i in range(len(new_s)): flag = 0 for j in range(len(new_t)): if new_s[i] == new_t[j]: flag = 1 if flag == 0: lst.append(new_s[i]) return lst s = "I use hackerrank to improve my programming" t = "programming hackerrank" res = MissingWords(s,t) print(res)<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Apr 28 11:43:02 2019 @author: kartiktanksali """ import math def isDivisible(number1,number2): n1 = number1 n2 = number2 for i in range(number2): if number1 == 0: print(n1," Divided by ",n2," is ",i) break elif number1 < 0: print(n1," Divided by ",n2," is ",math.floor(n1/n2)," with a remainded of ",n1%n2) break number1 -= number2 number1 = 6 number2 = 4 res = isDivisible(number1,number2) <file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Jun 23 16:40:11 2019 @author: kartiktanksali """ import math def arrangeCoins(n): lst = [] for i in n: k = -0.5 + math.sqrt(0.25 + 2 * i) lst.append(int(math.floor(k))) for i in lst: print(i) n = [2,5,8] print(arrangeCoins(n))<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Jun 23 17:17:04 2019 @author: kartiktanksali """ #Sum of odd integers def sumOdd(n): for i in n: print(int((i+1)/2)**2) lst = [3,1,2,3] sumOdd(lst)<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Mar 26 19:18:20 2019 @author: kartiktanksali """ #lst = [1,2,3] # #sub_lst = [] ## #def subarraySum(nums, k): # res = 0 # count=0 # for i in range(len(nums)): # count=0 # for j in range(i, len(nums)): # count+=1 # if sum(nums[i:j+1]) == k: # res += 1 # sub_lst.append(count) # return max(sub_lst) # #res = subarraySum(lst,5) #print(res) #sub_lst = [] #def printSubArrays(arr, start, end): # # # Stop if we have reached the end of the array # if end == len(arr): # return # # # Increment the end point and start from 0 # elif start > end: # return printSubArrays(arr, 0, end + 1) # # # Print the subarray and increment the starting # # point # else: # sub_lst.append(arr[start:end + 1]) # return printSubArrays(arr, start + 1, end) # ## Driver code #arr = [1, 2, 3, 4] #printSubArrays(arr, 0, 0) # #for i in sub_lst: # if sum(i)==5: # print(i) #def sums(x): # x+=x # # #lst = [1,2,3,4,5] # #sub_lst = [] #temp_lst = [] #for i in range(len(lst)): # temp_lst=[] # for j in range(i,len(lst)): # temp_lst.append(lst[j]) # s = map(sums,temp_lst) # if s==5: # sub_lst.append(temp_lst) # # #print(sub_lst) #nums = [1,2,3,4,5,6] #k=6 #h = {0: 1} #s = 0 #c = 0 #count=0 #for i in nums: # s += i # if s - k in h: # c += h[s - k] # count+=1 # if s in h: # h[s] += 1 # else: # h[s] = 1 #print(c) #print(count) def atMostSum(arr,k): _sum = 0 cnt = 0 maxcnt = 0 for i in range(len(arr)): if ((_sum + arr[i]) <= k): _sum += arr[i] cnt += 1 elif(sum != 0): _sum = _sum - arr[i - cnt] + arr[i] maxcnt = max(cnt, maxcnt) return maxcnt arr = [1,2,3,4,5] k = 6 res = atMostSum(arr,k) print(res) <file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Wed Jun 19 21:25:34 2019 @author: kartiktanksali """ #Kadane's Algorithm lst = [-2,3,2,-1] max_sum_global = max_sum_current = lst[0] for i in range(1,len(lst)): max_sum_current = max(lst[i],lst[i]+max_sum_current) if max_sum_current > max_sum_global: max_sum_global = max_sum_current print(max_sum_global) <file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sun Mar 17 19:08:19 2019 @author: kartiktanksali """ def reverse(x): reverse = 0 n=x if x<0: x = x*-1 while (x > 0): lastDigit = x % 10 reverse = (reverse * 10) + lastDigit x = x//10 if n<0: reverse = reverse*-1 if -2147483648 < reverse < 2147483647: return reverse else: return 0 x = int(input()) res = reverse(x) print(res)<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Jun 18 13:23:09 2019 @author: kartiktanksali """ #Array Rotation lst = [1,2,3,4,5,6,7] nr = int(input("Enter the number of rotations: ")) for i in range(nr): temp = lst[0] for j in range(len(lst)-1): lst[j] = lst[j+1] lst[len(lst)-1] = temp print(lst)<file_sep>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Mar 26 19:12:15 2019 @author: kartiktanksali """ def quarter(string): if 1<=q<=3: return string[0:4]+"Q1" elif 4<=q<=6: return string[0:4]+"Q2" elif 7<=q<=9: return string[0:4]+"Q3" elif 10<=q<=12: return string[0:4]+"Q4" string = "2015-05-19" q = string[5]+string[6] q = int(q) res = quarter(string) print(res)
ed009e3d2e12e4e88ae445173c61b191fd19137a
[ "Python" ]
19
Python
kartiktanksali/Coding_Questions
7a49b4d3de3554e2b2af336881c4d3ea6ab0fe51
d0687e11ec3f5b3d1f757ddbe9be585c2a4eca29
refs/heads/master
<repo_name>soofaloofa/nats<file_sep>/README.md # nats A command-line interface to [gnatsd](nats.io). nats is a command-line utility for sending and receiving messages via i a gnatsd cluster. It is a thin wrapper around the [NATS Go client](https://github.com/nats-io/nats). ## Install ```bash $ go get github.com/soofaloofa/nats ``` ## Usage Publishing to a subject ```bash $ nats pub subject test [#1] Published on [subject] : 'test' sending [#2] Published on [subject] : 'sending' messages [#3] Published on [subject] : 'messages' ``` Subscribing on a subject ```bash $ nats sub subject Listening on [subject] [#1] Received on [subject]: 'test' [#2] Received on [subject]: 'sending' [#3] Received on [subject]: 'messages' ``` ## Configuration Use the `-s` flag to publish or subscribe to a different server. ```bash $ nats -s tls://192.168.1.45:4222 pub subject ``` comma separate multiple servers ```bash $ nats -s nats://192.168.1.45:4222,nats://192.168.1.46:4222 pub subject ``` <file_sep>/main.go package main import ( "bufio" "fmt" "os" "strings" "time" "github.com/nats-io/nats" "github.com/urfave/cli" ) var ( url string // NATS server url reply string // Publish reply-to subject ) func main() { app := cli.NewApp() app.Name = "nats" app.Usage = "a nats.io CLI" app.Version = "0.0.1" app.Compiled = time.Now() app.Authors = []cli.Author{ cli.Author{ Name: "<NAME>", Email: "<EMAIL>", }, } app.Copyright = "(c) 2016 <NAME>" // TODO: Add additional config options as needed app.Flags = []cli.Flag{ cli.StringFlag{ Name: "server, s", Usage: "NATS server url and port. Separate multiple servers with a comma.", Value: nats.DefaultURL, EnvVar: "NATS_SERVER", Destination: &url, }, } // Pub/Sub commands app.Commands = []cli.Command{ cli.Command{ Name: "pub", Aliases: []string{"a"}, Usage: "publish messages to a subject", Action: pub, }, cli.Command{ Name: "sub", Aliases: []string{"s"}, Usage: "subscribe to a subject", Action: sub, }, } app.Run(os.Args) } // splitFirstWord splits the input string at the first word func splitFirstWord(s string) (string, string) { for i := range s { // If we encounter a space, reduce the count. if s[i] == ' ' { return s[0:i], s[i:] } } // Return the entire string. return s, "" } func pubUsage() string { return "Usage: <subject<message>" } func pub(c *cli.Context) error { nc, err := nats.Connect(url) if err != nil { return cli.NewExitError(fmt.Sprintf("Can't connect to %s: %v", url, err), 1) } defer nc.Close() if len(c.Args()) != 1 { return cli.NewExitError("Usage: nats pub <subject>", 1) } subj, i := c.Args()[0], 0 scanner := bufio.NewScanner(os.Stdin) for scanner.Scan() { i++ msg := strings.TrimSpace(scanner.Text()) if msg == "" { continue } msg = strings.TrimSpace(msg) nc.Publish(subj, []byte(msg)) nc.Flush() fmt.Printf("[#%d] Published on [%s] : '%s'\n", i, subj, msg) } return nil } func sub(c *cli.Context) error { nc, err := nats.Connect(nats.DefaultURL) if err != nil { return cli.NewExitError(fmt.Sprintf("Can't connect to %s: %v", nats.DefaultURL, err), 1) } defer nc.Close() if len(c.Args()) < 1 || len(c.Args()) > 2 { return cli.NewExitError("Usage: nats sub <subject> [queue group]", 1) } subj, i := c.Args()[0], 0 var queue string if len(c.Args()) == 2 { queue = c.Args()[2] } nc.QueueSubscribe(subj, queue, func(msg *nats.Msg) { i++ fmt.Printf("[#%d] Received on [%s]: '%s'\n", i, msg.Subject, string(msg.Data)) }) fmt.Printf("Listening on [%s]\n", subj) // Loop until CTRL-D scanner := bufio.NewScanner(os.Stdin) for scanner.Scan() { } return nil }
43c541c9e37772b0467cc7ddb46f8dc505a0f8a6
[ "Markdown", "Go" ]
2
Markdown
soofaloofa/nats
7e0ee8c752b66a4583897e6a2b17dccef88022d4
8873c426ed838c7a4d15500e9e368527960e114f
refs/heads/master
<file_sep>package com.example.homework07; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Map; public class ChatObject { public String sender, tripId, messagedId, message, sentTime,isImageSent; // int isImageSent; // public Date sentTime; public ChatObject(String sender, String tripId, String messagedId, String message, String isImageSent, String sentTime) { this.sender = sender; this.tripId = tripId; this.messagedId = messagedId; this.message = message; this.sentTime = sentTime; this.isImageSent = isImageSent; } public ChatObject(Map<String,String> hashMap) { this.sender=(String)hashMap.get("sender"); this.tripId=(String)hashMap.get("tripId"); this.messagedId=(String)hashMap.get("messagedId"); this.message=(String)hashMap.get("message"); this.isImageSent= (String) hashMap.get("isImageSent"); this.sentTime=(String)hashMap.get("sentTime"); } } <file_sep>package com.example.homework07; import android.annotation.SuppressLint; import android.content.res.Resources; import android.net.Uri; import android.util.Log; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.constraintlayout.widget.ConstraintLayout; import androidx.constraintlayout.widget.ConstraintSet; import androidx.recyclerview.widget.RecyclerView; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.android.gms.tasks.Task; import com.google.firebase.firestore.DocumentReference; import com.google.firebase.firestore.DocumentSnapshot; import com.google.firebase.firestore.EventListener; import com.google.firebase.firestore.FirebaseFirestore; import com.google.firebase.firestore.FirebaseFirestoreException; import com.google.firebase.storage.FirebaseStorage; import com.google.firebase.storage.StorageReference; import com.squareup.picasso.Picasso; import java.util.ArrayList; import java.util.Map; public class ChatAdapter extends RecyclerView.Adapter<ChatAdapter.ViewHolder> { ArrayList<ChatObject> tdata; FirebaseStorage storage; public ChatAdapter(ArrayList<ChatObject> tdata) { this.tdata = tdata; } @NonNull @Override public ChatAdapter.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { View view= LayoutInflater.from(parent.getContext()) .inflate(R.layout.chat_item, parent, false); ChatAdapter.ViewHolder viewholder=new ChatAdapter.ViewHolder(view); return viewholder; } @Override public void onBindViewHolder(@NonNull final ChatAdapter.ViewHolder holder, int position) { final ChatObject chat = tdata.get(position); holder.tv_messages.setText(chat.message); holder.tv_msgDetails.setText(chat.sender +", "+chat.sentTime); holder.iv_picture.setVisibility(View.INVISIBLE); if(chat.isImageSent.equals("1")){ holder.iv_picture.setVisibility(View.VISIBLE); holder.tv_messages.setVisibility(View.INVISIBLE); storage = FirebaseStorage.getInstance(); StorageReference listRef = storage.getReference().child(chat.message); listRef.getDownloadUrl().addOnSuccessListener(new OnSuccessListener<Uri>() { @Override public void onSuccess(Uri uri) { Log.e("chat image+", "uri: " + uri.toString()); Picasso.get().load(uri.toString()).into(holder.iv_picture); } }) .addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { Log.d("demo...", "Image chat view failue"); } }); } Resources res = holder.itemView.getContext().getResources(); int color = res.getColor(R.color.listColor); if(chat.sender.equals(MainActivity.loggedInUserName)) { holder.tv_messages.setGravity(Gravity.RIGHT); holder.tv_msgDetails.setGravity(Gravity.RIGHT); // holder.tv_messages.setBackgroundColor(color); // holder.tv_messages.setWidth(ViewGroup.LayoutParams.WRAP_CONTENT); } holder.chat = chat; } @Override public int getItemCount() { return tdata.size(); } public static class ViewHolder extends RecyclerView.ViewHolder{ TextView tv_messages; TextView tv_msgDetails; ChatObject chat; ImageView iv_picture; ConstraintLayout constraintLayout; ArrayList<ChatObject> chatsFromDB=new ArrayList<>(); LinearLayout linearLayout; // ImageView imgMsg; public ViewHolder(@NonNull final View itemView) { super(itemView); this.chat = chat; tv_messages=itemView.findViewById(R.id.tv_message_chat); tv_msgDetails=itemView.findViewById(R.id.tv_msgDetails); iv_picture=itemView.findViewById(R.id.imgChat_chat); final FirebaseFirestore db; db=FirebaseFirestore.getInstance(); constraintLayout = itemView.findViewById(R.id.layout_listItem); // linearLayout=itemView.findViewById(R.id.linear_layout_chat); itemView.setOnLongClickListener(new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { final String tripid=chat.tripId; final String msgId=chat.messagedId; if(MainActivity.loggedInUserName.equals(chat.sender)) { DocumentReference docRef = db.collection("Chats").document(tripid); docRef.get().addOnCompleteListener(new OnCompleteListener<DocumentSnapshot>() { @Override public void onComplete(@NonNull Task<DocumentSnapshot> task) { if (task.isSuccessful()) { DocumentSnapshot snapshot= task.getResult(); if (snapshot.exists()) { if (snapshot != null && snapshot.exists()) { // Log.d("trips", "Current data: " + snapshot.getData()); Chats chat = new Chats(snapshot.getData()); chatsFromDB.clear(); for(int i = 0; i<chat.chatList.size(); i++){ Map<String,String> map = (Map<String, String>) chat.chatList.get(i); if(map.get("messagedId").equals(msgId)) { // chat.chatList.remove(i); } else { chatsFromDB.add(new ChatObject(map)); } } db.collection("Chats").document(tripid) .delete() .addOnSuccessListener(new OnSuccessListener<Void>() { @Override public void onSuccess(Void aVoid) { Chats chat = new Chats(chatsFromDB); db.collection("Chats").document(tripid) .set(chat.ToHashMap()) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if(task.isSuccessful()){ Log.d("chats", "chat added successfully"); } else{ Log.d("trip", task.getException().toString()); } } }); } }) .addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { Log.w("demo", "Error deleting document", e); } }); } } else { Log.d("trips", "Current data: null"); } } } }); } return false; } }); } } } <file_sep>package com.example.homework07; import android.graphics.Bitmap; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; public class Trips { String title; String latitude; String longitude; String coverPhoto; String createdBy; String imgUrl; // Bitmap byteImage; ArrayList<String> members = new ArrayList<String>(); // public Trips(String title, String createdBy, Bitmap byteImage) { // this.title = title; // this.createdBy = createdBy; // this.byteImage = byteImage; // } public Trips(String title, String latitude, String longitude, String coverPhoto, ArrayList<String> members, String createdBy) { this.title = title; this.latitude = latitude; this.longitude = longitude; this.coverPhoto = coverPhoto; this.members = members; this.createdBy=createdBy; } public Trips(String title, String createdBy, String coverPhoto,String imgUrl) { this.title = title; this.coverPhoto = coverPhoto; this.createdBy = createdBy; this.imgUrl=imgUrl; } public Trips(Map<String,Object>hashMap) { this.title=(String)hashMap.get("title"); this.createdBy=(String)hashMap.get("admin"); this.coverPhoto=(String)hashMap.get("coverPhoto"); this.latitude=(String)hashMap.get("latitude"); this.longitude=(String)hashMap.get("longitude"); this.members= (ArrayList<String>) hashMap.get("members"); } public Map<String,Object> tripsToHashMap() { HashMap<String,Object> hashTrips = new HashMap<String, Object>(); hashTrips.put("title",this.title); hashTrips.put("latitude",this.latitude); hashTrips.put("longitude",this.longitude); hashTrips.put("coverPhoto",this.coverPhoto); hashTrips.put("members",this.members); hashTrips.put("admin",this.createdBy); return hashTrips; } } <file_sep>package com.example.homework07; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.ImageView; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.android.gms.tasks.Task; import com.google.firebase.firestore.FirebaseFirestore; import com.google.firebase.firestore.QueryDocumentSnapshot; import com.google.firebase.firestore.QuerySnapshot; import com.google.firebase.storage.FirebaseStorage; import com.google.firebase.storage.ListResult; import com.google.firebase.storage.StorageReference; import java.util.ArrayList; public class TripActivity extends AppCompatActivity { ImageView imgUser; ImageView imgAddTrip; private RecyclerView rv_tripItem; private RecyclerView.Adapter tripAdapter; private RecyclerView.LayoutManager layoutManager; private ImageView btn_view_Users; FirebaseStorage storage; StorageReference storageReference; // public static int flagEdit=0; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_trip); setTitle("Trips"); final ArrayList<Trips>tripItemArrayList=new ArrayList<Trips>(); imgUser = findViewById(R.id.imgViewUserPage); rv_tripItem=findViewById(R.id.rv_tripItem); imgAddTrip = findViewById(R.id.imgAddTrip); btn_view_Users=findViewById(R.id.btn_viewUsers); rv_tripItem.setHasFixedSize(true); storage = FirebaseStorage.getInstance(); storageReference = storage.getReference(); layoutManager = new LinearLayoutManager(this); rv_tripItem.setLayoutManager(layoutManager); FirebaseFirestore db; db=FirebaseFirestore.getInstance(); btn_view_Users.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent i=new Intent(TripActivity.this, ViewUsersActivity.class); startActivity(i); } }); imgUser.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // flagEdit=1; Intent intent = new Intent(TripActivity.this, UserActivity.class); startActivity(intent); } }); imgAddTrip.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(TripActivity.this, AddTripActivity.class); startActivity(intent); } }); db.collection("Trips") .get() .addOnCompleteListener(new OnCompleteListener<QuerySnapshot>() { @Override public void onComplete(@NonNull Task<QuerySnapshot> task) { if (task.isSuccessful()) { for (QueryDocumentSnapshot document : task.getResult()) { final String titleItem= (String) document.getData().get("title"); final String adminItem= (String)document.getData().get("admin"); final String coverPhotoItem= (String)document.getData().get("coverPhoto"); StorageReference listRef = storage.getReference().child("images/"+titleItem); listRef.getDownloadUrl().addOnSuccessListener(new OnSuccessListener<Uri>() { @Override public void onSuccess(Uri uri) { Log.e("Tuts+", "uri: " + uri.toString()); Trips t=new Trips(titleItem,adminItem,coverPhotoItem,uri.toString()); tripItemArrayList.add(t); tripAdapter.notifyDataSetChanged(); } }); } tripAdapter=new TripAdapter(tripItemArrayList); rv_tripItem.setAdapter(tripAdapter); } else { Log.d("tag", "Error getting documents: ", task.getException()); } } }); } } <file_sep>package com.example.homework07; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.app.AppCompatActivity; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import android.app.ProgressDialog; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Bundle; import android.provider.MediaStore; import android.text.format.DateFormat; import android.text.format.DateUtils; import android.util.Log; import android.view.View; import android.widget.EditText; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.android.gms.tasks.Task; import com.google.firebase.firestore.DocumentReference; import com.google.firebase.firestore.DocumentSnapshot; import com.google.firebase.firestore.EventListener; import com.google.firebase.firestore.FirebaseFirestore; import com.google.firebase.firestore.FirebaseFirestoreException; import com.google.firebase.storage.FirebaseStorage; import com.google.firebase.storage.OnProgressListener; import com.google.firebase.storage.StorageReference; import com.google.firebase.storage.UploadTask; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.Map; import java.util.UUID; public class ChatRoomActivity extends AppCompatActivity{ RecyclerView chatRecylerView; ImageView iv_sendButton; ImageView iv_openGallery; ImageView iv_chatDelete; EditText et_messages; FirebaseFirestore db; private RecyclerView rv_chat; private RecyclerView.Adapter chatAdapter; private RecyclerView.LayoutManager layoutManager; String tripTitle = ""; FirebaseStorage storage; StorageReference storageReference; ArrayList<ChatObject> chatsFromDB; public static final int PICK_IMAGE = 1; private Uri filePath; // Boolean isImageSent = false; String imageUrl = ""; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_chat_room); if(getIntent().getExtras()!=null){ tripTitle = getIntent().getExtras().getString("tripTitle"); setTitle("Chat Room : "+tripTitle); } chatRecylerView = findViewById(R.id.recylerViewChat); iv_sendButton = findViewById(R.id.imgViewSend_chat); iv_openGallery = findViewById(R.id.imgViewGallery_chat); et_messages = findViewById(R.id.editTextTypeMessage_chat); iv_chatDelete = findViewById(R.id.imageViewChatDelete); rv_chat = findViewById(R.id.recylerViewChat); layoutManager = new LinearLayoutManager(this); rv_chat.setLayoutManager(layoutManager); chatsFromDB = new ArrayList<>(); db = FirebaseFirestore.getInstance(); storage = FirebaseStorage.getInstance(); storageReference = storage.getReference(); final DocumentReference docRef = db.collection("Chats").document(tripTitle); docRef.addSnapshotListener(new EventListener<DocumentSnapshot>() { @Override public void onEvent(@Nullable DocumentSnapshot snapshot, @Nullable FirebaseFirestoreException e) { if (e != null) { Log.w("trips", "Listen failed.", e); return; } if (snapshot != null && snapshot.exists()) { Log.d("trips", "Current data: " + snapshot.getData()); Chats chat = new Chats(snapshot.getData()); chatsFromDB.clear(); for(int i = 0; i<chat.chatList.size(); i++){ Map<String,String> map = (Map<String, String>) chat.chatList.get(i); chatsFromDB.add(new ChatObject(map)); } chatAdapter = new ChatAdapter(chatsFromDB); rv_chat.setAdapter(chatAdapter); rv_chat.scrollToPosition(chatsFromDB.size()-1); } else { Log.d("trips", "Current data: null"); } } }); iv_sendButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { String msg = et_messages.getText().toString(); if(msg.equals("")){ Toast.makeText(ChatRoomActivity.this, "Enter a message", Toast.LENGTH_SHORT).show(); } else{ sendMessage("0"); } } }); iv_openGallery.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(); intent.setType("image/*"); intent.setAction(Intent.ACTION_GET_CONTENT); startActivityForResult(Intent.createChooser(intent, "Select Picture"), PICK_IMAGE); } }); iv_chatDelete.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(ChatRoomActivity.this, TripActivity.class); startActivity(intent); finish(); } }); } public static String formatDateTimeFromDate(String mDateFormat, Date date) { if (date == null) { return null; } return DateFormat.format(mDateFormat, date).toString(); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == PICK_IMAGE) { filePath = data.getData(); try { Bitmap bitmap = MediaStore.Images.Media.getBitmap(getContentResolver(), filePath); // isImageSent = true; sendMessage("1"); // uploadImage(); // iv_displayPhoto.setImageBitmap(bitmap); } catch (IOException e){ e.printStackTrace(); } } } //uploading the image to firebase storage with progress bar display private void uploadImage(String msgID) { if(filePath != null) { final ProgressDialog progressDialog = new ProgressDialog(this); progressDialog.setTitle("Sending Image..."); progressDialog.show(); imageUrl = "chats/"+ msgID; StorageReference ref = storageReference.child(imageUrl); ref.putFile(filePath) .addOnSuccessListener(new OnSuccessListener<UploadTask.TaskSnapshot>() { @Override public void onSuccess(UploadTask.TaskSnapshot taskSnapshot) { progressDialog.dismiss(); } }) .addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { progressDialog.dismiss(); } }) .addOnProgressListener(new OnProgressListener<UploadTask.TaskSnapshot>() { @Override public void onProgress(UploadTask.TaskSnapshot taskSnapshot) { double progress = (100.0*taskSnapshot.getBytesTransferred()/taskSnapshot .getTotalByteCount()); progressDialog.setMessage("Uploaded "+(int)progress+"%"); } }); } } public void sendMessage(String isImageSent){ String message = et_messages.getText().toString(); String msgID = UUID.randomUUID().toString(); String DATE_FORMAT = "MM/dd HH:mm"; // String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss"; String currentTime = formatDateTimeFromDate(DATE_FORMAT, Calendar.getInstance().getTime()); if(isImageSent.equals("0")){ chatsFromDB.add(new ChatObject(MainActivity.loggedInUserName, tripTitle, msgID, message, "0", currentTime)); Chats chat = new Chats(chatsFromDB); db.collection("Chats").document(tripTitle) .set(chat.ToHashMap()) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if(task.isSuccessful()){ Log.d("chats", "chat added successfully"); // chatAdapter = new ChatAdapter(chatsFromDB); // rv_chat.setAdapter(chatAdapter); et_messages.setText(""); } else{ Log.d("trip", task.getException().toString()); } } }); } else{ uploadImage(msgID); chatsFromDB.add(new ChatObject(MainActivity.loggedInUserName, tripTitle, msgID, imageUrl, "1", currentTime)); Chats chat = new Chats(chatsFromDB); db.collection("Chats").document(tripTitle) .set(chat.ToHashMap()) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if(task.isSuccessful()){ Log.d("chats", "chat added successfully"); // chatAdapter = new ChatAdapter(chatsFromDB); // rv_chat.setAdapter(chatAdapter); et_messages.setText(""); } else{ Log.d("trip", task.getException().toString()); } } }); } } } <file_sep>package com.example.homework07; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import android.content.Intent; import android.os.Bundle; import android.text.Editable; import android.text.TextWatcher; import android.util.Log; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.ImageView; import android.widget.ListView; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.Task; import com.google.firebase.firestore.FirebaseFirestore; import com.google.firebase.firestore.QueryDocumentSnapshot; import com.google.firebase.firestore.QuerySnapshot; import java.util.ArrayList; public class ViewUsersActivity extends AppCompatActivity { public ArrayAdapter<User> adapter; private EditText et_searchUsers_viewUsers; private ImageView iv_cancel_viewUsers; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_view_users); final ListView lv_viewUser; final FirebaseFirestore db; db=FirebaseFirestore.getInstance(); lv_viewUser=findViewById(R.id.lv_userView); iv_cancel_viewUsers=findViewById(R.id.iv_cancel_viewUsers); et_searchUsers_viewUsers=findViewById(R.id.et_searchUser_viewUser); et_searchUsers_viewUsers.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { final String searchedName=et_searchUsers_viewUsers.getText().toString(); final ArrayList<User> UserList=new ArrayList<User>(); db.collection("User") .get() .addOnCompleteListener(new OnCompleteListener<QuerySnapshot>() { @Override public void onComplete(@NonNull Task<QuerySnapshot> task) { if (task.isSuccessful()) { for (QueryDocumentSnapshot document : task.getResult()) { User u=new User(document.getData()); if(u.userName.contains(searchedName)) { if(!u.userName.equals(MainActivity.loggedInUserName)) { UserList.add(u); } } } adapter=new ArrayAdapter<User>(getBaseContext(),android.R.layout.simple_list_item_1,UserList); lv_viewUser.setAdapter(adapter); } else { Log.d("demo", "Error getting documents: ", task.getException()); } } }); } }); if(getIntent()!=null) { final ArrayList<User> UserList=new ArrayList<User>(); db.collection("User") .get() .addOnCompleteListener(new OnCompleteListener<QuerySnapshot>() { @Override public void onComplete(@NonNull Task<QuerySnapshot> task) { if (task.isSuccessful()) { for (QueryDocumentSnapshot document : task.getResult()) { User u=new User(document.getData()); if(!u.userName.equals(MainActivity.loggedInUserName)) { UserList.add(u); } } adapter=new ArrayAdapter<User>(getBaseContext(),android.R.layout.simple_list_item_1,UserList); lv_viewUser.setAdapter(adapter); } else { Log.d("demo", "Error getting documents: ", task.getException()); } } }); lv_viewUser.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { User i=adapter.getItem(position); Intent intent=new Intent(ViewUsersActivity.this,FriendsDetailsActivity.class); String firstName=i.firstName.toString(); String lastName=i.lastName.toString(); String userName=i.userName.toString(); String Image=i.imgUrl.toString(); String gender=i.gender.toString(); intent.putExtra("firstName",firstName); intent.putExtra("lastName",lastName); intent.putExtra("userName",userName); intent.putExtra("Image",Image); intent.putExtra("gender",gender); startActivity(intent); } }); iv_cancel_viewUsers.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { finish(); } }); } } }
937010c78c22c2e65f77367a61285972f4d2557f
[ "Java" ]
6
Java
deepakVeerapandian/Homework07
3224af7322d43d48aba188f4fe1c65a06bd1332f
a7f625a8a8a44cd23407e414a363bfe4336afd95
refs/heads/master
<repo_name>TsungYuChien/Chata<file_sep>/src/main/java/yzujbs/chata/RegisterPage.java package yzujbs.chata; import android.content.Intent; import android.graphics.Color; import android.graphics.drawable.GradientDrawable; import android.support.annotation.NonNull; import android.support.v7.app.ActionBar; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.text.InputType; import android.text.method.PasswordTransformationMethod; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.Task; import com.google.firebase.auth.AuthResult; import com.google.firebase.auth.FirebaseAuth; public class RegisterPage extends AppCompatActivity { LinearLayout ll; Button btnRegister; EditText edtMail,edtPassword,edtcheckpd; private FirebaseAuth mAuth; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_register_page); ActionBar actionBar = getSupportActionBar(); //hide the action bar actionBar.hide(); //hide the action bar initialLayout(); } void initialLayout(){ //-------mail------------- edtMail=new EditText(this); edtMail=findViewById(R.id.edtMail); //--------Password----------- edtPassword=new EditText(this); edtPassword=findViewById(R.id.edtPassword); //--------Password Check---------- edtcheckpd=new EditText(this); edtcheckpd=findViewById(R.id.edtCheckpd); //-------button----------- btnRegister=new Button(this); btnRegister=findViewById(R.id.btnRegister); btnRegister.setOnClickListener(new Button.OnClickListener(){ @Override public void onClick(View v) { mAuth = FirebaseAuth.getInstance(); mAuth.createUserWithEmailAndPassword(edtMail.getText().toString(), edtPassword.getText().toString()) .addOnCompleteListener(RegisterPage.this, new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { Intent it =new Intent(RegisterPage.this,FillresumePage.class); startActivity(it); Toast.makeText(RegisterPage.this, "success", Toast.LENGTH_SHORT).show(); } else { Toast.makeText(RegisterPage.this, task.getException().getMessage(), Toast.LENGTH_SHORT).show(); } } }); } }); } } <file_sep>/README.md # Chata 第二屆元智創意app競賽 - Chata <file_sep>/src/main/java/yzujbs/chata/ListTalent.java package yzujbs.chata; import android.graphics.Color; import android.support.annotation.NonNull; import android.support.v7.app.ActionBar; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.Menu; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.view.WindowManager; import android.widget.RelativeLayout; import android.widget.TextView; import yzujbs.chata.R; public class ListTalent extends AppCompatActivity { private RecyclerView blogList; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,WindowManager.LayoutParams.FLAG_FULLSCREEN); setContentView(R.layout.activity_list_talent); initial(); //---------------Action Bar----------------- // TextView tv=new TextView(getApplicationContext()); // RelativeLayout.LayoutParams txtview=new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT); // tv.setLayoutParams(txtview); // tv.setText("ChaTa"); // tv.setTextSize(35); // tv.setTextColor(Color.rgb(255,253,247)); // // getSupportActionBar().setDisplayOptions(ActionBar.DISPLAY_SHOW_CUSTOM); // getSupportActionBar().setCustomView(tv); } void initial(){ blogList=findViewById(R.id.blog_list); blogList.setHasFixedSize(true); blogList.setLayoutManager(new LinearLayoutManager(this)); } @Override protected void onStart() { super.onStart(); } public static class BlogViewHolder extends RecyclerView.ViewHolder{ View view; public BlogViewHolder(@NonNull View itemView) { super(itemView); view=itemView; } public void setTitle(String title){ TextView talentTitle=(TextView)view.findViewById(R.id.TalentTitle); talentTitle.setText(title); } public void setLocal(String local){ TextView talentLocal=(TextView)view.findViewById(R.id.TalentLocal); talentLocal.setText(local); } public void setTalent(String talent){ TextView talentTalent=(TextView)view.findViewById(R.id.TalentTalent); talentTalent.setText(talent); } public void setLearn(String learn){ TextView talentLearn=(TextView)view.findViewById(R.id.TalentLearn); talentLearn.setText(learn); } } @Override public boolean onCreateOptionsMenu(Menu menu) { return super.onCreateOptionsMenu(menu); } }
844873cf11ba34a206a3bea87d894cc52bbbfd16
[ "Markdown", "Java" ]
3
Java
TsungYuChien/Chata
10cebad3e8355d0d76e19cfe2881b5e5ae737c8f
07a6240b0a9f200f19be6edb77ac05d80b427adc
refs/heads/master
<file_sep>(function init(){ var sliders = []; var layers = []; var layer; var canvas, ctx; var layerSettings = { url: "", name: "", run: "", value: null, top: 0, left: 0 }; window.jraw = function(id){ canvas = document.getElementById(id); ctx = canvas.getContext('2d'); this.data; this.buffer; var slidersCount = 0; this.newLayer = function(settings){ var layer = new Layer(settings); //console.log(layer) layers[settings.name] = layer; layers[settings.name]["name"] = settings.name; return layer; }; this.setActiveLayer = function(name){ layer = layers[name]; return this; }; this.attachSlider = function(sliderId, attr){ this.sliders[slidersCount] = document.getElementById(sliderId); var slider = this.sliders[slidersCount]; slider.ready = this.ready; slider.init = function(){ attr.ready(); }; slider.onchange = function(){ if(slider.ready){ attr.setValue(slider.value); attr.run(); } }; slidersCount++; return slider; }; this.paint = function(){ var interval; var width = 0, height = 0; interval = setInterval(function(){ for(var r in layers){ if(!layers[r].ready){ return; } } for(var i in layers){ var img = new Image(); img.src = layers[i].getImage(); if(img.width > width){ canvas.width = width = img.width; } if(img.height > height){ canvas.height = height = img.height; } //console.log(layers[i].name) ctx.drawImage(img, layers[i].left, layers[i].top, layers[i].width, layers[i].height); } clearInterval(interval); }, 10); }; this.applyFilter = function(){ ctx.putImageData(this.data, 0, 0); for(var i in this.sliders){ this.sliders[i].init(); } }; this.cancelFilter = function(){ ctx.putImageData(this.data, 0, 0); for(var i in this.sliders){ this.sliders[i].init(); } }; }; function Layer(s){ this.settings = defaultOptions(layerSettings, s); this.canvas = document.createElement('canvas'); this.ctx = this.canvas.getContext('2d'); this.ready = false; this.loaded = false; this.name = this.settings.name; this.width = 0, // image width this.height = 0, // image height this.left = this.settings.left, // image main canvas left offset this.top = this.settings.top // image main canvas top offset ; var img = new Image(); img.src = this.settings.url; var self = this; img.onload = function(){ self.canvas.width = self.width = img.width; self.canvas.height = self.height = img.height; self.ctx.drawImage(img, 0, 0, img.width, img.height); self.data = self.ctx.getImageData(0, 0, img.width, img.height); self.loaded = true; if(self.settings.run !== ""){ self.run(self.settings.run, self.settings.value); } /*for(var i in self.sliders){ self.sliders[i].ready = self.ready; self.sliders[i].init(); }*/ }; this.run = function(attr, value){ var interval; interval = setInterval(function(){ if(!self.loaded){ return; } attr.setLayer(self); attr.ready(); attr.setValue(value); attr.run(); self.ready = true; clearInterval(interval); }, 10); }; this.getImage = function(){ return this.canvas.toDataURL(); }; } window.Color = function(){ }; Color.RGBtoHSB = function(r, g, b){ r /= 255, g /= 255, b /= 255; var max = Math.max(r, g, b), min = Math.min(r, g, b); var h, s, l = (max + min) / 2; if(max === min){ h = s = 0; // achromatic }else{ var d = max - min; s = l > 0.5 ? d / (2 - max - min) : d / (max + min); switch(max){ case r: h = (g - b) / d + (g < b ? 6 : 0); break; case g: h = (b - r) / d + 2; break; case b: h = (r - g) / d + 4; break; } h /= 6; } return [h, s, l]; }; Color.HSBtoRGB = function(h, s, l){ var r, g, b; if(s === 0){ r = g = b = l; // achromatic }else{ function hue2rgb(p, q, t){ if(t < 0) t += 1; if(t > 1) t -= 1; if(t < 1 / 6) return p + (q - p) * 6 * t; if(t < 1 / 2) return q; if(t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6; return p; } var q = l < 0.5 ? l * (1 + s) : l + s - l * s; var p = 2 * l - q; r = hue2rgb(p, q, h + 1 / 3); g = hue2rgb(p, q, h); b = hue2rgb(p, q, h - 1 / 3); } return [r * 255, g * 255, b * 255]; }; function defaultOptions(defaults, replacements){ var items = {}; for(var i in defaults){ items[i] = defaults[i]; } for(var i in replacements){ items[i] = replacements[i]; } return items; } })();<file_sep>#jraw-0.0.1-alpha# jraw (Draw with a "j" instead of a "d") is a JavaScript Library that uses a JavaScript Canvas for each layer to manipulate images in memory and display them on one main canvas on the page. You can attach inputs such as text fields or range sliders to an action like Brightness or Contrast. ##Currently Supported## ###Adjustments### - Brightness - Contrast - Hue - Saturation ##Things To Change (TODO)## - ~~Needs to be able to create and adjust layers individually. jraw should be the main class which can initiate a layer class. The layer class will then allow for image manipulation just on that layer.~~ - Adjustments - ~~Brightness~~ - ~~Contrast~~ - ~~Hue~~ - ~~Saturation~~ - Grayscale - Invert - Threshold - Color Adjustments (RGB) - Shadows - Highlights - Midtones - Filters - Transform - Rotate - Scale - Crop - Skew - Flip Vertical - Flip Horizontal - Blending - Disolve - Darken - Multiply - Color Burn - Linear Burn - Lighten - Screen - Color Dodge - Linear Dodge - Overlay - Soft Light - Hard Light - Vivid Light - Linear Light - Pin Light - Hard Mix - Difference - Exclusion - Hue - Saturation - Color - Luminosity <file_sep>function Contrast(j){ this.data; this.jraw = j; this.amount = 0; this.cwImage; this.ready = function(){ this.data = this.jraw.ctx.getImageData(0, 0, this.jraw.canvas.width, this.jraw.canvas.height); this.cwImage = this.jraw.ctx.getImageData(0, 0, this.jraw.canvas.width, this.jraw.canvas.height); }; this.setValue = function(amount){ this.amount = parseInt(amount); }; this.run = function(){ var value = (255.0 + this.amount) / 255.0; value *= value; for(var i = 0; i < this.cwImage.data.length; i++){ var r = this.cwImage.data[i]; var g = this.cwImage.data[i+1]; var b = this.cwImage.data[i+2]; var red = r / 255.0; var green = g / 255.0; var blue = b / 255.0; red = (((red - 0.5) * value) + 0.5) * 255.0; green = (((green - 0.5) * value) + 0.5) * 255.0; blue = (((blue - 0.5) * value) + 0.5) * 255.0; var iR = red; iR = iR > 255 ? 255 : iR; iR = iR < 0 ? 0 : iR; var iG = green; iG = iG > 255 ? 255 : iG; iG = iG < 0 ? 0 : iG; var iB = blue; iB = iB > 255 ? 255 : iB; iB = iB < 0 ? 0 : iB; this.data.data[i] = iR; this.data.data[i+1] = iG; this.data.data[i+2] = iB; i += 3; } this.jraw.ctx.putImageData(this.data, 0, 0); } }<file_sep>function Brightness(){ this.data; this.cwImage; this.jraw; this.amount = 0; this.ready = function(){ this.data = this.jraw.ctx.getImageData(0, 0, this.jraw.canvas.width, this.jraw.canvas.height); this.cwImage = this.jraw.ctx.getImageData(0, 0, this.jraw.canvas.width, this.jraw.canvas.height); }; this.setLayer = function(layer){ this.jraw = layer; }; this.setValue = function(amount){ this.amount = parseInt(amount); }; this.run = function(){ for(var i = 0; i < this.cwImage.data.length; i++){ var red = this.cwImage.data[i]; var green = this.cwImage.data[i + 1]; var blue = this.cwImage.data[i + 2]; red += this.amount; if(red > 255){ red = 255; }else if(red < 0){ red = 0; } green += this.amount; if(green > 255){ green = 255; }else if(green < 0){ green = 0; } blue += this.amount; if(blue > 255){ blue = 255; }else if(blue < 0){ blue = 0; } this.data.data[i] = red; this.data.data[i+1] = green; this.data.data[i+2] = blue; i += 3; } this.jraw.ctx.putImageData(this.data, 0, 0); }; }
02d6fec3bdbe5cb74d4f0c20e29303e77c37c2aa
[ "JavaScript", "Markdown" ]
4
JavaScript
TheColorRed/jraw
fd5994743f47e50359015255c0da141caf4a6e92
02f3fd3c3f03e82b79f6b42764363da954697f28
refs/heads/master
<repo_name>skeletalbassman/pytix<file_sep>/wrappers/trello.py '''wrapper class for Trello REST API''' import requests import yaml import datetime BASE = "https://api.trello.com/1/" class Trello(): def __init__(self, project=None, username=None, password=None): self._key = None self._token = None self._authorize() if project: self._board = self.setProject(project) else: try: with open("projects.yaml", "r") as f: data = f.read() boards = yaml.load(data) self._board = boards["trello"] except IOError: print "If you have not previously set a Trello board as your current project, you must\nspecify a board name." board_name = raw_input("Board name: ") self._board = self.setProject(board_name) def _authorize(self): try: with open("credentials.yaml", "r") as f: data = f.read() creds = yaml.load(data) except IOError: creds = {} if not "trello" in creds: print "Your API key was not found on file." print "Navigate to the following link to obtain your API key\nand paste it into the terminal below. Make sure you are logged into Trello before following the link." print "Link: https://trello.com/app-key" key = raw_input("API key: ") print "\nNow please follow the link below and click 'Allow'." print "Copy and paste the resulting token back into the terminal. Pytix will\ncache this key and token for future use. This is a one-time procedure." print "https://trello.com/1/authorize?expiration=never&scope=read%2Cwrite&name=pytix&key={}&response_type=token".format(key) token = raw_input("API token: ") self._key = key self._token = token new_creds = {} new_creds["key"] = key new_creds["token"] = token creds["trello"] = new_creds with open("credentials.yaml", "w") as f: f.write(yaml.dump(creds)) def _getCreds(self): with open("credentials.yaml", "r") as f: data = f.read() creds = yaml.load(data) key = creds["trello"]["key"] token = creds["trello"]["token"] return key, token def setProject(self, proj_name): key, token = self._getCreds() url = BASE + "members/me?&boards=all&key={0}&token={1}".format(key, token) response = requests.get(url) boards = response.json()["boards"] for board in boards: print board if board["name"] == proj_name: self._board = board["id"] try: with open("projects.yaml", "r") as f: data = f.read() projs = yaml.load(data) except IOError: projs = {} projs["trello"] = board["id"] with open("projects.yaml", "w") as f: f.write(yaml.dump(projs)) return board["id"] def getProject(self): key, token = self._getCreds() board = self._board url = BASE + "boards/{0}?lists=open&cards=open&key={1}&token={2}".format(board, key, token) response = requests.get(url) #TODO deal with the response here #what do we want to show the user about the board? json = response.json() lists = json["lists"] cards = json["cards"] list_stats = {} max_length = 0 for item in lists: cur_length = len(item["name"]) if cur_length > max_length: max_length = cur_length list_stats[item["id"]] = { "name": item["name"], "no. of cards": 0 } for card in cards: list_stats[card["idList"]]["no. of cards"] += 1 left_side = " List Name " right_side = " No. of Cards ".format("no. of cards") if len(left_side)-2 > max_length: max_length = len(left_side)-2 print "\n"+json["name"] print "\nStatistics:" print "-"*(19+max_length) print "|{0:{1}}|{2}|".format(left_side, max_length+2, right_side) print "-"*(19+max_length) for key in list_stats: name = " {} ".format(list_stats[key]["name"]) num = " {} ".format(str(list_stats[key]["no. of cards"])) print "|{0:{1}}|{2:14}|".format( name, max_length+2, num) print "-"*(19+max_length) def getList(self, name): key, token = self._getCreds() board = self._board url = BASE + "boards/{0}?lists=open&key={1}&token={2}".format(board, key, token) response = requests.get(url) json = response.json() for item in json["lists"]: if item["name"] == name: list_id = item["id"] if list_id: url = BASE + "lists/{0}?cards=open&key={1}&token={2}".format(list_id, key, token) response = requests.get(url) json = response.json() cards = {} max_name_len = 0 max_id_len = 0 for card in json["cards"]: if len(card["name"]) > max_name_len: max_name_len = len(card["name"]) if len(card["id"]) > max_id_len: max_id_len = len(card["id"]) cards[card["id"]] = { "name": card["name"], "id": card["id"] } left_side = " Card Name " right_side = " Card ID " if len(left_side)-2 > max_name_len: max_name_len = len(left_side)-2 if len(right_side)-2 > max_id_len: max_id_len = len(right_side)-2 print "\n"+json["name"] print "-"*(7+max_id_len+max_name_len) print "|{0:{1}}|{2:{3}}|".format(left_side, max_name_len+2, right_side, max_id_len+2) print "-"*(7+max_id_len+max_name_len) for key in cards: name = " {} ".format(cards[key]["name"]) ID = " {} ".format(cards[key]["id"]) print "|{0:{1}}|{2:{3}}|".format( name, max_name_len+2, ID, max_id_len+2) print "-"*(7+max_id_len+max_name_len) else: print "List not found. Check your spelling." def getTask(self, name=None, ID=None): if not name and not ID: print "You must specify either a card name or a card ID." return None key, token = self._getCreds() board = self._board url = BASE + "boards/{0}?cards=open&key={1}&token={2}".format(board, key, token) response = requests.get(url) json = response.json() card_id = None if ID: card_id = ID else: for card in json["cards"]: if card["name"] == name: card_id = card["id"] if card_id: url = BASE + "cards/{0}?actions=commentCard&key={1}&token={2}".format(card_id, key, token) response = requests.get(url) json = response.json() comments = {} max_name_len = 0 max_text_len = 0 max_date_len = 0 for comment in json["actions"]: if len(comment["memberCreator"]["username"])-2 > max_name_len: max_name_len = len(comment["memberCreator"]["username"]) if len(comment["data"]["text"])-2 > max_text_len: max_text_len = len(comment["data"]["text"]) date = comment["date"].split("T")[0] if len(date)-2 > max_date_len: max_date_len = len(date) comments[comment["id"]] = { "username": comment["memberCreator"]["username"], "text": comment["data"]["text"], "date": date } name = json["name"] name_label = " Username " text_label = " Comment Text " date_label = " Date " if len(name_label)-2 > max_name_len: max_name_len = len(name_label)-2 if len(text_label)-2 > max_text_len: max_text_len = len(text_label)-2 print "\n"+name print "-"*(10+max_text_len+max_name_len+max_date_len) print "|{0:{1}}|{2:{3}}|{4:{5}}|".format(name_label, max_name_len+2, text_label, max_text_len+2, date_label, max_date_len+2) print "-"*(10+max_text_len+max_name_len+max_date_len) #TODO need to handle comments where overall table width > 80 chars for key in comments: name = " {} ".format(comments[key]["username"]) text = " {} ".format(comments[key]["text"]) date = " {} ".format(comments[key]["date"]) print "|{0:{1}}|{2:{3}}|{4:{5}}|".format( name, max_name_len+2, text, max_text_len+2, date, max_date_len+2) print "-"*(10+max_text_len+max_name_len+max_date_len) else: print "Card not found. Check your spelling." def moveTask(self, name, from_list, to_list): key, token = self._getCreds() board = self._board board_url = BASE + "boards/{0}?lists=open&key={1}&token={2}".format(board, key, token) response = requests.get(board_url) json = response.json() from_id = to_id = None for item in json["lists"]: if item["name"] == from_list: from_id = item["id"] elif item["name"] == to_list: to_id = item["id"] if not from_id: print "Source board not found." return None if not to_id: print "Destination board not found." return None url1 = BASE + "lists/{0}?cards=open&key={1}&token={2}".format(from_id, key, token) response = requests.get(url1) json = response.json() card_id = None for card in json["cards"]: if card["name"] == name: card_id = card["id"] if not card_id: print "Card not found." return None url = BASE + "cards/{0}?idList={1}&pos=bottom&key={2}&token={3}".format(card_id, to_id, key, token) response = requests.put(url) json = response.json() print "'{0}' moved to list '{1}'".format(json["name"], to_list) def addTask(self, name, to_list): key, token = self._getCreds() board = self._board board_url = BASE + "boards/{0}?lists=open&key={1}&token={2}".format(board, key, token) response = requests.get(board_url) json = response.json() to_id = None for item in json["lists"]: if item["name"] == to_list: to_id = item["id"] if not to_id: print "Destination list not found." return None url = BASE + "cards?name={0}&idList={1}&due=null&key={2}&token={3}".format(name, to_id, key, token) response = requests.post(url, data={}) json = response.json() print "'{0}' added to list '{1}'".format(json["name"], to_list) def commentTask(self, name, text): if not name and not ID: print "You must specify either a card name or a card ID." return None key, token = self._getCreds() board = self._board url = BASE + "boards/{0}?cards=open&key={1}&token={2}".format(board, key, token) response = requests.get(url) json = response.json() card_id = None for card in json["cards"]: if card["name"] == name: card_id = card["id"] if not card_id: print "Card not found." return None url = BASE + "cards/{0}/actions/comments?key={1}&token={2}".format(card_id, key, token) data = { "text": text } response = requests.post(url, data=data) json = response.json() if text == json["display"]["entities"]["comment"]["text"]: print "Comment added successfully." else: print "There was an error in processing your comment." def deleteTask(self, name): if not name and not ID: print "You must specify either a card name or a card ID." return None key, token = self._getCreds() board = self._board url = BASE + "boards/{0}?cards=open&key={1}&token={2}".format(board, key, token) response = requests.get(url) json = response.json() card_id = None for card in json["cards"]: if card["name"] == name: card_id = card["id"] if not card_id: print "Card not found." return None url = BASE + "cards/{0}?key={1}&token={2}".format(card_id, key, token) response = requests.delete(url, data={}) json = response.json() if "_value" in json: if json["_value"] == None: print "Card deleted successfully." else: print "Card could not be deleted." if __name__ == "__main__": trello = Trello() #trello.getList("Current Sprint") trello.deleteTask("Test Card")<file_sep>/README.md # pytix Python ticket management command line tool
9587ae1f6d5cf2598a9b8d98b8dda3671805546f
[ "Markdown", "Python" ]
2
Python
skeletalbassman/pytix
02f8cb6f256068eaf9f12ad1f370e91301cb2889
818d1a444398246971bccea6fe15dcce86918063
refs/heads/master
<file_sep>class WordDef attr_reader :value def initialize(value, language) @value = value @language = language end end class Word < WordDef end class Definition < WordDef end <file_sep>Dictionary Purpose: Completed as part of the Epicodus curriculum. Introduction to object oriented design. * DONE allows users to add words to the dictionary. * DONE allows users to add definitions to words. * DONE Supports multiple langugaes. <file_sep># require 'word_def' class Term @@all_terms = [] attr_reader :words, :definitions, :id def initialize(id) @words = [] @definitions = [] @id = id end def Term.all_terms @@all_terms end def Term.search(word) desired_term = "" @@all_terms.each do |term| term.words.each do |current_word| if current_word.value == word desired_term = term end end end desired_term end def save @@all_terms << self end def Term.create(word, definition, language='English') term = Term.new(@@all_terms.length + 1) term.words << Word.new(word, language) term.definitions << Definition.new(definition, language) term.save term end def Term.clear @@all_terms = [] end def set_id(id) @id = id end def add_word(new_word, language) @words << Word.new(new_word, language) end def add_definition(new_definition, language) @definitions << Definition.new(new_definition, language) end def remove_word(index) @words.delete_at(index) end def remove_definition(index) @definitions.delete_at(index) end def remove @@all_terms.delete_at(@id - 1) @@all_terms.each_with_index do |term, index| term.set_id(index + 1) end end end <file_sep>require 'rspec' require 'term' require './lib/word_def' describe Term do before do Term.clear end describe 'initialize' do it 'initializes the term with a word and definition' do test_term = Term.new(1) test_term.should be_an_instance_of Term end end describe '#save' do it'saves every instance of the Term class to the all array' do test_term = Term.new(1).save Term.all_terms.length.should eq 1 end end describe '.create' do it 'creates an initialized instance of Term' do test_term = Term.create('koala','the original drug addicts') test_term.should be_an_instance_of Term end it 'creates an initialized instance of Term' do test_term = Term.create('koala','the original drug addicts') Term.all_terms.length.should eq 1 end end describe '#remove' do it 'removes a term from the all_terms array' do test_term = Term.create('lion', 'animal that eats zebras') test_term.remove Term.all_terms.length.should eq 0 end end describe'#set_id' do it 'resets the id for each term' do test_term = Term.create('panda', 'the animal with the most youtube hits for sneezing') test_term.set_id(5) test_term.id.should eq 5 end end describe '#add_word' do it 'allows a user to edit the word associated with a definition' do test_term = Term.create('cow', 'the subject of the infamous game "cow tipping" in rural areas of the United States') test_word = Word.new('pig') test_term.add_word(test_word) test_term.words.length.should eq 2 end end describe '#add_definition' do it 'allows a user to update the definition of a word' do test_term = Term.create('shark', 'related to Bruce the shark known to chant "fish are friends not food" in finding Nemo') test_definition = Definition.new("but they are so scary") test_term.add_definition(test_definition) test_term.definitions.length.should eq 2 end end describe '.search' do it 'allows a user to search all the all of the terms for the desired word' do test_term = Term.create('lioness', 'the lions wife') test_term1 = Term.create('cricket', 'the animal that makes the sound that lulls you to sleep at night') Term.search('cricket').should eq test_term1 end end end <file_sep>require './lib/term' require './lib/word_def' @current_term def main_menu puts "press n if you want to add a new term to your fantastic animal dictionary" puts "press l to list the animals in your fantastic animal dictionary" puts "press s to search for an animal" puts "press x to exit this fantastic dictionary" main_choice = gets.chomp case main_choice when 'n' add_term when 'l' list_terms when 's' search_terms when 'x' puts "You have terminated the dictionary." exit else "that was not a valid choice" main_menu end end def add_term puts "enter the language of the term you want to add" language = gets.chomp puts "please enter the animal you want to define" current_animal = gets.chomp puts "please enter the definition you want to add" current_definition = gets.chomp current_term = Term.create(current_animal, current_definition, language) puts "the definition for #{current_animal} was succesfully created" main_menu end def list_terms Term.all_terms.each do |term| puts "#{term.id}. #{term.words[0].value} - #{term.definitions[0].value}" end edit_menu end def edit_menu puts "Enter the id of a term to edit it" puts "Press 'm' to return to the main menu" user_choice = gets.chomp if user_choice == 'm' main_menu elsif !user_choice.match(/\d/).nil? @current_term = Term.all_terms[user_choice.to_i - 1] edit_term else puts "That choice was not valid. Idiot." idiot_menu end end def edit_term puts "press r to remove the term" puts "press w to list all of the words associated with the term: here you can remove, edit, or ....ADDDDD a term" puts "press d to list all of the definitions associated with the term: here! you can remove, edit or add a definition:-)" puts "press m to return to the main menu" edit_term_choice = gets.chomp case edit_term_choice when 'r' @current_term.remove when 'w' edit_words when 'd' edit_definitions when 'm' main_menu else puts "that was not a valid input...please learn how to read and think" edit_term end end def list_words @current_term.words.each_with_index do |word, index| puts "#{index +1}. #{word.value} #{word.language}" end end def list_definitions @current_term.definitions.each_with_index do |definition, index| puts "#{index +1}. #{definition.value} #{definition.language}" end end def search_terms puts "enter the word that you want to search" desired_word = gets.chomp desired_term = Term.search(desired_word) p desired_term @current_term = desired_term list_words list_definitions puts "Press s to search for another word" puts "Press m to return to the main menu" user_choice = gets.chomp case user_choice when 's' puts "Enter a word to search for" word_choice = gets.chomp search_terms(word_choice) when 'm' main_menu else "You are a dumbass" search_terms(word) end end def edit_words list_words puts "Enter the number of a word to change or remove it" puts "Press m to go back to the main menu" puts "press a to add a new word associated with the current definition" user_choice = gets.chomp if user_choice == 'm' main_menu elsif user_choice == 'a' puts "what language is the word you want to add in?" language = gets.chomp puts "please enter the word that you want to add" new_word = gets.chomp @current_term.add_word(new_word, language) elsif !user_choice.match(/\d/).nil? puts "Press r to remove the word from the term" puts "Enter an updated word to change word" edit_choice = gets.chomp case edit_choice when 'r' @current_term.remove_word(user_choice.to_i - 1) else @current_term.remove_word(user_choice.to_i - 1) @current_term.add_word(edit_choice) end else "Seriously? You haven't gotten this yet??" edit_words end main_menu end def edit_definitions list_definitions puts "Enter the number of a definition to change or remove it" puts "Press m to go back to the main menu" puts "Press a to add a new definition to a term" user_choice = gets.chomp if user_choice == 'm' main_menu elsif user_choice == 'a' puts "please enter the language of the definition" language = gets.chomp puts "pleae enter the new definition you want added to the term" new_definition = gets.chomp @current_term.add_definition(new_definition, language) elsif !user_choice.match(/\d/).nil? puts "Press r to remove the definition from the term" puts "Enter an updated definition to change definition" edit_choice = gets.chomp case edit_choice when 'r' @current_term.remove_definition(user_choice.to_i - 1) else @current_term.remove_definition(user_choice.to_i - 1) @current_term.add_definition(edit_choice) end else "Seriously? You haven't gotten this yet??" edit_definitions end main_menu end def idiot_menu puts"holy crap why do you keep getting this wrong" puts "press a if you want your punishment in lashes" puts "press b if you would prefer chinese water torutre" puts "press c if old fasion flogging is your style" idiot_choice = gets.chomp case idiot_choice when 'a' puts "you are still an idiot" edit_menu when 'b' puts "your are an even larger idiot...who wants chinese water torutre" edit_menu when 'c' puts "flogging is the worst choice, have you watched game of thrones?" edit_menu end end main_menu <file_sep>require 'rspec' require 'word_def' describe WordDef do it 'creates an instance of WordDef' do test_instance = WordDef.new("this value") test_instance.should be_an_instance_of WordDef end it 'should initialize the instance of WordDef with the passed value equal to the instance value' do test_instance = WordDef.new("this value") test_instance.value.should eq "this value" end end describe Word do it 'creates an instance of Word' do test_instance = Word.new("this value") test_instance.should be_an_instance_of Word end it 'should initialize the instance of Word with the passed value equal to the instance value' do test_instance = Word.new("this value") test_instance.value.should eq "this value" end end describe Definition do it 'creates an instance of Definition' do test_instance = Definition.new("this value") test_instance.should be_an_instance_of Definition end it 'should initialize the instance of Definition with the passed value equal to the instance value' do test_instance = Definition.new("this value") test_instance.value.should eq "this value" end end
ebb2c1415a042e41e6b983d606b0143b4e811db5
[ "Markdown", "Ruby" ]
6
Ruby
BordRGal32/Dictionary
ba6654e545603af059f7d2fc08bde87ddf0f73ef
913def149d4bdb4074d1a5ed7a2f8eb96873e172
refs/heads/master
<file_sep># WeatherApp_NodeJS Weather App using NodeJS ## Content: Callbacks APi Requests <file_sep>const request = require('postman-request'); // const forecast = (latitude, longitude, callback) => { // const url = // 'http://api.weatherstack.com/current?access_key=<KEY>&query=' + // latitude + // ',' + // longitude + // '&units=m'; // request({ url: url, json: true }, (error, response) => { // if (error) { // callback('Unable to connect to weather service.', undefined); // } else if (response.body.error) { // callback('Unable to find location', undefined); // } else { // callback(undefined, 'It is currently ' + // response.body.current.temperature + // ' degrees. It feels like ' + // response.body.current.feelslike + // ' degrees outside' // ); // } // }); const forecast = (latitude, longitude, callback) => { const url = 'http://api.weatherstack.com/current?access_key=<KEY>&query=' + latitude + ',' + longitude + '&units=m'; request({ url, json: true }, (error, {body}) => { if (error) { callback('Unable to connect to to weather service', undefined); } else if (body.error) { callback('Unable find weather location', undefined); } else { callback( undefined, 'It is currently ' + body.current.temperature + ' degrees. It feels like ' + body.current.feelslike + ' degrees outside' ); } }); }; module.exports = forecast; // ! bug fixed down know the problem was<file_sep>const geoCode = require('./shared/geocode'); const forcast = require('./shared/forcast'); const address = process.argv[2]; if (!address) { console.log('Please provide a valid address'); } else { geoCode(address, (error, { latitude, longitude, location }) => { if (error) { return console.log(error); } forcast(latitude, longitude, (error, forecastData) => { if (error) { return console.log(error); } console.log('Local: ', location); console.log('Data: ', forecastData); }); }); }
5bd26e5fd10226b63d1d5912ef89e337ea9393ae
[ "Markdown", "JavaScript" ]
3
Markdown
rickcarvlh/WeatherApp_NodeJS
c0f55ff9dc8402d2c6de25cfcfef029ebb195d4e
2e3903a5c2aa032613c9001c442d5919d5b17ed3
refs/heads/master
<file_sep>const express =require('express'); const multer =require('multer'); const ejs =require('ejs'); const path=require('path'); //Init app const app=express(); const port=3000; app.listen(port,()=> console.log(`Server started on port ${port}`));
aec5003098067b6894028188ed4e8ec80a4fb679
[ "JavaScript" ]
1
JavaScript
kirikaran/File_Upload_Using_NodeJS
1ff97a60c7088b74e7562ba1614b8eeea58653d1
3aecf3b5305f0c0612da5f605c7e815f99cb9a36
refs/heads/master
<file_sep>#pragma once #include <boost/fusion/container/map.hpp> #include <boost/fusion/include/at_key.hpp> #include <boost/type_index.hpp> #include "object_pool.hpp" namespace variant3 { namespace detail { template <typename T> struct get_resource; template <typename ResourceType, typename Constructor> struct get_resource<object_pool<ResourceType, Constructor>> { using type = ResourceType; }; } template <typename T> using get_resource_t = typename detail::get_resource<T>::type; template <typename... ObjectPools> class multi_object_pool { public: multi_object_pool(ObjectPools &&... pools) : map_(boost::fusion::make_pair<get_resource_t<ObjectPools>>( std::move(pools))...) {} template <typename T> std::shared_ptr<handle<T>> get_resource() { return boost::fusion::at_key<T>(map_).get_resource(); } private: using map_type = boost::fusion::map< boost::fusion::pair<get_resource_t<ObjectPools>, ObjectPools>...>; map_type map_; }; template <typename... ObjectPools> multi_object_pool<ObjectPools...> make_multi_object_pool(ObjectPools &&... object_pools) { return multi_object_pool<ObjectPools...>(std::move(object_pools)...); } } <file_sep>#include "variant2/object_pool.hpp" #include "gtest/gtest.h" #include <memory> class resource { public: resource(std::string name = "default") : name_(name) { std::cout << "resource created: " << name_ << std::endl; } ~resource() { std::cout << "resource freed: " << name_ << std::endl; } private: std::string name_; }; TEST(variant1, all) { std::cout << "main" << std::endl; auto my_pool = variant2::object_pool<resource>(); auto h1 = my_pool.get_resource(); auto h2 = my_pool.get_resource(); { auto h3 = my_pool.get_resource(); } auto h4 = my_pool.get_resource(); std::cout << "----" << std::endl; { std::cout << "----" << std::endl; auto my_pool2 = variant2::object_pool<resource>(); h1 = my_pool2.get_resource(); auto h5 = my_pool2.get_resource(); std::cout << "----" << std::endl; } std::cout << "----" << std::endl; } <file_sep>#include "multi_object_pool/multi_object_pool.hpp" #include "gtest/gtest.h" #include <memory> class resource1 { public: resource1(std::string name = "default") : name_(name) { std::cout << "resource1 created: " << name_ << std::endl; } ~resource1() { std::cout << "resource1 freed: " << name_ << std::endl; } private: std::string name_; }; class resource2 { public: resource2(std::string name = "default") : name_(name) { std::cout << "resource2 created: " << name_ << std::endl; } ~resource2() { std::cout << "resource2 freed: " << name_ << std::endl; } private: std::string name_; }; TEST(multi_object_pool, all) { auto my_multi_pool = variant2::multi_object_pool<resource1, resource2>(); auto res1 = my_multi_pool.get_resource<resource1>(); auto res2 = my_multi_pool.get_resource<resource2>(); } <file_sep>#pragma once #include <iostream> #include <memory> #include <map> #include "handle.hpp" namespace variant2 { template <typename ResourceType> class object_pool { public: std::shared_ptr<handle<ResourceType>> make_handle(const std::shared_ptr<ResourceType> &resource) { auto new_handle = std::make_shared<handle<ResourceType>>(resource); objects_[resource] = std::weak_ptr<handle<ResourceType>>(new_handle); return new_handle; } std::shared_ptr<handle<ResourceType>> allocate_resource() { auto new_resource = std::make_shared<ResourceType>(); auto new_handle = std::make_shared<handle<ResourceType>>(new_resource); objects_.emplace(std::make_pair(new_resource, new_handle)); return new_handle; } std::shared_ptr<handle<ResourceType>> get_resource() { for (const auto &o : objects_) { if (o.second.expired()) // this handle is free { return make_handle(o.first); } } return allocate_resource(); } object_pool() {} ~object_pool() {} size_t size() const { return objects_.size(); } private: std::map<std::shared_ptr<ResourceType>, std::weak_ptr<handle<ResourceType>>> objects_; }; } <file_sep>#pragma once #include <iostream> #include <memory> namespace variant2 { template <typename ResourceType> class handle { public: handle(std::shared_ptr<ResourceType> res) : resource_(res) {} std::shared_ptr<ResourceType> get() { return resource_; } private: std::shared_ptr<ResourceType> resource_; }; } <file_sep>#include <iostream> #include <memory> #include <boost/optional.hpp> namespace variant3 { class handle_manager { public: boost::optional<size_t> get_id() { if (free_ids.size() > 0) { size_t id = free_ids.back(); free_ids.pop_back(); return id; } else return boost::none; } void release_id(std::size_t id) { std::cout << "releasing handle " << id << std::endl; free_ids.push_back(id); } private: std::vector<size_t> free_ids; }; template <typename ResourceType> class handle { public: handle(std::size_t id, std::shared_ptr<ResourceType> res, std::shared_ptr<handle_manager> handle_manager) : id_(id), resource_(res), handle_manager_(handle_manager) { std::cout << "creating a handle" << std::endl; } ~handle() { if (!handle_manager_.expired()) { auto lock_handle_manager = handle_manager_.lock(); lock_handle_manager->release_id(id_); } } // TODO this should probably be a shared_ptr operator ResourceType() { return *resource_; } std::shared_ptr<ResourceType> get() { return resource_; } private: std::size_t id_; std::shared_ptr<ResourceType> resource_; std::weak_ptr<handle_manager> handle_manager_; }; } <file_sep>#include "variant1/object_pool.hpp" #include "gtest/gtest.h" #include <memory> class resource { public: resource(std::string name = "default") : name_(name) { std::cout << "resource created: " << name_ << std::endl; } ~resource() { std::cout << "resource freed: " << name_ << std::endl; } private: std::string name_; }; TEST(variant1, all) { std::cout << "main" << std::endl; auto my_pool = std::make_shared<variant1::object_pool<resource>>( "pool1"); // if we want to use // shared_from_this() on the pool, // we need a shared_ptr to the pool auto h1 = my_pool->get_resource(); auto h2 = my_pool->get_resource(); { auto h3 = my_pool->get_resource(); } auto h4 = my_pool->get_resource(); std::cout << "----" << std::endl; { auto my_pool2 = std::make_shared<variant1::object_pool<resource>>("pool2"); h1 = my_pool2->get_resource(); auto h5 = my_pool2->get_resource(); } std::cout << "----" << std::endl; } <file_sep>#include <iostream> #include <memory> #include <vector> #include "handle.hpp" namespace variant3 { template <typename T, typename... CtorArgs> class object_constructor { public: object_constructor(CtorArgs &&... args) : args_{args...} {} std::shared_ptr<T> create_object() { return invoke(std::make_index_sequence<sizeof...(CtorArgs)>()); } private: std::tuple<CtorArgs...> args_; template <size_t... Ints> std::shared_ptr<T> invoke(std::integer_sequence<size_t, Ints...>) { return std::make_shared<T>(std::get<Ints>(args_)...); } }; template <typename T, typename... CtorArgs> object_constructor<T, CtorArgs...> make_object_constructor(CtorArgs &&... args) { return object_constructor<T, CtorArgs...>(args...); } template <typename ResourceType, typename Constructor = object_constructor<ResourceType>> class object_pool { public: size_t allocate_resource() { resources_.emplace_back(constructor_.create_object()); return last_id(); } std::shared_ptr<handle<ResourceType>> get_resource() { auto id = handle_manager_->get_id(); if (!id) id = allocate_resource(); std::cout << "pool_size = " << resources_.size() << std::endl; std::cout << "passing handle to resource id = " << id.value() << std::endl; return std::make_shared<handle<ResourceType>>( id.value(), resources_[id.value()], handle_manager_); } object_pool(object_pool &&other) = default; object_pool(const object_pool &other) = default; // template <typename... Args> // object_pool(Args &&... args) // : constructor_(args...), // handle_manager_(std::make_shared<handle_manager>()) {} // TODO this should be only enabled if Constructor has no arguments object_pool() : constructor_(), handle_manager_(std::make_shared<handle_manager>()) {} object_pool(const Constructor &constructor) : constructor_(constructor), handle_manager_(std::make_shared<handle_manager>()) {} ~object_pool() {} private: Constructor constructor_; std::shared_ptr<handle_manager> handle_manager_; std::vector<std::shared_ptr<ResourceType>> resources_; // TODO test copying an object pool. size_t last_id() { return resources_.size() - 1; } }; template <typename ResourceType, typename... CtorArgs> auto make_object_pool(CtorArgs &&... args) -> object_pool<ResourceType, decltype(make_object_constructor<ResourceType>(args...))> { return object_pool< ResourceType, decltype(make_object_constructor<ResourceType>(args...))>( make_object_constructor<ResourceType>(args...)); } } <file_sep>include( ../cmake/googletest-download/googletest-download.cmake) add_executable( test_example test_example.cc ) target_link_libraries( test_example gtest gtest_main ) add_executable( test_variant1 variant1/test_variant1.cc ) target_link_libraries( test_variant1 gtest gtest_main ) add_executable( test_variant2 variant2/test_variant2.cc ) target_link_libraries( test_variant2 gtest gtest_main ) add_executable( test_save_ctor save_ctor/test_save_ctor.cc ) target_link_libraries( test_save_ctor gtest gtest_main ) find_package(Boost 1.58 REQUIRED) add_executable( test_multi_object_pool multi_object_pool/test_multi_object_pool.cc ) target_link_libraries( test_multi_object_pool gtest gtest_main ) target_include_directories( test_multi_object_pool PRIVATE ${Boost_INCLUDE_DIR} ) add_executable( test_variant3 variant3/test_variant3.cc ) target_link_libraries( test_variant3 gtest gtest_main ) target_include_directories( test_variant3 PRIVATE ${Boost_INCLUDE_DIR} ) add_executable( test_variant3_multi variant3/test_multi_object_pool.cc ) target_link_libraries( test_variant3_multi gtest gtest_main ) target_include_directories( test_variant3_multi PRIVATE ${Boost_INCLUDE_DIR} ) set_target_properties(test_variant3 test_variant3_multi PROPERTIES CXX_STANDARD 14)<file_sep>Variant 1 ========= - Pool keeps list of free IDs - Handle calls back to pool on destruction and frees ID - New resource is created if needed TODO: - How to create a resource with non-default ctor? - Multi-Pool (one object to store multiple resource types)? - Circular dependency between Pool and Handle..<file_sep>Variant 2 ========= - Pool keeps a map of resources and weak_ptrs to handles - On resource request checks all weak_ptrs if the handle was released (O(N) in number of resources...) - New resource is created if needed TODO: - How to create a resource with non-default ctor? - Multi-Pool (one object to store multiple resource types)? <file_sep>#include "variant3/multi_object_pool.hpp" #include "gtest/gtest.h" #include <memory> class resource1 { public: resource1(std::string name = "default") : name_(name) { std::cout << "resource1 created: " << name_ << std::endl; } ~resource1() { std::cout << "resource1 freed: " << name_ << std::endl; } private: std::string name_; }; class resource2 { public: resource2(std::string name = "default") : name_(name) { std::cout << "resource2 created: " << name_ << std::endl; } ~resource2() { std::cout << "resource2 freed: " << name_ << std::endl; } private: std::string name_; }; TEST(multi_object_pool, get_resource) { auto pool = variant3::make_object_pool<resource1>("my resource1"); static_assert(std::is_same<variant3::get_resource_t<decltype(pool)>, resource1>::value, "error"); } TEST(multi_object_pool, all) { auto my_multi_pool = variant3::make_multi_object_pool( variant3::make_object_pool<resource1>("my resource1"), variant3::make_object_pool<resource2>("my resource2")); auto res1 = my_multi_pool.get_resource<resource1>(); auto res2 = my_multi_pool.get_resource<resource2>(); } <file_sep>#include "variant3/object_pool.hpp" #include "gtest/gtest.h" #include <memory> class resource { public: resource(std::string name = "default") : name_(name) { std::cout << "resource created: " << name_ << std::endl; } ~resource() { std::cout << "resource freed: " << name_ << std::endl; } private: std::string name_; }; TEST(variant3, all) { std::cout << "main" << std::endl; auto my_pool = variant3::object_pool<resource>(); auto h0 = my_pool.get_resource(); auto h1 = my_pool.get_resource(); { auto h2 = my_pool.get_resource(); } auto h3 = my_pool.get_resource(); std::cout << "----" << std::endl; { auto my_pool2 = variant3::object_pool<resource>(); h0 = my_pool2.get_resource(); auto h4 = my_pool2.get_resource(); std::cout << "----" << std::endl; } std::cout << "----" << std::endl; } TEST(variant3, with_string_ctor) { std::cout << "main" << std::endl; auto my_pool = variant3::make_object_pool<resource>("test"); auto h0 = my_pool.get_resource(); auto h1 = my_pool.get_resource(); { auto h2 = my_pool.get_resource(); } auto h3 = my_pool.get_resource(); } TEST(variant3, implicit_conversion) { auto my_pool = variant3::make_object_pool<resource>("test"); resource res = *my_pool.get_resource(); // TODO don't use shard_ptr for // handle then we can implicitly // convert } <file_sep>#include <iostream> #include <memory> namespace variant1 { template <typename T> class object_pool; template <typename ResourceType> class handle { public: handle(std::size_t id, std::shared_ptr<ResourceType> res, std::shared_ptr<object_pool<ResourceType>> pool) : id_(id), resource_(res), pool_(pool) { std::cout << "creating a handle" << std::endl; } ~handle() { if (!pool_.expired()) { auto pool = pool_.lock(); pool->release(id_); } } ResourceType get() { return *resource_; } private: std::size_t id_; std::shared_ptr<ResourceType> resource_; std::weak_ptr<object_pool<ResourceType>> pool_; }; } <file_sep>#include "variant2/object_pool.hpp" #include "gtest/gtest.h" #include <iostream> class class_with_strings { public: class_with_strings(std::string name1, std::string name2) : name1_(name1), name2_(name2) {} std::string name1_; std::string name2_; }; template <typename... T> struct arg_wrapper { std::tuple<T...> args; }; template <typename T, typename... CtorArgs> struct ctor_forwarder { ctor_forwarder(CtorArgs &&... args) : args_{args...} {} T create_object() { return invoke(std::make_index_sequence<sizeof...(CtorArgs)>()); } std::tuple<CtorArgs...> args_; template <size_t... Ints> T invoke(std::integer_sequence<size_t, Ints...>) { return T{std::get<Ints>(args_)...}; } }; template <typename T, typename... CtorArgs> ctor_forwarder<T, CtorArgs...> make_ctor_forwarder(CtorArgs &&... args) { return ctor_forwarder<T, CtorArgs...>(args...); } TEST(save_ctor, all) { auto test = make_ctor_forwarder<class_with_strings>("bla1", "bla2"); auto o1 = test.create_object(); std::cout << o1.name1_ << std::endl; std::cout << o1.name2_ << std::endl; } <file_sep>#include <iostream> #include <memory> #include <vector> #include "handle.hpp" namespace variant1 { template <typename ResourceType> class object_pool : public std::enable_shared_from_this<object_pool<ResourceType>> { public: void allocate_resource() { resources_.emplace_back(std::make_shared<ResourceType>(name_)); available_resources.emplace_back(last_id()); } std::shared_ptr<handle<ResourceType>> get_resource() { if (available_resources.size() == 0) allocate_resource(); size_t id = available_resources.back(); available_resources.pop_back(); std::cout << "pool_size = " << resources_.size() << std::endl; std::cout << "passing handle to resource id = " << id << std::endl; return std::make_shared<handle<ResourceType>>(id, resources_[id], this->shared_from_this()); } void release(std::size_t id) { std::cout << "releasing handle " << id << " of pool " << name_ << std::endl; available_resources.push_back(id); } object_pool(std::string name) : name_(name) { std::cout << "pool created: " << name_ << std::endl; } ~object_pool() { std::cout << "pool freed: " << name_ << std::endl; } private: std::string name_; std::vector<std::shared_ptr<ResourceType>> resources_; std::vector<size_t> available_resources; size_t last_id() { return resources_.size() - 1; } }; } <file_sep>#include "gtest/gtest.h" TEST( MyExample, FirstTest ) { ASSERT_TRUE(true); } <file_sep>project(memory_pool CXX) cmake_minimum_required(VERSION 3.5) include_directories("src") add_subdirectory( test ) <file_sep>#pragma once #include <boost/fusion/container/map.hpp> #include <boost/fusion/include/at_key.hpp> #include <boost/type_index.hpp> #include "../variant2/object_pool.hpp" namespace variant2 { template <typename... ResourceTypes> class multi_object_pool { public: multi_object_pool() : map_(boost::fusion::make_pair<ResourceTypes>( object_pool<ResourceTypes>())...) {} template <typename T> std::shared_ptr<handle<T>> get_resource() { return boost::fusion::at_key<T>(map_).get_resource(); } private: using map_type = boost::fusion::map< boost::fusion::pair<ResourceTypes, object_pool<ResourceTypes>>...>; map_type map_; }; }
28e186bb4391b592e71bd85648c01baea25e4449
[ "Markdown", "CMake", "C++" ]
19
C++
havogt/memory_pool
3c2fd62c7b4a2c6c1c061618d90f4f23eb38742a
3b4cc281a363d244ce137286a7a757b8333d5aff
refs/heads/master
<repo_name>plxzzlx/AndCtrl<file_sep>/Desktop/AndCtr/AndCtr/NetworkServer.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Net.Sockets; using System.Net; using System.Threading; using System.Windows.Controls; using System.ComponentModel; namespace AndCtr { public class NetworkServer : INotifyPropertyChanged { Socket serverSocket; Thread myThread; Thread receiveThread; bool IsSeverConnected = false; String IP = null; int Port; private String output; public String Output { get { return output; } set { output = value; OnPropertyChanged("Output"); } } public NetworkServer(String IP,int Port) { this.IP = IP; this.Port = Port; } private bool SetUpServer() { try { //服务器IP地址 IPAddress ip = IPAddress.Parse(IP); serverSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); //绑定IP地址:端口 serverSocket.Bind(new IPEndPoint(ip, Port)); //设定最多10个排队连接请求 serverSocket.Listen(1); Output += "Start Suscessfully!\n" + "Server IP=" + IP + ", Port=" + Port + "\n"; myThread = new Thread(ListenClientConnect); IsSeverConnected = true; } catch (System.Exception ex) { IsSeverConnected = false; Console.WriteLine(ex.ToString()); } return IsSeverConnected; } public void Start() { if(!IsSeverConnected && SetUpServer()) myThread.Start(); } public void Stop() { if (IsSeverConnected) { serverSocket.Close(); myThread.Abort(); if (receiveThread != null) { receiveThread.Abort(); } } } /// <summary> /// 监听客户端连接 /// </summary> private void ListenClientConnect() { while (true) { try { Socket clientSocket = serverSocket.Accept(); Output += "Accept a client! IP=" + clientSocket.RemoteEndPoint.ToString() + "\n"; Output += "Client: " + "Server Say Hello!\n"; clientSocket.Send(Encoding.ASCII.GetBytes("Server Say Hello!")); receiveThread = new Thread(ReceiveMessage); receiveThread.Start(clientSocket); } catch (System.Exception ex) { Output += ex.ToString(); } } } /// <summary> /// 接收消息 /// </summary> /// <param name="clientSocket"></param> private void ReceiveMessage(object clientSocket) { byte[] result = new byte[1024]; Socket myClientSocket = (Socket)clientSocket; MouseAction mAction = new MouseAction(); while (true) { try { int receiveNumber = myClientSocket.Receive(result); String Event = Encoding.ASCII.GetString(result, 0, receiveNumber); Output = "Client: " + Event + "\n"; mAction.OnEvent(Event); myClientSocket.Send(Encoding.ASCII.GetBytes("Sever Recive : " + Event)); } catch (Exception ex) { Console.WriteLine(ex.Message); myClientSocket.Shutdown(SocketShutdown.Both); myClientSocket.Close(); Output = "断开连接!"; break; } } } public event PropertyChangedEventHandler PropertyChanged; public void OnPropertyChanged(string propertyName) { if (PropertyChanged != null) { PropertyChanged(this, new PropertyChangedEventArgs(propertyName)); } } } } <file_sep>/Desktop/AndCtr/AndCtr/WndCursor.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Runtime.InteropServices; namespace AndCtr { public class WndCursor { [Flags] public enum MouseEventFlag : uint { Move = 0x0001, LeftDown = 0x0002, LeftUp = 0x0004, RightDown = 0x0008, RightUp = 0x0010, MiddleDown = 0x0020, MiddleUp = 0x0040, XDown = 0x0080, XUp = 0x0100, Wheel = 0x0800, VirtualDesk = 0x4000, Absolute = 0x8000 } public struct POINT { public int X; public int Y; public POINT(int x, int y) { this.X = x; this.Y = y; } } [DllImport("user32.dll")] public static extern bool SetCursorPos(int X, int Y); [DllImport("User32")] public extern static bool GetCursorPos(ref POINT lpPoint); [DllImport("user32.dll")] public static extern void mouse_event(MouseEventFlag flags, int dx, int dy, uint data, UIntPtr extraInfo); } public class WndCursorController { /// <summary> /// 设置鼠标于屏幕的绝对位置,以左上角为(0,0)点。 /// </summary> /// <param name="x">x坐标</param> /// <param name="y">y坐标</param> public void SetCursorPos(int x, int y) { WndCursor.SetCursorPos(x, y); } /// <summary> /// 获取鼠标在屏幕的绝对位置,左上角为(0,0)点 /// </summary> /// <param name="x">x坐标</param> /// <param name="y">y坐标</param> public static void GetCursorPos(ref int x, ref int y) { WndCursor.POINT p = new WndCursor.POINT(0,0); WndCursor.GetCursorPos(ref p); x = p.X; y = p.Y; } /// <summary> /// 触发鼠标(左键)按下事件 /// </summary> public void onLeftMouseDown() { WndCursor.mouse_event(WndCursor.MouseEventFlag.LeftDown, 0, 0, 0, UIntPtr.Zero); } /// <summary> /// 触发鼠标(左键)弹起事件 /// </summary> public void onLeftMouseUp() { WndCursor.mouse_event(WndCursor.MouseEventFlag.LeftUp, 0, 0, 0, UIntPtr.Zero); } /// <summary> /// 触发鼠标(右键)按下事件 /// </summary> public void onRightMouseDown() { WndCursor.mouse_event(WndCursor.MouseEventFlag.RightDown, 0, 0, 0, UIntPtr.Zero); } /// <summary> /// 触发鼠标(右键)弹起事件 /// </summary> public void onRightMouseUp() { WndCursor.mouse_event(WndCursor.MouseEventFlag.RightUp, 0, 0, 0, UIntPtr.Zero); } /// <summary> /// 触发鼠标(左键)单击事件 /// </summary> public void onLeftMouseClick() { WndCursor.mouse_event(WndCursor.MouseEventFlag.LeftDown, 0, 0, 0, UIntPtr.Zero); WndCursor.mouse_event(WndCursor.MouseEventFlag.LeftUp, 0, 0, 0, UIntPtr.Zero); } /// <summary> /// 触发鼠标(右键)单击事件 /// </summary> public void onRightMouseClick() { WndCursor.mouse_event(WndCursor.MouseEventFlag.RightDown, 0, 0, 0, UIntPtr.Zero); WndCursor.mouse_event(WndCursor.MouseEventFlag.RightUp, 0, 0, 0, UIntPtr.Zero); } } } <file_sep>/Desktop/AndCtr/AndCtr/MouseAction.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace AndCtr { public class MouseAction { WndCursorController CurCtr = null; delegate void MouseEventDelegate(String Event); public MouseAction() { CurCtr = new WndCursorController(); } private void onMouseAction(String Event) { String[] CmdStr = Event.Split('\n'); String[] str = CmdStr[0].Split(' '); switch (str[0]) { case "Set": int x0 = 0, y0 = 0; WndCursorController.GetCursorPos(ref x0, ref y0); int x = int.Parse(str[1]); int y = int.Parse(str[2]); x = x0 + x; y = y0 + y; CurCtr.SetCursorPos(x, y); break; case "LDown": CurCtr.onLeftMouseDown(); break; case "LUp": CurCtr.onLeftMouseUp(); break; case "LClick": CurCtr.onLeftMouseClick(); break; case "RDown": CurCtr.onRightMouseDown(); break; case "RUp": CurCtr.onRightMouseUp(); break; case "RClick": CurCtr.onRightMouseClick(); break; default: Console.WriteLine("Wrong Event String"); break; } } public void OnEvent(String Event) { MouseEventDelegate Dl = new MouseEventDelegate(onMouseAction); Dl(Event); } } } <file_sep>/README.md AndCtrl ======= 手机控制PC的程序 <file_sep>/Desktop/AndCtr/AndCtr/MainWindow.xaml.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Documents; using System.Windows.Input; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Navigation; using System.Windows.Shapes; using System.Net; using System.Windows.Forms; namespace AndCtr { /// <summary> /// MainWindow.xaml 的交互逻辑 /// </summary> public partial class MainWindow : Window { NetworkServer server; String IP; int Port; WindowState ws; WindowState wsl; NotifyIcon notifyIcon; public MainWindow() { InitializeComponent(); wsl = WindowState; InitIcon(); InitIpConfig(); } private void InitIpConfig() { IPHostEntry IpEntry = Dns.GetHostEntry(Dns.GetHostName()); foreach (IPAddress ip in IpEntry.AddressList) { if (ip.IsIPv6LinkLocal || ip.IsIPv6Teredo) continue; txt_IP.Text = ip.ToString(); txt_IP_List.AppendText(ip.ToString() + "\n"); } } private void InitIcon() { this.notifyIcon = new NotifyIcon(); this.notifyIcon.BalloonTipText = "AndCtr已经启动!"; //设置程序启动时显示的文本 this.notifyIcon.Text = "AndCtr"; //最小化到托盘时,鼠标点击时显示的文本 this.notifyIcon.Icon = new System.Drawing.Icon("desktop.ico"); //程序图标 this.notifyIcon.Visible = true; notifyIcon.MouseDoubleClick += OnNotifyIconDoubleClick; //this.notifyIcon.ShowBalloonTip(1000); } private void btn_Start_Click(object sender, RoutedEventArgs e) { try { IP = txt_IP.Text; Port = int.Parse(txt_Port.Text); server = new NetworkServer(IP, Port); server.Start(); } catch (System.Exception ex) { System.Windows.MessageBox.Show(ex.ToString()); } txt_Output.DataContext = server; } private void OnNotifyIconDoubleClick(object sender, EventArgs e) { this.Show(); WindowState = wsl; } private void Window_StateChanged(object sender, EventArgs e) { ws = WindowState; if (ws == WindowState.Minimized) this.Hide(); } private void Window_Closed(object sender, EventArgs e) { if (server != null) server.Stop(); this.notifyIcon.Visible = false; } } }
0f947138e6861b0a28282cab1ccec231dced2721
[ "Markdown", "C#" ]
5
C#
plxzzlx/AndCtrl
d94f42c4bd32b0cc15125f6a59a10c2326045982
043f964be110cecb3bfa8258340ee1d7044eb4db
refs/heads/master
<file_sep><?php use Illuminate\Support\Facades\Route; use App\Http\Controllers\InicioController; use App\Http\Controllers\NombreController; use App\Http\Controllers\ServiciosController; use App\Http\Controllers\ContactanosController; use App\Http\Controllers\InformacionController; /* |-------------------------------------------------------------------------- | Web Routes |-------------------------------------------------------------------------- | | Here is where you can register web routes for your application. These | routes are loaded by the RouteServiceProvider within a group which | contains the "web" middleware group. Now create something great! | */ //Route::get('/', function () { // return view('welcome'); //}); Route::get('/inicio/{servi?}', [InicioController::class, 'show']); Route::get('/nombre/{servi?}', [NombreController::class, 'show']); Route::get('/servicios/{servi?}', [ServiciosController::class, 'show']); Route::get('/Contactanos/{contac?}', [ContactanosController::class, 'show']); Route::get('/informacion/{inf?}', [InformacionController::class, 'show']);
a205d56d0d96cdb4629a3c0608a928706f421209
[ "PHP" ]
1
PHP
iruzts/examen
cffe66fa6e8de4ae7a90646eb020c0445439be39
af734a9587c8f00e51da3ea72d0ea6aa2c42220e
refs/heads/master
<repo_name>szazdenes/DistanceCounter<file_sep>/mainwindow.cpp #include "mainwindow.h" #include "ui_mainwindow.h" MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent), ui(new Ui::MainWindow) { ui->setupUi(this); connect(ui->distanceMeasurementWidget, &DistanceMeasurementForm::signalClearDistanceDataList, ui->analizationWidget, &AnalizationForm::slotClearDistanceData); connect(ui->distanceMeasurementWidget, &DistanceMeasurementForm::signalSendDistance, ui->analizationWidget, &AnalizationForm::slotAddDistanceData); connect(ui->distanceMeasurementWidget, &DistanceMeasurementForm::signalSendArea, ui->analizationWidget, &AnalizationForm::slotAddAreaData); } MainWindow::~MainWindow() { delete ui; } <file_sep>/calibrationdialog.h #ifndef CALIBRATIONDIALOG_H #define CALIBRATIONDIALOG_H #include <QDialog> namespace Ui { class CalibrationDialog; } class CalibrationDialog : public QDialog { Q_OBJECT public: explicit CalibrationDialog(QWidget *parent = nullptr, double pixels = 1); ~CalibrationDialog(); signals: void signalSendCalibrationValue(double calib); private slots: void on_pushButton_clicked(); private: Ui::CalibrationDialog *ui; double calibration; }; #endif // CALIBRATIONDIALOG_H <file_sep>/distancemeasurementform.h #ifndef DISTANCEMEASUREMENTFORM_H #define DISTANCEMEASUREMENTFORM_H #include <QWidget> #include <QFileDialog> #include <QGraphicsScene> #include <QtMath> #include <QToolTip> #include "calibrationdialog.h" namespace Ui { class DistanceMeasurementForm; } class DistanceMeasurementForm : public QWidget { Q_OBJECT public: explicit DistanceMeasurementForm(QWidget *parent = nullptr); ~DistanceMeasurementForm(); signals: void signalClearDistanceDataList(); void signalSendDistance(double distance); void signalSendArea(double area); private slots: void on_pushButton_clicked(); void slotMouseMoved(QPointF pos); void slotMouseButtonPressed(QPointF pos); void slotMouseButtonReleased(QPointF pos); void slotSetCalibrationValue(double calib); void slotWheelUp(); void slotWheelDown(); void on_clearPushButton_clicked(); void on_savePushButton_clicked(); void on_calibrationPushButton_clicked(); void on_fitPushButton_clicked(); void on_originalPushButton_clicked(); void on_distanceRadioButton_toggled(bool checked); void on_areaRadioButton_toggled(bool checked); void on_areaPushButton_clicked(); private: Ui::DistanceMeasurementForm *ui; QImage loadImage, mask, image, originalImage; QGraphicsScene scene; QPointF startPosition; bool mousePressed; double calibValue; bool calibration; double zoom; double pensize; QCursor cursor; void refreshMask(); void refreshImage(); void refreshLine(QPointF &startPos, QPointF &endPos); void refreshArea(QPointF &pos); double calculateDistance(QPointF &startPos, QPointF &endPos); double calculateArea(QImage &image); void setCursorImage(double size); }; #endif // DISTANCEMEASUREMENTFORM_H <file_sep>/analizationform.h #ifndef ANALIZATIONFORM_H #define ANALIZATIONFORM_H #include <QWidget> #include <QtMath> #include <QFileDialog> #include <QTextStream> #include <QDebug> namespace Ui { class AnalizationForm; } class AnalizationForm : public QWidget { Q_OBJECT public: explicit AnalizationForm(QWidget *parent = nullptr); ~AnalizationForm(); public slots: void slotAddDistanceData(double data); void slotAddAreaData(double area); void slotClearDistanceData(); private slots: void on_clearPushButton_clicked(); void on_exportPushButton_clicked(); private: Ui::AnalizationForm *ui; void calculateAveStD(); double getAverage(QList<double> &list); double getStd(QList<double> &list, double average); void refreshTableWidget(); void exportTableData(QString exportFilename); void tableSetHeader(); QList<double> distanceDataList; QList<double> areaDataList; }; #endif // ANALIZATIONFORM_H <file_sep>/calibrationdialog.cpp #include "calibrationdialog.h" #include "ui_calibrationdialog.h" CalibrationDialog::CalibrationDialog(QWidget *parent, double pixels) : QDialog(parent), ui(new Ui::CalibrationDialog) { ui->setupUi(this); calibration = pixels; ui->measuredLineEdit->setText(QString::number(pixels)); } CalibrationDialog::~CalibrationDialog() { delete ui; } void CalibrationDialog::on_pushButton_clicked() { if(!ui->referenceLineEdit->text().isEmpty()){ calibration = ui->referenceLineEdit->text().toDouble()/calibration; emit signalSendCalibrationValue(calibration); this->accept(); } } <file_sep>/distancemeasurementform.cpp #include "distancemeasurementform.h" #include "ui_distancemeasurementform.h" DistanceMeasurementForm::DistanceMeasurementForm(QWidget *parent) : QWidget(parent), ui(new Ui::DistanceMeasurementForm) { ui->setupUi(this); connect(ui->graphicsView, &ImageGraphicsViewForm::signalMouseMoved, this, &DistanceMeasurementForm::slotMouseMoved); connect(ui->graphicsView, &ImageGraphicsViewForm::signalLeftButtonPressed, this, &DistanceMeasurementForm::slotMouseButtonPressed); connect(ui->graphicsView, &ImageGraphicsViewForm::signalLeftButtonReleased, this, &DistanceMeasurementForm::slotMouseButtonReleased); connect(ui->graphicsView, &ImageGraphicsViewForm::signalWheelUp, this, &DistanceMeasurementForm::slotWheelUp); connect(ui->graphicsView, &ImageGraphicsViewForm::signalWheelDown, this, &DistanceMeasurementForm::slotWheelDown); ui->graphicsView->setScene(&scene); ui->graphicsView->setTransformationAnchor(QGraphicsView::AnchorViewCenter); mousePressed = false; calibration = false; calibValue = 1; zoom = 1; pensize = 50; ui->clearPushButton->setDisabled(true); ui->pushButton->setDisabled(true); ui->savePushButton->setDisabled(true); ui->distanceRadioButton->setChecked(true); ui->areaRadioButton->setDisabled(true); } DistanceMeasurementForm::~DistanceMeasurementForm() { delete ui; } void DistanceMeasurementForm::on_pushButton_clicked() { emit signalClearDistanceDataList(); QString imagePath = QFileDialog::getOpenFileName(this, "Open image", "../"); loadImage = QImage(imagePath); image = QImage(loadImage); originalImage = QImage(loadImage); mask = QImage(loadImage.width(), loadImage.height(), QImage::Format_ARGB32_Premultiplied); refreshMask(); refreshImage(); } void DistanceMeasurementForm::slotMouseMoved(QPointF pos) { if(mousePressed){ if(ui->distanceRadioButton->isChecked()){ refreshMask(); refreshLine(startPosition, pos); refreshImage(); double distance = calculateDistance(startPosition, pos); QToolTip::showText(QCursor::pos(), QString::number(distance) + " mm", ui->graphicsView); } if(ui->areaRadioButton->isChecked()){ refreshArea(pos); refreshImage(); } } else return; } void DistanceMeasurementForm::slotMouseButtonPressed(QPointF pos) { mousePressed = true; startPosition = pos; } void DistanceMeasurementForm::slotMouseButtonReleased(QPointF pos) { mousePressed = false; if(ui->distanceRadioButton->isChecked()){ refreshMask(); refreshLine(startPosition, pos); refreshImage(); double distance = calculateDistance(startPosition, pos); if(calibration) emit signalSendDistance(distance); if(!calibration){ CalibrationDialog calibDialog(this, distance); connect(&calibDialog, &CalibrationDialog::signalSendCalibrationValue, this, &DistanceMeasurementForm::slotSetCalibrationValue); calibDialog.exec(); } } } void DistanceMeasurementForm::slotSetCalibrationValue(double calib) { calibValue = calib; calibration = true; ui->pushButton->setEnabled(true); ui->clearPushButton->setEnabled(true); ui->savePushButton->setEnabled(true); ui->areaRadioButton->setEnabled(true); } void DistanceMeasurementForm::slotWheelUp() { pensize += 5; setCursorImage(pensize); } void DistanceMeasurementForm::slotWheelDown() { pensize -= 5; setCursorImage(pensize); } void DistanceMeasurementForm::refreshMask() { mask.fill(Qt::transparent); } void DistanceMeasurementForm::refreshImage() { QPainter painter(&image); QPainter painter2(&loadImage); if(mousePressed){ painter.setOpacity(0.5); painter.drawImage(0, 0, loadImage); painter.drawImage(0, 0, mask); painter.end(); scene.clear(); scene.addPixmap(QPixmap::fromImage(image)); } else{ painter2.drawImage(0,0, image); painter2.end(); scene.clear(); scene.addPixmap(QPixmap::fromImage(image)); } } void DistanceMeasurementForm::refreshLine(QPointF &startPos, QPointF &endPos) { QPainter painter(&mask); QPen pen; pen.setColor(Qt::red); pen.setWidth(2); painter.setPen(pen); painter.drawLine(startPos, endPos); painter.end(); } void DistanceMeasurementForm::refreshArea(QPointF &pos) { QPainter painter(&mask); QBrush brush; brush.setColor(Qt::yellow); brush.setStyle(Qt::SolidPattern); painter.setCompositionMode(QPainter::CompositionMode_Source); // painter.setOpacity(0.5); painter.setBrush(brush); painter.setPen(Qt::NoPen); painter.drawEllipse(pos.x() - pensize/zoom/2.0, pos.y() - pensize/zoom/2.0, pensize/zoom, pensize/zoom); painter.end(); } double DistanceMeasurementForm::calculateDistance(QPointF &startPos, QPointF &endPos) { double result = calibValue * qSqrt((endPos.x()-startPos.x())*(endPos.x()-startPos.x())+(endPos.y()-startPos.y())*(endPos.y()-startPos.y())); return result; } double DistanceMeasurementForm::calculateArea(QImage &image) { double area = 0; for(int i = 0; i <= image.height(); i++){ for (int j = 0; j <= image.width(); j++){ if(image.pixelColor(i, j) == QColor(Qt::yellow)) area++; } } area *= calibValue; return area; } void DistanceMeasurementForm::setCursorImage(double size) { if(ui->areaRadioButton->isChecked()){ QImage cursorImage = QImage(size, size, QImage::Format_ARGB32); QPainter cursorPainter(&cursorImage); cursorPainter.setCompositionMode(QPainter::CompositionMode_Source); cursorPainter.fillRect(cursorImage.rect(), Qt::transparent); cursorPainter.setPen(Qt::yellow); cursorPainter.drawEllipse(0, 0, size, size); cursorPainter.end(); cursor = QCursor(QPixmap::fromImage(cursorImage)); ui->graphicsView->setCursor(cursor); } else return; } void DistanceMeasurementForm::on_clearPushButton_clicked() { refreshMask(); image = originalImage; refreshImage(); } void DistanceMeasurementForm::on_savePushButton_clicked() { if(!image.isNull()){ QString saveName = QFileDialog::getSaveFileName(this, "Save image", "../"); if(!saveName.endsWith(".png")) saveName.append(".png"); image.save(saveName); } } void DistanceMeasurementForm::on_calibrationPushButton_clicked() { calibration = false; calibValue = 1; QString imagePath = QFileDialog::getOpenFileName(this, "Open image", "../"); loadImage = QImage(imagePath); image = QImage(loadImage); originalImage = QImage(loadImage); mask = QImage(loadImage.width(), loadImage.height(), QImage::Format_ARGB32_Premultiplied); refreshMask(); refreshImage(); } void DistanceMeasurementForm::on_fitPushButton_clicked() { ui->graphicsView->scale(1.0/zoom, 1.0/zoom); QRectF imageRect = image.rect(); QRectF rect = ui->graphicsView->viewport()->rect(); double fitSize = qMin<double>(rect.width() / imageRect.width(), rect.height() / imageRect.height()); ui->graphicsView->scale(fitSize, fitSize); zoom = fitSize; } void DistanceMeasurementForm::on_originalPushButton_clicked() { ui->graphicsView->scale(1.0/zoom, 1.0/zoom); zoom = 1; } void DistanceMeasurementForm::on_distanceRadioButton_toggled(bool checked) { if(checked) ui->graphicsView->setCursor(QCursor()); } void DistanceMeasurementForm::on_areaRadioButton_toggled(bool checked) { if(checked) setCursorImage(pensize); } void DistanceMeasurementForm::on_areaPushButton_clicked() { double measuredArea = calculateArea(mask); emit signalSendArea(measuredArea); } <file_sep>/imagegraphicsviewform.cpp #include "imagegraphicsviewform.h" #include "ui_imagegraphicsviewform.h" ImageGraphicsViewForm::ImageGraphicsViewForm(QWidget *parent) : QGraphicsView(parent), ui(new Ui::ImageGraphicsViewForm) { ui->setupUi(this); } ImageGraphicsViewForm::~ImageGraphicsViewForm() { delete ui; } void ImageGraphicsViewForm::mousePressEvent(QMouseEvent *event) { if(event->button() == Qt::LeftButton) emit signalLeftButtonPressed(mapToScene(event->pos())); else QGraphicsView::mousePressEvent(event); } void ImageGraphicsViewForm::mouseReleaseEvent(QMouseEvent *event) { if(event->button() == Qt::LeftButton) emit signalLeftButtonReleased(mapToScene(event->pos())); else QGraphicsView::mouseReleaseEvent(event); } void ImageGraphicsViewForm::mouseMoveEvent(QMouseEvent *event) { emit signalMouseMoved(mapToScene(event->pos())); } void ImageGraphicsViewForm::wheelEvent(QWheelEvent *event) { if(event->delta() > 0) emit signalWheelUp(); if(event->delta() < 0) emit signalWheelDown(); else QGraphicsView::wheelEvent(event); } <file_sep>/imagegraphicsviewform.h #ifndef IMAGEGRAPHICSVIEWFORM_H #define IMAGEGRAPHICSVIEWFORM_H #include <QWidget> #include <QGraphicsView> #include <QMouseEvent> namespace Ui { class ImageGraphicsViewForm; } class ImageGraphicsViewForm : public QGraphicsView { Q_OBJECT public: explicit ImageGraphicsViewForm(QWidget *parent = nullptr); ~ImageGraphicsViewForm(); void mousePressEvent(QMouseEvent* event); void mouseReleaseEvent(QMouseEvent* event); void mouseMoveEvent(QMouseEvent *event); void wheelEvent(QWheelEvent *event); signals: void signalLeftButtonPressed(QPointF pos); void signalLeftButtonReleased(QPointF pos); void signalMouseMoved(QPointF pos); void signalWheelUp(); void signalWheelDown(); private: Ui::ImageGraphicsViewForm *ui; }; #endif // IMAGEGRAPHICSVIEWFORM_H <file_sep>/analizationform.cpp #include "analizationform.h" #include "ui_analizationform.h" AnalizationForm::AnalizationForm(QWidget *parent) : QWidget(parent), ui(new Ui::AnalizationForm) { ui->setupUi(this); distanceDataList.clear(); areaDataList.clear(); tableSetHeader(); } AnalizationForm::~AnalizationForm() { delete ui; } void AnalizationForm::slotAddDistanceData(double data) { distanceDataList.append(data); refreshTableWidget(); } void AnalizationForm::slotAddAreaData(double area) { areaDataList.append(area); if(!areaDataList.isEmpty()){ ui->tableWidget->clear(); ui->tableWidget->setRowCount(0); tableSetHeader(); foreach(double currentItem, areaDataList){ ui->tableWidget->insertRow(ui->tableWidget->rowCount()); ui->tableWidget->setItem(ui->tableWidget->rowCount()-1, 0, new QTableWidgetItem(QString::number(currentItem))); } } else return; } void AnalizationForm::slotClearDistanceData() { distanceDataList.clear(); areaDataList.clear(); ui->tableWidget->clear(); ui->tableWidget->setRowCount(0); tableSetHeader(); } void AnalizationForm::calculateAveStD() { if(ui->tableWidget->rowCount() > 0){ int rows = ui->tableWidget->rowCount(); QList<double> tableValues; for(int i = 0; i < rows; i++){ tableValues.append(ui->tableWidget->item(i, 0)->text().toDouble()); // qDebug() << ui->tableWidget->item(i, 0)->text(); } double average = getAverage(tableValues); double StD = getStd(tableValues, average); ui->tableWidget->insertRow(ui->tableWidget->rowCount()); ui->tableWidget->setVerticalHeaderItem(ui->tableWidget->rowCount()-1, new QTableWidgetItem(QString("Average"))); ui->tableWidget->setItem(ui->tableWidget->rowCount()-1, 0, new QTableWidgetItem(QString::number(average))); ui->tableWidget->insertRow(ui->tableWidget->rowCount()); ui->tableWidget->setVerticalHeaderItem(ui->tableWidget->rowCount()-1, new QTableWidgetItem(QString("StD"))); ui->tableWidget->setItem(ui->tableWidget->rowCount()-1, 0, new QTableWidgetItem(QString::number(StD))); } else return; } double AnalizationForm::getAverage(QList<double> &list) { double result = 0; for (int i = 0; i < list.size(); i++){ result += (double)list.at(i) / (double)list.size(); // qDebug() << QString::number(list.at(i)); } return result; } double AnalizationForm::getStd(QList<double> &list, double average) { double result = 0; // double average = getAverage(list); for (int i = 0; i < list.size(); i++) result += (((double)list.at(i) - average)*((double)list.at(i) - average)) / (double)list.size(); result = qSqrt(result); return result; } void AnalizationForm::refreshTableWidget() { if(!distanceDataList.isEmpty()){ ui->tableWidget->clear(); ui->tableWidget->setRowCount(0); tableSetHeader(); foreach(double currentItem, distanceDataList){ ui->tableWidget->insertRow(ui->tableWidget->rowCount()); ui->tableWidget->setItem(ui->tableWidget->rowCount()-1, 0, new QTableWidgetItem(QString::number(currentItem))); } calculateAveStD(); } else return; } void AnalizationForm::exportTableData(QString exportFilename) { QFile exportFile(exportFilename); if(!exportFile.fileName().endsWith(".csv")) exportFile.setFileName(exportFile.fileName() + ".csv"); if(!exportFile.open(QIODevice::WriteOnly | QIODevice::Text)) { qDebug("baj"); return; } QTextStream out(&exportFile); out << "#" << ui->tableWidget->horizontalHeaderItem(0)->text() << "\n"; for(int i = 0; i < ui->tableWidget->rowCount()-2; i++){ out << ui->tableWidget->item(i, 0)->text() << "\n"; } out << "Average:" << "\t" << ui->tableWidget->item(ui->tableWidget->rowCount()-2, 0)->text() << "\n"; out << "StD:" << "\t" << ui->tableWidget->item(ui->tableWidget->rowCount()-1, 0)->text(); exportFile.close(); } void AnalizationForm::tableSetHeader() { ui->tableWidget->horizontalHeader()->setSectionResizeMode(QHeaderView::Stretch); ui->tableWidget->setHorizontalHeaderLabels(QStringList() << "Distance (mm)/Area (mm2)"); } void AnalizationForm::on_clearPushButton_clicked() { slotClearDistanceData(); } void AnalizationForm::on_exportPushButton_clicked() { QString outFileName = QFileDialog::getSaveFileName(this, "Save table", "../"); exportTableData(outFileName); }
7d15760258d8888a4d569c4f0a6f74451f504c95
[ "C++" ]
9
C++
szazdenes/DistanceCounter
2ccf3bb5daf9b5c878bfe419d07bcaf0f6c01900
6c2ac9cb9965aa072a58b339924d90d0c761ab09
refs/heads/master
<file_sep>package org.ioarmband.android.connection; import java.io.IOException; import org.ioarmband.net.connection.StreamedConnection; import org.ioarmband.net.connection.manager.ConnectionManager; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothSocket; import android.util.Log; public class BluetoothAndroidConnectionManager extends ConnectionManager { private static BluetoothAndroidConnectionManager instance = null; private BluetoothSocket bluetoothSocket; private BluetoothAndroidConnectionManager() { super(); } public static synchronized BluetoothAndroidConnectionManager getInstance() { if(instance == null) { instance = new BluetoothAndroidConnectionManager(); } return instance; } @Override public void LauchDiscovery() { BluetoothAdapter mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); if (mBluetoothAdapter == null) { // Device does not support Bluetooth } if (mBluetoothAdapter.isEnabled()) { BluetoothAndroidDiscoveryManager androidDiscoveryManager = new BluetoothAndroidDiscoveryManager(); androidDiscoveryManager.startdiscoveryDevice(); } } @Override public void closeConnectionComplementary() { Log.d("BluetoothConnectionManager", "BluetoothConnectionManager close"); try { if (bluetoothSocket != null) { bluetoothSocket.close(); } } catch (IOException e) { e.printStackTrace(); } bluetoothSocket = null; } @Override public void newConnectionComplementary(Object bluetoothSocket) { Log.d("BluetoothConnectionManager", "newConnectionComplementary"); this.bluetoothSocket = (BluetoothSocket) bluetoothSocket; try { streamConnection = new StreamedConnection(this.bluetoothSocket.getInputStream(), this.bluetoothSocket.getOutputStream()); streamConnection.addConnectionListener(connection); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
c6421461c8b398629e67c7a6fd61996fb27aa04e
[ "Java" ]
1
Java
ioArmband/ioarmband_android_service
6fadd25233be58fba34aeaf754287daf7239d55a
20e71cb2e2a974c7512b60ed2e0eba3ffd96e160
refs/heads/master
<file_sep>// https://github.com/sermonis/three-geojson-geometry/blob/master/src/index.js#L167 function toSphereCoordinates ( lat, lng, radius ) { const phi = ( 90 - lat ) * Math.PI / 180; const theta = ( 180 - lng ) * Math.PI / 180; const x = radius * Math.sin( phi ) * Math.cos( theta ); const y = radius * Math.cos( phi ); const z = radius * Math.sin( phi ) * Math.sin( theta ); return { x, y, z }; } function returnCurveCoordinates ( latitudeA, longitudeA, latitudeB, longitudeB, size ) { const start = toSphereCoordinates( latitudeA, longitudeA, size ); const end = toSphereCoordinates( latitudeB, longitudeB, size ); const midPointX = ( start.x + end.x ) / 2; const midPointY = ( start.y + end.y ) / 2; const midPointZ = ( start.z + end.z ) / 2; let distance = Math.pow( end.x - start.x, 2 ); distance += Math.pow( end.y - start.y, 2 ); distance += Math.pow( end.z - start.z, 2 ); distance = Math.sqrt( distance ); let multipleVal = Math.pow( midPointX, 2 ); multipleVal += Math.pow( midPointY, 2 ); multipleVal += Math.pow( midPointZ, 2 ); multipleVal = Math.pow( distance, 2 ) / multipleVal; multipleVal = multipleVal * 0.7; let midX = midPointX + multipleVal * midPointX; let midY = midPointY + multipleVal * midPointY; let midZ = midPointZ + multipleVal * midPointZ; return { start: { x: start.x, y: start.y, z: start.z, }, mid: { x: midX, y: midY, z: midZ, }, end: { x: end.x, y: end.y, z: end.z, }, }; }; const GLOBE_RADIUS = 200; const CURVE_MIN_ALTITUDE = 20; const CURVE_MAX_ALTITUDE = 200; const DEGREE_TO_RADIAN = Math.PI / 180; function clamp ( num, min, max ) { return num <= min ? min : ( num >= max ? max : num ); } function coordinateToPosition ( lat, lng, radius ) { const { x, y, z } = toSphereCoordinates( lat, lng, radius ); return new THREE.Vector3( x, y, z ); } function getSplineFromCoords ( latitudeA, longitudeA, latitudeB, longitudeB, size ) { const start = coordinateToPosition( latitudeA, longitudeA, size ); const end = coordinateToPosition( latitudeB, longitudeB, size ); // altitude const altitude = clamp( start.distanceTo( end ) * .45, CURVE_MIN_ALTITUDE, CURVE_MAX_ALTITUDE ); // 2 control points const interpolate = d3.geoInterpolate( [ longitudeA, latitudeA ], [ longitudeB, latitudeB ] ); const midCoord1 = interpolate( 0.25 ); const midCoord2 = interpolate( 0.75 ); const mid1 = coordinateToPosition( midCoord1[ 1 ], midCoord1[ 0 ], GLOBE_RADIUS + altitude ); const mid2 = coordinateToPosition( midCoord2[ 1 ], midCoord2[ 0 ], GLOBE_RADIUS + altitude ); return { start, end, mid1, mid2 }; } <file_sep># Globe - [Demo](https://sermonis.github.io/three-globe-particles/) Particles globe with countries in Three.js. - Three.js - d3.js ### Preview --- ![Preview](./previews/preview.png) ## Author [<NAME>](https://github.com/nenadV91) ## License Licensed under MIT <file_sep>class Markers { constructor ( countries, { markerRadius = 2 } = {} ) { this.countries = countries; this.radius = config.sizes.globe + config.sizes.globe * config.scale.markers; groups.markers = new THREE.Group(); groups.markers.name = 'GlobeMarkers'; this.markerGeometry = new THREE.SphereGeometry( markerRadius, 15, 15 ); this.markerMaterial = new THREE.MeshBasicMaterial(); this.markerMaterial.transparent = true; this.markerMaterial.opacity = 0.8; this.create(); } create () { for ( let i = 0; i < this.countries.length; i++ ) { const country = this.countries[ i ]; if ( country.latitude && country.longitude ) { const lat = +country.latitude; const lng = +country.longitude; const coords = toSphereCoordinates( lat, lng, this.radius ); const marker = new Marker( this.markerMaterial, this.markerGeometry, country.name, coords ); elements.markers.push( marker ); } } } }; <file_sep>// Select countries. const SouthAmerica = [ 'Ecuador', 'Colombia', 'Paraguay', 'Uruguay', 'Guyana', 'Venezuela, RB', 'Peru', 'Panama', 'Cuba', ]; const NorthAmerica = [ 'Mexico', 'United States', 'Greenland', 'Iceland', ]; const Europe = [ 'Norway', 'Greece', 'Serbia', 'Croatia', 'Spain', 'Portugal', 'Germany', 'Italy', 'Ukraine', 'Denmark', 'Romania', ]; const Africa = [ 'Chad', 'Nigeria', 'Namibia', 'Zambia', 'South Sudan', 'Somalia', 'Uganda', 'Kenya', 'Malawi', 'Comoros', 'Madagascar', 'Ethiopia', 'Yemen, Rep.', 'Sudan', ]; const Asia = [ 'Pakistan', 'India', 'Nepal', 'Kazakhstan', 'Maldives', 'Sri Lanka', 'Mongolia', 'Thailand', 'Lao PDR', 'Cambodia', 'Vietnam', 'Singapore', 'Indonesia', ]; const Rest = [ 'New Caledonia', 'New Zealand', 'Tonga', 'Fiji', 'Nauru', 'Solomon Islands', 'Kiribati', 'Tuvalu', ] const selected = [ ...Asia, ...Africa, ...Europe, ...NorthAmerica, ...SouthAmerica, ...Rest, ]; function selectCountries ( list, countries ) { return list.map( name => { const country = countries.find( c => c.name === name ); const { latitude, longitude } = country; return { name, latitude, longitude }; } ); } // Connections. const connections = { 'Colombia': [ 'Ecuador', 'Cuba', 'Mexico', 'Peru', 'Venezuela, RB', 'Guyana', 'United States', ], 'South Sudan': [ 'Nigeria', 'Sudan', 'Kenya', 'Uganda', 'Zambia', 'Malawi', 'Ethiopia', 'Somalia', 'Madagascar', 'Yemen, Rep.', ], 'India': [ 'Pakistan', 'Kazakhstan', 'Maldives', 'Sri Lanka', 'Vietnam', 'Thailand', ], 'Thailand': [ 'Singapore', 'Indonesia', 'Nepal', 'Vietnam', 'Sri Lanka', 'Cambodia', 'Pakistan' ], 'Panama': [ 'Cuba', 'Mexico', 'Ecuador', 'Colombia', 'Peru', 'Venezuela, RB', 'United States' ], 'Fiji': [ 'Tuvalu', 'Nauru', 'Kiribati', 'Tonga', 'New Caledonia', 'New Zealand' ], }; function getCountry ( name, countries ) { return countries.find( c => c.name === name ); } function getCountries ( object, countries ) { return Object.keys( object ).reduce( ( r, e ) => { r[ e ] = object[ e ].map( c => getCountry( c, countries ) ); return r; }, {} ); } <file_sep>class Globe { constructor ( radius ) { this.radius = config.sizes.globe; this.geometry = new THREE.SphereGeometry( this.radius, 64, 64 ); // this.geometry.rotateY( THREE.Math.degToRad( -90 ) ); groups.globe = new THREE.Group(); groups.globe.name = 'Globe'; this.initGlobe(); this.initAtmosphere(); return groups.globe; } initGlobe () { const scale = config.scale.globeScale; this.globeMaterial = this.createGlobeMaterial(); this.globe = new THREE.Mesh( this.geometry, this.globeMaterial ); // this.globe.rotateY( THREE.Math.degToRad( -90 ) ); this.globe.scale.set( scale, scale, scale ); elements.globe = this.globe; groups.map = new THREE.Group(); groups.map.name = 'Map'; groups.map.add( this.globe ); groups.globe.add( groups.map ); } initAtmosphere () { this.atmosphereMaterial = this.createGlobeAtmosphere(); this.atmosphere = new THREE.Mesh( this.geometry, this.atmosphereMaterial ) this.atmosphere.scale.set(1.2, 1.2, 1.2); elements.atmosphere = this.atmosphere; groups.atmosphere = new THREE.Group(); groups.atmosphere.name = 'Atmosphere'; groups.atmosphere.add( this.atmosphere ); groups.globe.add( groups.atmosphere ); } createGlobeMaterial () { const texture = loader.load( './assets/textures/earth_dark.jpg' ); const shaderMaterial = new THREE.ShaderMaterial( { uniforms: { texture: { value: texture } }, vertexShader: shaders.globe.vertexShader, fragmentShader: shaders.globe.fragmentShader, blending: THREE.AdditiveBlending, transparent: true, } ); const normalMaterial = new THREE.MeshBasicMaterial( { blending: THREE.AdditiveBlending, transparent: true, } ); return shaderMaterial; } createGlobeAtmosphere () { return new THREE.ShaderMaterial( { vertexShader: shaders.atmosphere.vertexShader, fragmentShader: shaders.atmosphere.fragmentShader, blending: THREE.AdditiveBlending, side: THREE.BackSide, transparent: true, uniforms: {} } ); } }; <file_sep>class Lines { constructor () { this.countries = Object.keys( data.connections ); this.total = this.countries.length; this.group = groups.lines = new THREE.Group(); this.group.name = 'Lines'; this.create(); this.animate(); this.createDots(); } changeCountry () { countries.index++; if ( countries.index >= this.total ) { countries.index = 0; } if ( countries.selected ) { countries.selected.visible = false; } this.select(); groups.lineDots.children = []; elements.lineDots = []; this.createDots(); } createDots () { const lineDots = new Dots(); groups.globe.add( groups.lineDots ); } animate () { if( !countries.selected ) { this.select(); } this.interval = setInterval( () => this.changeCountry(), countries.interval ); } select () { const next = this.countries[ countries.index ]; const selected = groups.lines.getObjectByName( next ); countries.selected = selected; countries.selected.visible = true; } create () { const { connections, countries } = data; for (let i in connections ) { const start = getCountry( i, countries ); const group = new THREE.Group(); group.name = i; for ( let j in connections[ i ] ) { const end = connections[ i ][ j ]; const line = new Line( start, end ); elements.lines.push( line.mesh ); group.add( line.mesh ); } group.visible = false; groups.lines.add( group ); } } }; class Line { constructor ( start, end ) { const { globe } = config.sizes; const { markers } = config.scale; this.start = start; this.end = end; this.radius = globe + globe * markers; this.curve = this.createCurve(); this.geometry = new THREE.BufferGeometry().setFromPoints( this.curve.getPoints( 200 ) ); this.material = this.createMaterial(); this.line = new MeshLine(); this.line.setGeometry( this.geometry ); this.mesh = new THREE.Mesh( this.line.geometry, this.material ); this.mesh._path = this.geometry.vertices; } createCurve () { const { start, end, mid1, mid2 } = getSplineFromCoords( this.start.latitude, this.start.longitude, this.end.latitude, this.end.longitude, this.radius ); return new THREE.CubicBezierCurve3( start, mid1, mid2, end ); } createMaterial () { return new MeshLineMaterial( { color: config.colors.globeLines, transparent: true, opacity: 0.45 } ); } }; <file_sep>class Marker { constructor ( material, geometry, label, cords, { textColor = 'white', pointColor = config.colors.globeMarkerColor, glowColor = config.colors.globeMarkerGlow } = {} ) { this.material = material; this.geometry = geometry; this.labelText = label; this.cords = cords; this.isAnimating = false; this.textColor = textColor; this.pointColor = new THREE.Color( pointColor ); this.glowColor = new THREE.Color( glowColor ); this.group = new THREE.Group(); this.group.name = 'Marker'; this.createLabel(); this.createPoint(); this.createGlow(); this.setPosition(); groups.markers.add( this.group ); } createLabel () { const text = this.createText(); const texture = new THREE.Texture( text ); texture.minFilter = THREE.LinearFilter; textures.markerLabels.push( texture ); const material = new THREE.SpriteMaterial(); material.map = texture; material.depthTest = false; material.useScreenCoordinates = false; this.label = new THREE.Sprite( material ); this.label.scale.set( 40, 20, 1 ); this.label.center.x = 0.25; this.label.translateY( 2 ); this.group.add( this.label ); elements.markerLabel.push( this.label ); } createPoint () { this.point = new THREE.Mesh( this.geometry, this.material ); this.point.material.color.set( this.pointColor ); this.group.add( this.point ); elements.markerPoint.push( this.point ); } createGlow () { this.glow = new THREE.Mesh( this.geometry, this.material.clone() ); this.glow.material.color.set( this.glowColor ); this.glow.material.opacity = 0.6; this.group.add( this.glow ); elements.markerPoint.push( this.glow ); } animateGlow () { if ( !this.isAnimating ) { if ( Math.random() > 0.99 ) { this.isAnimating = true; } } else if ( this.isAnimating ) { this.glow.scale.x += 0.025; this.glow.scale.y += 0.025; this.glow.scale.z += 0.025; this.glow.material.opacity -= 0.005; if ( this.glow.scale.x >= 4 ) { this.glow.scale.x = 1; this.glow.scale.y = 1; this.glow.scale.z = 1; this.glow.material.opacity = 0.6; this.glow.isAnimating = false; } } } setPosition () { const { x, y, z } = this.cords; this.group.position.set( -x, y, -z ); } createText () { const element = document.createElement( 'canvas' ); const canvas = new fabric.Canvas( element ); const text = new fabric.Text( this.labelText, { left: 0, top: 0, fill: this.textColor, fontFamily: 'Open Sans', } ); canvas.add( text ); return element; } }; <file_sep>class GeoJSON { constructor ( json ) { this.json = json; this.radius = config.sizes.globe; // groups.contours = new THREE.Group(); groups.contours.name = 'geoJSON'; groups.contours.rotateY( THREE.Math.degToRad( 90 ) ); // this.initGeoJson(); return groups.contours; } initGeoJson () { // TODO: d3 geometry creation. // https://github.com/sermonis/three-geojson-geometry // https://github.com/sermonis/three-geojson-geometry/blob/master/src/index.js const lineObjs = [ // new THREE.LineSegments( // // https://github.com/d3/d3-geo/blob/main/README.md#geoGraticule10 // new THREE.GeoJsonGeometry(d3.geoGraticule10(), alt), // new THREE.LineBasicMaterial({ color: 'white', opacity: 0.04, transparent: true }) // ) ]; const materials = [ // https://threejs.org/examples/#webgl_lines_fat // https://github.com/mrdoob/three.js/blob/master/examples/webgl_lines_fat.html // new THREE.LineBasicMaterial( { color: 'blue', linewidth: 3, } ), // outer ring // new THREE.LineBasicMaterial( { color: 0xffaa00, linewidth: 3, transparent: true, opacity: 0 } ), // outer ring new THREE.LineBasicMaterial( { color: 0xffaa00, linewidth: 3, transparent: true, opacity: 0 } ), // outer ring // new THREE.MeshBasicMaterial( { color: 0xffaa00, linewidth: 3, } ), // outer ring // new THREE.LineDashedMaterial( { color: 0xffaa00, dashSize: 3, gapSize: 1 } ), // outer ring new THREE.LineBasicMaterial( { color: 'green', linewidth: 3, } ), // inner holes ]; // const vectorArray = []; this.json.features.forEach( ( { properties, geometry } ) => { // console.log( geometry ); // console.log( new THREE.GeoJsonGeometry( geometry, this.radius ) ); // console.log( new THREE.GeoJsonGeometry( geometry, this.radius ).toJSON() ); // const g = new THREE.GeoJsonGeometry( geometry, this.radius ); // console.log( g.getAttribute( 'position' ) ); // console.log( g.toNonIndexed() ); // g.toNonIndexed() // const vertices = g.getAttribute( 'position' ); // console.log( new THREE.BufferAttribute( vertices, 3 ) ); // console.log( new THREE.BufferAttribute( g.getAttribute( 'position' ) ) ); // console.log( g.getAttribute( 'position' ) ); // console.log( g.toNonIndexed() ); // const s = new THREE.ShapeGeometry(); // g.computeVertexNormals(); // console.log( s.fromGeometry(g.toNonIndexed()) ); // fromGeometry // THREE.GeometryUtils.center(geometry); // copyArray: ƒ (a) // copyAt: ƒ (a,b,c) // copyColorsArray: ƒ (a) // copyIndicesArray: ƒ () // copyVector2sArray: ƒ (a) // copyVector3sArray: ƒ (a) // copyVector4sArray: // const position = g.attributes.position; // const vector = new THREE.Vector3(); // // const vector = new THREE.Vector2(); // // // vector.fromBufferAttribute(position, i); // vector.fromBufferAttribute(position, 0); // 0 -? // // for ( let i = 0, length = position.length; i < length; i ) { // // // vectorArray.push( vector.fromBufferAttribute( position, i ) ); // vectorArray.push( vector.fromBufferAttribute( position, i ) ); // // }; // gjg.computeVertexNormals(); // console.log( position.length, position ); // console.log( vector ); // const shapeGeometry = new THREE.ShapeGeometry(); // console.log( shapeGeometry.fromGeometry(gjg) ); // fromDirectGeometry: ƒ (a) // fromGeometry: ƒ (a) lineObjs.push( new THREE.LineSegments( // lineObjs.push( new THREE.Mesh( new THREE.GeoJsonGeometry( geometry, this.radius ), materials ) ); } ); // console.log( vectorArray ); // console.log( lineObjs ); lineObjs.forEach( obj => groups.contours.add( obj ) ); // lineObjs.forEach( obj => { // // // obj.rotateY( THREE.Math.degToRad( -90 ) ); // obj.computeLineDistances(); // groups.contours.add( obj ); // // } ); } };
dfc15a00e546f2e8648f5e2da520038269671d64
[ "JavaScript", "Markdown" ]
8
JavaScript
sermonis/three-globe-particles
9487c7caed34c589564a2bebaf9ac1736e634b27
5bc97419eb6e408353da9d9ee15f51f2b96d07c3
refs/heads/master
<file_sep>#!/usr/bin/env python ''' This module prepares the images for captioning. ''' import pandas as pd import pickle as pickle from keras.preprocessing import image from vgg16 import VGG16 import numpy as np from keras.applications.imagenet_utils import preprocess_input counter = 0 def load_image(path): img = image.load_img(path, target_size=(224,224)) x = image.img_to_array(img) x = np.expand_dims(x, axis=0) x = preprocess_input(x) return np.asarray(x) def load_encoding_model(): model = VGG16(weights='imagenet', include_top=True, input_shape = (224, 224, 3)) return model def get_encoding(model, img): global counter counter += 1 image = load_image('./data2/'+str(img)) pred = model.predict(image) pred = np.reshape(pred, pred.shape[1]) print "Encoding image: "+str(counter) print pred.shape return pred def get_img_list(img_region_path): imgs_df = pd.read_csv(img_region_path) file_names = [] for i in range(0,imgs_df.shape[0]): file_names.append(imgs_df['image_name'].iloc[i]) return file_names def encode_imgs(test_imgs): encoded_images = {} encoding_model = load_encoding_model() for img in test_imgs: encoded_images[img] = get_encoding(encoding_model, img) with open( "encoded_images.p", "wb" ) as pickle_f: pickle.dump( encoded_images, pickle_f ) def main(): tst_imgs = get_img_list('./data2/img_regions_cats_1.csv') encode_imgs(tst_imgs) if __name__ == "__main__": main()<file_sep>#!/usr/bin/env python ''' This program measures similarity between two captions using word2vec embeddings @author : <NAME> @date : 14-Nov-2017 ''' import numpy as np import math from nltk.corpus import stopwords from gensim.models import KeyedVectors class CaptionVector: def __init__(self, caption, embeddings): self.embedding_model = embeddings self.vector = self.caption_to_vector(caption) def caption_vec_based_on_avg_vecs(self, vec_set, ignore = []): if len(ignore) == 0: return np.mean(vec_set, axis = 0) else: return np.dot(np.transpose(vec_set), ignore) / sum(ignore) def caption_to_vector(self, caption): cached_stop_words = stopwords.words("english") caption = caption.lower() words_in_caption = [word for word in caption.split() if word not in cached_stop_words] vec_set = [] for word in words_in_caption: try: word_vecs=self.embedding_model[word] vec_set.append(word_vecs) except: pass return self.caption_vec_based_on_avg_vecs(vec_set) def calc_cosine_sim(self, other_caption_vec): cosine_similarity = np.dot(self.vector, other_caption_vec) / (np.linalg.norm(self.vector) * np.linalg.norm(other_caption_vec)) try: if math.isnan(cosine_similarity): cosine_similarity=0 except: cosine_similarity=0 return cosine_similarity class CaptionSimilarity: word_embeddings = 'embeddings/GoogleNews-vectors-negative300.bin' def __init__(self): print("Loading embeddings vector...") self.embedding_model = KeyedVectors.load_word2vec_format(self.word_embeddings, binary=True) def caption_similarity(self, caption1, caption2): cap_vec_1 = CaptionVector(caption1, self.embedding_model) cap_vec_2 = CaptionVector(caption2, self.embedding_model) similarityScore = cap_vec_1.calc_cosine_sim(cap_vec_2.vector) return similarityScore def main(): cs = CaptionSimilarity() cap1 = "The brown cat with furry hair" cap2 = "The cat in brown color is sitting on table" print(cs.caption_similarity(cap1, cap2)) cap1 = "The brown cat with furry hair" cap2 = "The dog in brown color is sitting on table" print(cs.caption_similarity(cap1, cap2)) if __name__ == "__main__": main()<file_sep>#!/usr/bin/env python """ This program creates caption similarity matrix, by comparing each caption with all others. @author : <NAME> @date : 14 Nov 2017 """ import numpy as np import pandas as pd from caption_similarity import CaptionSimilarity class ImgCap2Vecs: def __init__(self): self.cs = CaptionSimilarity() self.cd = {} def create_cap_dict(self, caption_file): imgs_df = pd.read_csv(caption_file) for i in range(0,imgs_df.shape[0]): self.cd[imgs_df['image_name'].iloc[i]] = imgs_df['caption'].iloc[i] def img_caption_similarity_matrix(self, caption_file, csv_file): self.create_cap_dict(caption_file) file_names = [] features = [] for key_base,value_base in self.cd.items(): similarity_vec = [] for key_comp,value_comp in self.cd.items(): if key_base != key_comp: similarity_vec.append(self.cs.caption_similarity(value_base, value_comp)) else: similarity_vec.append(0) file_names.append(key_base) features.append(similarity_vec) df = pd.DataFrame(features) df.columns = file_names df.index = file_names df.index.name = "image_name" df.to_csv(csv_file) def main(): icv = ImgCap2Vecs() icv.img_caption_similarity_matrix("./data2/img_cap_human.txt","./data2/cap_cat_human.csv") if __name__ == "__main__": main()<file_sep># imageSearchImproved This is the improvement over my imageSearch repo. - Under imageSearch the improvement was the introduction of bounding boxes to improve image search - Under this repo, I take it a step further by first captioning the images and then using caption similarity to improve the search. ## Image Captioning Quality of image caption is the determinant factor in getting this search to work, I did the following experiments I used the code in repo - https://github.com/anuragmishracse/caption_generator to help caption the images. I ran this code on - Flick8k dataset - Here, I got poor results as there is no cat class hence my captions have no mention of cats - PascalSentences dataset - http://vision.cs.uiuc.edu/pascal-sentences/ , I got somewhat better results but still not up to the mark Then to prove the point this works, - I hand captioned 100 odd images and included the cat and dog breed names in the caption and it worked really well. - I further realized I can use a model to classify these images into breeds by training on the following dataset - http://www.robots.ox.ac.uk/~vgg/data/pets/ - Then I can augment the PascalSentences dataset captions with the class labels of above model to match the performance on hand-captioned images. ## Combing the Bounding Box and Caption approach Finally I combined both the approaches by simple mean of similarity scores for both the approaches. All the three approaches demo can been seen in the jupyter notebook "Search Image.ipynb" ## Files The following class takes the idea of word similarity using word2vec and extends it to caption similarity by averaging the word vectors to get caption vectors. Then I use cosine similarity between the two captions to see how close they are... > python caption_similarity.py The following file run in python 2.7 and prepares my images for caption generator repo code listed below > python encode_img_for_cap.py Run the python 2.7 code in the repo https://github.com/anuragmishracse/caption_generator to train your captioning model and then caption your images using it. > caption_generator.py: The base script that contains functions for model creation, batch data generator etc. > prepare_dataset.py: Prepares the dataset for training. Changes have to be done to this script if new dataset is to be used. > train_model.py: Module for training the caption generator. > test_model.py: Run it to caption your images This program creates caption similarity matrix, by comparing each caption with all others. It takes as input the caption generated in above step and outputs a csv file containing similarity scores for between all images. > python img_cap_vecs.py sample output as below image_name | Cat_001.jpg | Cat_002.jpg | Cat_003.jpg ---------- | ----------- | ----------- | ----------- **Cat_001.jpg** | 0.0 | 0.5622765421867371 | 0.591035008430481 **Cat_002.jpg** | 0.5622765421867371 | 0.0 | 0.5671480298042297 **Cat_003.jpg** | 0.591035008430481 | 0.5671480298042297 | 0.0 The following file as all the functions to perform image search > python search.py - img_search_normal("Cat_008.jpg") - Does a naive vec to vec similarity - img_search_frcnn("Cat_008.jpg") - Improved image search using bounding boxes (Faster RNN) - img_search_caption("Cat_008.jpg") - Further improvement using caption similarity (Above listed method) - img_search_combined("Cat_008.jpg") - Combination of bounding boxes and captions search. unzip the imgdata.zip to get all images and run "Search Image.ipynb" to see the results. <file_sep>#!/usr/bin/env python """ This program is a helper that loads all the images that need to be searched """ from keras.applications import inception_v3, resnet50 from keras.preprocessing import image from keras.models import Model import matplotlib.image as mpimg import matplotlib.pyplot as plt from scipy.spatial import distance import pandas as pd import numpy as np import os import glob from os import walk topn = 3 class ImgSearch: _model = None _search_path = './data2' _img_cols_px = 224 _img_rows_px = 224 _img_layers = 3 _img_features = None _search_scores = None _file_names = None _match_count = 0 def __init__(self): #self._load_inception3() #self._load_resnet50() pass self.df = pd.read_csv("./data2/cap_cat_human.csv", index_col="image_name") def _load_inception3(self): base_model = inception_v3.InceptionV3(weights='imagenet', include_top=True) #base_model.summary() layer_out = base_model.get_layer('avg_pool') self._model = Model(inputs=base_model.input, outputs=layer_out.output) def _load_resnet50(self): base_model = resnet50.ResNet50(weights='imagenet', include_top=True) #base_model.summary() layer_out = base_model.get_layer('avg_pool') self._model = Model(inputs=base_model.input, outputs=layer_out.output) def image_to_vector(self, image_name): img_path = os.path.join(self._search_path,image_name) img = image.load_img(img_path, target_size=( self._img_cols_px, self._img_rows_px)) img = image.img_to_array(img) img = img.reshape(1, self._img_cols_px, self._img_rows_px, self._img_layers) preds = self._model.predict(img) return preds def create_img_vectors(self, location): f = [] file_names = [] for( dirpath, dirnames, filenames ) in walk(self._search_path): f.extend(filenames) for file in f: if file.endswith(".jpg"): file_names.append(file.replace(".npy","")) break file_names = np.asarray(file_names) np.save(location + ".label", file_names) features = [] for file_name in file_names: print(file_name) img_vec = self.image_to_vector(file_name) img_vecs = np.array(img_vec) features.append(img_vecs) features = np.asarray(features) np.save(location + ".npy", features) def show_image(self, name, label): img_path = os.path.join(self._search_path, name) img= mpimg.imread(img_path) plt.imshow(img) plt.title(label) plt.suptitle(name) plt.show() def load_imgs_vectors(self, location): if self._file_names is None: self._file_names = np.load(location + ".label.npy") self._img_features = np.load(location + ".npy") def img_search_similarity_matrix(self, csv_file): file_names = [] features = [] img_nos = 0 for img_file in self._file_names: base_img_vec, indx = self.load_img_from_vec(img_file) similarity_vec = [] vec_nos = 0 for img_vec in self._img_features: if img_nos != vec_nos: score_img = 1 - np.linalg.norm(img_vec - base_img_vec)/100 #score_img = 1 - distance.cosine(img_vec, base_img_vec) score_cap = self.img_similarity_score_based_on_caption(self._file_names[vec_nos],self._file_names[img_nos]) score = 0.6*score_img + 0.4*score_cap print(score_img, score_cap, score) else: score = 0 similarity_vec.append(score) vec_nos = vec_nos + 1 file_names.append(img_file) features.append(similarity_vec) img_nos = img_nos + 1 df = pd.DataFrame(features) df.columns = file_names df.index = file_names df.index.name = "image_name" df.to_csv(csv_file) def search_img(self, comparison_image): search_scores = [] for feat in self._img_features : score = np.linalg.norm(feat - comparison_image) #score = 1 - distance.cosine(comparison_image, feat) search_scores.append(score) lowest = sorted(search_scores, key=float, reverse=False) self._search_scores = search_scores return lowest def increment_count(self,base_class, match_class): if self.get_class(base_class) == self.get_class(match_class): self._match_count = self._match_count + 1 def show_img_with_score(self, score, base, flist): search_index = self._search_scores.index(score) fname = self._file_names[search_index] if fname not in flist and fname != base: flist.append(self._file_names[search_index]) self.show_image(self._file_names[search_index], "Similarity Score : " + str(score)) self.increment_count(base, self._file_names[search_index]) print("Search Score = " + str(score)) return flist def load_img_from_vec(self, image_name): fname = self._file_names.tolist() indx = fname.index(image_name) return self._img_features[indx], indx def get_class(self, carname): return carname.split("_0")[0] def img_similarity_score_based_on_caption(self, img1, img2): return self.df.loc[img1][img2] def img_search_with_text(self, img_name): #df = pd.read_csv("./data2/cap_cat_human_1.csv", index_col="image_name") nlargest = topn order = np.argsort(-self.df.values, axis=1)[:, :nlargest] result = pd.DataFrame(self.df.columns[order], columns=['top{}'.format(i) for i in range(1, nlargest+1)], index=self.df.index) return result.ix[img_name] def get_img_caption_vector(self, img_name): df = pd.read_csv("./data2/cap_cat_human.csv", index_col="image_name") return df[img_name] def imgs_to_npy(): query = ImgSearch() query.create_img_vectors('./data2/cat_resnet50') def img_search_common(search_img_name, img_vec_path): query = ImgSearch() query.load_imgs_vectors(img_vec_path) search_img,indx = query.load_img_from_vec(search_img_name) query.show_image(search_img_name, "Base Image") search_scores = query.search_img(search_img) print(len(search_scores)) flist= [] res = 1 while (1): flist = query.show_img_with_score(search_scores[res], search_img_name, flist) if len(flist) >= topn: break res = res + 1 def img_search_frcnn(search_img_name): img_search_common(search_img_name, './data2/reg_cat_2') def img_search_normal(search_img_name): img_search_common(search_img_name, './data2/cat_resnet50') def img_search_caption(img_name): query = ImgSearch() query.show_image(img_name, "Base Image") results = query.img_search_with_text(img_name) for img_name in results: query.show_image(img_name, img_name) def img_search_combined(img_name): query = ImgSearch() query.show_image(img_name, "Base Image") df = pd.read_csv("./data2/img_vec_mat.csv", index_col="image_name") nlargest = topn order = np.argsort(-df.values, axis=1)[:, :nlargest] result = pd.DataFrame(df.columns[order], columns=['top{}'.format(i) for i in range(1, nlargest+1)], index=df.index) for img_name in result.ix[img_name]: if img_name.count('.') > 1: img_name = img_name.rsplit('.',1)[0] query.show_image(img_name, img_name) def main(): #img_search_caption("Cat_001.jpg") ImgSrch = ImgSearch() #print(ImgSrch.img_similarity_score_based_on_caption("Cat_001.jpg","Cat_003.jpg")) ImgSrch.load_imgs_vectors('./data2/reg_cat_2') ImgSrch.img_search_similarity_matrix("./data2/img_vec_mat.csv") #print(ImgSrch.get_img_caption_vector('Cat_008.jpg')) #img_search_frcnn("Cat_008.jpg") if __name__ == "__main__": main()
2bf6e34b7eade059f43915b2896755c0074db38a
[ "Markdown", "Python" ]
5
Python
arorahardeep/imageSearchImproved
8e1d2e225693be106540fc71b4e3391775fb0a6c
5cf4ffe3f9006cc4243ec9f38273149324778b30
refs/heads/master
<file_sep><?php namespace Quiklyn; class OpaqueCollectionException extends \RuntimeException { }<file_sep><?php namespace Quiklyn; use MessagePack\Packer; use MessagePack\Unpacker; use PDO; /** * Class OpaqueCollection * @package Quiklyn */ class OpaqueCollection { const COMPRESSION_GZIP = 'Z'; const COMPRESSION_NONE = 'N'; const FORMAT_JSON = 'J'; const FORMAT_MSGPACK = 'M'; const FORMAT_RAW = 'R'; private static $packer; private static $unpacker; private $conn; private $table; private $format; private $compression; /** * OpaqueCollection constructor. * @param $conn PDO * @param string $table * @param string $format * @param string $compression */ public function __construct($conn, $table, $format = self::FORMAT_MSGPACK, $compression = self::COMPRESSION_NONE) { $this->conn = $conn; $this->table = self::quoteIdentifier($table); $this->format = $format; $this->compression = $compression; } public static function quoteIdentifier($name) { return '`' . str_replace('`', '``', $name) . '`'; } public function initialize() { if ($this->conn->exec($this->getCreateTableSql()) === false) { $this->throwError('failed to create the ' . $this->table . ' table', $this->conn->errorInfo()); } } /** * @return string */ public function getCreateTableSql() { return 'CREATE TABLE IF NOT EXISTS ' . $this->table . ' ( id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, key_id BINARY(16) NOT NULL, value MEDIUMBLOB NOT NULL, version BIGINT UNSIGNED NOT NULL, -- optimistic updates format CHAR(1) NOT NULL DEFAULT ' . $this->conn->quote(self::FORMAT_RAW) . ', compression CHAR(1) NOT NULL DEFAULT ' . $this->conn->quote(self::COMPRESSION_NONE) . ', is_deleted TINYINT(1) NOT NULL DEFAULT 0, -- logical deletion to address fragmentation created_dt BIGINT UNSIGNED NOT NULL, -- milliseconds since epoch updated_dt BIGINT UNSIGNED NOT NULL, -- milliseconds since epoch PRIMARY KEY (`id`), UNIQUE KEY (`key_id`), INDEX (`updated_dt`) ) ENGINE = InnoDB ROW_FORMAT = DYNAMIC DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_unicode_ci;'; } private function throwError($message, $errorInfo) { $errorMessage = $message; if ($errorInfo) { $errorMessage .= "\n" . 'SQLSTATE[' . $errorInfo[0] . '] ' . $errorInfo[1] . ' ' . $errorInfo[2]; } throw new OpaqueCollectionException($errorMessage); } public function getItem($key) { $sth = $this->conn->prepare( 'SELECT id, hex(key_id) AS `key`, value, version, format, compression, from_unixtime(created_dt * 0.001) AS created_dt, from_unixtime(updated_dt * 0.001) AS updated_dt FROM ' . $this->table . ' WHERE key_id = unhex(:key) AND is_deleted = 0;' ); $sth->bindParam('key', $key, PDO::PARAM_STR); if ($sth->execute() === false) { $this->throwError('failed to get item ' . $key, $sth->errorInfo()); } $result = $sth->fetch(PDO::FETCH_ASSOC); if (!$result) { return null; } $item = [ 'id' => $result['id'], 'key' => $result['key'], 'value' => self::decodeValue($result['value'], $result['format'], $result['compression']), 'version' => $result['version'], 'updated' => $result['updated_dt'], 'created' => $result['created_dt'] ]; return $item; } public static function decodeValue($value, $format, $compression) { switch ($compression) { case self::COMPRESSION_GZIP: $value = gzdecode($value); break; } switch ($format) { case self::FORMAT_JSON: $value = json_decode($value, true); if ($value === null) { throw new OpaqueCollectionException('failed to decode json value'); } break; case self::FORMAT_MSGPACK: $value = self::getUnpacker()->unpack($value); break; } return $value; } /** * @return Unpacker */ private static function getUnpacker(): Unpacker { if (self::$unpacker === null) { self::$unpacker = new Unpacker(); } return self::$unpacker; } public function putItem($value) { $value = $this->encodeValue($value); if ($this->conn->exec('SET @uuid := cast(uuid() AS BINARY(36))') === false) { $this->throwError('failed to generate uuid', $this->conn->errorInfo()); }; $sth = $this->conn->prepare( 'INSERT INTO ' . $this->table . ' ( key_id, value, version, format, compression, created_dt, updated_dt ) VALUES ( unhex(concat(substr(@uuid, 15, 4), substr(@uuid, 10, 4), substr(@uuid, 1, 8), substr(@uuid, 20, 4), substr(@uuid, 25))), :value, 1, :format, :compression, round(unix_timestamp(now(3)) * 1000), round(unix_timestamp(now(3)) * 1000) )' ); $sth->bindParam('value', $value, PDO::PARAM_LOB); $sth->bindParam('format', $this->format, PDO::PARAM_STR); $sth->bindParam('compression', $this->compression, PDO::PARAM_STR); if ($sth->execute() === false) { $this->throwError('failed to put item', $sth->errorInfo()); } $id = $this->conn->lastInsertId(); $sth = $this->conn->prepare('SELECT hex(key_id) FROM ' . $this->table . ' WHERE id = :id'); $sth->bindParam('id', $id); if ($sth->execute() === false) { $this->throwError('failed to get last insert id', $sth->errorInfo()); } return $sth->fetchColumn(); } private function encodeValue($value) { switch ($this->format) { case self::FORMAT_JSON: $value = json_encode($value); if ($value === null) { throw new OpaqueCollectionException('failed to encode json value'); } break; case self::FORMAT_MSGPACK: $value = self::getPacker()->pack($value); break; } switch ($this->compression) { case self::COMPRESSION_GZIP: $value = gzencode($value); break; } return $value; } /** * @return Packer */ private static function getPacker(): Packer { if (self::$packer === null) { self::$packer = new Packer(); } return self::$packer; } public function updateItem($key, $value, $version) { $value = $this->encodeValue($value); $sth = $this->conn->prepare( 'UPDATE ' . $this->table . ' SET value = :value, version = version + 1, format = :format, compression = :compression, updated_dt = round(unix_timestamp(now(3)) * 1000) WHERE key_id = unhex(:key_id) AND version = :version' ); $sth->bindParam('value', $value, PDO::PARAM_LOB); $sth->bindParam('format', $this->format, PDO::PARAM_STR); $sth->bindParam('compression', $this->compression, PDO::PARAM_STR); $sth->bindParam('key_id', $key, PDO::PARAM_STR); $sth->bindParam('version', $version, PDO::PARAM_INT); if ($sth->execute() === false) { $this->throwError('failed to update version ' . $version . ' of item ' . $key, $sth->errorInfo()); } if ($sth->rowCount() < 1) { $this->throwError('version ' . $version . ' of item ' . $key . ' not found', $sth->errorInfo()); } } public function deleteItem($key) { $sth = $this->conn->prepare( 'UPDATE ' . $this->table . ' SET is_deleted = 1 WHERE key_id = unhex(:key)' ); $sth->bindParam('key', $key, PDO::PARAM_STR); if ($sth->execute() === false) { $this->throwError('failed to delete item ' . $key, $sth->errorInfo()); } if ($sth->rowCount() < 1) { $this->throwError('item ' . $key . ' not found', $sth->errorInfo()); } } } <file_sep>## OpaqueCollection Unstructured key-value storage for MySQL. <file_sep>COVERAGE_DIR = ./coverage PHPUNIT ?= ./vendor/bin/phpunit VENDOR_DIR = ./vendor .PHONY: check check: $(VENDOR_DIR) $(PHPUNIT) .PHONY: clean clean: rm -rf $(COVERAGE_DIR) .PHONY: coverage coverage: $(VENDOR_DIR) $(PHPUNIT) --coverage-html $(COVERAGE_DIR) $(VENDOR_DIR): composer.json composer install -n touch $(VENDOR_DIR) <file_sep><?php namespace Tests; use PDO; use PHPUnit\Framework\TestCase; use Quiklyn\OpaqueCollection; use Quiklyn\OpaqueCollectionException; /** * @covers \Quiklyn\OpaqueCollection */ final class OpaqueCollectionTest extends TestCase { protected static $pdo; /** * @var OpaqueCollection */ private $collection; private $table; public static function setUpBeforeClass() { $dbSettings = [ 'host' => getenv('DB_HOST') ?: '127.0.0.1', 'port' => getenv('DB_PORT') ?: '3306', 'database' => getenv('DB_DATABASE') ?: 'unittest', 'charset' => getenv('DB_CHARSET') ?: 'utf8', 'username' => getenv('DB_USERNAME') ?: 'root', 'password' => getenv('DB_PASSWORD') ?: '<PASSWORD>', ]; self::$pdo = new PDO( "mysql:host=$dbSettings[host];charset=$dbSettings[charset]", $dbSettings['username'], $dbSettings['password'] ); $databaseIdentifier = '`' . str_replace('`', '``', $dbSettings['database']) . '`'; $affectedRows = self::$pdo->exec( "CREATE DATABASE IF NOT EXISTS $databaseIdentifier DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_unicode_ci; USE $databaseIdentifier;" ); if ($affectedRows === null) { throw new \Exception('Failed to create database: ' . join(' ', self::$pdo->errorInfo())); } } public function testInitializeError() { $connMock = $this->getMockBuilder(\PDO::class) ->disableOriginalConstructor() ->setMethods(['quote', 'errorInfo', 'exec']) ->getMock(); $connMock->expects($this->once()) ->method('exec')->willReturn(false); $connMock->expects($this->atLeastOnce()) ->method('quote')->willReturn(null); $connMock->expects($this->atLeastOnce()) ->method('errorInfo')->willReturn([0, 0, '']); $this->expectException(OpaqueCollectionException::class); $this->collection = new OpaqueCollection($connMock, $this->table); $this->collection->initialize(); } public function testGetItemNull() { $key = '00000000000000000000000000000000'; $item = $this->collection->getItem($key); $this->assertNull($item['value']); } public function testGetItemInvalidKey() { $key = 'lorem'; $item = $this->collection->getItem($key); $this->assertNull($item['value']); } public function testGetItemError() { $sthMock = $this->getMockBuilder(\PDOStatement::class) ->setMethods(['execute']) ->getMock(); $sthMock->expects($this->once()) ->method('execute') ->willReturn(false); $connMock = $this->getMockBuilder(\PDO::class) ->disableOriginalConstructor() ->setMethods(['prepare']) ->getMock(); $connMock->expects($this->once()) ->method('prepare')->willReturn($sthMock); $this->expectException(OpaqueCollectionException::class); $this->collection = new OpaqueCollection($connMock, $this->table); $this->collection->getItem('XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'); } public function testGetItemJsonError() { $sthMock = $this->getMockBuilder(\PDOStatement::class) ->setMethods(['execute', 'fetch']) ->getMock(); $sthMock->expects($this->once()) ->method('execute') ->willReturn(true); $sthMock->expects($this->once()) ->method('fetch') ->willReturn([ 'id' => 1, 'key' => '<KEY>', 'compression' => OpaqueCollection::COMPRESSION_NONE, 'format' => OpaqueCollection::FORMAT_JSON, 'value' => '{"invalid": }' ]); $connMock = $this->getMockBuilder(\PDO::class) ->disableOriginalConstructor() ->setMethods(['prepare']) ->getMock(); $connMock->expects($this->once()) ->method('prepare')->willReturn($sthMock); $this->expectException(OpaqueCollectionException::class); $this->collection = new OpaqueCollection($connMock, $this->table); $this->collection->getItem('XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'); } public function testPutItem() { $value = ['text' => 'Hello', 'html' => '<div class="greeting">Hello</div>']; $key = $this->collection->putItem($value); $item = $this->collection->getItem($key); $this->assertNotEmpty($key); $this->assertNotEmpty($item); $this->assertEquals($value, $item['value']); } public function testPutItemError() { $sthMock = $this->getMockBuilder(\PDOStatement::class) ->setMethods(['execute']) ->getMock(); $sthMock->expects($this->once()) ->method('execute') ->willReturn(false); $connMock = $this->getMockBuilder(\PDO::class) ->disableOriginalConstructor() ->setMethods(['prepare', 'exec']) ->getMock(); $connMock->expects($this->once()) ->method('prepare')->willReturn($sthMock); $this->expectException(OpaqueCollectionException::class); $this->collection = new OpaqueCollection($connMock, $this->table); $value = ['text' => 'Hello', 'html' => '<div class="greeting">Hello</div>']; $this->collection->putItem($value); } public function testPutItemLastInsertIdError() { $sthMock = $this->getMockBuilder(\PDOStatement::class) ->setMethods(['execute']) ->getMock(); $sthMock->expects($this->exactly(2)) ->method('execute') ->willReturnOnConsecutiveCalls(true, false); $connMock = $this->getMockBuilder(\PDO::class) ->disableOriginalConstructor() ->setMethods(['prepare', 'lastInsertId', 'exec']) ->getMock(); $connMock->expects($this->exactly(2)) ->method('prepare')->willReturn($sthMock); $connMock->expects($this->once()) ->method('lastInsertId') ->willReturn(1); $this->expectException(OpaqueCollectionException::class); $this->collection = new OpaqueCollection($connMock, $this->table); $value = ['text' => 'Hello', 'html' => '<div class="greeting">Hello</div>']; $this->collection->putItem($value); } public function testPutItemUuidError() { $connMock = $this->getMockBuilder(\PDO::class) ->disableOriginalConstructor() ->setMethods(['exec', 'errorInfo']) ->getMock(); $connMock->expects($this->once()) ->method('exec')->willReturn(false); $this->expectException(OpaqueCollectionException::class); $this->collection = new OpaqueCollection($connMock, $this->table); $value = ['text' => 'Hello', 'html' => '<div class="greeting">Hello</div>']; $this->collection->putItem($value); } public function testUpdateItem() { $value = ['text' => 'Hello', 'html' => '<div class="greeting">Hello</div>']; $updatedValue = ['text' => 'Howdy', 'html' => '<div class="greeting">Howdy</div>']; $key = $this->collection->putItem($value); $item = $this->collection->getItem($key); $this->collection->updateItem($key, $updatedValue, $item['version']); $item = $this->collection->getItem($key); $this->assertNotEmpty($key); $this->assertNotEmpty($item); $this->assertEquals($updatedValue, $item['value']); } public function testUpdateItemJson() { $this->collection = new OpaqueCollection( self::$pdo, $this->table, OpaqueCollection::FORMAT_JSON ); $value = ['text' => 'Hello', 'html' => '<div class="greeting">Hello</div>']; $updatedValue = ['text' => 'Howdy', 'html' => '<div class="greeting">Howdy</div>']; $key = $this->collection->putItem($value); $item = $this->collection->getItem($key); $this->collection->updateItem($key, $updatedValue, $item['version']); $item = $this->collection->getItem($key); $this->assertNotEmpty($key); $this->assertNotEmpty($item); $this->assertEquals($updatedValue, $item['value']); } public function testUpdateItemGzip() { $this->collection = new OpaqueCollection( self::$pdo, $this->table, OpaqueCollection::FORMAT_MSGPACK, OpaqueCollection::COMPRESSION_GZIP ); $value = ['text' => 'Hello', 'html' => '<div class="greeting">Hello</div>']; $updatedValue = ['text' => 'Howdy', 'html' => '<div class="greeting">Howdy</div>']; $key = $this->collection->putItem($value); $item = $this->collection->getItem($key); $this->collection->updateItem($key, $updatedValue, $item['version']); $item = $this->collection->getItem($key); $this->assertNotEmpty($key); $this->assertNotEmpty($item); $this->assertEquals($updatedValue, $item['value']); } public function testUpdateItemError() { $sthMock = $this->getMockBuilder(\PDOStatement::class) ->setMethods(['execute', 'rowCount']) ->getMock(); $sthMock->method('execute') ->willReturn(false); $sthMock->expects($this->never()) ->method('rowCount'); $connMock = $this->getMockBuilder(\PDO::class) ->disableOriginalConstructor() ->setMethods(['prepare']) ->getMock(); $connMock->expects($this->once()) ->method('prepare')->willReturn($sthMock); $this->expectException(OpaqueCollectionException::class); $this->collection = new OpaqueCollection($connMock, $this->table); $key = '00000000000000000000000000000000'; $item = ['version' => 1]; $updatedValue = ['text' => 'Howdy', 'html' => '<div class="greeting">Howdy</div>']; $this->collection->updateItem($key, $updatedValue, $item['version']); } public function testUpdateItemConflict() { $sthMock = $this->getMockBuilder(\PDOStatement::class) ->setMethods(['execute', 'bindParam', 'rowCount']) ->getMock(); $sthMock->method('execute') ->willReturn(true); $sthMock->expects($this->atLeastOnce())->method('rowCount') ->willReturn(0); $connMock = $this->getMockBuilder(\PDO::class) ->disableOriginalConstructor() ->setMethods(['prepare']) ->getMock(); $connMock->expects($this->once()) ->method('prepare')->willReturn($sthMock); $this->expectException(OpaqueCollectionException::class); $this->collection = new OpaqueCollection($connMock, $this->table); $key = '00000000000000000000000000000000'; $item = ['version' => 1]; $updatedValue = ['text' => 'Howdy', 'html' => '<div class="greeting">Howdy</div>']; $this->collection->updateItem($key, $updatedValue, $item['version']); } public function testDeleteItem() { $value = ['text' => 'Hello', 'html' => '<div class="greeting">Hello</div>']; $key = $this->collection->putItem($value); $this->collection->deleteItem($key); $item = $this->collection->getItem($key); $this->assertNotEmpty($key); $this->assertNull($item); } public function testDeleteItemNotFound() { $this->expectException(OpaqueCollectionException::class); $this->collection->deleteItem('keynotfound'); } public function testDeleteItemError() { $sthMock = $this->getMockBuilder(\PDOStatement::class) ->setMethods(['execute', 'rowCount']) ->getMock(); $sthMock->expects($this->once())->method('execute') ->willReturn(false); $sthMock->expects($this->never())->method('rowCount'); $connMock = $this->getMockBuilder(\PDO::class) ->disableOriginalConstructor() ->setMethods(['prepare']) ->getMock(); $connMock->expects($this->once()) ->method('prepare')->willReturn($sthMock); $this->expectException(OpaqueCollectionException::class); $this->collection = new OpaqueCollection($connMock, $this->table); $this->collection->deleteItem('keynotfound'); } protected function setUp() { $this->table = 'items'; $this->collection = new OpaqueCollection(self::$pdo, $this->table); $this->collection->initialize(); } }
0d11bdd9e12e1eb43fbde8c05e580ba7ae62760a
[ "Markdown", "Makefile", "PHP" ]
5
PHP
callemo/opaque-collection
cf807acd93fcbc6bb9ca76c89380ac88ce0c83cf
c65979f2debd184fc1a2ff0979bdce4a7fb1ab3d
refs/heads/master
<repo_name>lakrisen/TEST<file_sep>/Folder/main.c #include <stdio.h> #include <stdlib.h> #include <string.h> #include <stdbool.h> #include <avr/io.h> #include <util/delay.h> #include <avr/interrupt.h> #include <avr/pgmspace.h> #include "uart.h" #include "line_buffer.h" #include "circ_buffer.h" #include "encoder.h" #include "controller.h" //#include "motor.h" #include "sysid.h" #include "motor_test.h" static LB_T lb; // TODO: Replace these with receive buffer declaration static volatile char ch; static volatile bool ch_waiting; int main(void) { float x = 0.0; float y = 0.0; float xy = 0.0; float theta = 0; //actual pendulumn angle float vel = 0; //actual cart veloctiy float velref = 0; //desired cart velocity float ctrl = 0; //output control signal float ctrl_temp = 0; int log_count = 0; float time = 0; int motor_dir = 0; //Initialise the things lb_init(&lb); DDRD |= (1<<6); uart_init(); // init USART sei(); // enable interrupts enc_init(); ctrl_init(); PWM_init(); //motor_test(); printf_P(PSTR("Hello World\n")); //motor_init(); sysid_init(); // Wait a second at startup _delay_ms(1000); // send initial string printf_P(PSTR("Mr. Bond, what would you like to do today?!\n")); //Infinite loop for (;/*ever*/;) { /* if (event_count > 0) // Operation pending, do it right now (high priority) { event_count = 0; // Reset counter if(log_count>0){ printf_P(PSTR("%f ,%f\n"),time, adc_read()); // TODO: Handle sampling and sending of one line of data // Note: If we have sent the number requested samples, // disable the data logging time +=0.01; log_count--; if(log_count ==0){TIMSK &=~(1<<OCIE0); time = 0;} } if (event_count > 0) { // If we ever get here, the sampling/sending step above is taking // too long to fit in one sampling interval! printf_P(PSTR("ERROR: Sampling interval too long\n")); // TODO: Print an error message and disable the data logging } } else // No pending operation, do low priority tasks { // TODO: Handle serial command processing here */ // Do some pretend processing //_delay_ms(250); while (uart_avail()) { char c = uart_getc(); // TODO: Transfer character to line buffer and handle overflow. //lb_append(&lb, c); // TODO: Process command if line buffer is ready ... if (lb_append(&lb, c) == LB_BUFFER_FULL) { lb_init(&lb); // Clear line printf_P(PSTR("\nMax line length exceeded\n")); } } // Process command if line buffer is terminated by a line feed or carriage return if (lb_line_ready(&lb)) { // Note: The following is a terrible way to process strings from the user // See recommendations section of the lab guide for a better way to // handle commands with arguments, which scales well to a large // number of commands. if (!strncmp_P(lb_gets(&lb), PSTR("help"), 4)) //use lbgets to read data from the buffer and compare { printf_P(PSTR( "MCHA3000 Serial help.\n" "Instruction Table.\n" "x = (set value of x)\n" "x? (print value of x)\n" "count? (query encoder count)\n" "reset (reset encoder count)\n" )); } /* else if (!strncmp_P(lb_gets(&lb), PSTR("logv0 "), 6)) //Set the X value { log_count = atof(lb_gets_at(&lb, 6)); TIMSK |= 1<<OCIE0; //enable interrupt on compare printf_P(PSTR("Time (sec), Voltage (V)\n")); //printf_P(PSTR("x set to %f\n"), x); }*/ else if (!strncmp_P(lb_gets(&lb), PSTR("x="), 2)) //Set the X value { x = atof(lb_gets_at(&lb, 2)); //printf_P(PSTR("x set to %f\n"), x); } else if (!strncmp_P(lb_gets(&lb), PSTR("x?"), 2)) //query the X value { printf_P(PSTR("x is %f\n"), x); } else if (!strncmp_P(lb_gets(&lb), PSTR("y="), 2)) //set the value of Y value { y = atof(lb_gets_at(&lb, 2)); //printf_P(PSTR("y set to %f\n"), y); } else if (!strncmp_P(lb_gets(&lb), PSTR("xy?"), 3)) //set the value of XxY value { xy = x*y; printf_P(PSTR("xy is %f\n"), xy); } else if (!strncmp_P(lb_gets(&lb), PSTR("theta="), 6)) //Set the actual theta value { theta = atof(lb_gets_at(&lb, 6)); } else if (!strncmp_P(lb_gets(&lb), PSTR("vel="), 4)) //Set the actual cart velocity value { vel = atof(lb_gets_at(&lb, 4)); } else if (!strncmp_P(lb_gets(&lb), PSTR("velref="), 7)) //Set the requested value for velocity { velref = atof(lb_gets_at(&lb, 7)); } else if (!strncmp_P(lb_gets(&lb), PSTR("ctrl?"), 5)) //compute the control { ctrl_temp = velocity_controller(velref-vel); //printf_P(PSTR("%g\n"), ctrl_temp); ctrl = angle_controller(ctrl_temp-theta); printf_P(PSTR("%g\n"), ctrl); } else if (!strncmp_P(lb_gets(&lb), PSTR("count?"), 6)) //query the count { printf_P(PSTR("count1 = %d\n"), enc1_read()); printf_P(PSTR("count2 = %d\n"), enc2_read()); } else if (!strncmp_P(lb_gets(&lb), PSTR("reset"), 5)) //reset the encoder { enc_reset(); printf_P(PSTR("count1 = %d\n"), enc1_read()); printf_P(PSTR("count2 = %d\n"), enc2_read()); } else if (!strncmp_P(lb_gets(&lb), PSTR("motor "), 5)) //display atmega pin configuration { //printf_P(PSTR("current = %f\n"), motor_current()); //printf_P(PSTR("adc = %d\n"), test()); printf_P(PSTR("Motor Command Accepted\n")); motor_dir = atof(lb_gets_at(&lb, 5)); motor_test(motor_dir); } else if (!strncmp_P(lb_gets(&lb), PSTR("pin"), 5)) //display atmega pin configuration { printf_P(PSTR( "Pin Configuration.\n" " Status LED: (XCK/T0)PB0 <-> 1 +---\\_/---+40 <-- PA0(ADC0) :Angle potentiometer\n" " (T1)PB1 <-> 2 | |39 <-- PA1(ADC1) :Z accelerometer\n" " (INT2/AIN0)PB2 <-> 3 | A |38 <-- PA2(ADC2) :X accelerometer\n" " (OC0/AIN1)PB3 <-> 4 | T |37 <-- PA3(ADC3) :Y gyro\n" " (!SS)PB4 <-> 5 | M |36 <-- PA4(ADC4) :Y gyro (4.5x sens.)\n" " (MOSI)PB5 <-> 6 | E |35 <-> PA5(ADC5)\n" " (MISO)PB6 <-> 7 | L |34 <-> PA6(ADC6)\n" " (SCK)PB7 <-> 8 | |33 <-- PA7(ADC7) :Gyro reference\n" " !RESET --> 9 | A |32 <-- AREF :3.3V\n" " VCC --- 10| T |31 --- GND\n" " GND --- 11| m |30 --- AVCC\n" " XTAL2 <-- 12| e |29 <-> PC7(TOSC2)\n" " XTAL1 --> 13| g |28 <-> PC6(TOSC1)\n" " (RXD)PD0 --> 14| a |27 <-> PC5(TDI)\n" " (TXD)PD1 <-- 15| 3 |26 <-> PC4(TDO)\n" " E-Stop: (INT0)PD2 --> 16| 2 |25 <-> PC3(TMS)\n" " Limit switch: (INT1)PD3 --> 17| |24 <-> PC2(TCK)\n" " (OC1B)PD4 <-> 18| |23 <-> PC1(SDA)\n" " (OC1A)PD5 <-> 19| |22 --> PC0(SCL) :SyRen 10 S2 (dir.)\n" " Az IMU5: (ICP1)PD6 <-- 20+---------+21 --> PD7(OC2) :SyRen 10 S1 (speed)\n" )); } else { printf_P(PSTR("Unknown command: \"%s\"\n"), lb_gets(&lb)); } // Reset line buffer lb_init(&lb); } } return 0; }
69ed1d15ec8a543133284b3c9a4818fbea79d89d
[ "C" ]
1
C
lakrisen/TEST
34b70568d55ae16832ae1f7c94606a047d5efddd
9e3ff75e53ae1216e5829cef5de498b602ff0596
refs/heads/master
<repo_name>JohnnyLeWebDeveloper/Project-Spotify<file_sep>/assets/js/script.js const currentPlaylist = array(); const audioElement; function Audio() { // Keeps track of Currently Playing Songs this.currentlyPlaying; // Contain Audio Object this.audio = document.createElement('audio'); // Function that is called when we want to setup a new song this.setTrack = function(src) { this.audio.src = src; } } <file_sep>/assets/sql files/genres.sql /* Create Genres Table */ CREATE TABLE `project_spotify`.`genres` ( `id` INT NOT NULL AUTO_INCREMENT, `name` VARCHAR(255) NOT NULL, PRIMARY KEY (`id`) ) ENGINE = InnoDB; /* INSERT INTO Statements */ INSERT INTO `genres` (`id`, `name`) VALUES (NULL, 'House'), (NULL, 'Pop'), (NULL, 'Rock'), (NULL, 'Hip-Hop'), (NULL, 'Rap'), (NULL, 'Classical'), (NULL, 'Jazz'), (NULL, 'Electronic Dance'), (NULL, 'Popular'), (NULL, 'Punk Rock'), (NULL, 'Heavy Metal'), (NULL, 'Techno'), (NULL, 'Disco'), (NULL, 'Dance'), (NULL, 'Indie Rock'); <file_sep>/assets/sql files/Songs.sql /* Create Songs Table */ CREATE TABLE `project_spotify`.`Songs` ( `id` INT NOT NULL AUTO_INCREMENT, `title` VARCHAR(255) NOT NULL, `artist` INT NOT NULL, `album` INT NOT NULL, `genre` INT NOT NULL, `duration` VARCHAR(8) NOT NULL, `path` VARCHAR(500) NOT NULL, `albumOrder` INT NOT NULL, `plays` INT NOT NULL, PRIMARY KEY (`id`) ) ENGINE = InnoDB; /* Song 1 */ INSERT INTO `songs` ( `id`, `title`, `artist`, `album`, `genre`, `duration`, `path`, `albumOrder`, `plays` ) VALUES ( NULL, '<NAME> - So Excited (Calippo Remix)', '1', '1', '1', '3:25', 'assets/music/selected01.mp3', '1', '0' ) /* Song 2 */ INSERT INTO `songs` ( `id`, `title`, `artist`, `album`, `genre`, `duration`, `path`, `albumOrder`, `plays` ) VALUES ( NULL, 'CID - Bad For Me (ft. Sizzy Rocket)', '2', '2', '1', '2:33', 'assets/music/selected02.mp3', '2', '0' ) /* Song 3 */ INSERT INTO `songs` ( `id`, `title`, `artist`, `album`, `genre`, `duration`, `path`, `albumOrder`, `plays` ) VALUES ( NULL, '<NAME> - Right Amount Of Wrong (TRU Concept Remix)', '3', '3', '1', '3:19', 'assets/music/selected03.mp3', '3', '0' ) /* Song 4 */ INSERT INTO `songs` ( `id`, `title`, `artist`, `album`, `genre`, `duration`, `path`, `albumOrder`, `plays` ) VALUES ( NULL, 'Anto & <NAME> - With You', '4', '4', '1', '2:42', 'assets/music/selected04.mp3', '4', '0' ) <file_sep>/register.php <?php include ("includes/config.php"); include ("includes/classes/Account.php"); include ("includes/classes/Constants.php"); $account = new Account($con); include ("includes/handlers/register-handler.php"); include ("includes/handlers/login-handler.php"); function getInputValue($name) { if (isset($_POST[$name])) { echo $_POST[$name]; } } if (isset($_POST['registerButton'])) { echo '<script> $(document).ready(function() { $("#loginForm").hide(); $("#registerForm").show(); }); </script>'; } else { echo '<script> $(document).ready(function() { $("#loginForm").show(); $("#registerForm").hide(); }); </script>'; } ?> <!DOCTYPE html> <html> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width"> <title>Welcome to Project Spotify!</title> <!--Font Awesome 5--> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.0.10/css/all.css" integrity="<KEY>" crossorigin="anonymous"> <link href="css/register.css" rel="stylesheet" type="text/css" /> <!--jQuery--> <script src="https://code.jquery.com/jquery-3.3.1.slim.min.js" integrity="<KEY>" crossorigin="anonymous"></script> </head> <body> <div id="background"> <div id="loginContainer"> <div id="inputContainer"> <form action="register.php" id="loginForm" method="POST"> <h2>Login to your account</h2> <p> <?php echo $account->getError(Constants::$loginFailed); ?> <label for="loginUsername">Username</label> <input type="text" id="loginUsername" name="loginUsername" placeholder="User Name" value="<?php getInputValue('loginUsername') ?>" required> </p> <p> <label for="loginPassword">Password</label> <input type="<PASSWORD>" id="loginPassword" name="loginPassword" placeholder="<PASSWORD>" required> </p> <button type="submit" name="loginButton">Log In</button> <div class="hasAccountText"> <span id="hideLogin"> Don't have an account yet? Signup here. </span> </div> </form> <form action="register.php" id="registerForm" method="POST"> <h2>Create your free account</h2> <p> <?php echo $account->getError(Constants::$usernameCharacters); ?> <?php echo $account->getError(Constants::$usernameTaken); ?> <label for="username">Username</label> <input type="text" id="username" name="username" placeholder="<NAME>" value="<?php getInputValue('username') ?>" required> </p> <p> <?php echo $account->getError(Constants::$firstNameCharacters); ?> <label for="firstName">First Name</label> <input type="text" id="firstName" name="firstName" placeholder="First Name" value="<?php getInputValue('firstName') ?>" required> </p> <p> <?php echo $account->getError(Constants::$lastNameCharacters); ?> <label for="lastName">Last Name</label> <input type="text" id="lastName" name="lastName" placeholder="Last Name" value="<?php getInputValue('lastName') ?>" required> </p> <p> <?php echo $account->getError(Constants::$emailsDoNotMatch); ?> <?php echo $account->getError(Constants::$emailInvalid); ?> <?php echo $account->getError(Constants::$emailTaken); ?> <label for="email">Email</label> <input type="email" id="email" name="email" placeholder="Email" value="<?php getInputValue('email') ?>" required> </p> <p> <label for="email2">Confirm email</label> <input type="email" id="email2" name="email2" placeholder="Confirm email" value="<?php getInputValue('email2') ?>" required> </p> <p> <?php echo $account->getError(Constants::$passwordsDoNotMatch); ?> <?php echo $account->getError(Constants::$passwordNotAlphanumeric); ?> <?php echo $account->getError(Constants::$passwordCharacters); ?> <label for="password">Password</label> <input type="<PASSWORD>" id="password" name="password" placeholder="Enter password" required> </p> <p> <label for="password2">Confirm password</label> <input type="<PASSWORD>" id="password2" name="password2" placeholder="Enter <PASSWORD>" required> </p> <button type="submit" name="registerButton">SIGN UP</button> <div class="hasAccountText"> <span id="hideRegister"> Already have an account? Log in here. </span> </div> </form> </div> <div id="loginText"> <h1>Music for everyone.</h1> <h2>What's on Spotify?</h2> <h3>Music</h3> <p class="music"> There are millions of songs on Spotify. Play your favorites, discover new tracks, and build the perfect collection. </p> <h3>Playlists</h3> <p class="music"> You’ll find readymade playlists to match your mood, put together by music fans and experts. </p> <h3>New Releases</h3> <p class="music"> Hear this week’s latest singles and albums, and check out what’s hot in the Top 50. </p> </div> </div> <script src="js/register.js"></script> <script src="js/script.js"></script> </body> </html> <file_sep>/assets/sql files/albums.sql /* Create Album Table */ CREATE TABLE `project_spotify`.`albums` ( `id` INT NOT NULL AUTO_INCREMENT, `title` VARCHAR(255) NOT NULL, `artist` INT NOT NULL, `genre` INT NOT NULL, `artworkPath` VARCHAR(500) NOT NULL, PRIMARY KEY (`id`) ) ENGINE = InnoDB; /* Single INSERT INTO Statement */ INSERT INTO `albums` ( `id`, `title`, `artist`, `genre`, `artworkPath` ) VALUES ( NULL, 'So Excited (Calippo Remix)', '1', '1', 'assets/images/artwork/selected01.JPG' ); /* INSERT INTO Statements */ INSERT INTO `albums` ( `id`, `title`, `artist`, `genre`, `artworkPath` ) VALUES ( NULL, 'So Excited (Calippo Remix)', '1', '1', 'assets/images/artwork/selected01.JPG' ), ( NULL, 'Bad For Me (ft. Sizzy Rocket)', '2', '1', 'assets/images/artwork/selected02.JPG' ), ( NULL, 'Right Amount Of Wrong (TRU Concept Remix)', '3', '1', 'assets/images/artwork/selected03.JPG' ), ( NULL, 'With You', '4', '1', 'assets/images/artwork/selected04.JPG' ); <file_sep>/assets/sql files/artists.sql /* Create Artists Table */ CREATE TABLE `project_spotify`.`artists` ( `id` INT NOT NULL AUTO_INCREMENT, `name` VARCHAR(255) NOT NULL, PRIMARY KEY (`id`) ) ENGINE = InnoDB; /* INSERT INTO Statements */ INSERT INTO `artists` (`id`, `name`) VALUES (NULL, '<NAME>') INSERT INTO `artists` (`id`, `name`) VALUES (NULL, 'CID') INSERT INTO `artists` (`id`, `name`) VALUES (NULL, '<NAME>') INSERT INTO `artists` (`id`, `name`) VALUES (NULL, 'Anto & <NAME>') <file_sep>/includes/config.php <?php ob_start(); session_start(); $timezone = date_default_timezone_set("America/New_York"); $con = mysqli_connect("localhost", "root", "root", project_spotify); if(mysqli_connect_error()) { echo "Failed to connect: " . mysqli_connect_error(); } ?> <file_sep>/assets/sql files/users.sql SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET time_zone = "+00:00"; -- -- Database: `project_spotify` -- -- -------------------------------------------------------- -- -- Table structure for table `users` -- CREATE TABLE `users` ( `id` int(11) NOT NULL, `username` varchar(255) NOT NULL, `firstName` varchar(255) NOT NULL, `lastName` varchar(255) NOT NULL, `email` varchar(255) NOT NULL, `password` varchar(255) NOT NULL, `signUpDate` datetime NOT NULL, `profilePic` varchar(500) NOT NULL ) ENGINE = InnoDB DEFAULT CHARSET = utf8; -- -- -- Indexes for dumped tables -- -- -- Indexes for table `users` -- ALTER TABLE `users` ADD PRIMARY KEY (`id`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `users` -- ALTER TABLE `users` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT = 2;
974a45cb45a9be8657ef52429029237d8999f537
[ "JavaScript", "SQL", "PHP" ]
8
JavaScript
JohnnyLeWebDeveloper/Project-Spotify
0f52d77fdb8839c5806f6e245ab078153b78cce6
1d7d1bfd9c2cd868cb7dc20685e9a5a65e785a39
refs/heads/master
<file_sep>#include <iostream> #include<cmath> using namespace std; int main() { int a,b; char n[100]; cin >> a >> b; cin >> n; int i=0; while(i<b&&i<a) { if(i==0&&n[i]=='1') b++; else if(i==0&&n[i]!='1') n[i]='1'; else if(n[i]=='0') b++; else n[i]='0'; i++; } cout<<n; return 0; }
63107aedb1cc3ffd014c2ed11334725dadabcf5f
[ "C++" ]
1
C++
rishabhamu/2019PGCACA40
989c98e1d7a818f755a29953abe21b10c4d0c2a3
838d4c776c9872fe850ca95820dd08a366827227
refs/heads/master
<repo_name>zhublik/fit_parser<file_sep>/fit_parser.gemspec # coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'fit_parser/version' Gem::Specification.new do |spec| spec.name = 'fit_parser' spec.version = FitParser::VERSION spec.authors = ['<NAME>'] spec.email = ['<EMAIL>'] spec.summary = '' spec.description = 'handles FIT files' spec.homepage = 'https://github.com/zhublik/fit_parser' spec.license = 'MIT' spec.files = Dir['{lib,spec}/**/*'] spec.executables = spec.files.grep(%r{^bin/[^.]+$}) { |f| File.basename(f) } spec.require_paths = ['lib'] spec.add_runtime_dependency 'bindata', '2.2.0' spec.add_runtime_dependency 'activesupport' spec.add_development_dependency 'bundler', '~> 1.10' spec.add_development_dependency 'rake', '~> 13.0' spec.add_development_dependency 'rspec' spec.add_development_dependency 'rspec-its' spec.add_development_dependency 'warder' spec.add_development_dependency 'pry' spec.add_development_dependency 'simplecov' end <file_sep>/spec/file/record_spec.rb require 'spec_helper' describe FitParser::File::Record do describe '#read' do context 'given a sample definition record' do it 'works' do record = described_class.new({}, {}) file = example_file('record/definition_record') record.read(file) expect(record.header).to be_a(FitParser::File::RecordHeader) expect(record.content).to be_a(FitParser::File::Definition) end end context 'given a sample data record with a string non null terminated' do context 'string length is equal to field size' do it 'works' do record = described_class.new({}, {}) record.read(example_file('record/definition_record_2.fit')) definitions = record.definitions file = example_file('record/data_record_2.fit') record = described_class.new(definitions, {}).read(file) expect(record.header).to be_a(FitParser::File::RecordHeader) expect(record.content.raw_version).to eql(250) expect(record.content.raw_part_number).to eql('123-A1234-00') end end context 'string length is smaller than field size' do it 'works' do record = described_class.new({}, {}) record.read(example_file('record/definition_record_2.fit')) definitions = record.definitions file = example_file('record/data_record_2bis.fit') record = described_class.new(definitions, {}).read(file) expect(record.header).to be_a(FitParser::File::RecordHeader) expect(record.content.raw_version).to eql(251) expect(record.content.version).to eql(2.51) expect(record.content.raw_part_number).to eql('123-A1234') expect(record.content.part_number).to eql('123-A1234') end end end end end <file_sep>/spec/fit_parser_spec.rb require 'spec_helper' describe FitParser do describe 'VERSION' do subject{ FitParser::VERSION } it { is_expected.to be_a(String) } it { is_expected.to match(/\d{1,2}\.\d{1,2}\.\d{1,2}/) } end describe '.load_file' do it 'works with threads' do threads = [] %w( be470628-c34a-4189-aae3-42bef36436ce.fit fc84b277-68af-4d63-ac8d-fb8e162ab2a2.fit ).each do |file| threads << Thread.new(file) do |el| data = FitParser.load_file("spec/support/examples/file/#{el}") expect(data.records).to_not be_nil end end threads.each { |thread| thread.join } end it 'works with threads' do threads = [] %w(3863374146 3110334490).each do |file| threads << Thread.new(file) do |el| data = FitParser.load_file("spec/support/examples/file/#{el}") expect(data.records).to_not be_nil end end threads.each { |thread| thread.join } end # sometimes doesn't work because of multithreads issues # it 'works with threads' do # threads = [] # %w( # f334a45f-cd2b-40d8-9c46-b4c34fe9e9dd. # 598363e9-3121-4f24-8b6f-b1368a269e8f. # 7edbeca2-0c62-42b6-9e7a-e869b1e5ccd0. # dbcdf1a1-3ce0-4e71-a409-bec64c6043b0. # fcc17445-79ec-43bc-9f7f-ea9324992cd2. # ).each do |file| # threads << Thread.new(file) do |el| # data = FitParser.load_file("spec/support/examples/file/#{el}") # expect(data.records).to_not be_nil # end # end # threads.each { |thread| thread.join } # end end it 'works without threads' do path = 'spec/support/examples/file/fc84b277-68af-4d63-ac8d-fb8e162ab2a2.fit' data = FitParser.load_file(path) expect(data.records).to_not be_nil end context 'with IQ datafields' do it 'works 1375670253.fit' do path = 'spec/support/examples/1375670253.fit' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 1379311720.fit' do path = 'spec/support/examples/1379311720.fit' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 6AUI5200.FIT' do path = 'spec/support/examples/6AUI5200.FIT' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 1426768070-2.fit' do path = 'spec/support/examples/1426768070-2.fit' data = FitParser.load_file(path) expect(data.records).to_not be_nil end end it 'works' do %w( f334a45f-cd2b-40d8-9c46-b4c34fe9e9dd. 598363e9-3121-4f24-8b6f-b1368a269e8f. 7edbeca2-0c62-42b6-9e7a-e869b1e5ccd0. dbcdf1a1-3ce0-4e71-a409-bec64c6043b0. fcc17445-79ec-43bc-9f7f-ea9324992cd2. ).each do |el| data = FitParser.load_file("spec/support/examples/file/#{el}") expect(data.records).to_not be_nil end end it 'works 7439451200' do path = 'spec/support/examples/file/7439451200' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 31675356730' do path = 'spec/support/examples/file/31675356730' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works for suunto' do Dir.glob('spec/support/examples/suunto-app/*.fit').each do |f| data = FitParser.load_file(f) expect(data.records).to_not be_nil end end it 'works 7420309810' do path = 'spec/support/examples/file/7420309810' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 7348726805' do path = 'spec/support/examples/file/7348726805' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 16050359900.fit' do path = 'spec/support/examples/file/16050359900.fit' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 18119560227.fit' do path = 'spec/support/examples/file/18119560227.fit' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 19205208205.fit' do path = 'spec/support/examples/file/19205208205.fit' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 23489915119.fit' do path = 'spec/support/examples/file/23489915119.fit' data = FitParser.load_file(path) expect(data.records).to_not be_nil end it 'works 24093026216.fit' do path = 'spec/support/examples/file/24093026216.fit' data = FitParser.load_file(path) expect(data.records).to_not be_nil end end <file_sep>/README.md # FitParser [![Build Status](https://travis-ci.com/dimameshcharakou/fit_parser.svg?branch=master)](https://travis-ci.com/dimameshcharakou/fit_parser) ## Example usage ```ruby require 'fit_parser' fit_file = FitParser.load_file(ARGV[0]) records = fit_file.records.select{ |r| r.content.record_type != :definition }.map{ |r| r.content } ``` <file_sep>/lib/fit_parser/file/definition.rb module FitParser class File class Definition < BinData::Record class DevField < BinData::Record uint8 :field_number uint8 :field_size uint8 :developer_data_index attr_accessor :base_type_number, :name, :scale def raw_name "raw_#{name}" end def dyn_data nil end def type case base_type_number when 0 # enum build_int_type 8, false when 1 # sint8 build_int_type 8, true when 2 # uint8 build_int_type 8, false when 131 # sint16 build_int_type 16, true when 132 # uint16 build_int_type 16, false when 133 # sint32 build_int_type 32, true when 134 # uint32 build_int_type 32, false when 7 # string # some cases found where string has the max field length # and is therefore not null terminated @length = 1 'string' when 136 # float32 @length = 4 'float' when 137 # float64 @length = 8 'double' when 10 # uint8z build_int_type 8, false when 139 # uint16z build_int_type 16, false when 140 # uint32z build_int_type 32, false when 13 # array of bytes build_int_type 8, false when 142 # sint64 build_int_type 64, true when 143 # uint64 build_int_type 64, false when 144 # uint64z build_int_type 64, false else fail "Can't map base_type_number #{base_type_number} to a data type" end end alias :real_type :type # return the length in byte of the given type def length @length end private def build_int_type(length, signed) # @length is in byte not in bits, so divide by 8 @length = length / 8 (signed ? '' : 'u') << 'int' << length.to_s end end class Field < BinData::Record hide :reserved_bits uint8 :field_definition_number uint8 :field_size bit1 :endian_ability bit2 :reserved_bits bit5 :base_type_number def data @data ||= Definitions.get_field(parent.parent.global_message_number.snapshot, field_definition_number.snapshot) || { name: "field_#{field_definition_number.snapshot}", scale: nil } end def dyn_data @dyn_data ||= Definitions.get_dynamic_fields(parent.parent.global_message_number.snapshot, field_definition_number.snapshot) end def name data[:name] end def raw_name "raw_#{name}" end def scale data[:scale] end def real_type data[:type] end def type case base_type_number.snapshot when 0 # enum build_int_type 8, false when 1 build_int_type 8, true when 2 build_int_type 8, false when 3 build_int_type 16, true when 4 build_int_type 16, false when 5 build_int_type 32, true when 6 build_int_type 32, false when 7 # some cases found where string has the max field length # and is therefore not null terminated @length = 1 'string' when 8 @length = 4 'float' when 9 @length = 8 'double' when 10 # uint8z build_int_type 8, false when 11 # uint16z build_int_type 16, false when 12 # uint32z build_int_type 32, false when 13 # array of bytes build_int_type 8, false when 14 # sint64 build_int_type 64, true when 15 # uint64 build_int_type 64, false when 16 # uint64z build_int_type 64, false else fail "Can't map base_type_number #{base_type_number} to a data type" end end # field_size is in byte def size field_size end # return the length in byte of the given type def length @length end private def build_int_type(length, signed) # @length is in byte not in bits, so divide by 8 @length = length / 8 (signed ? '' : 'u') << 'int' << length.to_s end end skip length: 1 bit8 :architecture choice :global_message_number, selection: :architecture do uint16le 0 uint16be 1 end bit8 :field_count array :fields_arr, type: Field, initial_length: :field_count bit8 :dev_field_count, :onlyif => :dev_data_flag? array :dev_fields_arr, type: DevField, initial_length: :dev_field_count, :onlyif => :dev_data_flag? def self.attributes @attributes ||= Hash.new { |h, k| h[k] = {} } end def self.attributes=(val) @attributes = val end def self.read(io, attrs = {}) self.attributes = attrs super(io) end def endianness architecture.snapshot == 0 ? :little : :big end def record_type :definition end def dev_data_flag? dev_data_flag = Definition.attributes[:dev_data_flag] return true if dev_data_flag && dev_data_flag == 1 false end end end end <file_sep>/lib/bindata/dsl_field_validator.rb # Override DSLFieldValidator#ensure_valid_name from bindata/lib/dsl.rb # gem bindata # because we should not raise error on duplicate name module BinData module DSLMixin class DSLFieldValidator def ensure_valid_name(name) if name and not option?(:fieldnames_are_values) if malformed_name?(name) # raise NameError.new("", name), "field '#{name}' is an illegal fieldname" end if duplicate_name?(name) # raise SyntaxError, "duplicate field '#{name}'" end if name_shadows_method?(name) raise NameError.new("", name), "field '#{name}' shadows an existing method" end if name_is_reserved?(name) raise NameError.new("", name), "field '#{name}' is a reserved name" end end end end end end <file_sep>/lib/fit_parser/file/types.rb module FitParser class File module Types def self.types=(value) @types = value end def self.types @types ||= {} end def self.add_type(name, type, option = {}) Types.types[name] = option.merge(basic_type: type) end def self.get_type_definition(name) return Types.types[name] if Types.types.has_key?(name) nil end def self.date_time_value(time, values, parameters) val = values.invert if time < val['min'] time.to_s else res= parameters[:utc] ? Time.utc(1989, 12, 31) + time : Time.local(1989, 12, 31) + time res.to_s end end def self.message_index_value(msg_index, values, parameters = nil) val = values.invert msg_index & val['mask'] end def self.bitfield_value(bitfield, values, parameters = nil) res = '' values.each do |key, val| if key & bitfield != 0 res << '/' unless res.empty? res << val end end res end end end end # basic types FitParser::File::Types.add_type :enum, nil, invalid: 0xFF FitParser::File::Types.add_type :sint8, nil, invalid: 0x7F FitParser::File::Types.add_type :uint8, nil, invalid: 0xFF FitParser::File::Types.add_type :sint16, nil, invalid: 0x7FFF FitParser::File::Types.add_type :uint16, nil, invalid: 0xFFFF FitParser::File::Types.add_type :sint32, nil, invalid: 0x7FFFFFFF FitParser::File::Types.add_type :uint32, nil, invalid: 0xFFFFFFFF FitParser::File::Types.add_type :string, nil, invalid: 0x00 FitParser::File::Types.add_type :float32, nil, invalid: 0xFFFFFFFF FitParser::File::Types.add_type :float64, nil, invalid: 0xFFFFFFFFFFFFFFFF FitParser::File::Types.add_type :uint8z, nil, invalid: 0x00 FitParser::File::Types.add_type :uint16z, nil, invalid: 0x0000 FitParser::File::Types.add_type :uint32z, nil, invalid: 0x00000000 FitParser::File::Types.add_type :byte, nil, invalid: 0xFF FitParser::File::Types.add_type :sint64, nil, invalid: 0x7FFFFFFFFFFFFFFF FitParser::File::Types.add_type :uint64, nil, invalid: 0xFFFFFFFFFFFFFFFF FitParser::File::Types.add_type :uint64z, nil, invalid: 0x0000000000000000 # derived types FitParser::File::Types.add_type :file, :enum, values: { 1 => 'device', 2 => 'settings', 3 => 'sport', 4 => 'activity', 5 => 'workout', 6 => 'course', 7 => 'schedules', 9 => 'weight', 10 => 'totals', 11 => 'goals', 14 => 'blood_pressure', 15 => 'monitoring_a', 20 => 'activity_summary', 28 => 'monitoring_daily', 32 => 'monitoring_b', 34 => 'segment', 35 => 'segment_list', 40 => 'exd_configuration', 247 => 'mfg_range_min', 254 => 'mfg_range_max' } FitParser::File::Types.add_type :mesg_num, :uint16, values: { 0 => 'file_id', 1 => 'capabilities', 2 => 'device_settings', 3 => 'user_profile', 4 => 'hrm_profile', 5 => 'sdm_profile', 6 => 'bike_profile', 7 => 'zones_target', 8 => 'hr_zone', 9 => 'power_zone', 10 => 'met_zone', 12 => 'sport', 15 => 'goal', 18 => 'session', 19 => 'lap', 20 => 'record', 21 => 'event', 23 => 'device_info', 26 => 'workout', 27 => 'workout_step', 28 => 'schedule', 30 => 'weight_scale', 31 => 'course', 32 => 'course_point', 33 => 'totals', 34 => 'activity', 35 => 'software', 37 => 'file_capabilities', 38 => 'mesg_capabilities', 39 => 'field_capabilities', 49 => 'file_creator', 51 => 'blood_pressure', 53 => 'speed_zone', 55 => 'monitoring', 72 => 'training_file', 78 => 'hrv', 101 => 'length', 103 => 'monitoring_info', 105 => 'pad', 106 => 'slave_device', 131 => 'cadence_zone', 145 => 'memo_glob', 148 => 'segment_id', 149 => 'segment_leaderboard_entry', 150 => 'segment_point', 151 => 'segment_file', 159 => 'watchface_settings', 160 => 'gps_metadata', 161 => 'camera_event', 162 => 'timestamp_correlation', 164 => 'gyroscope_data', 165 => 'accelerometer_data', 167 => 'three_d_sensor_calibration', 169 => 'video_frame', 174 => 'obdii_data', 177 => 'nmea_sentence', 178 => 'aviation_attitude', 184 => 'video', 185 => 'video_title', 186 => 'video_description', 187 => 'video_clip', 188 => 'ohr_settings', 200 => 'exd_screen_configuration', 201 => 'exd_data_field_configuration', 202 => 'exd_data_concept_configuration', 206 => 'field_description', 207 => 'developer_data_id', 208 => 'magnetometer_data', 65280 => 'mfg_range_min', 65534 => 'mfg_range_max' } FitParser::File::Types.add_type :checksum, :uint8, values: { 0 => 'clear', 1 => 'ok' } FitParser::File::Types.add_type :file_flags, :uint8z, values: { 0x02 => 'read', 0x04 => 'write', 0x08 => 'erase' }, method: :bitfield_value FitParser::File::Types.add_type :mesg_count, :enum, values: { 0 => 'num_per_file', 1 => 'max_per_file', 2 => 'max_per_file_type' } FitParser::File::Types.add_type :date_time, :uint32, values: { 268435456 => 'min' }, method: :date_time_value, parameters: {utc: true} FitParser::File::Types.add_type :local_date_time, :uint32, values: { 268435456 => 'min' }, method: :date_time_value, parameters: {utc: false} FitParser::File::Types.add_type :message_index, :uint16, values: { 32768 => 'selected', 26872 => 'reserved', 4095 => 'mask' }, method: :message_index_value FitParser::File::Types.add_type :device_index, :uint8, values: { 0 => 'creator' } FitParser::File::Types.add_type :gender, :enum, values: { 0 => 'female', 1 => 'male' } FitParser::File::Types.add_type :language, :enum, values: { 0 => 'english', 1 => 'french', 2 => 'italian', 3 => 'german', 4 => 'spanish', 5 => 'croatian', 6 => 'czech', 7 => 'danish', 8 => 'dutch', 9 => 'finnish', 10 => 'greek', 11 => 'hungarian', 12 => 'norwegian', 13 => 'polish', 14 => 'portuguese', 15 => 'slovakian', 16 => 'slovenian', 17 => 'swedish', 18 => 'russian', 19 => 'turkish', 20 => 'latvian', 21 => 'ukrainian', 22 => 'arabic', 23 => 'farsi', 24 => 'bulgarian', 25 => 'romanian', 26 => 'chinese', 27 => 'japanese', 28 => 'korean', 29 => 'taiwanese', 30 => 'thai', 31 => 'hebrew', 32 => 'brazilian_portuguese', 33 => 'indonesian', 254 => 'custom' } FitParser::File::Types.add_type :language_bits_0, :uint8z, values: { 0x01 => 'english', 0x02 => 'french', 0x04 => 'italian', 0x08 => 'german', 0x10 => 'spanish', 0x20 => 'croatian', 0x40 => 'czech', 0x80 => 'danish' } FitParser::File::Types.add_type :language_bits_1, :uint8z, values: { 0x01 => 'dutch', 0x02 => 'finnish', 0x04 => 'greek', 0x08 => 'hungarian', 0x10 => 'norwegian', 0x20 => 'polish', 0x40 => 'portuguese', 0x80 => 'slovakian' } FitParser::File::Types.add_type :language_bits_2, :uint8z, values: { 0x01 => 'slovenian', 0x02 => 'swedish', 0x04 => 'russian', 0x08 => 'turkish', 0x10 => 'latvian', 0x20 => 'ukrainian', 0x40 => 'arabic', 0x80 => 'farsi' } FitParser::File::Types.add_type :language_bits_3, :uint8z, values: { 0x01 => 'bulgarian', 0x02 => 'romanian', 0x04 => 'chinese', 0x08 => 'japanese', 0x10 => 'korean', 0x20 => 'taiwanese', 0x40 => 'thai', 0x80 => 'hebrew' } FitParser::File::Types.add_type :language_bits_4, :uint8z, values: { 0x01 => 'brazilian_portuguese', 0x02 => 'indonesian' } FitParser::File::Types.add_type :time_zone, :enum, values: { 0 => 'almaty', 1 => 'bangkok', 2 => 'bombay', 3 => 'brasilia', 4 => 'cairo', 5 => 'cape_verde_is', 6 => 'darwin', 7 => 'eniwetok', 8 => 'fiji', 9 => 'hong_kong', 10 => 'islamabad', 11 => 'kabul', 12 => 'magadan', 13 => 'mid_atlantic', 14 => 'moscow', 15 => 'muscat', 16 => 'newfoundland', 17 => 'samoa', 18 => 'sydney', 19 => 'tehran', 20 => 'tokyo', 21 => 'us_alaska', 22 => 'us_atlantic', 23 => 'us_central', 24 => 'us_eastern', 25 => 'us_hawaii', 26 => 'us_mountain', 27 => 'us_pacific', 28 => 'other', 29 => 'auckland', 30 => 'kathmandu', 31 => 'europe_western_wet', 32 => 'europe_central_cet', 33 => 'europe_eastern_eet', 34 => 'jakarta', 35 => 'perth', 36 => 'adelaide', 37 => 'brisbane', 38 => 'tasmania', 39 => 'iceland', 40 => 'amsterdam', 41 => 'athens', 42 => 'barcelona', 43 => 'berlin', 44 => 'brussels', 45 => 'budapest', 46 => 'copenhagen', 47 => 'dublin', 48 => 'helsinki', 49 => 'lisbon', 50 => 'london', 51 => 'madrid', 52 => 'munich', 53 => 'oslo', 54 => 'paris', 55 => 'prague', 56 => 'reykjavik', 57 => 'rome', 58 => 'stockholm', 59 => 'vienna', 60 => 'warsaw', 61 => 'zurich', 62 => 'quebec', 63 => 'ontario', 64 => 'manitoba', 65 => 'saskatchewan', 66 => 'alberta', 67 => 'british_columbia', 68 => 'boise', 69 => 'boston', 70 => 'chicago', 71 => 'dallas', 72 => 'denver', 73 => 'kansas_city', 74 => 'las_vegas', 75 => 'los_angeles', 76 => 'miami', 77 => 'minneapolis', 78 => 'new_york', 79 => 'new_orleans', 80 => 'phoenix', 81 => 'santa_fe', 82 => 'seattle', 83 => 'washington_dc', 84 => 'us_arizona', 85 => 'chita', 86 => 'ekaterinburg', 87 => 'irkutsk', 88 => 'kaliningrad', 89 => 'krasnoyarsk', 90 => 'novosibirsk', 91 => 'petropavlovsk_kamchatskiy', 92 => 'samara', 93 => 'vladivostok', 94 => 'mexico_central', 95 => 'mexico_mountain', 96 => 'mexico_pacific', 97 => 'cape_town', 98 => 'winkhoek', 99 => 'lagos', 100 => 'riyahd', 101 => 'venezuela', 102 => 'australia_lh', 103 => 'santiago', 253 => 'manual', 254 => 'automatic' } FitParser::File::Types.add_type :display_measure, :enum, values: { 0 => 'metric', 1 => 'statute' } FitParser::File::Types.add_type :display_heart, :enum, values: { 0 => 'bpm', 1 => 'max', 2 => 'reserve' } FitParser::File::Types.add_type :display_power, :enum, values: { 0 => 'watts', 1 => 'percent_ftp' } FitParser::File::Types.add_type :display_position, :enum, values: { 0 => 'degree', 1 => 'degree_minute', 2 => 'degree_minute_second', 3 => 'austrian_grid', 4 => 'british_grid', 5 => 'dutch_grid', 6 => 'hungarian_grid', 7 => 'finnish_grid', 8 => 'german_grid', 9 => 'icelandic_grid', 10 => 'indonesian_equatorial', 11 => 'indonesian_irian', 12 => 'indonesian_southern', 13 => 'india_zone_0', 14 => 'india_zone_IA', 15 => 'india_zone_IB', 16 => 'india_zone_IIA', 17 => 'india_zone_IIB', 18 => 'india_zone_IIIA', 19 => 'india_zone_IIIB', 20 => 'india_zone_IVA', 21 => 'india_zone_IVB', 22 => 'irish_transverse', 23 => 'irish_grid', 24 => 'loran', 25 => 'maidenhead_grid', 26 => 'mgrs_grid', 27 => 'new_zealand_grid', 28 => 'new_zealand_transverse', 29 => 'qatar_grid', 30 => 'modified_swedish_grid', 31 => 'swedish_grid', 32 => 'south_african_grid', 33 => 'swiss_grid', 34 => 'taiwan_grid', 35 => 'united_states_grid', 36 => 'utm_ups_grid', 37 => 'west_malayan', 38 => 'borneo_rso', 39 => 'estonian_grid', 40 => 'latvian_grid', 41 => 'swedish_ref_99_grid' } FitParser::File::Types.add_type :switch, :enum, values: { 0 => 'off', 1 => 'on', 2 => 'auto' } FitParser::File::Types.add_type :sport, :enum, values: { 0 => 'generic', 1 => 'running', 2 => 'cycling', 3 => 'transition', 4 => 'fitness_equipment', 5 => 'swimming', 6 => 'basketball', 7 => 'soccer', 8 => 'tennis', 9 => 'american_football', 10 => 'training', 11 => 'walking', 12 => 'cross_country_skiing', 13 => 'alpine_skiing', 14 => 'snowboarding', 15 => 'rowing', 16 => 'mountaineering', 17 => 'hiking', 18 => 'multisport', 19 => 'paddling', 20 => 'flying', 21 => 'e_biking', 22 => 'motorcycling', 23 => 'boating', 24 => 'driving', 25 => 'golf', 26 => 'hang_gliding', 27 => 'horseback_riding', 28 => 'hunting', 29 => 'fishing', 30 => 'inline_skating', 31 => 'rock_climbing', 32 => 'sailing', 33 => 'ice_skating', 34 => 'sky_diving', 35 => 'snowshoeing', 36 => 'snowmobiling', 37 => 'stand_up_paddleboarding', 38 => 'surfing', 39 => 'wakeboarding', 40 => 'water_skiing', 41 => 'kayaking', 42 => 'rafting', 43 => 'windsurfing', 44 => 'kitesurfing', 45 => 'tactical', 46 => 'jumpmaster', 47 => 'boxing', 48 => 'floor_climbing', 254 => 'all' } FitParser::File::Types.add_type :sport_bits_0, :uint8z, values: { 1 => 'generic', 2 => 'running', 4 => 'cycling', 8 => 'transition', 16 => 'fitness_equipment', 32 => 'swimming', 64 => 'basketball', 128 => 'soccer' } FitParser::File::Types.add_type :sport_bits_1, :uint8z, values: { 1 => 'tennis', 2 => 'american_football', 4 => 'training', 8 => 'walking', 16 => 'cross_country_skiing', 32 => 'alpine_skiing', 64 => 'snowboarding', 128 => 'rowing' } FitParser::File::Types.add_type :sport_bits_2, :uint8z, values: { 1 => 'mountaineering', 2 => 'hiking', 4 => 'multisport', 8 => 'paddling', 16 => 'flying', 32 => 'e_biking', 64 => 'motorcycling', 128 => 'boating' } FitParser::File::Types.add_type :sport_bits_3, :uint8z, values: { 1 => 'driving', 2 => 'golf', 4 => 'hang_gliding', 8 => 'horseback_riding', 16 => 'hunting', 32 => 'fishing', 64 => 'inline_skating', 128 => 'rock_climbing' } FitParser::File::Types.add_type :sport_bits_4, :uint8z, values: { 1 => 'sailing', 2 => 'ice_skating', 4 => 'sky_diving', 8 => 'snowshoeing', 16 => 'snowmobiling', 32 => 'stand_up_paddleboarding', 64 => 'surfing', 128 => 'wakeboarding' } FitParser::File::Types.add_type :sport_bits_5, :uint8z, values: { 1 => 'water_skiing', 2 => 'kayaking', 4 => 'rafting', 8 => 'windsurfing', 16 => 'kitesurfing', 32 => 'tactical', 64 => 'jumpmaster', 128 => 'boxing' } FitParser::File::Types.add_type :sport_bits_6, :uint8z, values: { 1 => 'floor_climbing' } FitParser::File::Types.add_type :sub_sport, :enum, values: { 0 => 'generic', 1 => 'treadmill', 2 => 'street', 3 => 'trail', 4 => 'track', 5 => 'spin', 6 => 'indoor_cycling', 7 => 'road', 8 => 'mountain', 9 => 'downhill', 10 => 'recumbent', 11 => 'cyclocross', 12 => 'hand_cycling', 13 => 'track_cycling', 14 => 'indoor_rowing', 15 => 'elliptical', 16 => 'stair_climbing', 17 => 'lap_swimming', 18 => 'open_water', 19 => 'flexibility_training', 20 => 'strength_training', 21 => 'warm_up', 22 => 'match', 23 => 'exercise', 24 => 'challenge', 25 => 'indoor_skiing', 26 => 'cardio_training', 27 => 'indoor_walking', 28 => 'e_bike_fitness', 29 => 'bmx', 30 => 'casual_walking', 31 => 'speed_walking', 32 => 'bike_to_run_transition', 33 => 'run_to_bike_transition', 34 => 'swim_to_bike_transition', 35 => 'atv', 36 => 'motocross', 37 => 'backcountry', 38 => 'resort', 39 => 'rc_drone', 40 => 'wingsuit', 41 => 'whitewater', 42 => 'skate_skiing', 43 => 'yoga', 44 => 'pilates', 45 => 'indoor_running', 46 => 'gravel_cycling', 47 => 'e_bike_mountain', 48 => 'commuting', 49 => 'mixed_surface', 50 => 'navigate', 51 => 'track_me', 254 => 'all' } FitParser::File::Types.add_type :sport_event, :enum, values: { 0 => 'uncategorized', 1 => 'geocaching', 2 => 'fitness', 3 => 'recreation', 4 => 'race', 5 => 'special_event', 6 => 'training', 7 => 'transportation', 8 => 'touring' } FitParser::File::Types.add_type :activity, :enum, values: { 0 => 'manual', 1 => 'auto_multi_sport' } FitParser::File::Types.add_type :intensity, :enum, values: { 0 => 'active', 1 => 'rest', 2 => 'warmup', 3 => 'cooldown' } FitParser::File::Types.add_type :session_trigger, :enum, values: { 0 => 'activity_end', 1 => 'manual', 2 => 'auto_multi_sport', 3 => 'fitness_equipment' } FitParser::File::Types.add_type :autolap_trigger, :enum, values: { 0 => 'time', 1 => 'distance', 2 => 'position_start', 3 => 'position_lap', 4 => 'position_waypoint', 5 => 'position_marked', 6 => 'off' } FitParser::File::Types.add_type :lap_trigger, :enum, values: { 0 => 'manual', 1 => 'time', 2 => 'distance', 3 => 'position_start', 4 => 'position_lap', 5 => 'position_waypoint', 6 => 'position_marked', 7 => 'session_end', 8 => 'fitness_equipment' } FitParser::File::Types.add_type :time_mode, :enum, values: { 0 => 'hour12', 1 => 'hour24', 2 => 'military', 3 => 'hour_12_with_seconds', 4 => 'hour_24_with_seconds', 5 => 'utc' } FitParser::File::Types.add_type :backlight_mode, :enum, values: { 0 => 'off', 1 => 'manual', 2 => 'key_and_messages', 3 => 'auto_brightness', 4 => 'smart_notifications', 5 => 'key_and_messages_night', 6 => 'key_and_messages_and_smart_notifications' } FitParser::File::Types.add_type :date_mode, :enum, values: { 0 => 'day_month', 1 => 'month_day' } FitParser::File::Types.add_type :event, :enum, values: { 0 => 'timer', 3 => 'workout', 4 => 'workout_step', 5 => 'power_down', 6 => 'power_up', 7 => 'off_course', 8 => 'session', 9 => 'lap', 10 => 'course_point', 11 => 'battery', 12 => 'virtual_partner_pace', 13 => 'hr_high_alert', 14 => 'hr_low_alert', 15 => 'speed_high_alert', 16 => 'speed_low_alert', 17 => 'cad_high_alert', 18 => 'cad_low_alert', 19 => 'power_high_alert', 20 => 'power_low_alert', 21 => 'recovery_hr', 22 => 'battery_low', 23 => 'time_duration_alert', 24 => 'distance_duration_alert', 25 => 'calorie_duration_alert', 26 => 'activity', 27 => 'fitness_equipment', 28 => 'length', 32 => 'user_marker', 33 => 'sport_point', 36 => 'calibration', 42 => 'front_gear_change', 43 => 'rear_gear_change', 45 => 'elev_high_alert', 46 => 'elev_low_alert', 47 => 'comm_timeout' } FitParser::File::Types.add_type :event_type, :enum, values: { 0 => 'start', 1 => 'stop', 2 => 'consecutive_depreciated', 3 => 'marker', 4 => 'stop_all', 5 => 'begin_depreciated', 6 => 'end_depreciated', 7 => 'end_all_depreciated', 8 => 'stop_disable', 9 => 'stop_disable_all' } FitParser::File::Types.add_type :timer_trigger, :enum, values: { 0 => 'manual', 1 => 'auto', 2 => 'fitness_equipment' } FitParser::File::Types.add_type :fitness_equipment_state, :enum, values: { 0 => 'ready', 1 => 'in_use', 2 => 'paused', 3 => 'unknown' } FitParser::File::Types.add_type :autoscroll, :enum, values: { 0 => 'none', 1 => 'slow', 2 => 'medium', 3 => 'fast' } FitParser::File::Types.add_type :activity_class, :enum, values: { 127 => 'level', 100 => 'level_max', 128 => 'athlete' } FitParser::File::Types.add_type :hr_zone_calc, :enum, values: { 0 => 'custom', 1 => 'percent_max_hr', 2 => 'percent_hrr' } FitParser::File::Types.add_type :pwr_zone_calc, :enum, values: { 0 => 'custom', 1 => 'percent_ftp' } FitParser::File::Types.add_type :wkt_step_duration, :enum, values: { 0 => 'time', 1 => 'distance', 2 => 'hr_less_than', 3 => 'hr_greater_than', 4 => 'calories', 5 => 'open', 6 => 'repeat_until_steps_cmplt', 7 => 'repeat_until_time', 8 => 'repeat_until_distance', 9 => 'repeat_until_calories', 10 => 'repeat_until_hr_less_than', 11 => 'repeat_until_hr_greater_than', 12 => 'repeat_until_power_less_than', 13 => 'repeat_until_power_greater_than', 14 => 'power_less_than', 15 => 'power_greater_than', 28 => 'repetition_time' } FitParser::File::Types.add_type :wkt_step_target, :enum, values: { 0 => 'speed', 1 => 'heart_rate', 2 => 'open', 3 => 'cadence', 4 => 'power', 5 => 'grade', 6 => 'resistance' } FitParser::File::Types.add_type :goal, :enum, values: { 0 => 'time', 1 => 'distance', 2 => 'calories', 3 => 'frequency', 4 => 'steps', 5 => 'ascent', 6 => 'active_minutes' } FitParser::File::Types.add_type :goal_recurrence, :enum, values: { 0 => 'off', 1 => 'daily', 2 => 'weekly', 3 => 'monthly', 4 => 'yearly', 5 => 'custom' } FitParser::File::Types.add_type :goal_source, :enum, values: { 0 => 'auto', 1 => 'community', 2 => 'user' } FitParser::File::Types.add_type :schedule, :enum, values: { 0 => 'workout', 1 => 'course' } FitParser::File::Types.add_type :course_point, :enum, values: { 0 => 'generic', 1 => 'summit', 2 => 'valley', 3 => 'water', 4 => 'food', 5 => 'danger', 6 => 'left', 7 => 'right', 8 => 'straight', 9 => 'first_aid', 10 => 'fourth_category', 11 => 'third_category', 12 => 'second_category', 13 => 'first_category', 14 => 'hors_category', 15 => 'sprint', 16 => 'left_fork', 17 => 'right_fork', 18 => 'middle_fork', 19 => 'slight_left', 20 => 'sharp_left', 21 => 'slight_right', 22 => 'sharp_right', 23 => 'u_turn', 24 => 'segment_start', 25 => 'segment_end' } FitParser::File::Types.add_type :manufacturer, :uint16, values: { 1 => 'garmin', 2 => 'garmin_fr405_antfs', 3 => 'zephyr', 4 => 'dayton', 5 => 'idt', 6 => 'srm', 7 => 'quarq', 8 => 'ibike', 9 => 'saris', 10 => 'spark_hk', 11 => 'tanita', 12 => 'echowell', 13 => 'dynastream_oem', 14 => 'nautilus', 15 => 'dynastream', 16 => 'timex', 17 => 'metrigear', 18 => 'xelic', 19 => 'beurer', 20 => 'cardiosport', 21 => 'a_and_d', 22 => 'hmm', 23 => 'suunto', 24 => 'thita_elektronik', 25 => 'gpulse', 26 => 'clean_mobile', 27 => 'pedal_brain', 28 => 'peaksware', 29 => 'saxonar', 30 => 'lemond_fitness', 31 => 'dexcom', 32 => 'wahoo_fitness', 33 => 'octane_fitness', 34 => 'archinoetics', 35 => 'the_hurt_box', 36 => 'citizen_systems', 37 => 'magellan', 38 => 'osynce', 39 => 'holux', 40 => 'concept2', 42 => 'one_giant_leap', 43 => 'ace_sensor', 44 => 'brim_brothers', 45 => 'xplova', 46 => 'perception_digital', 47 => 'bf1systems', 48 => 'pioneer', 49 => 'spantec', 50 => 'metalogics', 51 => '4iiiis', 52 => 'seiko_epson', 53 => 'seiko_epson_oem', 54 => 'ifor_powell', 55 => 'maxwell_guider', 56 => 'star_trac', 57 => 'breakaway', 58 => 'alatech_technology_ltd', 59 => 'mio_technology_europe', 60 => 'rotor', 61 => 'geonaute', 62 => 'id_bike', 63 => 'specialized', 64 => 'wtek', 65 => 'physical_enterprises', 66 => 'north_pole_engineering', 67 => 'bkool', 68 => 'cateye', 69 => 'stages_cycling', 70 => 'sigmasport', 71 => 'tomtom', 72 => 'peripedal', 73 => 'wattbike', 76 => 'moxy', 77 => 'ciclosport', 78 => 'powerbahn', 79 => 'acorn_projects_aps', 80 => 'lifebeam', 81 => 'bontrager', 82 => 'wellgo', 83 => 'scosche', 84 => 'magura', 85 => 'woodway', 86 => 'elite', 87 => 'nielsen_kellerman', 88 => 'dk_city', 89 => 'tacx', 90 => 'direction_technology', 91 => 'magtonic', 92 => '1partcarbon', 93 => 'inside_ride_technologies', 94 => 'sound_of_motion', 95 => 'stryd', 96 => 'icg', 97 => 'MiPulse', 98 => 'bsx_athletics', 99 => 'look', 100 => 'campagnolo_srl', 101 => 'body_bike_smart', 102 => 'praxisworks', 103 => 'limits_technology', 104 => 'topaction_technology', 105 => 'cosinuss', 106 => 'fitcare', 255 => 'development', 257 => 'healthandlife', 258 => 'lezyne', 259 => 'scribe_labs', 260 => 'zwift', 261 => 'watteam', 262 => 'recon', 263 => 'favero_electronics', 264 => 'dynovelo', 265 => 'strava', 266 => 'precor', 267 => 'bryton', 268 => 'sram', 269 => 'navman', 270 => 'cobi', 271 => 'spivi', 272 => 'mio_magellan', 273 => 'evesports', 274 => 'sensitivus_gauge', 5759 => 'actigraphcorp' } FitParser::File::Types.add_type :garmin_product, :uint16, values: { 1 => 'hrm1', 2 => 'axh01', 3 => 'axb01', 4 => 'axb02', 5 => 'hrm2ss', 6 => 'dsi_alf02', 7 => 'hrm3ss', 8 => 'hrm_run_single_byte_product_id', 9 => 'bsm', 10 => 'bcm', 11 => 'axs01', 12 => 'hrm_tri_single_byte_product_id', 14 => 'fr225_single_byte_product_id', 473 => 'fr301_china', 474 => 'fr301_japan', 475 => 'fr301_korea', 494 => 'fr301_taiwan', 717 => 'fr405', 782 => 'fr50', 987 => 'fr405_japan', 988 => 'fr60', 1011 => 'dsi_alf01', 1018 => 'fr310xt', 1036 => 'edge500', 1124 => 'fr110', 1169 => 'edge800', 1199 => 'edge500_taiwan', 1213 => 'edge500_japan', 1253 => 'chirp', 1274 => 'fr110_japan', 1325 => 'edge200', 1328 => 'fr910xt', 1333 => 'edge800_taiwan', 1334 => 'edge800_japan', 1341 => 'alf04', 1345 => 'fr610', 1360 => 'fr210_japan', 1380 => 'vector_ss', 1381 => 'vector_cp', 1386 => 'edge800_china', 1387 => 'edge500_china', 1410 => 'fr610_japan', 1422 => 'edge500_korea', 1436 => 'fr70', 1446 => 'fr310xt_4t', 1461 => 'amx', 1482 => 'fr10', 1497 => 'edge800_korea', 1499 => 'swim', 1537 => 'fr910xt_china', 1551 => 'fenix', 1555 => 'edge200_taiwan', 1561 => 'edge510', 1567 => 'edge810', 1570 => 'tempe', 1600 => 'fr910xt_japan', 1623 => 'fr620', 1632 => 'fr220', 1664 => 'fr910xt_korea', 1688 => 'fr10_japan', 1721 => 'edge810_japan', 1735 => 'virb_elite', 1736 => 'edge_touring', 1742 => 'edge510_japan', 1752 => 'hrm_run', 1765 => 'fr920xt', 1821 => 'edge510_asia', 1822 => 'edge810_china', 1823 => 'edge810_taiwan', 1836 => 'edge1000', 1837 => 'vivo_fit', 1853 => 'virb_remote', 1885 => 'vivo_ki', 1903 => 'fr15', 1907 => 'vivo_active', 1918 => 'edge510_korea', 1928 => 'fr620_japan', 1929 => 'fr620_china', 1930 => 'fr220_japan', 1931 => 'fr220_china', 1967 => 'fenix2', 1988 => 'epix', 2050 => 'fenix3', 2052 => 'edge1000_taiwan', 2053 => 'edge1000_japan', 2061 => 'fr15_japan', 2067 => 'edge520', 2070 => 'edge1000_china', 2072 => 'fr620_russia', 2073 => 'fr220_russia', 2079 => 'vector_s', 2100 => 'edge1000_korea', 2130 => 'fr920xt_taiwan', 2131 => 'fr920xt_china', 2132 => 'fr920xt_japan', 2134 => 'virbx', 2135 => 'vivo_smart_apac', 2140 => 'etrex_touch', 2147 => 'edge25', 2148 => 'fr25', 2150 => 'vivo_fit2', 2153 => 'fr225', 2156 => 'fr630', 2157 => 'fr230', 2160 => 'vivo_active_apac', 2161 => 'vector_2', 2162 => 'vector_2s', 2172 => 'virbxe', 2173 => 'fr620_taiwan', 2174 => 'fr220_taiwan', 2175 => 'truswing', 2188 => 'fenix3_china', 2189 => 'fenix3_twn', 2192 => 'varia_headlight', 2193 => 'varia_taillight_old', 2204 => 'edge_explore_1000', 2219 => 'fr225_asia', 2225 => 'varia_radar_taillight', 2226 => 'varia_radar_display', 2238 => 'edge20', 2262 => 'd2_bravo', 2266 => 'approach_s20', 2276 => 'varia_remote', 2327 => 'hrm4_run', 2337 => 'vivo_active_hr', 2347 => 'vivo_smart_gps_hr', 2348 => 'vivo_smart_hr', 2368 => 'vivo_move', 2398 => 'varia_vision', 2406 => 'vivo_fit3', 2413 => 'fenix3_hr', 2429 => 'index_smart_scale', 2431 => 'fr235', 2441 => 'oregon7xx', 2444 => 'rino7xx', 2496 => 'nautix', 2530 => 'edge_820', 2531 => 'edge_explore_820', 10007 => 'sdm4', 10014 => 'edge_remote', 20119 => 'training_center', 65532 => 'android_antplus_plugin', 65534 => 'connect' } FitParser::File::Types.add_type :antplus_device_type, :uint8, values: { 1 => 'antfs', 11 => 'bike_power', 12 => 'environment_sensor_legacy', 15 => 'multi_sport_speed_distance', 16 => 'control', 17 => 'fitness_equipment', 18 => 'blood_pressure', 19 => 'geocache_node', 20 => 'light_electric_vehicle', 25 => 'env_sensor', 26 => 'racquet', 27 => 'control_hub', 31 => 'muscle_oxygen', 35 => 'bike_light_main', 36 => 'bike_light_shared', 38 => 'exd', 40 => 'bike_radar', 119 => 'weight_scale', 120 => 'heart_rate', 121 => 'bike_speed_cadence', 122 => 'bike_cadence', 123 => 'bike_speed', 124 => 'stride_speed_distance' } FitParser::File::Types.add_type :ant_network, :enum, values: { 0 => 'public', 1 => 'antplus', 2 => 'antfs', 3 => 'private' } FitParser::File::Types.add_type :workout_capabilities, :uint32z, values: { 0x00000001 => 'interval', 0x00000002 => 'custom', 0x00000004 => 'fitness_equipment', 0x00000008 => 'firstbeat', 0x00000010 => 'new_leaf', 0x00000020 => 'tcx', 0x00000080 => 'speed', 0x00000100 => 'heart_rate', 0x00000200 => 'distance', 0x00000400 => 'cadence', 0x00000800 => 'power', 0x00001000 => 'grade', 0x00002000 => 'resistance', 0x00004000 => 'protected' }, method: :bitfield_value FitParser::File::Types.add_type :battery_status, :uint8, values: { 1 => 'new', 2 => 'good', 3 => 'ok', 4 => 'low', 5 => 'critical', 6 => 'charging', 7 => 'unknown' } FitParser::File::Types.add_type :hr_type, :enum, values: { 0 => 'normal', 1 => 'irregular' } FitParser::File::Types.add_type :course_capabilities, :uint32z, values: { 0x00000001 => 'processed', 0x00000002 => 'valid', 0x00000004 => 'time', 0x00000008 => 'distance', 0x00000010 => 'position', 0x00000020 => 'heart_rate', 0x00000040 => 'power', 0x00000080 => 'cadence', 0x00000100 => 'training', 0x00000200 => 'navigation', 0x00000400 => 'bikeway' }, method: :bitfield_value FitParser::File::Types.add_type :weight, :uint16, values: { 65534 => 'calculating' } FitParser::File::Types.add_type :workout_hr, :uint32, values: { 100 => 'bpm_offset' } FitParser::File::Types.add_type :workout_power, :uint32, values: { 1000 => 'watts_offset' } FitParser::File::Types.add_type :bp_status, :enum, values: { 0 => 'no_error', 1 => 'error_incomplete_data', 2 => 'error_no_measurement', 3 => 'error_data_out_of_range', 4 => 'error_irregular_heart_rate' } FitParser::File::Types.add_type :user_local_id, :uint16, values: { 0 => 'local_min', 15 => 'local_max', 16 => 'stationary_min', 255 => 'stationary_max', 256 => 'portable_min', 65534 => 'portable_max' } FitParser::File::Types.add_type :swim_stroke, :enum, values: { 0 => 'freestyle', 1 => 'backstroke', 2 => 'breaststroke', 3 => 'butterfly', 4 => 'drill', 5 => 'mixed', 6 => 'im' } FitParser::File::Types.add_type :activity_type, :enum, values: { 0 => 'generic', 1 => 'running', 2 => 'cycling', 3 => 'transition', 4 => 'fitness_equipment', 5 => 'swimming', 6 => 'walking', 8 => 'sedentary', 254 => 'all' } FitParser::File::Types.add_type :activity_subtype, :enum, values: { 0 => 'generic', 1 => 'treadmill', 2 => 'street', 3 => 'trail', 4 => 'track', 5 => 'spin', 6 => 'indoor_cycling', 7 => 'road', 8 => 'mountain', 9 => 'downhill', 10 => 'recumbent', 11 => 'cyclocross', 12 => 'hand_cycling', 13 => 'track_cycling', 14 => 'indoor_rowing', 15 => 'elliptical', 16 => 'stair_climbing', 17 => 'lap_swimming', 18 => 'open_water', 254 => 'all' } FitParser::File::Types.add_type :activity_level, :enum, values: { 0 => 'low', 1 => 'medium', 2 => 'high' } FitParser::File::Types.add_type :side, :enum, values: { 0 => 'right', 1 => 'left' } FitParser::File::Types.add_type :left_right_balance, :uint8, values: { 127 => 'mask', 128 => 'right' } FitParser::File::Types.add_type :left_right_balance_100, :uint16, values: { 16383 => 'mask', 32768 => 'right' } FitParser::File::Types.add_type :length_type, :enum, values: { 0 => 'idle', 1 => 'active' } FitParser::File::Types.add_type :day_of_week, :enum, values: { 0 => 'sunday', 1 => 'monday', 2 => 'tuesday', 3 => 'wednesday', 4 => 'thursday', 5 => 'friday', 6 => 'saturday' } FitParser::File::Types.add_type :connectivity_capabilities, :uint32z, values: { 1 => 'bluetooth', 2 => 'bluetooth_le', 4 => 'ant', 8 => 'activity_upload', 16 => 'course_download', 32 => 'workout_download', 64 => 'live_track', 128 => 'weather_conditions', 256 => 'weather_alerts', 512 => 'gps_ephemeris_download', 1024 => 'explicit_archive', 2048 => 'setup_incomplete', 4096 => 'continue_sync_after_software_update', 8192 => 'connect_iq_app_download', 16384 => 'golf_course_download', 32768 => 'device_initiates_sync', 65536 => 'connect_iq_watch_app_download', 131072 => 'connect_iq_widget_download', 262144 => 'connect_iq_watch_face_download', 524288 => 'connect_iq_data_field_download', 1048576 => 'connect_iq_app_managment', 2097152 => 'swing_sensor', 4194304 => 'swing_sensor_remote', 8388608 => 'incident_detection', 16777216 => 'audio_prompts', 33554432 => 'wifi_verification', 67108864 => 'true_up', 134217728 => 'find_my_watch', 268435456 => 'remote_manual_sync', 536870912 => 'live_track_auto_start', 1073741824 => 'live_track_messaging' } FitParser::File::Types.add_type :stroke_type, :enum, values: { 0 => 'no_event', 1 => 'other', 2 => 'serve', 3 => 'forehand', 4 => 'backhand', 5 => 'smash' } FitParser::File::Types.add_type :body_location, :enum, values: { 0 => 'left_leg', 1 => 'left_calf', 2 => 'left_shin', 3 => 'left_hamstring', 4 => 'left_quad', 5 => 'left_glute', 6 => 'right_leg', 7 => 'right_calf', 8 => 'right_shin', 9 => 'right_hamstring', 10 => 'right_quad', 11 => 'right_glute', 12 => 'torso_back', 13 => 'left_lower_back', 14 => 'left_upper_back', 15 => 'right_lower_back', 16 => 'right_upper_back', 17 => 'torso_front', 18 => 'left_abdomen', 19 => 'left_chest', 20 => 'right_abdomen', 21 => 'right_chest', 22 => 'left_arm', 23 => 'left_shoulder', 24 => 'left_bicep', 25 => 'left_tricep', 26 => 'left_brachioradialis', 27 => 'left_forearm_extensors', 28 => 'right_arm', 29 => 'right_shoulder', 30 => 'right_bicep', 31 => 'right_tricep', 32 => 'right_brachioradialis', 33 => 'right_forearm_extensors', 34 => 'neck', 35 => 'throat', 36 => 'waist_mid_back', 37 => 'waist_front', 38 => 'waist_left', 39 => 'waist_right' } FitParser::File::Types.add_type :source_type, :enum, values: { 0 => 'ant', 1 => 'antplus', 2 => 'bluetooth', 3 => 'bluetooth_low_energy', 4 => 'wifi', 5 => 'local' } FitParser::File::Types.add_type :attitude_stage, :enum, values: { 0 => 'failed', 1 => 'aligning', 2 => 'degraded', 3 => 'valid' } FitParser::File::Types.add_type :attitude_validity, :uint16, values: { 0x0001 => 'track_angle_heading_valid', 0x0002 => 'pitch_valid', 0x0004 => 'roll_valid', 0x0008 => 'lateral_body_accel_valid', 0x0010 => 'normal_body_accel_valid', 0x0020 => 'turn_rate_valid', 0x0040 => 'hw_fail', 0x0080 => 'mag_invalid', 0x0100 => 'no_gps', 0x0200 => 'gps_invalid', 0x0400 => 'solution_coasting', 0x0800 => 'true_track_angle', 0x1000 => 'magnetic_heading' } FitParser::File::Types.add_type :auto_sync_frequency, :enum, values: { 0 => 'never', 1 => 'occasionally', 2 => 'frequent', 3 => 'once_a_day' } FitParser::File::Types.add_type :exd_layout, :enum, values: { 0 => 'full_screen', 1 => 'half_vertical', 2 => 'half_horizontal', 3 => 'half_vertical_right_split', 4 => 'half_horizontal_bottom_split', 5 => 'full_quarter_split', 6 => 'half_vertical_left_split', 7 => 'half_horizontal_top_split' } FitParser::File::Types.add_type :exd_display_type, :enum, values: { 0 => 'numerical', 1 => 'simple', 2 => 'graph', 3 => 'bar', 4 => 'circle_graph', 5 => 'virtual_partner', 6 => 'balance', 7 => 'string_list', 8 => 'string', 9 => 'simple_dynamic_icon', 10 => 'gauge' } FitParser::File::Types.add_type :exd_data_units, :enum, values: { 0 => 'no_units', 1 => 'laps', 2 => 'miles_per_hour', 3 => 'kilometers_per_hour', 4 => 'feet_per_hour', 5 => 'meters_per_hour', 6 => 'degrees_celsius', 7 => 'degrees_farenheit', 8 => 'zone', 9 => 'gear', 10 => 'rpm', 11 => 'bpm', 12 => 'degrees', 13 => 'millimeters', 14 => 'meters', 15 => 'kilometers', 16 => 'feet', 17 => 'yards', 18 => 'kilofeet', 19 => 'miles', 20 => 'time', 21 => 'enum_turn_type', 22 => 'percent', 23 => 'watts', 24 => 'watts_per_kilogram', 25 => 'enum_battery_status', 26 => 'enum_bike_light_beam_angle_mode', 27 => 'enum_bike_light_battery_status', 28 => 'enum_bike_light_network_config_type', 29 => 'lights', 30 => 'seconds', 31 => 'minutes', 32 => 'hours', 33 => 'calories', 34 => 'kilojoules', 35 => 'milliseconds', 36 => 'second_per_mile', 37 => 'second_per_kilometer', 38 => 'centimeter', 39 => 'enum_course_point', 40 => 'bradians', 41 => 'enum_sport' } FitParser::File::Types.add_type :exd_qualifiers, :enum, values: { 0 => 'no_qualifier', 1 => 'instantaneous', 2 => 'average', 3 => 'lap', 4 => 'maximum', 5 => 'maximum_average', 6 => 'maximum_lap', 7 => 'last_lap', 8 => 'average_lap', 9 => 'to_destination', 10 => 'to_go', 11 => 'to_next', 12 => 'next_course_point', 13 => 'total', 14 => 'three_second_average', 15 => 'ten_second_average', 16 => 'thirty_second_average', 17 => 'percent_maximum', 18 => 'percent_maximum_average', 19 => 'lap_percent_maximum', 20 => 'elapsed', 21 => 'sunrise', 22 => 'sunset', 23 => 'compared_to_virtual_partner', 24 => 'maximum_24h', 25 => 'minimum_24h', 26 => 'minimum', 27 => 'first', 28 => 'second', 29 => 'third', 30 => 'shifter', 31 => 'last_sport', 242 => 'zone_9', 243 => 'zone_8', 244 => 'zone_7', 245 => 'zone_6', 246 => 'zone_5', 247 => 'zone_4', 248 => 'zone_3', 249 => 'zone_2', 250 => 'zone_1' } FitParser::File::Types.add_type :exd_descriptors, :enum, values: { 0 => 'bike_light_battery_status', 1 => 'beam_angle_status', 2 => 'batery_level', 3 => 'light_network_mode', 4 => 'number_lights_connected', 5 => 'cadence', 6 => 'distance', 7 => 'estimated_time_of_arrival', 8 => 'heading', 9 => 'time', 10 => 'battery_level', 11 => 'trainer_resistance', 12 => 'trainer_target_power', 13 => 'time_seated', 14 => 'time_standing', 15 => 'elevation', 16 => 'grade', 17 => 'ascent', 18 => 'descent', 19 => 'vertical_speed', 20 => 'di2_battery_level', 21 => 'front_gear', 22 => 'rear_gear', 23 => 'gear_ratio', 24 => 'heart_rate', 25 => 'heart_rate_zone', 26 => 'time_in_heart_rate_zone', 27 => 'heart_rate_reserve', 28 => 'calories', 29 => 'gps_accuracy', 30 => 'gps_signal_strength', 31 => 'temperature', 32 => 'time_of_day', 33 => 'balance', 34 => 'pedal_smoothness', 35 => 'power', 36 => 'functional_threshold_power', 37 => 'intensity_factor', 38 => 'work', 39 => 'power_ratio', 40 => 'normalized_power', 41 => 'training_stress_Score', 42 => 'time_on_zone', 43 => 'speed', 44 => 'laps', 45 => 'reps', 46 => 'workout_step', 47 => 'course_distance', 48 => 'navigation_distance', 49 => 'course_estimated_time_of_arrival', 50 => 'navigation_estimated_time_of_arrival', 51 => 'course_time', 52 => 'navigation_time', 53 => 'course_heading', 54 => 'navigation_heading', 55 => 'power_zone', 56 => 'torque_effectiveness', 57 => 'timer_time', 58 => 'power_weight_ratio', 59 => 'left_platform_center_offset', 60 => 'right_platform_center_offset', 61 => 'left_power_phase_start_angle', 62 => 'right_power_phase_start_angle', 63 => 'left_power_phase_finish_angle', 64 => 'right_power_phase_finish_angle', 65 => 'gears', 66 => 'pace', 67 => 'training_effect', 68 => 'vertical_oscillation', 69 => 'vertical_ratio', 70 => 'ground_contact_time', 71 => 'left_ground_contact_time_balance', 72 => 'right_ground_contact_time_balance', 73 => 'stride_length', 74 => 'running_cadence', 75 => 'performance_condition', 76 => 'course_type', 77 => 'time_in_power_zone', 78 => 'navigation_turn', 79 => 'course_location', 80 => 'navigation_location', 81 => 'compass', 82 => 'gear_combo', 83 => 'muscle_oxygen', 84 => 'icon' } FitParser::File::Types.add_type :auto_activity_detect, :uint32, values: { 0x00000000 => 'none', 0x00000001 => 'running', 0x00000002 => 'cycling', 0x00000004 => 'swimming', 0x00000008 => 'walking', 0x00000020 => 'elliptical', 0x00000400 => 'sedentary' } FitParser::File::Types.add_type :supported_exd_screen_layouts, :uint32z, values: { 0x00000001 => 'full_screen', 0x00000002 => 'half_vertical', 0x00000004 => 'half_horizontal', 0x00000008 => 'half_vertical_right_split', 0x00000010 => 'half_horizontal_bottom_split', 0x00000020 => 'full_quarter_split', 0x00000040 => 'half_vertical_left_split', 0x00000080 => 'half_horizontal_top_split' } FitParser::File::Types.add_type :fit_base_type, :uint8, values: { 0 => 'enum', 1 => 'sint8', 2 => 'uint8', 131 => 'sint16', 132 => 'uint16', 133 => 'sint32', 134 => 'uint32', 7 => 'string', 136 => 'float32', 137 => 'float64', 10 => 'uint8z', 139 => 'uint16z', 140 => 'uint32z', 13 => 'byte', 142 => 'sint64', 143 => 'uint64', 144 => 'uint64z' } FitParser::File::Types.add_type :turn_type, :enum, values: { 0 => 'arriving_idx', 1 => 'arriving_left_idx', 2 => 'arriving_right_idx', 3 => 'arriving_via_idx', 4 => 'arriving_via_left_idx', 5 => 'arriving_via_right_idx', 6 => 'bear_keep_left_idx', 7 => 'bear_keep_right_idx', 8 => 'continue_idx', 9 => 'exit_left_idx', 10 => 'exit_right_idx', 11 => 'ferry_idx', 12 => 'roundabout_45_idx', 13 => 'roundabout_90_idx', 14 => 'roundabout_135_idx', 15 => 'roundabout_180_idx', 16 => 'roundabout_225_idx', 17 => 'roundabout_270_idx', 18 => 'roundabout_315_idx', 19 => 'roundabout_360_idx', 20 => 'roundabout_neg_45_idx', 21 => 'roundabout_neg_90_idx', 22 => 'roundabout_neg_135_idx', 23 => 'roundabout_neg_180_idx', 24 => 'roundabout_neg_225_idx', 25 => 'roundabout_neg_270_idx', 26 => 'roundabout_neg_315_idx', 27 => 'roundabout_neg_360_idx', 28 => 'roundabout_generic_idx', 29 => 'roundabout_neg_generic_idx', 30 => 'sharp_turn_left_idx', 31 => 'sharp_turn_right_idx', 32 => 'turn_left_idx', 33 => 'turn_right_idx', 34 => 'uturn_left_idx', 35 => 'uturn_right_idx', 36 => 'icon_inv_idx', 37 => 'icon_idx_cnt' } FitParser::File::Types.add_type :bike_light_beam_angle_mode, :uint8, values: { 0 => 'manual', 1 => 'auto' } FitParser::File::Types.add_type :fit_base_unit, :uint16, values: { 0 => 'other' } # the type below is assigned to some fileds, but # it is not defined in terms of values and basic type in FIT SDK as # of 2015-01-29 FitParser::File::Types.add_type :bool, :uint8, values: { 0 => false, 1 => true } <file_sep>/lib/fit_parser/file/definitions.rb module FitParser class File module Definitions def self.fields @fields ||= Hash.new { |h, k| h[k] = {} } end def self.fields=(value) @fields = value end def self.dyn_fields @dyn_fields ||= Hash.new { |h, k| h[k] = {} } end def self.dyn_fields=(value) @dyn_fields = value end def self.names @names ||= Hash.new end def self.names=(value) @names = value end def self.add_field(global_msg_num, field_def_num, name, options = {}) if Definitions.fields[global_msg_num].key?(field_def_num) raise "bad definition of dynamic field (#{name}) without :ref_field_name or :ref_field_values" unless options.key?(:ref_field_name) && options.key?(:ref_field_values) Definitions.dyn_fields[global_msg_num][field_def_num] ||= {} Definitions.dyn_fields[global_msg_num][field_def_num][name.to_sym] = options # let's put the ref_field_values with the raw_value instead of the real value type = Types.get_type_definition(options[:ref_field_name].to_sym) if options[:ref_field_name] # basic types are not found and returns nil (also some rspec dummy tests) if type type = type[:values].invert Definitions.dyn_fields[global_msg_num][field_def_num][name.to_sym][:ref_field_values] = options[:ref_field_values].map { |elt| type[elt.to_s] } end else Definitions.fields[global_msg_num][field_def_num] = options.merge(name: name) end end def self.get_field(global_msg_num, field_def_num) Definitions.fields[global_msg_num][field_def_num] end def self.get_dynamic_fields(global_msg_num, field_def_num) Definitions.dyn_fields[global_msg_num][field_def_num] end def self.add_name(global_msg_num, name) Definitions.names[global_msg_num] = name end def self.get_name(global_msg_num) Definitions.names[global_msg_num] end end end end # DATA FitParser::File::Definitions.add_name(0, 'file_id') FitParser::File::Definitions.add_field(0, 0, 'type', type: :file) FitParser::File::Definitions.add_field(0, 1, 'manufacturer', type: :manufacturer) FitParser::File::Definitions.add_field(0, 2, 'product', type: :uint16) FitParser::File::Definitions.add_field(0, 2, 'garmin_product', type: :garmin_product, ref_field_name: 'manufacturer', ref_field_values: [:garmin, :dynastream, :dynastream_oem]) FitParser::File::Definitions.add_field(0, 3, 'serial_number', type: :uint32z) FitParser::File::Definitions.add_field(0, 4, 'time_created', type: :date_time) FitParser::File::Definitions.add_field(0, 5, 'number', type: :uint16) FitParser::File::Definitions.add_field(0, 8, 'product_number', type: :string) FitParser::File::Definitions.add_name(49, 'file_creator') FitParser::File::Definitions.add_field(49, 0, 'software_version', type: :uint16) FitParser::File::Definitions.add_field(49, 1, 'hardware_version', type: :uint8) FitParser::File::Definitions.add_name(35, 'software') FitParser::File::Definitions.add_field 35, 254, 'message_index', type: :message_index FitParser::File::Definitions.add_field 35, 3, 'version', type: :uint16, scale: 100 FitParser::File::Definitions.add_field 35, 5, 'part_number', type: :string FitParser::File::Definitions.add_name 106, 'slave_device' FitParser::File::Definitions.add_field 106, 0, 'manufacturer', type: :manufacturer FitParser::File::Definitions.add_field 106, 1, 'product', type: :uint16 FitParser::File::Definitions.add_field 106, 1, 'garmin_product', type: :garmin_product, ref_field_name: 'manufacturer', ref_field_values: [:garmin, :dynastream, :dynastream_oem] FitParser::File::Definitions.add_name 1, 'capabilities' FitParser::File::Definitions.add_field 1, 0, 'languages', type: :uint8z FitParser::File::Definitions.add_field 1, 1, 'sports', type: :sport_bits_0 FitParser::File::Definitions.add_field 1, 21, 'workouts_supported', type: :workout_capabilities FitParser::File::Definitions.add_field 1, 23, 'connectivity_supported', type: :connectivity_capabilities FitParser::File::Definitions.add_name 37, 'file_capabilities' FitParser::File::Definitions.add_field 37, 254, 'message_index', type: :message_index FitParser::File::Definitions.add_field 37, 0, 'type', type: :file FitParser::File::Definitions.add_field 37, 1, 'flags', type: :file_flags FitParser::File::Definitions.add_field 37, 2, 'directory', type: :string FitParser::File::Definitions.add_field 37, 3, 'max_count', type: :uint16 FitParser::File::Definitions.add_field 37, 4, 'max_size', type: :uint32, unit: 'bytes' FitParser::File::Definitions.add_name 38, 'mesg_capabilities' FitParser::File::Definitions.add_field 38, 254, 'message_index', type: :message_index FitParser::File::Definitions.add_field 38, 0, 'file', type: :file FitParser::File::Definitions.add_field 38, 1, 'mesg_num', type: :mesg_num FitParser::File::Definitions.add_field 38, 2, 'count_type', type: :mesg_count FitParser::File::Definitions.add_field 38, 3, 'count', type: :uint16 FitParser::File::Definitions.add_field 38, 3, 'num_per_file', type: :uint16, ref_field_name: 'count_type', ref_field_values: [:num_per_file] FitParser::File::Definitions.add_field 38, 3, 'max_per_file', type: :uint16, ref_field_name: 'count_type', ref_field_values: [:max_per_file] FitParser::File::Definitions.add_field 38, 3, 'max_per_file_type', type: :uint16, ref_field_name: 'count_type', ref_field_values: [:max_per_file_type] FitParser::File::Definitions.add_name 39, 'field_capabilities' FitParser::File::Definitions.add_field 39, 254, 'message_index', type: :message_index FitParser::File::Definitions.add_field 39, 0, 'file', type: :file FitParser::File::Definitions.add_field 39, 1, 'mesg_num', type: :mesg_num FitParser::File::Definitions.add_field 39, 2, 'field_num', type: :uint8 FitParser::File::Definitions.add_field 39, 3, 'count', type: :uint16 FitParser::File::Definitions.add_name 2, 'device_settings' FitParser::File::Definitions.add_field 2, 0, 'active_time_zone', type: :uint8, scale: 1 FitParser::File::Definitions.add_field 2, 1, 'utc_offset', type: :uint32 FitParser::File::Definitions.add_field 2, 2, 'time_offset', type: :uint32 FitParser::File::Definitions.add_field 2, 4, 'time_mode', type: :type_mode FitParser::File::Definitions.add_field 2, 5, 'time_zone_offset', type: :sint8, scale: 4, unit: 'hr' FitParser::File::Definitions.add_field 2, 12, 'backlight_mode', type: :backlight_mode FitParser::File::Definitions.add_field 2, 36, 'activity_tracker_enabled', type: :bool FitParser::File::Definitions.add_field 2, 39, 'clock_time', type: :date_time FitParser::File::Definitions.add_field 2, 40, 'pages_enabled', type: :uint16 FitParser::File::Definitions.add_field 2, 46, 'move_alert_enabled', type: :bool FitParser::File::Definitions.add_field 2, 47, 'date_mode', type: :date_mode FitParser::File::Definitions.add_field 2, 55, 'display_orientation', type: :display_orientation FitParser::File::Definitions.add_field 2, 56, 'mounting_side', type: :side FitParser::File::Definitions.add_field 2, 57, 'default_page', type: :uint16 FitParser::File::Definitions.add_field 2, 58, 'autosync_min_steps', type: :uint16 FitParser::File::Definitions.add_field 2, 59, 'autosync_min_time', type: :uint16 FitParser::File::Definitions.add_field 2, 80, 'lactate_threshold_autodetect_enabled', type: :bool FitParser::File::Definitions.add_field 2, 86, 'ble_auto_upload_enabled', type: :bool FitParser::File::Definitions.add_field 2, 89, 'auto_sync_frequency', type: :auto_sync_frequency FitParser::File::Definitions.add_field 2, 90, 'auto_activity_detect', type: :auto_activity_detect FitParser::File::Definitions.add_field 2, 94, 'number_of_screens', type: :uint8 FitParser::File::Definitions.add_field 2, 95, 'smart_notification_display_orientation', type: :display_orientation FitParser::File::Definitions.add_name 3, 'user_profile' FitParser::File::Definitions.add_field 3, 254, 'message_index', type: :message_index FitParser::File::Definitions.add_field 3, 0, 'friendly_name', type: :string, scale: 1 FitParser::File::Definitions.add_field 3, 1, 'gender', type: :gender, scale: 1 FitParser::File::Definitions.add_field 3, 2, 'age', type: :uint8, scale: 1, unit: 'years' FitParser::File::Definitions.add_field 3, 3, 'height', type: :uint8, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 3, 4, 'weight', type: :uint16, scale: 10, unit: 'kg' FitParser::File::Definitions.add_field 3, 5, 'language', type: :language, scale: 1 FitParser::File::Definitions.add_field 3, 6, 'elev_setting', type: :display_measure, scale: 1 FitParser::File::Definitions.add_field 3, 7, 'weight_setting', type: :display_measure, scale: 1 FitParser::File::Definitions.add_field 3, 8, 'resting_heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 3, 9, 'default_max_running_heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 3, 10, 'default_max_biking_heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 3, 11, 'default_max_heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 3, 12, 'hr_setting', type: :display_heart, scale: 1 FitParser::File::Definitions.add_field 3, 13, 'speed_setting', type: :display_measure, scale: 1 FitParser::File::Definitions.add_field 3, 14, 'dist_setting', type: :display_measure, scale: 1 FitParser::File::Definitions.add_field 3, 16, 'power_setting', type: :display_power, scale: 1 FitParser::File::Definitions.add_field 3, 17, 'activity_class', type: :activity_class, scale: 1 FitParser::File::Definitions.add_field 3, 18, 'position_setting', type: :display_position, scale: 1 FitParser::File::Definitions.add_field 3, 21, 'temperature_setting', type: :display_measure, scale: 1 FitParser::File::Definitions.add_field 3, 22, 'local_id', type: :user_local_id FitParser::File::Definitions.add_field 3, 23, 'global_id', type: :byte FitParser::File::Definitions.add_field 3, 30, 'height_setting', type: :display_measure FitParser::File::Definitions.add_field 3, 31, 'user_running_step_length', type: :uint16, scale: 1000, unit: 'm' FitParser::File::Definitions.add_field 3, 32, 'user_walking_step_length', type: :uint16, scale: 1000, unit: 'm' FitParser::File::Definitions.add_name 4, 'hrm_profile' FitParser::File::Definitions.add_field 4, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 4, 0, 'enabled', type: :bool FitParser::File::Definitions.add_field 4, 1, 'hrm_ant_id', type: :uint16z, scale: 1 FitParser::File::Definitions.add_field 4, 2, 'log_hrv', type: :bool FitParser::File::Definitions.add_field 4, 3, 'hrm_ant_id_trans_type', type: :uint8z FitParser::File::Definitions.add_name 5, 'sdm_profile' FitParser::File::Definitions.add_field 5, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 5, 0, 'enabled', type: :bool FitParser::File::Definitions.add_field 5, 1, 'sdm_ant_id', type: :uint16z, scale: 1 FitParser::File::Definitions.add_field 5, 2, 'sdm_cal_factor', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 5, 3, 'odometer', type: :uint32, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 5, 4, 'speed_source', type: :bool FitParser::File::Definitions.add_field 5, 5, 'sdm_ant_id_trans_type', type: :uint8z FitParser::File::Definitions.add_field 5, 7, 'odometer_rollover', type: :uint8 FitParser::File::Definitions.add_name 6, 'bike_profile' FitParser::File::Definitions.add_field 6, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 6, 0, 'name', type: :string, scale: 1 FitParser::File::Definitions.add_field 6, 1, 'sport', type: :sport, scale: 1 FitParser::File::Definitions.add_field 6, 2, 'sub_sport', type: :sub_sport, scale: 1 FitParser::File::Definitions.add_field 6, 3, 'odometer', type: :uint32, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 6, 4, 'bike_spd_ant_id', type: :uint16z, scale: 1 FitParser::File::Definitions.add_field 6, 5, 'bike_cad_ant_id', type: :uint16z, scale: 1 FitParser::File::Definitions.add_field 6, 6, 'bike_spdcad_ant_id', type: :uint16z, scale: 1 FitParser::File::Definitions.add_field 6, 7, 'bike_power_ant_id', type: :uint16z, scale: 1 FitParser::File::Definitions.add_field 6, 8, 'custom_wheelsize', type: :uint16, scale: 1000, unit: 'm' FitParser::File::Definitions.add_field 6, 9, 'auto_wheelsize', type: :uint16, scale: 1000, unit: 'm' FitParser::File::Definitions.add_field 6, 10, 'bike_weight', type: :uint16, scale: 10, unit: 'kg' FitParser::File::Definitions.add_field 6, 11, 'power_cal_factor', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 6, 12, 'auto_wheel_cal', type: :bool, scale: 1 FitParser::File::Definitions.add_field 6, 13, 'auto_power_zero', type: :bool, scale: 1 FitParser::File::Definitions.add_field 6, 14, 'id', type: :uint8 FitParser::File::Definitions.add_field 6, 15, 'spd_enabled', type: :bool FitParser::File::Definitions.add_field 6, 16, 'cad_enabled', type: :bool FitParser::File::Definitions.add_field 6, 17, 'spdcad_enabled', type: :bool FitParser::File::Definitions.add_field 6, 18, 'power_enabled', type: :bool FitParser::File::Definitions.add_field 6, 19, 'crank_length', type: :uint8, scale: 2, offset: -110, unit: 'mm' FitParser::File::Definitions.add_field 6, 20, 'enabled', type: :bool FitParser::File::Definitions.add_field 6, 21, 'bike_spd_ant_id_trans_type', type: :uint8z FitParser::File::Definitions.add_field 6, 22, 'bike_cad_ant_id_trans_type', type: :uint8z FitParser::File::Definitions.add_field 6, 23, 'bike_spdcad_ant_id_trans_type', type: :uint8z FitParser::File::Definitions.add_field 6, 24, 'bike_power_ant_id_trans_type', type: :uint8z FitParser::File::Definitions.add_field 6, 37, 'odometer_rollover', type: :uint8 FitParser::File::Definitions.add_field 6, 38, 'front_gear_num', type: :uint8z FitParser::File::Definitions.add_field 6, 39, 'front_gear', type: :uint8z FitParser::File::Definitions.add_field 6, 40, 'rear_gear_num', type: :uint8z FitParser::File::Definitions.add_field 6, 41, 'rear_gear', type: :uint8z FitParser::File::Definitions.add_field 6, 44, 'shimano_di2_enabled', type: :bool FitParser::File::Definitions.add_name 7, 'zones_target' FitParser::File::Definitions.add_field 7, 1, 'max_heart_rate', type: :uint8, scale: 1 FitParser::File::Definitions.add_field 7, 2, 'threshold_heart_rate', type: :uint8, scale: 1 FitParser::File::Definitions.add_field 7, 3, 'functional_threshold_power', type: :uint16, scale: 1 FitParser::File::Definitions.add_field 7, 5, 'hr_calc_type', type: :hr_zone_calc, scale: 1 FitParser::File::Definitions.add_field 7, 7, 'pwr_calc_type', type: :pwr_zone_calc, scale: 1 FitParser::File::Definitions.add_name 12, 'sport' FitParser::File::Definitions.add_field 12, 0, 'sport', type: :sport, scale: 1 FitParser::File::Definitions.add_field 12, 1, 'sub_sport', type: :sub_sport, scale: 1 FitParser::File::Definitions.add_field 12, 3, 'name', type: :string FitParser::File::Definitions.add_name 8, 'hr_zone' FitParser::File::Definitions.add_field 8, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 8, 1, 'high_bpm', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 8, 2, 'name', type: :string, scale: 1 FitParser::File::Definitions.add_name 53, 'speed_zone' FitParser::File::Definitions.add_field 53, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 53, 0, 'high_value', type: :uint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 53, 1, 'name', type: :string, scale: 1 FitParser::File::Definitions.add_name 131, 'cadence_zone' FitParser::File::Definitions.add_field 131, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 131, 0, 'high_value', type: :uint8, scale: 1, unit: 'rpm' FitParser::File::Definitions.add_field 131, 1, 'name', type: :string, scale: 1 FitParser::File::Definitions.add_name 9, 'power_zone' FitParser::File::Definitions.add_field 9, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 9, 1, 'high_value', type: :uint16, scale: 1, unit: 'watts' FitParser::File::Definitions.add_field 9, 2, 'name', type: :string, scale: 1 FitParser::File::Definitions.add_name 10, 'met_zone' FitParser::File::Definitions.add_field 10, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 10, 1, 'high_bpm', type: :uint8, scale: 1 FitParser::File::Definitions.add_field 10, 2, 'calories', type: :uint16, scale: 10, unit: 'kcal / min' FitParser::File::Definitions.add_field 10, 3, 'fat_calories', type: :uint8, scale: 10, unit: 'kcal / min' FitParser::File::Definitions.add_name 15, 'goal' FitParser::File::Definitions.add_field 15, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 15, 0, 'sport', type: :sport, scale: 1 FitParser::File::Definitions.add_field 15, 1, 'sub_sport', type: :sub_sport, scale: 1 FitParser::File::Definitions.add_field 15, 2, 'start_date', type: :date_time FitParser::File::Definitions.add_field 15, 3, 'end_date', type: :date_time FitParser::File::Definitions.add_field 15, 4, 'type', type: :goal FitParser::File::Definitions.add_field 15, 5, 'value', type: :uint32, scale: 1 FitParser::File::Definitions.add_field 15, 6, 'repeat', type: :bool FitParser::File::Definitions.add_field 15, 7, 'target_value', type: :uint32, scale: 1 FitParser::File::Definitions.add_field 15, 8, 'recurrence', type: :goal_recurrence, scale: 1 FitParser::File::Definitions.add_field 15, 9, 'recurrence_value', type: :uint16, scale: 1 FitParser::File::Definitions.add_field 15, 10, 'enabled', type: :bool, scale: 1 FitParser::File::Definitions.add_field 15, 11, 'source', type: :goal_source FitParser::File::Definitions.add_name 34, 'activity' FitParser::File::Definitions.add_field 34, 253, 'timestamp', type: :date_time FitParser::File::Definitions.add_field 34, 0, 'total_timer_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 34, 1, 'num_sessions', type: :uint16, scale: 1 FitParser::File::Definitions.add_field 34, 2, 'type', type: :activity FitParser::File::Definitions.add_field 34, 3, 'event', type: :event FitParser::File::Definitions.add_field 34, 4, 'event_type', type: :event_type FitParser::File::Definitions.add_field 34, 5, 'local_timestamp', type: :local_date_time FitParser::File::Definitions.add_field 34, 6, 'event_group', type: :uint8 FitParser::File::Definitions.add_name 18, 'session' FitParser::File::Definitions.add_field 18, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 18, 253, 'timestamp', type: :date_time, unit: 's' FitParser::File::Definitions.add_field 18, 0, 'event', type: :event FitParser::File::Definitions.add_field 18, 1, 'event_type', type: :event_type FitParser::File::Definitions.add_field 18, 2, 'start_time', type: :date_time, scale: 1 FitParser::File::Definitions.add_field 18, 3, 'start_position_lat', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 18, 4, 'start_position_long', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 18, 5, 'sport', type: :sport, scale: 1 FitParser::File::Definitions.add_field 18, 6, 'sub_sport', type: :sub_sport, scale: 1 FitParser::File::Definitions.add_field 18, 7, 'total_elapsed_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 18, 8, 'total_timer_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 18, 9, 'total_distance', type: :uint32, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 18, 10, 'total_cycles', type: :uint32, scale: 1, unit: 'cycles' FitParser::File::Definitions.add_field 18, 10, 'total_strides', type: :uint32, scale: 1, unit: 'strides', ref_field_name: 'sport', ref_field_values: [:running] FitParser::File::Definitions.add_field 18, 11, 'total_calories', type: :uint16, scale: 1, unit: 'kcal' FitParser::File::Definitions.add_field 18, 13, 'total_fat_calories', type: :uint16, scale: 1, unit: 'kcal' FitParser::File::Definitions.add_field 18, 14, 'avg_speed', type: :uint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 18, 15, 'max_speed', type: :uint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 18, 16, 'avg_heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 18, 17, 'max_heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 18, 18, 'avg_cadence', type: :uint8, scale: 1, unit: 'rpm' FitParser::File::Definitions.add_field 18, 18, 'avg_running_cadence', type: :uint8, scale: 1, unit: 'strides/min', ref_field_name: 'sport', ref_field_values: [:running] FitParser::File::Definitions.add_field 18, 19, 'max_cadence', type: :uint8, scale: 1, unit: 'rpm' FitParser::File::Definitions.add_field 18, 18, 'max_running_cadence', type: :uint8, scale: 1, unit: 'strides/min', ref_field_name: 'sport', ref_field_values: [:running] FitParser::File::Definitions.add_field 18, 20, 'avg_power', type: :uint16, scale: 1, unit: 'watts' FitParser::File::Definitions.add_field 18, 21, 'max_power', type: :uint16, scale: 1, unit: 'watts' FitParser::File::Definitions.add_field 18, 22, 'total_ascent', type: :uint16, scale: 1, unit: 'm' FitParser::File::Definitions.add_field 18, 23, 'total_descent', type: :uint16, scale: 1, unit: 'm' FitParser::File::Definitions.add_field 18, 24, 'total_training_effect', type: :uint8, scale: 10 FitParser::File::Definitions.add_field 18, 25, 'first_lap_index', type: :uint16, scale: 1 FitParser::File::Definitions.add_field 18, 26, 'num_laps', type: :uint16, scale: 1 FitParser::File::Definitions.add_field 18, 27, 'event_group', type: :uint8 FitParser::File::Definitions.add_field 18, 28, 'trigger', type: :session_trigger FitParser::File::Definitions.add_field 18, 29, 'nec_lat', type: :sint32, unit: 'semicircles' FitParser::File::Definitions.add_field 18, 30, 'nec_long', type: :sint32, unit: 'semicircles' FitParser::File::Definitions.add_field 18, 31, 'swc_lat', type: :sint32, unit: 'semicircles' FitParser::File::Definitions.add_field 18, 32, 'swc_long', type: :sint32, unit: 'semicircles' FitParser::File::Definitions.add_field 18, 34, 'normalized_power', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 18, 35, 'training_stress_score', type: :uint16, scale: 10, unit: 'tss' FitParser::File::Definitions.add_field 18, 36, 'intensity_factor', type: :uint16, scale: 1000, unit: 'if' FitParser::File::Definitions.add_field 18, 37, 'left_right_balance', type: :left_right_balance_100 FitParser::File::Definitions.add_field 18, 41, 'avg_stroke_count', type: :uint32, scale: 10, unit: 'strokes/lap' FitParser::File::Definitions.add_field 18, 42, 'avg_stroke_distance', type: :uint16, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 18, 43, 'swim_stroke', type: :swim_stroke, unit: 'swim_stroke', ref_field_name: 'sport', ref_field_values: [:swimming] FitParser::File::Definitions.add_field 18, 44, 'pool_length', type: :uint16, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 18, 46, 'pool_length_unit', type: :display_measure, scale: 1 FitParser::File::Definitions.add_field 18, 47, 'num_active_lengths', type: :uint16, unit: 'lengths' FitParser::File::Definitions.add_field 18, 48, 'total_work', type: :uint32, unit: 'J' FitParser::File::Definitions.add_field 18, 49, 'avg_altitude', type: :uint16, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 18, 50, 'max_altitude', type: :uint16, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 18, 51, 'gps_accuracy', type: :uint8, unit: 'm' FitParser::File::Definitions.add_field 18, 52, 'avg_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 18, 53, 'avg_pos_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 18, 54, 'avg_neg_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 18, 55, 'max_pos_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 18, 56, 'max_neg_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 18, 57, 'avg_temperature', type: :sint8, unit: 'C' FitParser::File::Definitions.add_field 18, 58, 'max_temperature', type: :sint8, unit: 'C' FitParser::File::Definitions.add_field 18, 59, 'total_moving_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 18, 60, 'avg_pos_vertical_speed', type: :sint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 18, 61, 'avg_neg_vertical_speed', type: :sint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 18, 62, 'max_pos_vertical_speed', type: :sint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 18, 63, 'max_neg_vertical_speed', type: :sint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 18, 64, 'min_heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 18, 65, 'time_in_hr_zone', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 18, 66, 'time_in_speed_zone', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 18, 67, 'time_in_cadence_zone', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 18, 68, 'time_in_power_zone', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 18, 69, 'avg_lap_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 18, 70, 'best_lap_index', type: :uint16 FitParser::File::Definitions.add_field 18, 71, 'min_altitude', type: :uint16, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 18, 82, 'player_score', type: :uint16 FitParser::File::Definitions.add_field 18, 83, 'opponent_score', type: :uint16 FitParser::File::Definitions.add_field 18, 84, 'opponent_name', type: :string FitParser::File::Definitions.add_field 18, 85, 'stroke_count', type: :uint16, unit: 'counts' FitParser::File::Definitions.add_field 18, 86, 'zone_count', type: :uint16, unit: 'counts' FitParser::File::Definitions.add_field 18, 87, 'max_ball_speed', type: :uint16, scale: 100, unit: 'm/s' FitParser::File::Definitions.add_field 18, 88, 'avg_ball_speed', type: :uint16, scale: 100, unit: 'm/s' FitParser::File::Definitions.add_field 18, 89, 'avg_vertical_oscillation', type: :uint16, scale: 10, unit: 'mm' FitParser::File::Definitions.add_field 18, 90, 'avg_stance_time_percent', type: :uint16, scale: 100, unit: 'percent' FitParser::File::Definitions.add_field 18, 91, 'avg_stance_time', type: :uint16, scale: 10, unit: 'ms' FitParser::File::Definitions.add_field 18, 92, 'avg_fractional_cadence', type: :uint8, scale: 128, unit: 'rpm' FitParser::File::Definitions.add_field 18, 93, 'max_fractional_cadence', type: :uint8, scale: 128, unit: 'rpm' FitParser::File::Definitions.add_field 18, 94, 'total_fractional_cycles', type: :uint8, scale: 128, unit: 'cycles' FitParser::File::Definitions.add_field 18, 95, 'avg_total_hemoglobin_conc', type: :uint16, scale: 100, unit: 'g/dL' FitParser::File::Definitions.add_field 18, 96, 'min_total_hemoglobin_conc', type: :uint16, scale: 100, unit: 'g/dL' FitParser::File::Definitions.add_field 18, 97, 'max_total_hemoglobin_conc', type: :uint16, scale: 100, unit: 'g/dL' FitParser::File::Definitions.add_field 18, 98, 'avg_saturated_hemoglobin_percent', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 18, 99, 'min_saturated_hemoglobin_percent', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 18, 100, 'max_saturated_hemoglobin_percent', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 18, 101, 'avg_left_torque_effectiveness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 18, 102, 'avg_right_torque_effectiveness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 18, 103, 'avg_left_pedal_smoothness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 18, 104, 'avg_right_pedal_smoothness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 18, 105, 'avg_combined_pedal_smoothness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 18, 111, 'sport_index', type: :uint8 FitParser::File::Definitions.add_field 18, 112, 'time_standing', type: :uint32, scale: 1000, units: 's' FitParser::File::Definitions.add_field 18, 113, 'stand_count', type: :uint16 FitParser::File::Definitions.add_field 18, 114, 'avg_left_pco', type: :sint8, units: 'mm' FitParser::File::Definitions.add_field 18, 115, 'avg_right_pco', type: :sint8, units: 'mm' FitParser::File::Definitions.add_field 18, 116, 'avg_left_power_phase', type: :uint8, scale: 0.7111111, units: 'degrees' FitParser::File::Definitions.add_field 18, 117, 'avg_left_power_phase_peak', type: :uint8, scale: 0.7111111, units: 'degrees' FitParser::File::Definitions.add_field 18, 118, 'avg_right_power_phase', type: :uint8, scale: 0.7111111, units: 'degrees' FitParser::File::Definitions.add_field 18, 119, 'avg_right_power_phase_peak', type: :uint8, scale: 0.7111111, units: 'degrees' FitParser::File::Definitions.add_field 18, 120, 'avg_power_position', type: :uint16, units: 'watts' FitParser::File::Definitions.add_field 18, 121, 'max_power_position', type: :uint16, units: 'watts' FitParser::File::Definitions.add_field 18, 122, 'avg_cadence_position', type: :uint8, units: 'rpm' FitParser::File::Definitions.add_field 18, 123, 'max_cadence_position', type: :uint8, units: 'rpm' FitParser::File::Definitions.add_field 18, 124, 'enhanced_avg_speed', type: :uint32, scale: 1000, units: 'm/s' FitParser::File::Definitions.add_field 18, 125, 'enhanced_max_speed', type: :uint32, scale: 1000, units: 'm/s' FitParser::File::Definitions.add_field 18, 126, 'enhanced_avg_altitude', type: :uint32, scale: 5, offset: 500, units: 'm' FitParser::File::Definitions.add_field 18, 127, 'enhanced_min_altitude', type: :uint32, scale: 5, offset: 500, units: 'm' FitParser::File::Definitions.add_field 18, 128, 'enhanced_max_altitude', type: :uint32, scale: 5, offset: 500, units: 'm' FitParser::File::Definitions.add_field 18, 129, 'avg_lev_motor_power', type: :uint16, units: 'watts' FitParser::File::Definitions.add_field 18, 130, 'max_lev_motor_power', type: :uint16, units: 'watts' FitParser::File::Definitions.add_field 18, 131, 'lev_battery_consumption', type: :uint8, scale: 2, units: 'percent' FitParser::File::Definitions.add_field 18, 132, 'avg_vertical_ratio', type: :uint16, scale: 100, units: 'percent' FitParser::File::Definitions.add_field 18, 133, 'avg_stance_time_balance', type: :uint16, scale: 100, units: 'percent' FitParser::File::Definitions.add_field 18, 134, 'avg_step_length', type: :uint16, scale: 10, units: 'mm' FitParser::File::Definitions.add_field 18, 137, 'total_anaerobic_training_effect', type: :uint8, scale: 10 FitParser::File::Definitions.add_name 19, 'lap' FitParser::File::Definitions.add_field 19, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 19, 253, 'timestamp', type: :date_time, unit: 's' FitParser::File::Definitions.add_field 19, 0, 'event', type: :event FitParser::File::Definitions.add_field 19, 1, 'event_type', type: :event_type FitParser::File::Definitions.add_field 19, 2, 'start_time', type: :date_time, scale: 1 FitParser::File::Definitions.add_field 19, 3, 'start_position_lat', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 19, 4, 'start_position_long', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 19, 5, 'end_position_lat', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 19, 6, 'end_position_long', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 19, 7, 'total_elapsed_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 19, 8, 'total_timer_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 19, 9, 'total_distance', type: :uint32, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 19, 10, 'total_cycles', type: :uint32, unit: 'cycles' FitParser::File::Definitions.add_field 19, 10, 'total_strides', type: :uint32, unit: 'strides', ref_field_name: 'sport', ref_field_values: [:running] FitParser::File::Definitions.add_field 19, 11, 'total_calories', type: :uint16, unit: 'kcal' FitParser::File::Definitions.add_field 19, 12, 'total_fat_calories', type: :uint16, unit: 'kcal' FitParser::File::Definitions.add_field 19, 13, 'avg_speed', type: :uint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 19, 14, 'max_speed', type: :uint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 19, 15, 'avg_heart_rate', type: :uint8, unit: 'bpm' FitParser::File::Definitions.add_field 19, 16, 'max_heart_rate', type: :uint8, unit: 'bpm' FitParser::File::Definitions.add_field 19, 17, 'avg_cadence', type: :uint8, unit: 'rpm' FitParser::File::Definitions.add_field 19, 17, 'avg_running_cadence', type: :uint8, unit: 'strides/min', ref_field_name: 'sport', ref_field_values: [:running] FitParser::File::Definitions.add_field 19, 18, 'max_cadence', type: :uint8, unit: 'rpm' FitParser::File::Definitions.add_field 19, 18, 'max_running_cadence', type: :uint8, unit: 'strides/min', ref_field_name: 'sport', ref_field_values: [:running] FitParser::File::Definitions.add_field 19, 19, 'avg_power', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 19, 20, 'max_power', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 19, 21, 'total_ascent', type: :uint16, unit: 'm' FitParser::File::Definitions.add_field 19, 22, 'total_descent', type: :uint16, unit: 'm' FitParser::File::Definitions.add_field 19, 23, 'intensity', type: :intensity FitParser::File::Definitions.add_field 19, 24, 'lap_trigger', type: :lap_trigger FitParser::File::Definitions.add_field 19, 25, 'sport', type: :sport FitParser::File::Definitions.add_field 19, 26, 'event_group', type: :uint8 FitParser::File::Definitions.add_field 19, 32, 'num_lengths', type: :uint16, unit: 'lengths' FitParser::File::Definitions.add_field 19, 33, 'normalized_power', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 19, 34, 'left_right_balance', type: :left_right_balance_100 FitParser::File::Definitions.add_field 19, 35, 'first_length_index', type: :uint16, scale: 1 FitParser::File::Definitions.add_field 19, 37, 'avg_stroke_distance', type: :uint16, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 19, 38, 'swim_stroke', type: :swim_stroke FitParser::File::Definitions.add_field 19, 39, 'sub_sport', type: :sub_sport, scale: 1 FitParser::File::Definitions.add_field 19, 40, 'num_active_lengths', type: :uint16, unit: 'lengths' FitParser::File::Definitions.add_field 19, 41, 'total_work', type: :uint32, unit: 'J' FitParser::File::Definitions.add_field 19, 42, 'avg_altitude', type: :uint16, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 19, 43, 'max_altitude', type: :uint16, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 19, 44, 'gps_accuracy', type: :uint8, unit: 'm' FitParser::File::Definitions.add_field 19, 45, 'avg_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 19, 46, 'avg_pos_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 19, 47, 'avg_neg_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 19, 48, 'max_pos_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 19, 49, 'max_neg_grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 19, 50, 'avg_temperature', type: :sint8, unit: 'C' FitParser::File::Definitions.add_field 19, 51, 'max_temperature', type: :sint8, unit: 'C' FitParser::File::Definitions.add_field 19, 52, 'total_moving_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 19, 53, 'avg_pos_vertical_speed', type: :sint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 19, 54, 'avg_neg_vertical_speed', type: :sint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 19, 55, 'max_pos_vertical_speed', type: :sint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 19, 56, 'max_neg_vertical_speed', type: :sint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 19, 57, 'time_in_hr_zone', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 19, 58, 'time_in_speed_zone', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 19, 59, 'time_in_cadence_zone', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 19, 60, 'time_in_power_zone', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 19, 61, 'repetition_num', type: :uint16 FitParser::File::Definitions.add_field 19, 62, 'min_altitude', type: :uint16, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 19, 63, 'min_heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 19, 71, 'wkt_step_index', type: :message_index FitParser::File::Definitions.add_field 19, 74, 'opponent_score', type: :uint16 FitParser::File::Definitions.add_field 19, 75, 'stroke_count', type: :uint16, unit: 'counts' FitParser::File::Definitions.add_field 19, 76, 'zone_count', type: :uint16, unit: 'counts' FitParser::File::Definitions.add_field 19, 77, 'avg_vertical_oscillation', type: :uint16, scale: 10, unit: 'mm' FitParser::File::Definitions.add_field 19, 78, 'avg_stance_time_percent', type: :uint16, scale: 100, unit: 'percent' FitParser::File::Definitions.add_field 19, 79, 'avg_stance_time', type: :uint16, scale: 10, unit: 'ms' FitParser::File::Definitions.add_field 19, 80, 'avg_fractional_cadence', type: :uint8, scale: 128, unit: 'rpm' FitParser::File::Definitions.add_field 19, 81, 'max_fractional_cadence', type: :uint8, scale: 128, unit: 'rpm' FitParser::File::Definitions.add_field 19, 82, 'total_fractional_cycles', type: :uint8, scale: 128, unit: 'cycles' FitParser::File::Definitions.add_field 19, 83, 'player_score', type: :uint16 FitParser::File::Definitions.add_field 19, 84, 'avg_total_hemoglobin_conc', type: :uint16, scale: 100, unit: 'g/dL' FitParser::File::Definitions.add_field 19, 85, 'min_total_hemoglobin_conc', type: :uint16, scale: 100, unit: 'g/dL' FitParser::File::Definitions.add_field 19, 86, 'max_total_hemoglobin_conc', type: :uint16, scale: 100, unit: 'g/dL' FitParser::File::Definitions.add_field 19, 87, 'avg_saturated_hemoglobin_percent', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 19, 88, 'min_saturated_hemoglobin_percent', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 19, 89, 'max_saturated_hemoglobin_percent', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 19, 91, 'avg_left_torque_effectiveness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 19, 92, 'avg_right_torque_effectiveness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 19, 93, 'avg_left_pedal_smoothness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 19, 94, 'avg_right_pedal_smoothness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 19, 95, 'avg_combined_pedal_smoothness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 19, 98, 'time_standing', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 19, 99, 'stand_count', type: :uint16 FitParser::File::Definitions.add_field 19, 100, 'avg_left_pco', type: :sint8, unit: 'mm' FitParser::File::Definitions.add_field 19, 101, 'avg_right_pco', type: :sint8, unit: 's' FitParser::File::Definitions.add_field 19, 102, 'avg_left_power_phase', type: :uint8, scale: 0.7111111, unit: 'degrees' FitParser::File::Definitions.add_field 19, 103, 'avg_left_power_phase_peak', type: :uint8, scale: 0.7111111, unit: 'degrees' FitParser::File::Definitions.add_field 19, 104, 'avg_right_power_phase', type: :uint8, scale: 0.7111111, unit: 'degrees' FitParser::File::Definitions.add_field 19, 105, 'avg_right_power_phase_peak', type: :uint8, scale: 0.7111111, unit: 'degrees' FitParser::File::Definitions.add_field 19, 106, 'avg_power_position', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 19, 107, 'max_power_position', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 19, 108, 'avg_cadence_position', type: :uint8, unit: 'rpm' FitParser::File::Definitions.add_field 19, 109, 'max_cadence_position', type: :uint8, unit: 'rpm' FitParser::File::Definitions.add_field 19, 110, 'enhanced_avg_speed', type: :uint32, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 19, 111, 'enhanced_max_speed', type: :uint32, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 19, 112, 'enhanced_avg_altitude', type: :uint32, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 19, 113, 'enhanced_min_altitude', type: :uint32, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 19, 114, 'enhanced_max_altitude', type: :uint32, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 19, 115, 'avg_lev_motor_power', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 19, 116, 'max_lev_motor_power', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 19, 117, 'lev_battery_consumption', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 19, 118, 'avg_vertical_ratio', type: :uint16, scale: 100, unit: 'percent' FitParser::File::Definitions.add_field 19, 119, 'avg_stance_time_balance', type: :uint16, scale: 100, unit: 'percent' FitParser::File::Definitions.add_field 19, 120, 'avg_step_length', type: :uint16, scale: 10, unit: 'mm' FitParser::File::Definitions.add_name 101, 'length' FitParser::File::Definitions.add_field 101, 254, 'message_index', type: :message_index, scale: 1 FitParser::File::Definitions.add_field 101, 253, 'timestamp', type: :date_time, scale: 1 FitParser::File::Definitions.add_field 101, 0, 'event', type: :event FitParser::File::Definitions.add_field 101, 1, 'event_type', type: :event_type FitParser::File::Definitions.add_field 101, 2, 'start_time', type: :date_time, scale: 1 FitParser::File::Definitions.add_field 101, 3, 'total_elapsed_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 101, 4, 'total_timer_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 101, 5, 'total_strokes', type: :uint16, scale: 1, unit: 'strokes' FitParser::File::Definitions.add_field 101, 6, 'avg_speed', type: :uint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 101, 7, 'swim_stroke', type: :swim_stroke, unit: 'swim_stroke' FitParser::File::Definitions.add_field 101, 9, 'avg_swimming_cadence', type: :uint8, unit: 'strokes/min' FitParser::File::Definitions.add_field 101, 10, 'event_group', type: :uint8 FitParser::File::Definitions.add_field 101, 11, 'total_calories', type: :uint16, unit: 'kcal' FitParser::File::Definitions.add_field 101, 12, 'length_type', type: :length_type FitParser::File::Definitions.add_field 101, 18, 'player_score', type: :uint16 FitParser::File::Definitions.add_field 101, 19, 'opponent_score', type: :uint16 FitParser::File::Definitions.add_field 101, 20, 'stroke_count', type: :uint16, unit: 'counts' FitParser::File::Definitions.add_field 101, 21, 'zone_count', type: :uint16, unit: 'counts' FitParser::File::Definitions.add_name 20, 'record' FitParser::File::Definitions.add_field 20, 253, 'timestamp', type: :date_time, scale: 1, unit: 's' FitParser::File::Definitions.add_field 20, 0, 'position_lat', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 20, 1, 'position_long', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 20, 2, 'altitude', type: :uint16, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 20, 3, 'heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 20, 4, 'cadence', type: :uint8, scale: 1, unit: 'rpm' FitParser::File::Definitions.add_field 20, 5, 'distance', type: :uint32, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 20, 6, 'speed', type: :uint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 20, 7, 'power', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 20, 8, 'compressed_speed_distance', type: :byte, unit: 'm/s,m' FitParser::File::Definitions.add_field 20, 9, 'grade', type: :sint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 20, 10, 'resistance', type: :uint8, scale: 1 FitParser::File::Definitions.add_field 20, 11, 'time_from_course', type: :sint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 20, 12, 'cycle_length', type: :uint8, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 20, 13, 'temperature', type: :sint8, unit: 'C' FitParser::File::Definitions.add_field 20, 17, 'speed_1s', type: :uint8, scale: 16, unit: 'm/s' FitParser::File::Definitions.add_field 20, 18, 'cycles', type: :uint8, unit: 'cycles' FitParser::File::Definitions.add_field 20, 19, 'total_cycles', type: :uint32, unit: 'cycles' FitParser::File::Definitions.add_field 20, 28, 'compressed_accumulated_power', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 20, 29, 'accumulated_power', type: :uint32, unit: 'watts' FitParser::File::Definitions.add_field 20, 30, 'left_right_balance', type: :left_right_balance FitParser::File::Definitions.add_field 20, 31, 'gps_accuracy', type: :uint8, unit: 'm' FitParser::File::Definitions.add_field 20, 32, 'vertical_speed', type: :sint16, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 20, 33, 'calories', type: :uint16, scale: 1, unit: 'kcal' FitParser::File::Definitions.add_field 20, 39, 'vertical_oscillation', type: :uint16, scale: 10, unit: 'mm' FitParser::File::Definitions.add_field 20, 40, 'stance_time_percent', type: :uint16, scale: 100, unit: 'percent' FitParser::File::Definitions.add_field 20, 41, 'stance_time', type: :uint16, scale: 10, unit: 'ms' FitParser::File::Definitions.add_field 20, 42, 'activity_type', type: :activity_type FitParser::File::Definitions.add_field 20, 43, 'left_torque_effectiveness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 20, 44, 'right_torque_effectiveness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 20, 45, 'left_pedal_smoothness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 20, 46, 'right_pedal_smoothness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 20, 47, 'combined_pedal_smoothness', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 20, 48, 'time128', type: :uint8, scale: 128, unit: 's' FitParser::File::Definitions.add_field 20, 49, 'stroke_type', type: :stroke_type FitParser::File::Definitions.add_field 20, 50, 'zone', type: :uint8 FitParser::File::Definitions.add_field 20, 51, 'ball_speed', type: :uint16, scale: 100, unit: 'm/s' FitParser::File::Definitions.add_field 20, 52, 'cadence256', type: :uint16, scale: 256, unit: 'rpm' FitParser::File::Definitions.add_field 20, 53, 'fractional_cadence', type: :uint8, scale: 128, unit: 'rpm' FitParser::File::Definitions.add_field 20, 54, 'total_hemoglobin_conc', type: :uint16, scale: 100, unit: 'g/dL' FitParser::File::Definitions.add_field 20, 55, 'total_hemoglobin_conc_min', type: :uint16, scale: 100, unit: 'g/dL' FitParser::File::Definitions.add_field 20, 56, 'total_hemoglobin_conc_max', type: :uint16, scale: 100, unit: 'g/dL' FitParser::File::Definitions.add_field 20, 57, 'saturated_hemoglobin_percent', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 20, 58, 'saturated_hemoglobin_percent_min', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 20, 59, 'saturated_hemoglobin_percent_max', type: :uint16, scale: 10, unit: '%' FitParser::File::Definitions.add_field 20, 62, 'device_index', type: :device_index FitParser::File::Definitions.add_field 20, 67, 'left_pco', type: :sint8, unit: 'mm' FitParser::File::Definitions.add_field 20, 68, 'right_pco', type: :sint8, unit: 'mm' FitParser::File::Definitions.add_field 20, 69, 'left_power_phase', type: :uint8, scale: 0.7111111, unit: 'degrees' FitParser::File::Definitions.add_field 20, 70, 'left_power_phase_peak', type: :uint8, scale: 0.7111111, unit: 'degrees' FitParser::File::Definitions.add_field 20, 71, 'right_power_phase', type: :uint8, scale: 0.7111111, unit: 'degrees' FitParser::File::Definitions.add_field 20, 72, 'right_power_phase_peak', type: :uint8, scale: 0.7111111, unit: 'degrees' FitParser::File::Definitions.add_field 20, 73, 'enhanced_speed', type: :uint32, scale: 1000, unit: 'm/s' FitParser::File::Definitions.add_field 20, 78, 'enhanced_altitude', type: :uint32, scale: 5, offset: 500, unit: 'm' FitParser::File::Definitions.add_field 20, 81, 'battery_soc', type: :uint8, scale: 2, unit: 'percent' FitParser::File::Definitions.add_field 20, 82, 'motor_power', type: :uint16, unit: 'watts' FitParser::File::Definitions.add_field 20, 83, 'vertical_ratio', type: :uint16, scale: 100, unit: 'percent' FitParser::File::Definitions.add_field 20, 84, 'stance_time_balance', type: :uint16, scale: 100, unit: 'percent' FitParser::File::Definitions.add_field 20, 85, 'step_length', type: :uint16, scale: 10, unit: 'mm' FitParser::File::Definitions.add_name 21, 'event' FitParser::File::Definitions.add_field 21, 253, 'timestamp', type: :date_time, unit: 's' FitParser::File::Definitions.add_field 21, 0, 'event', type: :event FitParser::File::Definitions.add_field 21, 1, 'event_type', type: :event_type FitParser::File::Definitions.add_field 21, 2, 'data16', type: :uint16, scale: 1 FitParser::File::Definitions.add_field 21, 3, 'data', type: :uint32, scale: 1 FitParser::File::Definitions.add_field 21, 3, 'timer_trigger', type: :timer_trigger, scale: 1, ref_field_name: 'event', ref_field_values: [:timer] FitParser::File::Definitions.add_field 21, 3, 'course_point_index', type: :message_index, scale: 1, ref_field_name: 'event', ref_field_values: [:course_point] FitParser::File::Definitions.add_field 21, 3, 'battery_level', type: :uint16, scale: 1000, unit: 'V', ref_field_name: 'event', ref_field_values: [:battery] FitParser::File::Definitions.add_field 21, 3, 'virtual_partner_speed', type: :uint16, scale: 1000, unit: 'm/s', ref_field_name: 'event', ref_field_values: [:virtual_partner_pace] FitParser::File::Definitions.add_field 21, 3, 'hr_high_alert', type: :uint8, scale: 1, unit: 'bpm', ref_field_name: 'event', ref_field_values: [:hr_high_alert] FitParser::File::Definitions.add_field 21, 3, 'hr_low_alert', type: :uint8, scale: 1, unit: 'bpm', ref_field_name: 'event', ref_field_values: [:hr_low_alert] FitParser::File::Definitions.add_field 21, 3, 'speed_high_alert', type: :uint16, scale: 1000, unit: 'm/s', ref_field_name: 'event', ref_field_values: [:speed_high_alert] FitParser::File::Definitions.add_field 21, 3, 'speed_low_alert', type: :uint16, scale: 1000, unit: 'm/s', ref_field_name: 'event', ref_field_values: [:speed_low_alert] FitParser::File::Definitions.add_field 21, 3, 'cad_high_alert', type: :uint16, scale: 1, unit: 'rpm', ref_field_name: 'event', ref_field_values: [:cad_high_alert] FitParser::File::Definitions.add_field 21, 3, 'cad_low_alert', type: :uint16, scale: 1, unit: 'rpm', ref_field_name: 'event', ref_field_values: [:cad_low_alert] FitParser::File::Definitions.add_field 21, 3, 'power_high_alert', type: :uint16, scale: 1, unit: 'watts', ref_field_name: 'event', ref_field_values: [:power_high_alert] FitParser::File::Definitions.add_field 21, 3, 'power_low_alert', type: :uint16, scale: 1, unit: 'watts', ref_field_name: 'event', ref_field_values: [:power_low_alert] FitParser::File::Definitions.add_field 21, 3, 'time_duration_alert', type: :uint32, scale: 1000, unit: 's', ref_field_name: 'event', ref_field_values: [:time_duration_alert] FitParser::File::Definitions.add_field 21, 3, 'distance_duration_alert', type: :uint32, scale: 100, unit: 'm', ref_field_name: 'event', ref_field_values: [:distance_duration_alert] FitParser::File::Definitions.add_field 21, 3, 'calorie_duration_alert', type: :uint32, scale: 1, unit: 'calories', ref_field_name: 'event', ref_field_values: [:calorie_duration_alert] FitParser::File::Definitions.add_field 21, 3, 'fitness_equipment_state', type: :fitness_equipment_state, scale: 1, ref_field_name: 'event', ref_field_values: [:fitness_equipment] FitParser::File::Definitions.add_field 21, 3, 'sport_point', type: :uint32, scale: 1, ref_field_name: 'event', ref_field_values: [:sport_point] FitParser::File::Definitions.add_field 21, 3, 'gear_change_data', type: :uint32, scale: 1, ref_field_name: 'event', ref_field_values: [:front_gear_change, :rear_gear_change] FitParser::File::Definitions.add_field 21, 4, 'event_group', type: :uint8 FitParser::File::Definitions.add_field 21, 7, 'score', type: :uint16 FitParser::File::Definitions.add_field 21, 8, 'opponent_score', type: :uint16 FitParser::File::Definitions.add_field 21, 9, 'front_gear_num', type: :uint8z FitParser::File::Definitions.add_field 21, 10, 'front_gear', type: :uint8z FitParser::File::Definitions.add_field 21, 11, 'rear_gear_num', type: :uint8z FitParser::File::Definitions.add_field 21, 12, 'rear_gear', type: :uint8z FitParser::File::Definitions.add_field 21, 13, 'device_index' FitParser::File::Definitions.add_name 23, 'device_info' FitParser::File::Definitions.add_field 23, 253, 'timestamp', type: :date_time, scale: 1, unit: 's' FitParser::File::Definitions.add_field 23, 0, 'device_index', type: :device_index, scale: 1 FitParser::File::Definitions.add_field 23, 1, 'device_type', type: :uint8, scale: 1 FitParser::File::Definitions.add_field 23, 1, 'antplus_device_type', type: :antplus_device_type, ref_field_name: 'source_type', ref_field_values: [:antplus] FitParser::File::Definitions.add_field 23, 1, 'ant_device_type', type: :uint8, ref_field_name: 'source_type', ref_field_values: [:ant] FitParser::File::Definitions.add_field 23, 2, 'manufacturer', type: :manufacturer, scale: 1 FitParser::File::Definitions.add_field 23, 3, 'serial_number', type: :uint32z, scale: 1 FitParser::File::Definitions.add_field 23, 4, 'product', type: :uint16, scale: 1 FitParser::File::Definitions.add_field 23, 5, 'software_version', type: :uint16, scale: 100 FitParser::File::Definitions.add_field 23, 6, 'hardware_version', type: :uint8, scale: 1 FitParser::File::Definitions.add_field 23, 7, 'cum_operating_time', type: :uint32, scale: 1, unit: 's' FitParser::File::Definitions.add_field 23, 10, 'battery_voltage', type: :uint16, scale: 256, unit: 'V' FitParser::File::Definitions.add_field 23, 11, 'battery_status', type: :battery_status FitParser::File::Definitions.add_field 23, 18, 'sensor_position', type: :body_location FitParser::File::Definitions.add_field 23, 19, 'descriptor', type: :string FitParser::File::Definitions.add_field 23, 20, 'ant_transmission_type', type: :uint8z FitParser::File::Definitions.add_field 23, 21, 'ant_device_number', type: :uint16z FitParser::File::Definitions.add_field 23, 22, 'ant_network', type: :ant_network FitParser::File::Definitions.add_field 23, 25, 'source_type', type: :source_type FitParser::File::Definitions.add_field 23, 27, 'product_name', type: :string FitParser::File::Definitions.add_name 72, 'training_file' FitParser::File::Definitions.add_field 72, 253, 'timestamp', type: :date_time FitParser::File::Definitions.add_field 72, 0, 'type', type: :file FitParser::File::Definitions.add_field 72, 1, 'manufacturer', type: :manufacturer FitParser::File::Definitions.add_field 72, 2, 'product', type: :uint16 FitParser::File::Definitions.add_field 72, 2, 'garmin_product', type: :garmin_product, ref_field_name: 'manufacturer', ref_field_values: [:garmin, :dynastream, :dynastream_oem] FitParser::File::Definitions.add_field 72, 3, 'serial_number', type: :uint32z FitParser::File::Definitions.add_field 72, 4, 'time_created', type: :date_time FitParser::File::Definitions.add_name 78, 'hrv' FitParser::File::Definitions.add_field 78, 0, 'time', type: :uint16, scale: 1000, unit: 's' FitParser::File::Definitions.add_name 31, 'course' FitParser::File::Definitions.add_field 31, 4, 'sport', type: :sport FitParser::File::Definitions.add_field 31, 5, 'name', type: :string FitParser::File::Definitions.add_field 31, 6, 'capabilities', type: :course_capabilities FitParser::File::Definitions.add_field 31, 7, 'sub_sport', type: :sub_sport FitParser::File::Definitions.add_name 32, 'course_point' FitParser::File::Definitions.add_field 32, 254, 'message_index', type: :message_index FitParser::File::Definitions.add_field 32, 1, 'timestamp', type: :date_time, scale: 1 FitParser::File::Definitions.add_field 32, 2, 'position_lat', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 32, 3, 'position_long', type: :sint32, scale: 1, unit: 'semicircles' FitParser::File::Definitions.add_field 32, 4, 'distance', type: :uint32, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 32, 5, 'type', type: :course_point, scale: 1 FitParser::File::Definitions.add_field 32, 6, 'name', type: :string FitParser::File::Definitions.add_field 32, 8, 'favorite', type: :bool FitParser::File::Definitions.add_name 26, 'workout' FitParser::File::Definitions.add_field 26, 4, 'sport', type: :sport, scale: 1 FitParser::File::Definitions.add_field 26, 5, 'capabilities', type: :workout_capabilities FitParser::File::Definitions.add_field 26, 6, 'num_valid_steps', type: :uint16, scale: 1 FitParser::File::Definitions.add_field 26, 8, 'wkt_name', type: :string, scale: 1 FitParser::File::Definitions.add_name 27, 'workout_step' FitParser::File::Definitions.add_field 27, 254, 'message_index', type: :message_index FitParser::File::Definitions.add_field 27, 0, 'wkt_step_name', type: :string, scale: 1 FitParser::File::Definitions.add_field 27, 1, 'duration_type', type: :wkt_step_duration, scale: 1 FitParser::File::Definitions.add_field 27, 2, 'duration_value', type: :uint32, scale: 1 FitParser::File::Definitions.add_field 27, 2, 'duration_time', type: :uint32, scale: 1000, unit: 's', ref_field_name: 'duration_type', ref_field_values: [:time, :repetition_time] FitParser::File::Definitions.add_field 27, 2, 'duration_distance', type: :uint32, scale: 100, unit: 'm', ref_field_name: 'duration_type', ref_field_values: [:distance] FitParser::File::Definitions.add_field 27, 2, 'duration_hr', type: :workout_hr, scale: 1, unit: '% or bpm', ref_field_name: 'duration_type', ref_field_values: [:hr_less_than, :hr_greater_than] FitParser::File::Definitions.add_field 27, 2, 'duration_calories', type: :uint32, scale: 1, unit: 'calories', ref_field_name: 'duration_type', ref_field_values: [:calories] FitParser::File::Definitions.add_field 27, 2, 'duration_step', type: :uint32, ref_field_name: 'duration_type', ref_field_values: [:repeat_until_steps_cmplt, :repeat_until_time, :repeat_until_distance, :repeat_until_calories, :repeat_until_hr_less_than, :repeat_until_hr_greater_than, :repeat_until_power_less_than,:repeat_until_power_greater_than] FitParser::File::Definitions.add_field 27, 2, 'duration_power', type: :workout_power, scale: 1, unit: '% or watts', ref_field_name: 'duration_type', ref_field_values: [:power_less_than, :power_greater_than] FitParser::File::Definitions.add_field 27, 3, 'target_type', type: :wkt_step_target, scale: 1 FitParser::File::Definitions.add_field 27, 4, 'target_value', type: :uint32, scale: 1 FitParser::File::Definitions.add_field 27, 4, 'target_hr_zone', type: :uint32, ref_field_name: 'target_type', ref_field_values: [:heart_rate] FitParser::File::Definitions.add_field 27, 4, 'target_power_zone', type: :uint32, ref_field_name: 'target_type', ref_field_values: [:power] FitParser::File::Definitions.add_field 27, 4, 'repeat_steps', type: :uint32, ref_field_name: 'duration_type', ref_field_values: [:repeat_until_steps_cmplt] FitParser::File::Definitions.add_field 27, 4, 'repeat_time', type: :uint32, scale: 1000, unit: 's', ref_field_name: 'duration_type', ref_field_values: [:repeat_until_time] FitParser::File::Definitions.add_field 27, 4, 'repeat_distance', type: :uint32, scale: 100, unit: 'm', ref_field_name: 'duration_type', ref_field_values: [:repeat_until_distance] FitParser::File::Definitions.add_field 27, 4, 'repeat_calories', type: :uint32, scale: 1, unit: 'calories', ref_field_name: 'duration_type', ref_field_values: [:repeat_until_calories] FitParser::File::Definitions.add_field 27, 4, 'repeat_hr', type: :workout_hr, scale: 1, unit: '% or bpm', ref_field_name: 'duration_type', ref_field_values: [:repeat_until_hr_less_than, :repeat_until_hr_greater_than] FitParser::File::Definitions.add_field 27, 4, 'repeat_power', type: :workout_power, scale: 1, unit: '% or watts', ref_field_name: 'duration_type', ref_field_values: [:repeat_until_power_less_than, :repeat_until_power_greater_than] FitParser::File::Definitions.add_field 27, 5, 'custom_target_value_low', type: :uint32, scale: 1 FitParser::File::Definitions.add_field 27, 5, 'custom_target_speed_low', type: :uint32, scale: 1000, unit: 'm/s', ref_field_name: 'target_type', ref_field_values: [:speed] FitParser::File::Definitions.add_field 27, 5, 'custom_target_heart_rate_low', type: :workout_hr, scale: 1, unit: '% or bpm', ref_field_name: 'target_type', ref_field_values: [:heart_rate] FitParser::File::Definitions.add_field 27, 5, 'custom_target_cadence_low', type: :uint32, scale: 1, unit: 'rpm', ref_field_name: 'target_type', ref_field_values: [:cadence] FitParser::File::Definitions.add_field 27, 5, 'custom_target_power_low', type: :workout_power, scale: 1, unit: '% or watts', ref_field_name: 'target_type', ref_field_values: [:power] FitParser::File::Definitions.add_field 27, 6, 'custom_target_value_high', type: :uint32, scale: 1 FitParser::File::Definitions.add_field 27, 6, 'custom_target_speed_high', type: :uint32, scale: 1000, unit: 'm/s', ref_field_name: 'target_type', ref_field_values: [:speed] FitParser::File::Definitions.add_field 27, 6, 'custom_target_heart_rate_high', type: :workout_hr, scale: 1, unit: '% or bpm', ref_field_name: 'target_type', ref_field_values: [:heart_rate] FitParser::File::Definitions.add_field 27, 6, 'custom_target_cadence_high', type: :uint32, scale: 1, unit: 'rpm', ref_field_name: 'target_type', ref_field_values: [:cadence] FitParser::File::Definitions.add_field 27, 6, 'custom_target_power_high', type: :workout_power, scale: 1, unit: '% or watts', ref_field_name: 'target_type', ref_field_values: [:power] FitParser::File::Definitions.add_field 27, 7, 'intensity', type: :intensity, scale: 1 FitParser::File::Definitions.add_name 28, 'schedule' FitParser::File::Definitions.add_field 28, 0, 'manufacturer', type: :manufacturer FitParser::File::Definitions.add_field 28, 1, 'product', type: :uint16 FitParser::File::Definitions.add_field 28, 1, 'garmin_product', type: :garmin_product, ref_field_name: 'manufacturer', ref_field_values: [:garmin, :dynastream, :dynastream_oem] FitParser::File::Definitions.add_field 28, 2, 'serial_number', type: :uint32z FitParser::File::Definitions.add_field 28, 3, 'time_created', type: :date_time FitParser::File::Definitions.add_field 28, 4, 'completed', type: :bool, scale: 1 FitParser::File::Definitions.add_field 28, 5, 'type', type: :schedule, scale: 1 FitParser::File::Definitions.add_field 28, 6, 'scheduled_time', type: :local_date_time FitParser::File::Definitions.add_name 33, 'totals' FitParser::File::Definitions.add_field 33, 254, 'message_index', type: :message_index FitParser::File::Definitions.add_field 33, 253, 'timestamp', type: :date_time, unit: 's' FitParser::File::Definitions.add_field 33, 0, 'timer_time', type: :uint32, unit: 's' FitParser::File::Definitions.add_field 33, 1, 'distance', type: :uint32, unit: 'm' FitParser::File::Definitions.add_field 33, 2, 'calories', type: :uint32, unit: 'kcal' FitParser::File::Definitions.add_field 33, 3, 'sport', type: :sport FitParser::File::Definitions.add_field 33, 4, 'elapsed_time', type: :uint32, unit: 's' FitParser::File::Definitions.add_field 33, 5, 'sessions', type: :uint16 FitParser::File::Definitions.add_field 33, 6, 'active_time', type: :uint32, unit: 's' FitParser::File::Definitions.add_field 33, 9, 'sport_index', type: :uint8 FitParser::File::Definitions.add_name 30, 'weight_scale' FitParser::File::Definitions.add_field 30, 253, 'timestamp', type: :date_time, scale: 1, unit: 's' FitParser::File::Definitions.add_field 30, 0, 'weight', type: :weight, scale: 100, unit: 'kg' FitParser::File::Definitions.add_field 30, 1, 'percent_fat', type: :uint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 30, 2, 'percent_hydration', type: :uint16, scale: 100, unit: '%' FitParser::File::Definitions.add_field 30, 3, 'visceral_fat_mass', type: :uint16, scale: 100, unit: 'kg' FitParser::File::Definitions.add_field 30, 4, 'bone_mass', type: :uint16, scale: 100, unit: 'kg' FitParser::File::Definitions.add_field 30, 5, 'muscle_mass', type: :uint16, scale: 100, unit: 'kg' FitParser::File::Definitions.add_field 30, 7, 'basal_met', type: :uint16, scale: 4, unit: 'kcal/day' FitParser::File::Definitions.add_field 30, 8, 'physique_rating', type: :uint8, scale: 1 FitParser::File::Definitions.add_field 30, 9, 'active_met', type: :uint16, scale: 4, unit: 'kcal/day' FitParser::File::Definitions.add_field 30, 10, 'metabolic_age', type: :uint8, scale: 1, unit: 'years' FitParser::File::Definitions.add_field 30, 11, 'visceral_fat_rating', type: :uint8, scale: 1 FitParser::File::Definitions.add_field 30, 12, 'user_profile_index', type: :message_index FitParser::File::Definitions.add_name 51, 'blood_pressure' FitParser::File::Definitions.add_field 51, 253, 'timestamp', type: :date_time, scale: 1, unit: 's' FitParser::File::Definitions.add_field 51, 0, 'systolic_pressure', type: :uint16, scale: 1, unit: 'mmHg' FitParser::File::Definitions.add_field 51, 1, 'diastolic_pressure', type: :uint16, scale: 1, unit: 'mmHg' FitParser::File::Definitions.add_field 51, 2, 'mean_arterial_pressure', type: :uint16, scale: 1, unit: 'mmHg' FitParser::File::Definitions.add_field 51, 3, 'map_3_sample_mean', type: :uint16, scale: 1, unit: 'mmHg' FitParser::File::Definitions.add_field 51, 4, 'map_morning_values', type: :uint16, scale: 1, unit: 'mmHg' FitParser::File::Definitions.add_field 51, 5, 'map_evening_values', type: :uint16, scale: 1, unit: 'mmHg' FitParser::File::Definitions.add_field 51, 6, 'heart_rate', type: :uint8, scale: 1, unit: 'bpm' FitParser::File::Definitions.add_field 51, 7, 'heart_rate_type', type: :hr_type FitParser::File::Definitions.add_field 51, 8, 'status', type: :bp_status FitParser::File::Definitions.add_field 51, 9, 'user_profile_index', type: :message_index FitParser::File::Definitions.add_name 103, 'monitoring_info' FitParser::File::Definitions.add_field 103, 253, 'timestamp', type: :date_time FitParser::File::Definitions.add_field 103, 0, 'local_timestamp', type: :local_date_time FitParser::File::Definitions.add_field 103, 1, 'activity_type', type: :activity_type FitParser::File::Definitions.add_field 103, 3, 'cycles_to_distance', type: :uint16, scale: 5000, unit: 'm/cycle' FitParser::File::Definitions.add_field 103, 4, 'cycles_to_calories', type: :uint16, scale: 5000, unit: 'kcal/cycle' FitParser::File::Definitions.add_field 103, 5, 'resting_metabolic_rate', type: :uint16, unit: 'kcal / day' FitParser::File::Definitions.add_name 55, 'monitoring' FitParser::File::Definitions.add_field 55, 253, 'timestamp', type: :date_time, unit: 's' FitParser::File::Definitions.add_field 55, 0, 'device_index', type: :device_index FitParser::File::Definitions.add_field 55, 1, 'calories', type: :uint16, unit: 'kcal' FitParser::File::Definitions.add_field 55, 2, 'distance', type: :uint32, scale: 100, unit: 'm' FitParser::File::Definitions.add_field 55, 3, 'cycles', type: :uint32, scale: 2, unit: 'cycles' FitParser::File::Definitions.add_field 55, 3, 'steps', type: :uint32, scale: 1, unit: 'steps', ref_field_name: 'activity_type', ref_field_values: [:walking, :running] FitParser::File::Definitions.add_field 55, 3, 'strokes', type: :uint32, scale: 2, unit: 'strokes', ref_field_name: 'activity_type', ref_field_values: [:cycling, :swimming] FitParser::File::Definitions.add_field 55, 4, 'active_time', type: :uint32, scale: 1000, unit: 's' FitParser::File::Definitions.add_field 55, 5, 'activity_type', type: :activity_type FitParser::File::Definitions.add_field 55, 6, 'activity_subtype', type: :activity_subtype FitParser::File::Definitions.add_field 55, 7, 'activity_level', type: :activity_level FitParser::File::Definitions.add_field 55, 8, 'distance_16', type: :uint16, unit: '100 * m' FitParser::File::Definitions.add_field 55, 9, 'cycles_16', type: :uint16, unit: '(steps)' FitParser::File::Definitions.add_field 55, 10, 'active_time_16', type: :uint16, unit: 's' FitParser::File::Definitions.add_field 55, 11, 'local_timestamp', type: :local_date_time FitParser::File::Definitions.add_field 55, 12, 'temperature', type: :sint16, scale: 100, unit: 'C' FitParser::File::Definitions.add_field 55, 14, 'temperature_min', type: :sint16, scale: 100, unit: 'C' FitParser::File::Definitions.add_field 55, 15, 'temperature_max', type: :sint16, scale: 100, unit: 'C' FitParser::File::Definitions.add_field 55, 16, 'activity_time', type: :uint16, unit: 'minutes' FitParser::File::Definitions.add_field 55, 19, 'active_calories', type: :uint16, unit: 'kcal' FitParser::File::Definitions.add_field 55, 24, 'current_activity_type_intensity', type: :byte FitParser::File::Definitions.add_field 55, 25, 'timestamp_min_8', type: :uint8, unit: 'min' FitParser::File::Definitions.add_field 55, 26, 'timestamp_16', type: :uint16, unit: 's' FitParser::File::Definitions.add_field 55, 27, 'heart_rate', type: :uint8, unit: 'bpm' FitParser::File::Definitions.add_field 55, 28, 'intensity', type: :uint8, scale: 10 FitParser::File::Definitions.add_field 55, 29, 'duration_min', type: :uint16, unit: 'min' FitParser::File::Definitions.add_field 55, 30, 'duration', type: :uint32, unit: 's' FitParser::File::Definitions.add_field 55, 31, 'ascent', type: :uint32, scale: 1000, unit: 'm' FitParser::File::Definitions.add_field 55, 32, 'descent', type: :uint32, scale: 1000, unit: 'm' FitParser::File::Definitions.add_field 55, 33, 'moderate_activity_minutes', type: :uint16, unit: 'minutes' FitParser::File::Definitions.add_field 55, 34, 'vigorous_activity_minutes', type: :uint16, unit: 'minutes' FitParser::File::Definitions.add_name(145, 'memo_glob') FitParser::File::Definitions.add_field(145, 250, 'part_index', type: :uint32) FitParser::File::Definitions.add_field(145, 0, 'memo', type: :byte) FitParser::File::Definitions.add_field(145, 1, 'message_number', type: :uint16) FitParser::File::Definitions.add_field(145, 2, 'message_index', type: :message_index) <file_sep>/spec/file/type_spec.rb require 'spec_helper' describe FitParser::File::Type do before :all do @types = FitParser::File::Types.types FitParser::File::Types.add_type(:int_type, :sint8) FitParser::File::Types.add_type( :int_type_with_val, :uint8, values: { 1 => 'one', 2 => 'two', 3 => 'three' } ) end after :all do FitParser::File::Types.types = @types end describe '.get_type' do context 'when valid name' do it 'returns a type' do expect(described_class.get_type(:int_type)) .to be_a(FitParser::File::Type) end it 'returns always the same instance' do expect(described_class.get_type(:int_type)) .to eql(described_class.get_type(:int_type)) end end context 'when invalid name' do it 'returns nil' do expect(described_class.get_type(:unknown_type)).to be_nil end end end describe '#value' do context 'when type has values' do let(:type) { described_class.get_type(:int_type_with_val) } context 'known value requested' do it 'returns the value' do expect(type.value(2)).to eql 'two' end end context 'unknown value requested' do it 'returns the input value' do expect(type.value(999)).to eql 999 end end context 'when invalid value is requested' do it 'returns nil' do expect(type.value(255)).to be_nil expect(type.value(0xFF)).to be_nil end end end context 'when type has date_time value' do let(:type) { described_class.get_type(:date_time) } it 'returns the date' do expect(type.value(790_509_304)).to eq('2015-01-18 09:55:04 UTC') end end context 'when type has message_index value' do let(:type) { described_class.get_type(:message_index) } it 'returns the message_index' do expect(type.value(10)).to eq(10) expect(type.value(32_778)).to eq(10) expect(type.value(28_682)).to eq(10) end end context 'when type has file_flags value' do let(:type) { described_class.get_type(:file_flags) } it 'returns the file_flags' do expect(type.value(10)).to eq('read/erase') expect(type.value(0x0A)).to eq('read/erase') end end context 'when type has bool value' do let(:type) { described_class.get_type(:bool) } it 'returns the boolean value' do expect(type.value(0)).to eq(false) expect(type.value(1)).to eq(true) expect(type.value(255)).to be_nil end end context 'when type has no value' do it 'returns nil' do type = described_class.get_type(:int_type) expect(type.value(1)).to eql 1 end end end end <file_sep>/lib/fit_parser/file.rb module FitParser class File MSG_NUM_FIELD_DESCRIPTION = 206 def self.read(io) new.read(io) end attr_reader :header, :records, :crc def initialize @records = [] end def read(io) @header = Header.read(io) definitions = {} dev_definitions = {} while io.pos < @header.end_pos record = Record.new(definitions, dev_definitions) @records << record.read(io) content = record.content if content[:global_message_number] == MSG_NUM_FIELD_DESCRIPTION field_definition_local_message_type = record.header.local_message_type end if !content[:global_message_number] && field_definition_local_message_type && record.header.local_message_type == field_definition_local_message_type dev_definitions[content[:raw_field_0].to_s] ||= {} dev_definitions[content[:raw_field_0].to_s][content[:raw_field_1].to_s] = content end definitions = record.definitions end @crc = io.read(2) self end end end <file_sep>/lib/fit_parser.rb require 'bindata' require 'active_support' require 'active_support/core_ext/class' require 'fit_parser/file' require 'fit_parser/file/header' require 'fit_parser/file/record' require 'fit_parser/file/record_header' require 'fit_parser/file/types' require 'fit_parser/file/type' require 'fit_parser/file/definition' require 'fit_parser/file/data' require 'fit_parser/file/definitions' require 'fit_parser/version' require 'bindata/dsl_field_validator' module FitParser def self.load_file(path) File.read(::File.open(path)) end end <file_sep>/bin/validator.sh #!/usr/bin/env bash SCORE=${SCORE:-40} if [[ -z $GITDIFF ]]; then ruby_files="lib/ spec/" app_files="lib/" else ruby_files=`git diff --diff-filter ACMRTUXB --name-only $GITDIFF lib/ spec/ | xargs` app_files=`git diff --diff-filter ACMRTUXB --name-only $GITDIFF lib/ | xargs` fi echo -e "\n\n\nCode style guide compliance results\n" bundle exec warder --style-guide --stats $ruby_files style_guide_exit_code=$? echo -e "\n\n\nMagic numbers results\n" bundle exec warder --magick-numbers --stats $app_files magick_numbers_exit_code=$? echo -e "\n\n\nCode duplication results\n" bundle exec warder --code-duplication --stats $app_files code_duplication_exit_code=$? echo -e "\n\n\nCode complexity results\n" bundle exec warder --code-complexity --stats $app_files code_complexity_exit_code=$? echo -e "\n\n\nCode smells detector result\n" bundle exec warder --code-smell --stats $app_files code_smells_exit_code=$? if [[ -z $GITREF ]]; then exit $(($style_guide_exit_code+$magick_numbers_exit_code+$code_smells_exit_code)); # +$code_complexity_exit_code+$code_duplication_exit_code else exit 0; # $(($style_guide_exit_code+$magick_numbers_exit_code+$code_complexity_exit_code+$code_smells_exit_code+$code_duplication_exit_code)); fi
70bb65b49893af22b59f580b7b5b01e1fadb91d4
[ "Markdown", "Ruby", "Shell" ]
12
Ruby
zhublik/fit_parser
f360cdde4bdcb1ab87ddf23a2c4b45dd95371edf
838957eae86ae7dd3ec73de86e2bff5c28801f19
refs/heads/master
<repo_name>taigamatsuo/TechChan-<file_sep>/testApp/LoginViewController.swift // // LoginViewController.swift // testApp // // Created by 松尾大雅 on 2019/09/30. // Copyright © 2019 litech. All rights reserved. // import UIKit import FirebaseAuth class LoginViewController: UIViewController { @IBOutlet weak var mailAddressText: UITextField! @IBOutlet weak var passwordText: UITextField! override func viewDidLoad() { super.viewDidLoad() } @IBAction func onTouchedSignUpButton(_ sender: Any) { if let email = mailAddressText.text, let password = passwordText.text { Auth.auth().createUser(withEmail: email, password: <PASSWORD>) { [weak self] (authResult, error) in self?.validateAuthenticationResult(authResult, error: error) } } } @IBAction func onTouchedLogInButton(_ sender: Any) { if let email = mailAddressText.text, let password = <PASSWORD>Text.text { Auth.auth().signIn(withEmail: email, password: <PASSWORD>) { [weak self] (authResult, error) in _ = self?.validateAuthenticationResult(authResult, error: error) } } } private func validateAuthenticationResult(_ authResult: AuthDataResult?, error: Error?) { if let error = error{ let alert = UIAlertController(title: "Error", message: error.localizedDescription, preferredStyle: .alert) alert.addAction(UIAlertAction(title: "OK", style: .default)) present(alert, animated: true, completion: nil) } else { performSegue(withIdentifier: "talkSegue", sender: self) } } } <file_sep>/Podfile source 'https://github.com/CocoaPods/Specs.git' platform :ios, '13.0' use_frameworks! target 'testApp' do pod 'Firebase/Core' pod 'Firebase/Database' pod 'Firebase/Auth' end
de3e4e6714d4511d72c5db9406c99ac7090eb72e
[ "Swift", "Ruby" ]
2
Swift
taigamatsuo/TechChan-
0779e8cd2e6bd42c3fe01b6770d2a2c7a1c31ab4
bbd247b5e98c1a249c3dcbc310287c875d0ae8c3
refs/heads/master
<repo_name>tmaeda13/test04___vscode<file_sep>/test01vscode/js/script.js // page-top $(document).ready(function(){ $("#page-top").hide(); $(window).on("scroll", function() { if ($(this).scrollTop() > 100) { $("#page-top").fadeIn("fast"); } else { $("#page-top").fadeOut("fast"); } scrollHeight = $(document).height(); scrollPosition = $(window).height() + $(window).scrollTop(); footHeight = $("footer").innerHeight(); if ( scrollHeight - scrollPosition <= footHeight ) { $("#page-top").css({ "position":"fixed", "bottom": "20px", "right": "20px", "z-index":"1000" }); } else { $("#page-top").css({ "position":"fixed", "bottom": "20px", "right": "20px", "z-index":"1000" }); } }); $('#page-top').click(function () { $('body,html').animate({ scrollTop: 0 }, 400); return false; }); });
b3ee88b206f8f3105767ec122440a07e0bf1afdf
[ "JavaScript" ]
1
JavaScript
tmaeda13/test04___vscode
e65db917ab0a339cbb8cfed014eb6e84859c2e4f
4d0f8f3c936eee1f8e59c6b7870ebf8bf214eb8d
refs/heads/main
<file_sep>--- - name: "Remove any older, conflicting docker packages." apt: name: "{{ item }}" state: absent with_items: - docker - docker-engine - docker.io - containerd - runc - name: "Install Docker's dependencies." apt: name: "{{ item }}" state: latest update_cache: yes with_items: - apt-transport-https - ca-certificates - curl - gnupg-agent - software-properties-common - python-pip - python3 - python3-pip - name: "Get the docker GPG key." apt_key: url: https://download.docker.com/linux/ubuntu/gpg state: present - name: "Add our docker packages to the APT repository." apt_repository: repo: deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_lsb.codename|lower }} stable - name: "Install Docker." apt: name: "{{ item }}" state: present update_cache: yes with_items: - docker-ce - docker-ce-cli - containerd.io - name: "Install the Python Docker module." pip: name: docker state: present - name: "Adding the ansible user to the docker linux group..." user: name: "{{ ansible_user }}" group: docker - name: "Ensure that the docker service is enabled on boot." service: name: docker enabled: yes state: started - name: "Create the docker group." group: name: docker state: present - name: "Add the ansible host user to the docker group." user: append: yes # Ensures that we don't remove from any other group. name: "{{ ansible_user }}" group: docker - name: "Prune any old docker resources." docker_prune: containers: yes images: yes networks: yes volumes: yes builder_cache: yes ...
312e2c201658158d7f4b08e87eab42565c5a6dfe
[ "Shell" ]
1
Shell
JCashen/playbook
9b310b30b75b948d5b20bc445d9b088751e4b6ee
b35d1a2c1073d8fd4f4b525db850f2a6076b2c1d
refs/heads/master
<repo_name>iCodeIN/podman<file_sep>/pkg/machine/pull.go package machine import ( "fmt" "io" "net/http" "os" "os/exec" "strings" "time" "github.com/sirupsen/logrus" "github.com/vbauerster/mpb/v6" "github.com/vbauerster/mpb/v6/decor" ) // DownloadVMImage downloads a VM image from url to given path // with download status func DownloadVMImage(downloadURL fmt.Stringer, localImagePath string) error { out, err := os.Create(localImagePath) if err != nil { return err } defer func() { if err := out.Close(); err != nil { logrus.Error(err) } }() resp, err := http.Get(downloadURL.String()) if err != nil { return err } defer func() { if err := resp.Body.Close(); err != nil { logrus.Error(err) } }() if resp.StatusCode != http.StatusOK { return fmt.Errorf("error downloading VM image: %s", resp.Status) } size := resp.ContentLength urlSplit := strings.Split(downloadURL.String(), "/") prefix := "Downloading VM image: " + urlSplit[len(urlSplit)-1] onComplete := prefix + ": done" p := mpb.New( mpb.WithWidth(60), mpb.WithRefreshRate(180*time.Millisecond), ) bar := p.AddBar(size, mpb.BarFillerClearOnComplete(), mpb.PrependDecorators( decor.OnComplete(decor.Name(prefix), onComplete), ), mpb.AppendDecorators( decor.OnComplete(decor.CountersKibiByte("%.1f / %.1f"), ""), ), ) proxyReader := bar.ProxyReader(resp.Body) defer func() { if err := proxyReader.Close(); err != nil { logrus.Error(err) } }() if _, err := io.Copy(out, proxyReader); err != nil { return err } p.Wait() return nil } // Will error out if file without .xz already exists // Maybe extracting then renameing is a good idea here.. // depends on xz: not pre-installed on mac, so it becomes a brew dependecy func decompressXZ(src string, output io.Writer) error { fmt.Println("Extracting compressed file") cmd := exec.Command("xzcat", "-k", src) //cmd := exec.Command("xz", "-d", "-k", "-v", src) stdOut, err := cmd.StdoutPipe() if err != nil { return err } //cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr go func() { if _, err := io.Copy(output, stdOut); err != nil { logrus.Error(err) } }() return cmd.Run() } <file_sep>/cmd/podman/machine/machine_unsupported.go // +build !amd64 arm64,linux amd64,windows package machine func init() {}
99e4531c9d83b290dc6a74d8c16c26c44fb7a8c0
[ "Go" ]
2
Go
iCodeIN/podman
aa96cb6739303a4bf76aeb6395e8ef79ff8ef0ee
6ff31956feb4c629a295b24e207b6e72de4e97c6
refs/heads/master
<repo_name>xt0rted/ClassLibrary4<file_sep>/ClassLibrary4/Class1.cs namespace ClassLibrary4 { using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.EntityFrameworkCore; using NUnit.Framework; using Shouldly; [TestFixture] public class Class1 { private DbContextOptions<MyDbContext> _dbOptions; [SetUp] public void SetUp() { _dbOptions = new DbContextOptionsBuilder<MyDbContext>() .UseInMemoryDatabase("things") .Options; using (var db = new MyDbContext(_dbOptions)) { db.Profiles.Add(new Profile { Id = 1, Name = "Profile 1", PrimaryPhotoId = 2 }); db.Photos.Add(new Photo { Id = 1, Name = "Photo 1" }); db.Photos.Add(new Photo { Id = 2, Name = "Photo 2" }); db.Photos.Add(new Photo { Id = 3, Name = "Photo 3" }); db.SaveChanges(); } } [Test] public async Task Should_update_PrimaryPhotoId_and_delete_the_old_one() { using (var db = new MyDbContext(_dbOptions)) { var profile = db.Profiles.First(m => m.Id == 1); var photo = db.Photos.First(m => m.Id == 2); // update primary photo & delete the current one profile.PrimaryPhotoId = 3; db.Photos.Remove(photo); await db.SaveChangesAsync(); } using (var db = new MyDbContext(_dbOptions)) { // photo is deleted correctly db.Photos.Count().ShouldBe(2); var profile = db.Profiles.First(m => m.Id == 1); // primary photo id is null instead of 3 profile.PrimaryPhotoId.ShouldBe(3); } } } public class MyDbContext : DbContext { public MyDbContext(DbContextOptions<MyDbContext> options) : base(options) { } public DbSet<Profile> Profiles { get; set; } public DbSet<Photo> Photos { get; set; } protected override void OnModelCreating(ModelBuilder builder) { builder.Entity<Profile>(entity => { entity.HasKey(e => e.Id); entity.HasIndex(e => e.PrimaryPhotoId); entity.HasOne(e => e.PrimaryPhoto) .WithMany(e => e.Profiles) .HasForeignKey(e => e.PrimaryPhotoId); }); builder.Entity<Photo>(entity => { entity.HasKey(e => e.Id); }); } } public class Profile { public int Id { get; set; } public string Name { get; set; } public int? PrimaryPhotoId { get; set; } public Photo PrimaryPhoto { get; set; } } public class Photo { public int Id { get; set; } public string Name { get; set; } public ICollection<Profile> Profiles { get; set; } = new HashSet<Profile>(); } } <file_sep>/README.md # EF Core In-Memory 🐛 This project demonstrates an issue with the In-Memory provider when you update a foreign key property and delete the old foreign key item in the same transaction. The expected result is the `PrimaryPhotoId` should be `3` but instead is `null`.
7c94ebfb46e073b39a4cb1bc2f7d82f7a311bdc0
[ "Markdown", "C#" ]
2
C#
xt0rted/ClassLibrary4
27809e437a28c17a6070797511bd0cb661f25cf9
80ad503fc003de1ff05cfeac93c1f0fc7fbc0846
refs/heads/main
<file_sep># Opportunity Calendar > a webapp to lists opportunities available at different organisations and > companies. Ideal for students who never want to miss any opportunities. This > webapp is a modified version of > [Girl-Code-It/Opportunity-Calendar](https://github.com/Girl-Code-It/Opportunity-Calendar-Frontend) [Demo](https://opp-cal.vercel.app) ## Features This webapp has multiple features. Following are some of the important ones. - [x] Account Creation - [x] Post new Opportunities - [x] Anyone can view opportunities - [x] Apply through the website - [x] A digital calendar listing all the opportunities date-wise - [ ] Save Opportunity as 'Favorite' - [ ] Refer Applicants (via Girl Code It) ## Flow Chart ![flow chart](./docs/opp-cal-sys-design.png) ## Technologies Used - [TypeScript](https://www.typescriptlang.org/) - [React](https://reactjs.org) - [Next.js](https://nextjs.org) - [Supabase](https://supabase.io/) - [PostgreSQL](https://www.postgresql.org/) ## License MIT License Copyright (c) 2021 Abdus Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. <file_sep>import { useToasts } from 'react-toast-notifications'; import { supabase } from '../utils/supabase'; type PgError = { code: string; hint: string; details: string; message: string; }; export function useInsertIntoDB<T>() { type ResponseType = { error: PgError | null; data: T[] | null; }; const toast = useToasts(); return (table_name: string, data: any): PromiseLike<ResponseType> => supabase .from<T>(table_name) .insert(data) .then( (resp): ResponseType => { if (resp.error) { toast.addToast(resp.error.message, { appearance: 'error' }); } if (resp.data) { toast.addToast('Successfully Added Opportunity', { appearance: 'success', }); } return resp; }, ); } <file_sep>#!/bin/bash # bash script to generate a component. you are free to create components # manually. the purpose of this script is to keep things consistent. # and yeah, I know I could write it in JavaScript. but I just felt like writing # in Bash. # the first parameter passed to this script will be considered as the component # name. please provide the name in snake-case. for example: root-layout CSS_MODULE_NAME="$(echo $1 | sed -r 's/\ /-/g')".module.css COMPONENT_DIR_NAME=$(echo $1 | sed -r 's/(^|-|_|\ )([a-z])/\U\2/g') # gen_boilerplate_code() -> string # generates boilerplate code for the component. # @param {string} 1 - name of the component # @param {string} 2 - path of the css module file function gen_boilerplate_code() { CODE="import React from 'react'; import classes from '$2'; type PropType = {}; export function $1(props: PropType) { return <>$1 Works!!</>; }" echo -e "$CODE" } ################ ## START ## ################ echo -e " +++++++++++++++++++++++++++++++ ++ ++ ++ COMPONENT GENERATOR ++ ++ v0.1 ++ ++ ++ +++++++++++++++++++++++++++++++ " # create components dir if it does not exists if [ ! -d ./components ]; then mkdir -p ./components fi if [ -d ./components/$COMPONENT_DIR_NAME ]; then echo "==> Component '$COMPONENT_DIR_NAME' exists!" exit 1 else echo "==> Creating ./components/$COMPONENT_DIR_NAME ..." mkdir -p ./components/$COMPONENT_DIR_NAME/ echo "==> Creating ./components/$COMPONENT_DIR_NAME/index.tsx ..." touch ./components/$COMPONENT_DIR_NAME/index.tsx echo "==> Creating ./components/$COMPONENT_DIR_NAME/$CSS_MODULE_NAME ..." touch ./components/$COMPONENT_DIR_NAME/$CSS_MODULE_NAME echo "==> Generating boilerplate code ..." gen_boilerplate_code $COMPONENT_DIR_NAME ./$CSS_MODULE_NAME \ >./components/$COMPONENT_DIR_NAME/index.tsx echo "==> DONE!!" fi <file_sep>interface ICallBack { callbackFn(): void; } export const throttle = (() => { let timer: ReturnType<typeof setTimeout> | undefined; return (callback: ICallBack['callbackFn'], time = 1000) => { if (timer) return; timer = setTimeout(() => { callback.call(null); timer = undefined; }, time); }; })(); <file_sep>// Next.js API route support: https://nextjs.org/docs/api-routes/introduction import type { NextApiRequest, NextApiResponse } from 'next'; async function locationFetcher(text: string) { const raw = await fetch( `https://autocomplete.geocoder.ls.hereapi.com/6.2/suggest.json?apiKey=${process.env.NEXT_PUBLIC_HEREJS_API_KEY}&query=${text}&beginHighlight=<b>&endHighlight=</b>`, ); const json = await raw.json(); return json; } // eslint-disable-next-line export default async (req: NextApiRequest, res: NextApiResponse) => { const searchText = req.query.search; const json = await locationFetcher( Array.isArray(searchText) ? searchText[0] : searchText, ); res.status(200).json(json); }; <file_sep>function generateRandomString(size: number): string { const allowed = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'; let text = ''; for (let j = 0; j <= size; j += 1) { text += allowed.charAt(Math.floor(Math.random() * allowed.length)); } return text; } export function* generateComponentKey() { while (true) { yield generateRandomString(10); } } <file_sep>import { useToasts } from 'react-toast-notifications'; export function useFormValidator() { const toast = useToasts(); return <T>( value: any, validatorFn: (value: any) => boolean, inputName?: string, ): T => { if (validatorFn(value)) { return value as T; } toast.addToast(`Form Input Validation Failed for ${inputName}`, { appearance: 'error', }); throw Error(`Form Input Validation Failed for ${inputName}`); }; } // validators .. function fnValidateString(value: any) { return value && typeof value === 'string'; } export const validators = { fnValidateString, }; <file_sep>import { createClient } from '@supabase/supabase-js'; const url = process.env.NEXT_PUBLIC_SUPABASE_URL; const anonKey = process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY; if (!url || !anonKey) { throw Error('cannot create supabase client without url and anon key'); } export const supabase = createClient(url, anonKey);
c77db89c5e045c5e7480394344516524edbaa718
[ "Markdown", "TypeScript", "Shell" ]
8
Markdown
app-johndpope/opp-cal
ca1ba8336dfead89f47dc1344d7456066589d83c
03d48d7fef524218662a80132b564131b72de5d5
refs/heads/master
<file_sep>var express = require("express"); var bodyParser = require('body-parser'); var mysql = require('mysql'); var exphbs = require('express-handlebars'); var app = express(); // var client = mysql.createConnection({ // host : 'localhost', // user : 'root', // password : '<PASSWORD>', // database : 'wishlist_db' // }); app.engine('handlebars', exphbs({defaultLayout: 'main'})); app.set('view engine', 'handlebars'); app.get('/wishlist', function (req, res) { res.render('wishlist', wishlist); }); app.listen(3000);
26c616225054cfe458f706ae988d8acf9091af93
[ "JavaScript" ]
1
JavaScript
ntekal/2-10-16-Lecture
cde486ed77f034d381e8d8617f5a2bb4f2988e59
3b601fd7ddd724b3a086e93eb1f534908aef9fb5
refs/heads/master
<repo_name>gee842/webgl-particles<file_sep>/README.md # webgl-particles Interactive Particle System written in Web-GL, using vanilla Javascript ## Instructions Watch the pretty colors Modify Settings, and hit Apply Particles to Push the changes. <file_sep>/src/app.js var DRAW_MODE = "gl.LINES"; var SHAPE_VERTEX = 2; var EMIT_RATE = 1; var AUTO_ROTATE = [0.0,0.0,0.0]; var particleList = []; var addqueue = []; var totalFrames = 0; var startTime = 0; var GRAVITY_STRENGTH = 0.00190; const devianceg = 0.0005; const deviancev = 0.003 const seed = 25565; var removal = []; var PARTICLE_LIFE = 130; var INITIAL_VELOCITY = 0; var VELOCITY_VARIANCE = 0.015; var EMIT_COLOR = [0.4,0.0,0.7]; var Particle = function(x,y,z,c,t){ this.x = x; this.y = y; this.z = z; this.v = [0.0,0.0,0.0]; this.c = c; this.t = t; } //MOUUSE CONTROLS function randomInitParticles(number,min,max) { console.log(number,min,max) outs = []; for (var i = 0; i < number; i++) { x = Math.random() * (max - min) + min; y = Math.random() * (max - min) + min; z = Math.random() * (max - min) + min; cr = Math.random() cg = Math.random() cb = Math.random() outs.push(new Particle(x,y,z,[cr,cg,cb],PARTICLE_LIFE)); } return outs; } function InitSquare(number,min,max) { console.log(number,min,max) outs = []; for (var i = 0; i < number; i++) { x = Math.random() * (max - min) + min; y = Math.random() * (3 - 2) + 2; z = Math.random() * (max - min) + min; cr = Math.random() cg = Math.random() cb = Math.random() outs.push(new Particle(x,y,z,[cr,cg,cb],PARTICLE_LIFE)); } return outs; } function EmitSquare(number,min,max) { outs = []; for (var i = 0; i < number; i++) { x = Math.random() * (max - min) + min; y = 4; z = 0; if (EMIT_COLOR[0] == 0) { cr = Math.random() } else { cr = EMIT_COLOR[0]; } if (EMIT_COLOR[1] == 0) { cb = Math.random() } else { cb = EMIT_COLOR[1]; } if (EMIT_COLOR[2] == 0) { cg = Math.random() } else { cg = EMIT_COLOR[2]; } part = new Particle(x,y,z,[cr,cg,cb],PARTICLE_LIFE); part.v[0] = ((Math.random() - 0.5) * 2 * VELOCITY_VARIANCE) ; part.v[1] = ((Math.random() - 0.5) * 2 * VELOCITY_VARIANCE) ; part.v[2] = ((Math.random() - 0.5) * 2 * VELOCITY_VARIANCE) ; outs.push(part); } return outs; } // particleList.push( new Particle(-1.5,1.0,-1.0,[0.5,0.5,0.5])); // particleList.push( new Particle(-1.0,1.0,-1.0,[0.5,0.5,0.5])); // particleList.push( new Particle(1.0,1.0,-1.0,[0.5,0.5,0.5])); function switchColor(gl) { gl.clearColor(Math.random() * 256,Math.random() * 256,Math.random() * 256, 1.0) } function updateParticles(g) { if (addqueue.length > 0) { particleList = particleList.concat(addqueue); addqueue = []; } for (var i = 0; i < particleList.length; i++) { particleList[i].v[1] -= g + (Math.random() - 0.5) * 2 * devianceg; particleList[i].v[0] -= (Math.random() - 0.5) * 2 * deviancev; particleList[i].v[2] -= (Math.random() - 0.5) * 2 * deviancev; particleList[i].x += particleList[i].v[0]; particleList[i].y += particleList[i].v[1]; particleList[i].z += particleList[i].v[2]; particleList[i].t -= 1; if ((particleList[i].t) <= 0) { removal.push(i); } } for (var j = 0; j < removal.length; j++) { particleList.splice(removal[j],SHAPE_VERTEX); } removal = []; } function giveVertexBuffer(particles) { if (!particles) return []; var outp = []; var els = []; for (var i = 0; i < particles.length; i++) { outp.push(particles[i].x); outp.push(particles[i].y); outp.push(particles[i].z); outp.push(particles[i].c[0]); outp.push(particles[i].c[1]); outp.push(particles[i].c[2]); } return outp; } function reverseVelocities(particles) { for (var i = 0; i < particles.length; i++) { particles[i].v[0] = 0 - particles[i].v[0]; particles[i].v[1] = 0 - particles[i].v[1] particles[i].v[2] = 0 - particles[i].v[2] } } function giveParticleOrder(particles) { if (!particles) return []; var out = []; for (var i = 0; i < particles.length; i++) { out.push(i); } return out; } function setVelocity(particle,vel) { particle.v = vel; } function randomVelocities(particles,min,max) { for (var i = 0; i < particles.length; i++) { particles[i].v[0] = Math.random() * (max - min) + min; particles[i].v[1] = Math.random() * (max - min) + min; particles[i].v[2] = Math.random() * (max - min) + min; } } var vertexShaderText = [ 'precision mediump float;', '', 'attribute vec3 vertPosition;', 'attribute vec3 vertColor;', 'varying vec3 fragColor;', 'uniform mat4 mWorld;', 'uniform mat4 mView;', 'uniform mat4 mProj;', '', 'void main()', '{', 'fragColor=vertColor;', 'gl_Position = mProj * mView * mWorld * vec4(vertPosition, 1.0);', 'gl_PointSize = 7.0;', '}' ].join('\n'); var fragmentShaderText = [ 'precision mediump float;', '', 'varying vec3 fragColor;', '', 'void main()', '{', 'gl_FragColor=vec4(fragColor,1.0);', '}' ].join("\n"); var InitDemo = function(){ var canvas = document.querySelector('#glCanvas'); var gl = canvas.getContext('webgl'); if (!gl) { gl = canvas.getContext('experimental-webgl'); } canvas.addEventListener("click", switchColor(gl), false); gl.clearColor(1.0,0.7,1.0,1.0); gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); gl.enable(gl.DEPTH_TEST); gl.enable(gl.CULL_FACE); gl.frontFace(gl.CCW); gl.cullFace(gl.BACK); var vertexShader = gl.createShader(gl.VERTEX_SHADER); var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER); gl.shaderSource(vertexShader,vertexShaderText); gl.shaderSource(fragmentShader,fragmentShaderText); gl.compileShader(vertexShader); gl.compileShader(fragmentShader); if(!gl.getShaderParameter(vertexShader,gl.COMPILE_STATUS)){ console.error('ERROR compiling vertex shader!', gl.getShaderInfoLog(vertexShader)); return; } if(!gl.getShaderParameter(fragmentShader,gl.COMPILE_STATUS)){ console.error('ERROR compiling fragment shader!', gl.getShaderInfoLog(fragmentShader)); return; } //creates a program: var program = gl.createProgram(); gl.attachShader(program, vertexShader); gl.attachShader(program, fragmentShader); gl.linkProgram(program); if(!gl.getProgramParameter(program,gl.LINK_STATUS)){ console.error('ERROR compiling program!', gl.getProgramInfoLog(program)); return; } var boxVertices = giveVertexBuffer(particleList); // var boxVertices = // [ // X, Y, Z R, G, B // // Top // -1.0, 1.0, -1.0, 0.5, 0.5, 0.5, // -1.0, 1.0, 1.0, 0.5, 0.5, 0.5, // 1.0, 1.0, 1.0, 0.5, 0.5, 0.5, // ]; var boxIndices =giveParticleOrder(particleList); var boxVertexBufferObject = gl.createBuffer(); gl.bindBuffer(gl.ARRAY_BUFFER, boxVertexBufferObject); gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(boxVertices), gl.STATIC_DRAW); var boxIndexBufferObject = gl.createBuffer(); gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, boxIndexBufferObject); gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(boxIndices), gl.STATIC_DRAW); var positionAttribLocation = gl.getAttribLocation(program, 'vertPosition'); var colorAttribLocation = gl.getAttribLocation(program, 'vertColor'); gl.vertexAttribPointer( positionAttribLocation, //attribute location 3,//number of elements per attribute gl.FLOAT,//type of elements gl.FALSE, 6 * Float32Array.BYTES_PER_ELEMENT, //number of bytes 4*2 //size of an indiviudal vertexShader 0//offset from beginning of a single vertex to this attribute ) gl.vertexAttribPointer( colorAttribLocation, //attribute location 3,//number of elements per attribute gl.FLOAT,//type of elements gl.FALSE, 6 * Float32Array.BYTES_PER_ELEMENT, //number of bytes 4*2 //size of an indiviudal vertexShader 3 * Float32Array.BYTES_PER_ELEMENT//offset from beginning of a single vertex to this attribute ) gl.enableVertexAttribArray(positionAttribLocation); gl.enableVertexAttribArray(colorAttribLocation); gl.useProgram(program); var matWorldUniformLocation = gl.getUniformLocation(program,'mWorld'); var matViewUniformLocation = gl.getUniformLocation(program,'mView'); var matProjUniformLocation = gl.getUniformLocation(program,'mProj'); var worldMatrix = new Float32Array(16); var viewMatrix = new Float32Array(16); var projMatrix = new Float32Array(16); mat4.identity(worldMatrix); mat4.lookAt(viewMatrix,[0,0,-8],[0,0,0],[0,1,0]); mat4.perspective(projMatrix,glMatrix.toRadian(45),canvas.width/canvas.height, 0.1, 1000.0); gl.uniformMatrix4fv(matWorldUniformLocation, gl.FALSE, worldMatrix); gl.uniformMatrix4fv(matViewUniformLocation, gl.FALSE, viewMatrix); gl.uniformMatrix4fv(matProjUniformLocation, gl.FALSE, projMatrix); var xRotationMatrix = new Float32Array(16); var yRotationMatrix = new Float32Array(16); //MAIN RENDER LOOP var identityMatrix = new Float32Array(16); mat4.identity(identityMatrix); console.log(particleList); //particleList = particleList.concat(randomInitParticles(40,1.5,-1.5)); gl.clearColor(Math.random()/0.5,Math.random()/0.5,Math.random()/0.5, 1.0) var angle = 0; totalFrames = 0; startTime = performance.now(); var loop = function(){ totalFrames++; angle = performance.now() / 2000 / 6 * 2 * Math.PI; elapsed = performance.now() - startTime; if (elapsed > 333) { totalFrames = 0; startTime = performance.now(); document.getElementById("vcount").value = particleList.length; } fps = totalFrames/(elapsed/1000); document.getElementById("fps").value = fps; mat4.rotate(worldMatrix, identityMatrix, angle,[AUTO_ROTATE[0],AUTO_ROTATE[1], AUTO_ROTATE[2]]); //mat4.rotate(worldMatrix, identityMatrix, angle*0.7, [-4,0,3]); gl.uniformMatrix4fv(matWorldUniformLocation, gl.FALSE, worldMatrix); gl.clear(gl.DEPTH_BUFFER_BIT | gl.COLOR_BUFFER_BIT); particleList = particleList.concat(EmitSquare(EMIT_RATE,-1.0,1.0)); //UPDATE PARTICLE POSITION HERE updateParticles(GRAVITY_STRENGTH); boxVertices = giveVertexBuffer(particleList); boxIndices = giveParticleOrder(particleList); gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(boxVertices), gl.STATIC_DRAW); gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(boxIndices), gl.STATIC_DRAW); gl.drawElements(eval(DRAW_MODE), boxIndices.length, gl.UNSIGNED_SHORT, 0); requestAnimationFrame(loop); }; requestAnimationFrame(loop); };
967652e6e33994b77c5757645d46f520c78fac49
[ "Markdown", "JavaScript" ]
2
Markdown
gee842/webgl-particles
446c1a55633df001bd131040188d9e174654b457
002b43f06e2e9cf11c9558e31a8c2fb507548589
refs/heads/master
<repo_name>sreeju123/ProtractorTypeScriptCucumber<file_sep>/JSFiles/pageObjects/calculator.d.ts import { ElementFinder } from "protractor"; export declare class calculator { firstEditBox: ElementFinder; secondEditBox: ElementFinder; go: ElementFinder; getResult: ElementFinder; constructor(); } //# sourceMappingURL=calculator.d.ts.map<file_sep>/JSFiles/cucumberConf.d.ts import { Config } from "protractor"; export declare let config: Config; //# sourceMappingURL=cucumberConf.d.ts.map<file_sep>/stepDefenition/steps.ts import { Given, When, Then } from "@cucumber/cucumber"; import { browser } from "protractor"; import { calculator } from "../pageObjects/calculator"; import { angularHomePage } from "../pageObjects/angularHomePage"; import chai from "chai"; var expect = chai.expect; let calc = new calculator(); let angHome = new angularHomePage(); Given('I will navigate to {string} page', async function (string) { // Write code here that turns the phrase above into concrete actions switch (string) { case "Calc": await browser.get('http://juliemr.github.io/protractor-demo/') break; case "AngularJS": await browser.get('https://angularjs.org/') break; } }); When('I add two numbers {string} and {string}', async function (string, string2) { // Write code here that turns the phrase above into concrete actions await calc.firstEditBox.sendKeys(string); await calc.secondEditBox.sendKeys(string2); }); Then('The output displayed should be {string}', async function (string) { // Write code here that turns the phrase above into concrete actions await calc.go.click(); await calc.getResult.getText().then(function (text) { expect(text).to.equals(string); console.log(text) }) }); When('I clicked on Header link', async function () { // Write code here that turns the phrase above into concrete actions await angHome.angularLink.click(); }); When('You will navigate to angular page', async function () { // Write code here that turns the phrase above into concrete actions console.log("Navigated to New Page") }); Then('You will enter {string} in search box', async function (string) { // Write code here that turns the phrase above into concrete actions await angHome.search.sendKeys(string) await browser.sleep(2000) }); <file_sep>/JSFiles/testSpec.d.ts export {}; //# sourceMappingURL=testSpec.d.ts.map<file_sep>/testSpec.ts import { browser, element, by } from "protractor"; import { angularHomePage } from "./pageObjects/angularHomePage"; import { calculator } from "./pageObjects/calculator"; describe('Chain Locator Demo', function () { it('sample', async function () { //Creating Object let calc = new calculator(); await browser.get('http://juliemr.github.io/protractor-demo/') // repeater, chain locator, and css for identical tags // Without PageObject /**await element(by.model('first')).sendKeys('2'); await element(by.model('second')).sendKeys('5'); await element(by.id('gobutton')).click() element(by.repeater('result in memory')).element(by.css('td:nth-child(3)')).getText().then(function(text){ console.log(text) }) **/ // With PageObject await calc.firstEditBox.sendKeys("3"); await calc.secondEditBox.sendKeys("6"); await calc.go.click(); calc.getResult.getText().then(function (text) { console.log(text) }) // Select dropdown value // element(by.model('operator')).element(by.css('option:nth-child(4)')).click() }) it('Angular Home page Title validation', async function () { let angHome = new angularHomePage(); // Without PageObject /**await browser.get('https://angularjs.org/') await element(by.linkText('angular.io')).click(); await element(by.css('input[type="search"]')).sendKeys("hello")**/ // With PageObject await browser.get('https://angularjs.org/') await angHome.angularLink.click(); await angHome.search.sendKeys("hello") }) })<file_sep>/JSFiles/conf.d.ts import { Config } from "protractor"; export declare let config: Config; //# sourceMappingURL=conf.d.ts.map<file_sep>/JSFiles/pageObjects/angularHomePage.d.ts import { ElementFinder } from "protractor"; export declare class angularHomePage { angularLink: ElementFinder; search: ElementFinder; constructor(); } //# sourceMappingURL=angularHomePage.d.ts.map<file_sep>/pageObjects/angularHomePage.ts import { element, ElementFinder, by } from "protractor"; export class angularHomePage { // Another POM Format angularLink: ElementFinder; search: ElementFinder; constructor() { this.angularLink = element(by.linkText('angular.io')); this.search = element(by.css('input[type="search"]')); } // One Format // angularLink = element(by.linkText('angular.io')); // search = element(by.css('input[type="search"]')); }<file_sep>/stepDefenition/hooks.ts import {After, Before, Status} from "@cucumber/cucumber"; import { browser } from "protractor"; Before(function () { // This hook will be executed before all scenarios }); Before({tags: "@calculatortesting"}, function () { // This hook will be executed before scenarios tagged with @foo browser.manage().window().maximize(); }); // Before({tags: "@foo and @bar"}, function () { // // This hook will be executed before scenarios tagged with @foo and @bar // }); // Before({tags: "@foo or @bar"}, function () { // // This hook will be executed before scenarios tagged with @foo or @bar // }); // You can use the following shorthand when only specifying tags Before("@Angulartesting", function () { // This hook will be executed before scenarios tagged with @foo console.log('Starting Angular Testing') }); After({tags: "@calculatortesting"}, function () { // This hook will be executed before scenarios tagged with @foo or @bar console.log('Calculator Testing is completed') }); After(async function (scenario) { if(scenario.result.status==Status.FAILED){ // Take screenshot const screenshot = await browser.takeScreenshot(); this.attach(screenshot,"image/png") } });<file_sep>/JSFiles/stepDefenition/steps.d.ts export {}; //# sourceMappingURL=steps.d.ts.map
ff07c9ffed91c1563404a22a340e7ae84aceec9d
[ "TypeScript" ]
10
TypeScript
sreeju123/ProtractorTypeScriptCucumber
7a5691f7eb29cc7ee40378cecb0d62c396193258
159531c0a7576d4dfaa5a6b8348b89f2f75f5910
refs/heads/master
<repo_name>iAmNawa/alyandtj.com<file_sep>/src/views/home.js const h = require('virtual-dom/h') const ee = require('../events') module.exports = function (state) { return h('div', [ h('.brand', 'Aly & TJ'), h('.address-bar', 'Days since the wedding of <NAME> and <NAME>'), h('nav.navbar.navbar-default', {role:'navigation'}, [ h('.container', [ h('.navbar-header', [ h('button.navbar-toggle', { type:'button', 'data-toggle':'collapse', 'data-target':'#bs-example-navbar-collapse-1' }, [ h('span.sr-only', 'Toggle navigation'), h('span.icon-bar'), h('span.icon-bar'), h('span.icon-bar') ]), h('a.navbar-brand', { href:'/' }, 'alyandtj.com') ]), h('#clockdiv', [ h('#clockElement', [h('span.days.clockNumber', state.days +''), ' Days']), h('#clockElement', [h('span.hours.clockNumber', state.hours+''), ' Hours']), h('#clockElement', [h('span.minutes.clockNumber', state.mins+''), ' Minutes']), h('#clockElement', [h('span.seconds.clockNumber', state.secs+''), ' Seconds']), ]) ]) ]), h('.container', [ h('#noPadding.stopIt.row.box', h('#noPadding2.col-lg-12.text-center', [ h('#carousel-example-generic.carousel.slide', [ h('ol.carousel-indicators.hidden-xs', [ h('li.generic.active', {'data-target':'#carousel-example-generic', 'data-slide-to':'0'}), h('li.generic', {'data-target':'#carousel-example-generic', 'data-slide-to':'1'}), h('li.generic', {'data-target':'#carousel-example-generic', 'data-slide-to':'2'}), h('li.generic', {'data-target':'#carousel-example-generic', 'data-slide-to':'3'}), h('li.generic', {'data-target':'#carousel-example-generic', 'data-slide-to':'4'}), ]), h('.carousel-inner', [ h('.item.active', h('img.img-responsive.img-full', {src:'/img/alyone.jpg',alt:''})), h('.item', h('img.img-responsive.img-full', {src:'/img/origin.jpg',alt:''})), h('.item', h('img.img-responsive.img-full', {src:'/img/thisisone.jpg',alt:''})), h('.item', h('img.img-responsive.img-full', {src:'/img/workplease.jpg',alt:''})), h('.item', h('img.img-responsive.img-full', {src:'/img/no.jpg',alt:''})) ]), h('a.left.carousel-control', { //href:'#carousel-example-generic', //'data-slide':'prev', click: function(e){ console.log('hello, prev') //$('.carousel').carousel('prev') } }, h('span.icon-prev')), h('a.right.carousel-control', { //href:'#carousel-example-generic', //'data-slide':'next', click: function(e){ console.log('hello, next') //$('.carousel').carousel('next') } }, h('span.icon-next')) ]), h('h2.brand-before', h('small', 'The Current')), h('h1.brand-name', 'Mr. & Mrs.'), h('#noteBelow', 'PLEASE LEAVE A NOTE FOR ALY AND TJ BELOW.'), h('hr.tagline-divider'), h('h2', h('small', [ 'By ', h('strong', '<NAME>') ])) ])), h('.stopIt.row.box', [ h('.col-lg-12.text-center', [ h('h2.intro-text.text-center', 'Check out the wedding video here'), h('div', { innerHTML: "<iframe src='https://player.vimeo.com/video/207156155' width='640' height='360' frameborder='0' webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe> <p><a href='https://vimeo.com/207156155'>AlyTjWeddingFinal</a> from <a href='https://vimeo.com/user63777007'><NAME></a> on <a href='https://vimeo.com'>Vimeo</a>.</p>" }) ]), ]), h('.stopIt.row.box', [ h('.col-lg-12.text-center', [ h('hr.tagline-divider'), h('h2.intro-text.text-center', '<NAME> and <NAME>'), h('hr.tagline-divider'), h('img.img-responsive.img-border.img-left', {src:'/img/alytj.jpeg'}), h('hr.tagline-divider.visible-xs'), h('p', 'In January of 2006, TJ and Aly met on a chairlift ride at Heavenly Mountain Resort. They have been enjoying the ride ever since.'), h('p', 'The main wedding page can be found'), h('a.hear', { href:'https://www.theknot.com/us/aly-borawski-and-tj-esposito-nov-2016'}, '-here-'), h('br'), h('br'), h('hr.tagline-divider'), h('h2.intro-text.text-center', 'Please leave a note for Aly and TJ below'), h('h2.intro-text.text-center', 'For problems with the messages contact Paul @ 415-246-0586'), h('hr.tagline-divider') ]), h('.col-lg-12', [ h('.form-group', [ h('label', 'message'), h('textarea.form-control') ]), h('.form-group', [ h('label', 'name'), h('input.form-control') ]), h('button.leavemsg.btn.btn-default', { click:function (e) { console.log(e); } },'Submit') ]), ]), h('.stopIt.row.box', [ h('.col-lg-12', state.msgs.map( function(msg){ var msg = msg.split(':::') return h('div', [ h('h4', msg[1]), h('p', '- ' + msg[0]), h('hr') ]) })), h('#thelastthing.col-lg-12') ]), ]), h('footer', h('.container', h('.row', h('.col-lg-12.text-center', h('p', 'Proudly made by Paul in 2016'))))), ]) } <file_sep>/readme.md # VirtualDOM This site uses virtual-dom, a JavaScript DOM model supporting element creation, diff computation and patch operations for efficient re-rendering. # Link to the virtual-dom repo - https://github.com/Matt-Esch/virtual-dom # alyandtj.com This is a countdown timer and website for Aly and TJ. # Contact Contact <EMAIL> if you notice any issues or call the number from the website, alyandtj.com <file_sep>/vdom/server.js const readFileSync = require('fs').readFileSync const writeFile = require('fs').writeFile const stylus = require('stylus') const join = require('path').join styl() function styl() { stylus(readFileSync(join(__dirname, 'p/s.styl'), 'utf8')) .include(join(__dirname, 'p')) .render((err, css)=> writeFile(join(__dirname, 'p/s.css'), css)) } require('browserify')() .add('src/main.js') .bundle() .pipe(require('fs').createWriteStream('p/dot.js')) poopserver('p/index.html', 3000, 'text/html') poopserver('p/dot.js', 3001, 'text/javascript') poopserver('p/s.css', 3002, 'text/css') function poopserver (file, port, type) { require('http') .createServer(function(req, res) { res.writeHead(200, { 'Content-Type': type }); require('fs') .createReadStream(file) .pipe(res) }) .listen(port) } <file_sep>/src/state.js var t = Date.parse('November 11 2016 16:45:00 UTC-0400') - Date.parse(new Date()) module.exports = { page: 'home', pages: [ 'home', 'about', 'services', 'contact' ], count: 0, endtime: 'November 11 2016 16:45:00 UTC-0400', days:0, hours:0, mins:0, secs:0, total:0, msgs:[] } <file_sep>/src/views/services.js const h = require('virtual-dom/h') const ee = require('../events') module.exports = function (state) { return h('div', [ h('h1', 'the ' + state.page + ' page'), h('h3', 'some more shit in the services page..') ]) } <file_sep>/src/views/index.js module.exports = { home: require('./home'), contact: require('./contact'), services: require('./services'), about: require('./about'), }
c26adfb0149172bd6db3fc856d629a12d2f2ad72
[ "JavaScript", "Markdown" ]
6
JavaScript
iAmNawa/alyandtj.com
a342ecdca8e08b1bdd5b2f150c091a73d2c2d925
d79ef6bb4629bf0ae98ef4269ded383b34be8c8b
refs/heads/master
<repo_name>Anthony10700/P8<file_sep>/auth/services/auth_services.py """this file is for including the job code Returns: [type]: [description] """ from django.contrib.auth.hashers import make_password from django.contrib.auth import authenticate, login from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage from django.contrib.auth import get_user_model from auth.forms import CustomUserCreationForm def sign_validation(request): """This method test if a form is valide return to a dictionary Args: request (request): views request Returns: dictionary: "methode": "", "value": "" """ result_dict = {"methode": "", "value": ""} form = CustomUserCreationForm(request.POST) if form.is_valid(): form.clean_password2() form.clean_email() form.clean_pseudo() user = form.save() login(request, user) result_dict["methode"] = "redirect" result_dict["value"] = "account" return result_dict else: result_dict["methode"] = "render" result_dict["value"] = "auth/sign_in.html" result_dict["form"] = form return result_dict def connect_validation(request): """ This method test if connection is valid Args: request (request): views request Returns: dictionary: "methode": "", "value": "" ,"messages":"" """ result_dict = {"methode": "", "value": ""} if 'inputPassword_connect' in request.POST \ and 'inputEmail_connect' in request.POST: email = request.POST['inputEmail_connect'] password = request.POST['inputPassword_connect'] password = make_password(password=<PASSWORD>, salt="1", hasher='pbkdf2_sha256') user_get = get_user_model() try: user_tmp = user_get.objects.get(email=email) except user_get.DoesNotExist: user_tmp = None if user_tmp is not None: user = authenticate(request, username=user_tmp.username, password=<PASSWORD>) else: user = None if user is not None: login(request, user) request.session.set_expiry(3600) request.session.clear_expired() result_dict["methode"] = "redirect" result_dict["value"] = "account" return result_dict else: result_dict["methode"] = "redirect" result_dict["value"] = "sign_in" result_dict["messages"] = "Mot de passe ou pseudo incorrect" return result_dict else: result_dict["methode"] = "render" result_dict["value"] = "auth/sign_in.html" return result_dict def account_get_info(request): """This method create a context of user information if he is connected Args: request (request): request of views auth account Returns: dict: context of render accound info """ if request.user.is_authenticated: user = request.user context = {"title": "Bienvenue " + user.username, "account_info": {"Email": user.email, "Speudo": user.username, "Prénom": user.first_name, "Nom": user.last_name}} return context return {} def get_history_article(request, nb_of_articles_per_page): """ this method is for getting all the article saved by the user Args: request (request): request of views auth history nb_of_articles_per_page (int): number of articles per page Returns: render: render of views, contains all articles saved, in articles_list paginate in context is for: True the button show in html page, False the button no visible """ result_dict = {"methode": "", "value": ""} recherche = request.user.save_product.all() if 'page' in request.GET: page = request.GET.get('page') else: page = 1 recherche = replace_short_dash(recherche) seek, paginate = get_page(page, recherche, nb_of_articles_per_page) result_dict["methode"] = "render" result_dict["value"] = "auth/history.html" result_dict["paginate"] = paginate result_dict["seek"] = seek return result_dict def get_page(page, all_product, nb_of_articles_per_page): """This method make a paginator for all products Args: page (int): page of paginator all_product (Product): product nb_of_articles_per_page (int): number of articles per page Returns: tuple: nb_of_articles_per_page product and paginate. paginate in context is for: True the button show in html page, False the button no visible """ paginator = Paginator(all_product, nb_of_articles_per_page) try: recherche = paginator.page(page) except PageNotAnInteger: recherche = paginator.page(1) except EmptyPage: recherche = paginator.page(paginator.num_pages) if paginator.num_pages > 1: paginate = True else: paginate = False return recherche, paginate def replace_short_dash(all_product_result): """This method replace all short dash(-) by nothing in the string Args: all_product_result (product list): list of products Returns: [product list]: list of products """ try: iter(all_product_result) except TypeError: all_product_result.categories.name = \ all_product_result.categories.name.replace("-", " ") return all_product_result else: for arct in all_product_result: arct.categories.name = arct.categories.name.replace("-", " ") return all_product_result <file_sep>/purbeurre/tests/test_model.py """Test file for all class models """ import json from django.test import TestCase from purbeurre.models import Product, Categories class ProductModelTest(TestCase): """Class test of modef Product Args: TestCase ([type]): [description] """ @classmethod def setUpTestData(cls): """Set up non-modified objects used by all test methods """ categories = Categories.objects.create( name="boissons-a-la-canneberge", url="https://fr.openfoodfacts.org/categorie/\ boissons-a-la-canneberge.json", nb_of_products=int(54)) categories.save() product_nutriments = { "fat_value": "0.5", "nova-group_100g": 4, "carbohydrates": "10.5", "saturated-fat_value": "0.1", "fat_serving": 1, "fat_100g": "0.5", "salt": 0, "sodium_value": 0, "fat": "0.5", "carbohydrates_serving": 21, "sugars_value": "10.5", "sodium_serving": 0, "salt_unit": "g", "fruits-vegetables-nuts_serving": 25, "salt_value": 0, "sodium": 0, "energy-kcal": 44, "energy-kcal_serving": 88, "fruits-vegetables-nuts_100g": 25, "saturated-fat_unit": "g", "nova-group_serving": 4, "sodium_unit": "g", "proteins_unit": "g", "energy_unit": "kcal", "salt_serving": 0, "fruits-vegetables-nuts-estimate-from-ingredients_100g": 25, "sodium_100g": 0, "sugars": "10.5", "energy_100g": 184, "proteins_value": "0.5", "nova-group": 4, "saturated-fat_serving": "0.2", "saturated-fat_100g": "0.1", "sugars_serving": 21, "proteins_100g": "0.5", "energy": 184, "carbohydrates_value": "10.5", "nutrition-score-fr_100g": 14, "carbon-footprint-from-known-ingredients_100g": "7.5", "carbon-footprint-from-known-ingredients_serving": 15, "energy-kcal_100g": 44, "carbohydrates_100g": "10.5", "salt_100g": 0, "fruits-vegetables-nuts_value": 25, "carbohydrates_unit": "g", "saturated-fat": "0.1", "carbon-footprint-from-known-ingredients_product": 75, "proteins_serving": 1, "fruits-vegetables-nuts_label": "0", "fruits-vegetables-nuts_unit": "g", "energy-kcal_value": 44, "energy-kcal_unit": "kcal", "energy_serving": 368, "nutrition-score-fr": 14, "sugars_100g": "10.5", "fruits-vegetables-nuts": 25, "sugars_unit": "g", "proteins": "0.5", "fat_unit": "g", "energy_value": 44} categories_fk = Categories.objects.get(name="boissons-a-la-canneberge") product_bdd = Product.objects.create( name="Cranberry", countries="France", id_openfoodfacts="3596710355051", url="https://fr.openfoodfacts.org/produit/3596710355051/cranberry-auchan", # noqa: E501 image_url="https://static.openfoodfacts.org/images/products/359/671/035/5051/front_fr.45.400.jpg", # noqa: E501 store="Auchan", nutriscore_grade="e", categories=categories_fk, nutriments=json.dumps(product_nutriments)) product_bdd.save() def setUp(self): self.product_nutriments = { "fat_value": "0.5", "nova-group_100g": 4, "carbohydrates": "10.5", "saturated-fat_value": "0.1", "fat_serving": 1, "fat_100g": "0.5", "salt": 0, "sodium_value": 0, "fat": "0.5", "carbohydrates_serving": 21, "sugars_value": "10.5", "sodium_serving": 0, "salt_unit": "g", "fruits-vegetables-nuts_serving": 25, "salt_value": 0, "sodium": 0, "energy-kcal": 44, "energy-kcal_serving": 88, "fruits-vegetables-nuts_100g": 25, "saturated-fat_unit": "g", "nova-group_serving": 4, "sodium_unit": "g", "proteins_unit": "g", "energy_unit": "kcal", "salt_serving": 0, "fruits-vegetables-nuts-estimate-from-ingredients_100g": 25, "sodium_100g": 0, "sugars": "10.5", "energy_100g": 184, "proteins_value": "0.5", "nova-group": 4, "saturated-fat_serving": "0.2", "saturated-fat_100g": "0.1", "sugars_serving": 21, "proteins_100g": "0.5", "energy": 184, "carbohydrates_value": "10.5", "nutrition-score-fr_100g": 14, "carbon-footprint-from-known-ingredients_100g": "7.5", "carbon-footprint-from-known-ingredients_serving": 15, "energy-kcal_100g": 44, "carbohydrates_100g": "10.5", "salt_100g": 0, "fruits-vegetables-nuts_value": 25, "carbohydrates_unit": "g", "saturated-fat": "0.1", "carbon-footprint-from-known-ingredients_product": 75, "proteins_serving": 1, "fruits-vegetables-nuts_label": "0", "fruits-vegetables-nuts_unit": "g", "energy-kcal_value": 44, "energy-kcal_unit": "kcal", "energy_serving": 368, "nutrition-score-fr": 14, "sugars_100g": "10.5", "fruits-vegetables-nuts": 25, "sugars_unit": "g", "proteins": "0.5", "fat_unit": "g", "energy_value": 44} def test_name(self): """test_name in product """ product = Product.objects.get(id=1) field_label = product.name self.assertEqual(field_label, "Cranberry") def test_countries(self): """test_countries in product """ product = Product.objects.get(id=1) field_label = product.countries self.assertEqual(field_label, "France") def test_id_openfoodfacts(self): """test_id_openfoodfacts in product """ product = Product.objects.get(id=1) field_label = product.id_openfoodfacts self.assertEqual(field_label, "3596710355051") def test_url(self): """test_url in product """ product = Product.objects.get(id=1) field_label = product.url self.assertEqual( field_label, "https://fr.openfoodfacts.org/produit/3596710355051/cranberry-auchan") # noqa: E501 def test_image_url(self): """test_image_url in product """ product = Product.objects.get(id=1) field_label = product.image_url self.assertEqual( field_label, "https://static.openfoodfacts.org/images/products/359/671/035/5051/front_fr.45.400.jpg") # noqa: E501 def test_store(self): """test_store in product """ product = Product.objects.get(id=1) field_label = product.store self.assertEqual(field_label, "Auchan") def test_nutriscore_grade(self): """test_nutriscore_grade in product """ product = Product.objects.get(id=1) field_label = product.nutriscore_grade self.assertEqual(field_label, "e") def test_categories(self): """test_categories in product """ product = Product.objects.get(id=1) field_label = product.categories.name self.assertEqual(field_label, "boissons-a-la-canneberge") def test_nutriments(self): """test_nutriments in product """ product = Product.objects.get(id=1) field_label = product.nutriments self.assertEqual(field_label, json.dumps(self.product_nutriments)) class CategoriesModelTest(TestCase): """Class test of modef categories Args: TestCase ([type]): [description] """ @classmethod def setUpTestData(cls): """Set up non-modified objects used by all test methods """ categories = Categories.objects.create( name="boissons", url="https://fr.openfoodfacts.org/categorie/boissons.json", nb_of_products=int(5)) categories.save() def test_name(self): """test_name categories """ categories = Categories.objects.get(name="boissons") field_label = categories.name self.assertEqual(field_label, "boissons") def test_url(self): """test_url categories """ categories = Categories.objects.get(name="boissons") field_label = categories.url self.assertEqual( field_label, "https://fr.openfoodfacts.org/categorie/boissons.json") def test_nb_of_products(self): """test_nb_of_products categories """ categories = Categories.objects.get(name="boissons") field_label = categories.nb_of_products self.assertEqual(field_label, 5) <file_sep>/purbeurre/migrations/0005_auto_20201216_1632.py # Generated by Django 3.1.4 on 2020-12-16 15:32 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('purbeurre', '0004_auto_20201216_1011'), ] operations = [ migrations.AlterField( model_name='product', name='countries', field=models.CharField(max_length=400, null=True), ), migrations.AlterField( model_name='product', name='image_url', field=models.URLField(max_length=800, unique=True), ), migrations.AlterField( model_name='product', name='name', field=models.CharField(max_length=400), ), migrations.AlterField( model_name='product', name='store', field=models.CharField(max_length=400, null=True), ), migrations.AlterField( model_name='product', name='url', field=models.URLField(max_length=800, unique=True), ), ] <file_sep>/purbeurre/views.py """ Views for purbeurre app, contain the views of results, show_product, unsave, legale and 404 Returns: render: render of views redirect : redirect of views """ from django.shortcuts import render from django.db import transaction from django.shortcuts import redirect from django.contrib import messages from django.template.defaulttags import register from django.core.exceptions import ObjectDoesNotExist from purbeurre.services.purbeurre_services import save_product_result,\ get_articles, show_specify_product,\ get_page, remove_product, replace_indent, like_dislike_services import json from django.http import HttpResponse @register.filter def get_item(dictionary, key): """This method is a filter to your template Args: dictionary (dict): dictionary key (string): key of your dictionary Returns: string: value of your dictionary key """ return dictionary.get(key) def index(request): """this view concern the index of the main page Args: request ([type]): [description] Returns: [type]: [description] """ context = {'title': "Pur Beurre"} return render(request, 'purbeurre/index.html', context=context) @transaction.atomic def resultats(request): """this view concern the result of the research Args: request ([type]): [description] Returns: [type]: [description] """ if request.user.is_authenticated: if request.method == 'POST': result_dict = save_product_result(request.user, request) if result_dict["methode"] == "redirect": messages.success(request, result_dict["message"]) return redirect(result_dict["value"]) elif result_dict["methode"] == "render": messages.error(request, result_dict["message"]) context = {'title': "Products"} return render(request, result_dict["value"], context=context) elif request.method == 'GET' and "search" in request.GET: result_dict = get_articles(request, 6) if result_dict["methode"] == "redirect": messages.error(request, result_dict["message"]) return redirect(result_dict["value"]) elif result_dict["methode"] == "render": context = { 'title': "Resultats de votre recherche", 'articles_list': result_dict["seek"], 'aliment_search': request.GET["search"], "paginate": result_dict["paginate"], "str_dict_return_param": result_dict[ "str_dict_return_param"]} return render(request, result_dict["value"], context=context) else: if request.method == 'GET' and "search" in request.GET: result_dict = get_articles(request, 6) if result_dict["methode"] == "redirect": messages.error(request, result_dict["message"]) return redirect(result_dict["value"]) elif result_dict["methode"] == "render": context = {'title': "Resultats de votre recherche", 'articles_list': result_dict["seek"], 'aliment_search': request.GET["search"], "paginate": result_dict["paginate"], "str_dict_return_param": result_dict[ "str_dict_return_param"]} return render(request, result_dict["value"], context=context) else: context = { 'title': "Vous n'êtes pas connecté.", 'err_show': "Vous n'êtes pas connecté."} return render(request, 'auth/sign_in.html', context=context) def show_product(request): """this view concern the display of a product Args: request ([type]): [description] Returns: [type]: [description] """ try: if request.method == 'GET': result_dict = show_specify_product(request) if result_dict["methode"] == "render" and "context" in result_dict: return render(request, result_dict["value"], context=result_dict["context"]) elif result_dict["methode"] == "redirect"\ and "message" in result_dict: messages.error(request, result_dict["message"]) context = {'title': "Product"} return render(request, 'purbeurre/resultats.html', context=context) else: context = {'title': "Bienvenue"} return render(request, 'purbeurre/index.html', context=context) # context = {'title': "Vous n'êtes pas connecté.", # 'err_show': "Vous n'êtes pas connecté."} # return render(request, 'auth/sign_in.html', context=context) except ValueError: context = {'title': "Bienvenue"} return render(request, 'purbeurre/index.html', context=context) @transaction.atomic def unsave(request): """this view can cancel an article previously saved Args: request ([type]): [description] Returns: [type]: [description] """ if request.user.is_authenticated: if request.method == 'POST': try: remove_product(request) all_product_result = request.user.save_product.all() all_product_result = replace_indent(all_product_result) seek, paginate = get_page(1, all_product_result, 6) context = {'title': "Historique de vos articles", 'articles_list': seek, "paginate": paginate} return render(request, 'auth/history.html', context=context) except ObjectDoesNotExist: all_product_result = request.user.save_product.all() all_product_result = replace_indent(all_product_result) context = {'title': "Historique de vos articles", 'articles_list': all_product_result} return render(request, 'auth/history.html', context=context) else: if 'page' in request.GET: page = request.GET.get('page') else: page = 1 all_product_result = request.user.save_product.all() seek, paginate = get_page(page, all_product_result, 6) all_product_result = replace_indent(all_product_result) context = {'title': "Historique de vos articles", 'articles_list': all_product_result, "paginate": paginate} return render(request, 'auth/history.html', context=context) else: context = {'title': "Vous n'êtes pas connecté.", 'err_show': "Vous n'êtes pas connecté."} return render(request, 'auth/sign_in.html', context=context) def legale(request): """this view concern the display of the legal mention Args: request ([type]): [description] Returns: [type]: [description] """ context = {'title': "Mentions légales"} return render(request, 'purbeurre/legal_notice.html', context=context) def page_not_found_view(request, exception=None): """Customizing error views 404 Args: request ([type]): which is the URL that resulted in the error exception ([type]): which is a useful representation of the exception that triggered the view (e.g. containing any message passed to a specific Http404 instance). Returns: [type]: [description] """ return render(request, '404.html') def page_server_error(request, exception=None): """Customizing error views page_server_error Args: request ([type]): which is the URL that resulted in the error exception ([type]): which is a useful representation of the exception that triggered the view (e.g. containing any message passed to a specific Http404 instance). Returns: render: [description] """ return render(request, '500.html') def like_dislike(request): """View for add a like or dislike product Args: request ([type]): [description] """ if request.user.is_authenticated: if request.method == "GET": context = like_dislike_services(request) if context["text"] == "like dislake save": context["text"] = "Produit ajouté au " return HttpResponse( json.dumps(context), content_type="application/json") elif context["text"] == "err": context["text"] = "Error in services" return HttpResponse( json.dumps(context), content_type="application/json") else: context = {'err': "Error no POST request"} return HttpResponse( json.dumps(context), content_type="application/json") else: context = { 'err': "Vous n'êtes pas connecté.", "like": 0, "dislike": 0} return HttpResponse( json.dumps(context), content_type="application/json") <file_sep>/purbeurre/migrations/0002_auto_20201216_1009.py # Generated by Django 3.1.4 on 2020-12-16 09:09 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('purbeurre', '0001_initial'), ] operations = [ migrations.AddField( model_name='product', name='user_id', field=models.ManyToManyField(to=settings.AUTH_USER_MODEL), ), migrations.DeleteModel( name='Products_save', ), ] <file_sep>/purbeurre/templatetags/utils.py """this file contain filters to applicate at the gabarit Returns: [type]: [description] """ from django import template register = template.Library() @register.filter('get_item') def get_item(dict_data, key): """ use example {{ your_dict|get_item:your_key }} """ if key: return dict_data.get(key) <file_sep>/purbeurre/services/purbeurre_services.py """this file contain the job code method of all views Returns: [type]: [description] """ import json import logging from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage from django.core.exceptions import ObjectDoesNotExist from purbeurre.models import Product # Get an instance of a logger logger = logging.getLogger(__name__) def save_product_result(user, request): """This method save an article to a manytomany table with user Returns: dictionary: "methode": "", "value": "" and "messages":"" """ result_dict = {"methode": "", "value": ""} # dictionnary return try: product_show = Product.objects.get(id=request.POST["id"]) # select a product that matches to request id product_show.save_product.add(user) # add user to selected products product_show.save() # save selected products in data base result_dict["methode"] = "redirect" # set method redirect in key methode result_dict["value"] = request.path_info + \ "?search=" + request.POST["search"] # set value of path in key value result_dict["message"] = 'Votre article à bien été enregistré' # set message for user in key message return result_dict # returns dictionnary to views except ObjectDoesNotExist: result_dict["methode"] = "render" # set method render in key # methode if try error result_dict["value"] = 'purbeurre/resultats.html' # set value # of path in key value result_dict["message"] = "Erreur dans l'enregistrement \ de votre produit" # set message for user in key message return result_dict # returns dictionnary to views def get_result_in_list_nutriscore(request): """This methodes get nutriscore form resulte et make a list Args: request ([type]): [description] """ list_of_nutri = [] if "nutriscore_a" in request.GET: if request.GET["nutriscore_a"] == "on": list_of_nutri.append("a") if "nutriscore_b" in request.GET: if request.GET["nutriscore_b"] == "on": list_of_nutri.append("b") if "nutriscore_c" in request.GET: if request.GET["nutriscore_c"] == "on": list_of_nutri.append("c") if "nutriscore_d" in request.GET: if request.GET["nutriscore_d"] == "on": list_of_nutri.append("d") if "nutriscore_e" in request.GET: if request.GET["nutriscore_e"] == "on": list_of_nutri.append("e") return list_of_nutri def get_articles(request, nb_of_articles_per_page): """ This method get articles in bdd Args: request (request): request of views results nb_of_articles_per_page (int): number of articles per page Returns: dictionary: "methode": "", "value": "" , "paginate":"", "seek":"" OR "methode": "", "value": "" ,"messages":"" """ try: result_dict = {"methode": "", "value": ""} str_dict_return_param = "" for key in dict(request.GET): if key != "page": str_dict_return_param += key + "=" + request.GET[key] + "&" str_dict_return_param = str_dict_return_param[:-1] result_final_of_produts = [] recherche = Product.objects.filter( name__icontains=request.GET["search"]) list_of_result_in_form = get_result_in_list_nutriscore(request) if len(list_of_result_in_form) >= 1: recherche = recherche.filter( nutriscore_grade__in=list_of_result_in_form) if "like_limit_1" in request.GET: if request.GET["like_limit_1"] == "on": for product_in in recherche: if product_in.like_count >= product_in.dislike_count: result_final_of_produts.append(product_in) if "like_limit_2" in request.GET: if request.GET["like_limit_2"] == "on": for product_in in recherche: if product_in.like_count < product_in.dislike_count: result_final_of_produts.append(product_in) if "like_limit_2" not in request.GET: if "like_limit_1" not in request.GET: result_final_of_produts = recherche logger.info('New search', exc_info=True, extra={'request': request, }) if 'page' in request.GET: page = int(request.GET['page']) if page <= 0: page = 1 else: page = 1 seek, paginate = get_page( page, result_final_of_produts, nb_of_articles_per_page) result_dict["methode"] = "render" result_dict["value"] = "purbeurre/resultats.html" result_dict["paginate"] = paginate result_dict["seek"] = seek result_dict["str_dict_return_param"] = str_dict_return_param return result_dict except ObjectDoesNotExist: result_dict["methode"] = "redirect" result_dict["value"] = "resultat" result_dict["message"] = "Erreur dans la recherche de votre produit" return result_dict def show_specify_product(request): """This method show a product , get id in request get id and send in context the product in articles_list Args: request (request): request of views results Returns: dictionary: "methode": "", "value": "" and "context":"" """ try: result_dict = {"methode": "", "value": ""} # dictionnary return product_show = Product.objects.get(id=request.GET["id"]) # select a # product that matches to request id product_save = Product.objects.get(id=request.GET["id"]) product_show = replace_indent(product_show) # replace all short dash in the product_show.name prod = json.loads(product_show.nutriments) # load the JSON string into the database if "search" in request.GET: search = request.GET["search"] else: search = "" # check search in the request get , # because the field must be returned to the user like_value = product_save.like_count dislike_value = product_save.dislike_count result_dict["methode"] = "render" # set method render in key methode result_dict["value"] = 'purbeurre/show_product.html' # set page resultats in key value result_dict["context"] = {'title': "resultats de votre recherche", 'articles_list': product_show, 'aliment_search': search, "nutriments": prod, 'like': like_value, 'dislike': dislike_value} # set context in key context for the views return result_dict # returns dictionnary to views except ObjectDoesNotExist: result_dict["methode"] = "redirect" # set method redirect in key methode result_dict["value"] = "resultat" # set value resultat in key value result_dict["message"] = "Erreur dans la recherche de votre produit" # set message for user in key message return result_dict # returns dictionnary to views def remove_product(request): """This method remove product with specify id Args: request (request): need "id" in request.POST """ if "id" in request.POST: product_show = Product.objects.get(id=request.POST["id"]) product_show.save_product.remove(request.user) def get_page(page, all_product, nb_of_articles_per_page): """This method make a paginator of all products Args: page (int): page of paginator all_product (Product): product nb_of_articles_per_page (int): number of articles per page Returns: tuple: nb_of_articles_per_page product and paginate. paginate in context is for: True the button show in html page, False the button no visible """ paginator = Paginator(all_product, nb_of_articles_per_page) try: recherche = paginator.page(page) except PageNotAnInteger: recherche = paginator.page(1) except EmptyPage: recherche = paginator.page(paginator.num_pages) if paginator.num_pages > 1: paginate = True else: paginate = False return recherche, paginate def replace_indent(all_product_result): """This method replace all short dash in the string Args: all_product_result (product list): list of product Returns: [product list]: list of product """ try: iter(all_product_result) except TypeError: all_product_result.categories.name = \ all_product_result.categories.name.replace("-", " ") return all_product_result else: for arct in all_product_result: arct.categories.name = arct.categories.name.replace("-", " ") return all_product_result def like_dislike_services(request): """methode for services like and dislike feature Args: request ([type]): [description] """ value_tmp = "" if "like" in request.GET: value_tmp = request.GET["like"] product_select = Product.objects.get(id=value_tmp) try: product_select.disklike_products.get(id=request.user.id) product_select.disklike_products.remove(request.user) except ObjectDoesNotExist: pass product_select.like_products.add(request.user) elif "dislike" in request.GET: value_tmp = request.GET["dislike"] product_select = Product.objects.get(id=value_tmp) try: product_select.like_products.get(id=request.user.id) product_select.like_products.remove(request.user) except ObjectDoesNotExist: pass product_select.disklike_products.add(request.user) else: value_tmp = "err" return value_tmp like_value = len(product_select.like_products.all()) dislike_value = len(product_select.disklike_products.all()) context = {"text": "like dislake save", "like": like_value, "dislike": dislike_value} product_select.like_count = like_value product_select.dislike_count = dislike_value product_select.save() return context <file_sep>/auth/views.py """this file contain all the view of the authentification application Returns: [type]: [description] """ from django.shortcuts import render from django.contrib.auth import logout from django.shortcuts import redirect from django.contrib import messages from django.core.exceptions import ValidationError from auth.services.auth_services import connect_validation, account_get_info,\ get_history_article, sign_validation # Create your views here. def sign_in(request): """this view concern the inscription Args: request ([type]): [description] Returns: [type]: [description] """ if not request.user.is_authenticated: if request.method == 'POST': try: result_dict = sign_validation(request) if result_dict["methode"] == "redirect": return redirect(result_dict["value"]) elif result_dict["methode"] == "render": context = {'form': result_dict["form"]} return render(request, result_dict["value"], context=context) except ValidationError as err: messages.error(request, err.message) return redirect('sign_in') else: context = {'title': "Inscription"} return render(request, 'auth/sign_in.html', context=context) else: return redirect('account') def connect(request): """this view concern the account connection Args: request ([type]): [description] Returns: [type]: [description] """ if request.method == 'POST': result_dict = connect_validation(request) if result_dict["methode"] == "redirect": if result_dict["value"] == "account": return redirect(result_dict["value"]) elif result_dict["value"] == "sign_in": messages.error(request, result_dict["messages"]) return redirect(result_dict["value"]) if result_dict["methode"] == "render": context = {'title': "Inscription"} return render(request, result_dict["value"], context=context) else: context = {'title': "Inscription"} return render(request, 'auth/sign_in.html', context=context) def account(request): """this view concern the account Args: request ([type]): [description] Returns: [type]: [description] """ if request.user.is_authenticated: context = account_get_info(request) return render(request, 'auth/account.html', context=context) else: context = {'title': "Utilisateur pas connecter"} return render(request, 'purbeurre/index.html', context=context) def logout_view(request): """this view is for the deconnexion Args: request ([type]): [description] Returns: [type]: [description] """ if request.user.is_authenticated: logout(request) context = {'title': "Déconnexion"} return render(request, 'purbeurre/index.html', context=context) else: context = {'title': "Vous n'êtes pas connecté.", 'err_show': "Vous n'êtes pas connecté."} return render(request, 'auth/sign_in.html', context=context) def history(request): """this view serve to get back the history of the saved products Args: request ([type]): [description] Returns: [type]: [description] """ if request.user.is_authenticated: result_dict = get_history_article(request, 6) if result_dict["methode"] == "render": context = {'title': "Historique de vos articles", 'articles_list': result_dict["seek"], "paginate": result_dict["paginate"], 'aliment_search': ""} return render(request, result_dict["value"], context=context) else: context = {'title': "Vous n'êtes pas connecté.", 'err_show': "Vous n'êtes pas connecté."} return render(request, 'auth/sign_in.html', context=context) <file_sep>/auth/urls.py """ url file """ from django.urls import path from . import views urlpatterns = [ path('sign_in.html', views.sign_in, name="sign_in"), path('connection.html', views.connect, name="connection"), path('account.html', views.account, name="account"), path('deconnection.html', views.logout_view, name="deconnection"), path('history.html', views.history, name="history") ] <file_sep>/auth/templates/auth/history.html {% extends "base.html" %} {% block content %} {% load static %} <div class="row sign_back pb-5"> <div class="container col-10"> <div class="row justify-content-center col-12"> <div class="col-12 text-center"> <img src="{% static '/img/profil.png' %}" alt="..." class="border rounded-circle " width="150px"> <hr class="divider_sign" /> <h4>Vos aliments sauvegardés sont : </h4> <div class="row justify-content-center mt-5"> {% for article in articles_list %} <div class="card col-xl-3 col-lg-5 col-8 card_result"> <a href="{% url 'show_product' %}?id={{article.id}}&search={{ aliment_search }}" style="height:50%;"> <div class="img_nutri_result"> <img src="{% static '/img/' %}{{article.nutriscore_grade}}.svg" alt=""> </div> <div class="img_div_result"> <img src="{{ article.image_url }}" class="card-img-top " alt=""> </div> </a> <div class="card-body card-body-article"> <h5 class="card-title">{{article.name}}</h5> <!-- <p class="card-text">Nutriscore : {{article.nutriscore_grade.upper}}</p> --> <p class="card-text">Categories: {{article.categories.name}}</p> <form action="{% url 'unsave' %}" method="post" class="button_card_art"> {% csrf_token %} <input type="hidden" name="search" value="{{ aliment_search }}"> <button type="submit" class="btn btn-primary" name="id" value="{{article.id}}"><i class="fa fa-floppy-o"> Retirer</i></button> </form> </div> </div> {% empty %} <h1>Sorry, no articles.</h1> {% endfor %} </div> {% if paginate %} <div class="clearfix"></div> <nav aria-label=""> <ul class="pagination justify-content-center"> {% if articles_list.has_previous %} <li style="padding-right: 20px;"> <a href="?page={{ articles_list.previous_page_number }}"> <button class="btn page-link">Précédent</button> </a> </li> {% endif %} {% if articles_list.has_next %} <li> <a href="?page={{ articles_list.next_page_number }}"> <button class="btn page-link ">Suivant</button> </a> </li> {% endif %} </ul> </nav> {% endif %} </div> </div> </div> </div> {% endblock %}<file_sep>/purbeurre/tests/test_views.py """ class of test viewe purbeurre """ import time from django.test import TestCase, Client # Create your tests here. from selenium import webdriver from selenium.webdriver.common.keys import Keys from purbeurre.models import Product, Categories import json firefox_options = webdriver.FirefoxOptions() firefox_options.headless = True class UrlPurbeurreTests(TestCase): """ Class test of url of app Args: TestCase ([type]): [description] """ @classmethod def setUpTestData(cls): """This method make a account for testing the url form sign_in """ browser = webdriver.Firefox(options=firefox_options) print("\nCreation d'un compte\n") info = {"inputUsername": "Frost101", "inputemail": "<EMAIL>", "inputPassword1": "<PASSWORD>", "inputPassword2": "<PASSWORD>", "inputNom": "Test_Nom", "inputprenom": "Test_prenom"} browser.get('http://127.0.0.1:8000/auth/sign_in.html') grid = browser.find_element_by_id('inputemail') grid.send_keys(info["inputemail"]) grid = browser.find_element_by_id('inputUsername') grid.send_keys(info["inputUsername"]) grid = browser.find_element_by_id('inputPassword1') grid.send_keys(info["inputPassword1"]) grid = browser.find_element_by_id('inputPassword2') grid.send_keys(info["inputPassword2"]) grid = browser.find_element_by_id('inputNom') grid.send_keys(info["inputNom"]) grid = browser.find_element_by_id('inputprenom') grid.send_keys(info["inputprenom"]) browser.execute_script( "document.getElementById('gridCheck').checked = true;") time.sleep(1) browser.execute_script( "document.getElementsByClassName('btn btn-primary')[1].click();") browser.quit() def setUp(self): """This method similar at __init__ for each instance """ # Every test needs a client. self.client = Client() self.browser = webdriver.Firefox(options=firefox_options) def tearDown(self): self.browser.quit() def test_index(self): """ This method test the index url """ response = self.client.get('/purbeurre/index.html') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['title'], "Pur Beurre") time.sleep(2) def test_like_dislike_offline(self): """ This method test the like_dislik with no connection user url """ response = self.client.get( 'http://127.0.0.1:8000/purbeurre/like_dislike/') self.assertEqual(response.status_code, 200) self.assertEqual(response.json()['err'], "Vous n'êtes pas connecté.") time.sleep(2) def test_index_selenium(self): """ test index with selenium """ self.browser.get('http://127.0.0.1:8000/purbeurre/index.html') self.assertEqual(self.browser.title, "Pur Beurre") time.sleep(2) def test_search_with_filter(self): """Methode for test filter in search product """ self.browser.get('http://127.0.0.1:8000/purbeurre/index.html') elem = self.browser.find_element_by_id('search-nav') elem.send_keys('boisson' + Keys.RETURN) time.sleep(2) self.browser.execute_script( "document.getElementById('btn_for_filter').click();") self.browser.execute_script( "document.getElementById('nutriscore_e').click();") self.browser.execute_script( "document.getElementById('btn_nav_search').click();") element_art = self.browser.find_elements_by_xpath( "//div[@id='div_card_all']/div") self.assertEqual(len(element_art), 6) def test_seek_search_selenium(self): """test search product with selenium no auth """ self.browser.get('http://127.0.0.1:8000/purbeurre/index.html') elem = self.browser.find_element_by_id('search-nav') elem.send_keys('boisson' + Keys.RETURN) time.sleep(2) self.assertEqual(self.browser.title, "Resultats de votre recherche") time.sleep(2) def connect(self): """Methode to connect user in selenium """ self.browser.get('http://127.0.0.1:8000/auth/sign_in.html') inputusername = self.browser.find_element_by_id('inputEmail_connect') inputusername.send_keys('<EMAIL>') inputpassword = self.browser.find_element_by_id( 'inputPassword_connect') inputpassword.send_keys('<PASSWORD>') # grid = self.browser.find_element_by_id('gridCheck_connect') self.browser.execute_script( "document.getElementById('gridCheck_connect').checked = true;") # button = self.browser.find_element_by_id('button_valid_form') self.browser.execute_script( "document.getElementById('button_valid_form').click();") def test_connection_selenium(self): """test connection with selenium """ self.connect() time.sleep(2) self.assertEqual(self.browser.title, "Bienvenue Frost101") time.sleep(2) def test_seek_search_connection_selenium(self): """test searh connection with selenium """ self.connect() self.make_product() time.sleep(2) self.assertEqual(self.browser.title, "Bienvenue Frost101") self.browser.get('http://127.0.0.1:8000/purbeurre/index.html') elem = self.browser.find_element_by_id('search-nav') elem.send_keys('boisson' + Keys.RETURN) time.sleep(2) self.assertEqual(self.browser.title, "Resultats de votre recherche") element_art = self.browser.find_elements_by_xpath( "//div[@id='div_card_all']/div") self.assertEqual(len(element_art), 6) time.sleep(2) def test_show_product_selenium(self): """test show_product with selenium """ self.make_product() self.connect() time.sleep(5) self.assertEqual(self.browser.title, "Bienvenue Frost101") self.browser.get('http://127.0.0.1:8000/purbeurre/index.html') elem = self.browser.find_element_by_id('search-nav') elem.send_keys('boisson' + Keys.RETURN) time.sleep(2) id_product = self.browser.find_element_by_id('button_id') self.browser.get( 'http://127.0.0.1:8000/purbeurre/show_product.html/?id=' + str( id_product.get_attribute("value")) + '&search=boisson') # noqa: E501 elem = self.browser.find_element_by_class_name( 'card_description').find_elements_by_tag_name("h5")[0] self.assertEqual(elem.text, "Repères nutritionnels pour 100g :") time.sleep(2) def make_product(self): """Methode for creation product """ categories = Categories.objects.create( name="boissons-a-la-canneberge", url="https://fr.openfoodfacts.org/categorie/" + "boissons-a-la-canneberge.json", nb_of_products=int(54)) categories.save() categories_fk = Categories.objects.get(name="boissons-a-la-canneberge") product_nutriments = { "fat_value": "0.5", "nova-group_100g": 4, "carbohydrates": "10.5", "saturated-fat_value": "0.1", "fat_serving": 1, "fat_100g": "0.5", "salt": 0, "sodium_value": 0, "fat": "0.5", "carbohydrates_serving": 21, "sugars_value": "10.5", "sodium_serving": 0, "salt_unit": "g", "fruits-vegetables-nuts_serving": 25, "salt_value": 0, "sodium": 0, "energy-kcal": 44, "energy-kcal_serving": 88, "fruits-vegetables-nuts_100g": 25, "saturated-fat_unit": "g", "nova-group_serving": 4, "sodium_unit": "g", "proteins_unit": "g", "energy_unit": "kcal", "salt_serving": 0, "fruits-vegetables-nuts-estimate-from-ingredients_100g": 25, "sodium_100g": 0, "sugars": "10.5", "energy_100g": 184, "proteins_value": "0.5", "nova-group": 4, "saturated-fat_serving": "0.2", "saturated-fat_100g": "0.1", "sugars_serving": 21, "proteins_100g": "0.5", "energy": 184, "carbohydrates_value": "10.5", "nutrition-score-fr_100g": 14, "carbon-footprint-from-known-ingredients_100g": "7.5", "carbon-footprint-from-known-ingredients_serving": 15, "energy-kcal_100g": 44, "carbohydrates_100g": "10.5", "salt_100g": 0, "fruits-vegetables-nuts_value": 25, "carbohydrates_unit": "g", "saturated-fat": "0.1", "carbon-footprint-from-known-ingredients_product": 75, "proteins_serving": 1, "fruits-vegetables-nuts_label": "0", "fruits-vegetables-nuts_unit": "g", "energy-kcal_value": 44, "energy-kcal_unit": "kcal", "energy_serving": 368, "nutrition-score-fr": 14, "sugars_100g": "10.5", "fruits-vegetables-nuts": 25, "sugars_unit": "g", "proteins": "0.5", "fat_unit": "g", "energy_value": 44} product_bdd = Product.objects.create( name="Cranberry", countries="France", id_openfoodfacts="3596710355051", url="https://fr.openfoodfacts.org/produit/3596710355051/cranberry-auchan", # noqa: E501 image_url="https://static.openfoodfacts.org/images/products/359/671/035/5051/front_fr.45.400.jpg", # noqa: E501 store="Auchan", nutriscore_grade="e", categories=categories_fk, nutriments=json.dumps(product_nutriments)) product_bdd.save() <file_sep>/purbeurre/urls.py """ url urlpatterns of purbeurre app """ from django.urls import path from . import views # def trigger_error(request): # """Methode fgfor sentry test # Args: # request ([type]): [description] # """ # division_by_zero = 1 / 0 urlpatterns = [ path('resultats.html', views.resultats, name="resultat"), path('show_product.html/', views.show_product, name="show_product"), path('unsave.html', views.unsave, name="unsave"), path('legal_notice.html', views.legale, name="legal_notice"), path('index.html', views.index, name="index"), # path('sentry-debug/', trigger_error), path('like_dislike/', views.like_dislike, name="like_dislike") ] <file_sep>/auth/templates/auth/account.html {% extends "base.html" %} {% block content %} {% load static %} <div class="row sign_back pb-5 " id="2"> <div class="container "> <div class="row justify-content-center"> <div class="row justify-content-center "> <div class="col-12 text-center "> <img src="{% static '/img/profil.png' %}" alt="..." class="border rounded-circle " width="150px"> <hr class="divider_sign" /> </div> <div class="row justify-content-center"> <div class="card border-primary justify-content-center info_acc" style="width: 28rem; font-weight: bold;"> <div class="card-header info_acc"> <h5 class="card-title">Information sur le compte :</h5> </div> <ul class="list-group list-group-flush info_acc"> {% if account_info %} {% for key, value in account_info.items %} <li class="list-group-item info_acc"><b>{{key}}</b> : {{ value }}</li> {% endfor %} {% endif %} </ul> </div> </div> </div> </div> </div> </div> {% endblock %}<file_sep>/purbeurre/models.py """ this file contains all models class """ from django.db import models from django.conf import settings # Create your models here. class Categories(models.Model): """Model categories of articles Args: models ([type]): [description] """ class Meta: ordering = ['-id'] verbose_name = "Category" verbose_name_plural = "Categories" def __str__(self): return self.name name = models.CharField(max_length=200, unique=True, null=False) url = models.URLField(max_length=200, unique=True, null=False) nb_of_products = models.IntegerField(null=True) class Product(models.Model): """Model of Product Args: models ([type]): [description] """ class Meta: ordering = ['-id'] verbose_name = "Product" verbose_name_plural = "Products" def __str__(self): return self.name name = models.CharField(max_length=800, unique=False, null=False) countries = models.CharField(max_length=800, unique=False, null=True) id_openfoodfacts = models.CharField( max_length=800, unique=True, null=False) url = models.URLField(max_length=800, unique=True, null=False) image_url = models.URLField(max_length=800, unique=True, null=False) store = models.CharField(max_length=800, unique=False, null=True) nutriscore_grade = models.CharField(max_length=1, unique=False, null=False) categories = models.ForeignKey(Categories, on_delete=models.CASCADE) nutriments = models.CharField(max_length=8000, unique=False, null=False) user_id = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name="save_product", name="save_product") disklike_products = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name="disklike_products", name="disklike_products") like_products = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name="like_products", name="like_products") like_count = models.IntegerField(null=True, default=0) dislike_count = models.IntegerField(null=True, default=0) <file_sep>/auth/tests/test_views.py """ class of test viewe purbeurre """ from django.test import TestCase, Client # Create your tests here. class UrlAuthTests(TestCase): """ Class test of url of app Args: TestCase ([type]): [description] """ def setUp(self): """This method similar at __init__ for each instance """ # Every test needs a client. self.client = Client() def test_sign_in(self): """ This method test the sign_in url """ response = self.client.get('/auth/sign_in.html') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['title'], "Inscription") self.make_account() self.client.login(username='Test_accound', password='<PASSWORD>') response = self.client.get('/auth/sign_in.html') self.assertEqual(response.status_code, 302) self.assertEqual(response.url, "/auth/account.html") def test_accound(self): """This methode test the account url """ response = self.client.get('/auth/account.html') self.assertEqual(response.status_code, 200) self.make_account() self.client.login(username='Test_accound', password='<PASSWORD>') response = self.client.get('/auth/account.html') self.assertEqual(response.status_code, 200) self.assertEqual( response.context['account_info']["Email"], "<EMAIL>") def test_history(self): """This method test the history url """ response = self.client.get('/auth/history.html') self.assertEqual(response.status_code, 200) self.assertEqual( response.context['title'], "Vous n'êtes pas connecté.") self.make_account() self.client.login(username='Test_accound', password='<PASSWORD>') response = self.client.get('/auth/history.html') self.assertEqual( response.context['title'], "Historique de vos articles") def test_deconnection(self): """This method test the deconnection url """ response = self.client.get('/auth/deconnection.html') self.assertEqual(response.status_code, 200) self.assertEqual( response.context['title'], "Vous n'êtes pas connecté.") self.make_account() self.client.login(username='Test_accound', password='<PASSWORD>') response = self.client.get('/auth/deconnection.html') self.assertEqual(response.context['title'], "Déconnexion") def test_connect(self): """This method test the connection url """ response = self.client.get('/auth/connection.html') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['title'], "Inscription") self.make_account() info = {"inputEmail_connect": "<EMAIL>", "inputPassword_connect": "<PASSWORD>"} response = self.client.post('/auth/connection.html', data=info) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, "/auth/account.html") info = {"inputEmail_connect": "<EMAIL>", "inputPassword_connect": "<PASSWORD>"} response = self.client.post('/auth/connection.html', data=info) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, "/auth/sign_in.html") def make_account(self): """This method make a account for testing the url form sign_in """ info = {"inputUsername": "Test_accound", "inputemail": "<EMAIL>", "inputPassword1": "<PASSWORD>", "inputPassword2": "<PASSWORD>", "inputNom": "Test_Nom", "inputprenom": "Test_prenom"} response = self.client.post('/auth/sign_in.html', data=info) self.assertEqual(response.status_code, 302) response = self.client.get('/auth/account.html') self.assertEqual(response.status_code, 200) self.assertEqual( response.context['account_info']["Email"], info["inputemail"]) <file_sep>/README.md ## About Project 8 - This deposit concerns the project number 8 of openclassrooms that you can find below the instruction for the installation and the necessary information - URL HEROKU https://purbeurre-at.herokuapp.com/purbeurre/index.html - Ocean Digital http://172.16.17.32/purbeurre/index.html ## For installation #### In your terminal : * Create a python environment : * `$ pip install virtualenv ` * `$ virtualenv -p python3 venv` * `$ activate venv` * Clone this repository on the same folder with venv * Install requirements : * `$ pip install -r requirements.txt` * install https://www.postgresql.org/ on your os * Make data base : * 'NAME': 'nutella_platform' * 'USER': 'postgres' * 'PASSWORD': '<PASSWORD>' EDIT PASSWORD in nutella_platform\settings.py 145 * 'HOST': '127.0.0.1' * 'PORT': '5432' * In your terminal go to the root of the repository, then enter : * `$ activate venv` * `$ python manage.py migrate` * `$ python manage.py import_product 5` * `$ python manage.py runserver` * Go to http://127.0.0.1:8000/ ## Functionality * You can search for products that are related to the previously imported categories (purbeurre\url_import_openfood.txt) * For exemple * https://fr.openfoodfacts.org/categorie/boissons-a-la-canneberge.json = boisson * You can save them and search for substitutes, but you will need to create an account first. ## Example ![](https://github.com/Anthony10700/P8/blob/master/img-ex/p8-1.PNG?raw=true) ![](https://github.com/Anthony10700/P8/blob/master/img-ex/p8-2.PNG?raw=true) ![](https://github.com/Anthony10700/P8/blob/master/img-ex/p8-3.PNG?raw=true) <file_sep>/purbeurre/apps.py """PurbeurreConfig """ from django.apps import AppConfig class PurbeurreConfig(AppConfig): """PurbeurreConfig for purbeurre app Args: AppConfig ([type]): [description] """ name = 'purbeurre' <file_sep>/purbeurre/migrations/0009_product_like_disklike_products.py # Generated by Django 3.1.4 on 2021-02-08 21:42 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('purbeurre', '0008_auto_20210108_1551'), ] operations = [ migrations.AddField( model_name='product', name='like_disklike_products', field=models.ManyToManyField(related_name='like_disklike_products', to=settings.AUTH_USER_MODEL), ), ] <file_sep>/purbeurre/migrations/0008_auto_20210108_1551.py # Generated by Django 3.1.4 on 2021-01-08 14:51 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('purbeurre', '0007_product_nutriments'), ] operations = [ migrations.AlterModelOptions( name='categories', options={'ordering': ['-id'], 'verbose_name': 'Category', 'verbose_name_plural': 'Categories'}, ), migrations.AlterModelOptions( name='product', options={'ordering': ['-id'], 'verbose_name': 'Product', 'verbose_name_plural': 'Products'}, ), ] <file_sep>/auth/apps.py """file go authconfig """ from django.apps import AppConfig class AuthConfig(AppConfig): """AuthConfig for app auth Args: AppConfig ([type]): [description] """ name = 'auth' <file_sep>/purbeurre/static/js/scripts.js /*! * Start Bootstrap - Creative v6.0.4 (https://startbootstrap.com/theme/creative) * Copyright 2013-2020 Start Bootstrap * Licensed under MIT (https://github.com/StartBootstrap/startbootstrap-creative/blob/master/LICENSE) */ (function($) { "use strict"; // Start of use strict // Smooth scrolling using jQuery easing $('a.js-scroll-trigger[href*="#"]:not([href="#"])').click(function() { if (location.pathname.replace(/^\//, '') == this.pathname.replace(/^\//, '') && location.hostname == this.hostname) { var target = $(this.hash); target = target.length ? target : $('[name=' + this.hash.slice(1) + ']'); if (target.length) { $('html, body').animate({ scrollTop: (target.offset().top - 72) }, 1000, "easeInOutExpo"); return false; } } }); // Closes responsive menu when a scroll trigger link is clicked $('.js-scroll-trigger').click(function() { $('.navbar-collapse').collapse('hide'); }); // Activate scrollspy to add active class to navbar items on scroll $('body').scrollspy({ target: '#mainNav', offset: 75 }); // Collapse Navbar var navbarCollapse = function() { if ($("#mainNav").offset().top > 100) { $("#mainNav").addClass("navbar-scrolled"); } else { $("#mainNav").removeClass("navbar-scrolled"); } }; // Collapse now if page is not at top navbarCollapse(); // Collapse the navbar when page is scrolled $(window).scroll(navbarCollapse); // Magnific popup calls $('#portfolio').magnificPopup({ delegate: 'a', type: 'image', tLoading: 'Loading image #%curr%...', mainClass: 'mfp-img-mobile', gallery: { enabled: true, navigateByImgClick: true, preload: [0, 1] }, image: { tError: '<a href="%url%">The image #%curr%</a> could not be loaded.' } }); $("#img-dislike").click(function() { var value_tmp = $("#div_global_like_dislike").attr('value'); let cookie = document.cookie let csrfToken = cookie.substring(cookie.indexOf('=') + 1) $.ajax({ /*ajax it's a function with send a http request to the server in the url @url*/ url: "../like_dislike/", dataType: "json", data: { "dislike": value_tmp }, success: function(response) { var text = "" if ('text' in response) { text = response.text + " dislike" } else { text = response.err } var div = document.getElementById('div_for_text_like_dislike'); div.innerHTML = text var dislike = document.getElementById('td_for_dislike'); dislike.innerHTML = response.dislike var like = document.getElementById('td_for_like'); like.innerHTML = response.like }, error: function(error) { var dislike = document.getElementById('div_for_text_like_dislike'); dislike.innerHTML = "Une erreur dans ajax désolé" } }); }); $("#img-like").click(function() { var value_tmp = $("#div_global_like_dislike").attr('value'); let cookie = document.cookie let csrfToken = cookie.substring(cookie.indexOf('=') + 1) $.ajax({ /*ajax it's a function with send a http request to the server in the url @url*/ url: "../like_dislike/", dataType: "json", data: { "like": value_tmp }, success: function(response) { var text = "" if ('text' in response) { text = response.text + " like" } else { text = response.err } var div = document.getElementById('div_for_text_like_dislike'); div.innerHTML = text var dislike = document.getElementById('td_for_dislike'); dislike.innerHTML = response.dislike var like = document.getElementById('td_for_like'); like.innerHTML = response.like }, error: function(error) { var div = document.getElementById('div_for_text_like_dislike'); div.innerHTML = "Une erreur dans ajax désolé" } }); }); if (getAllUrlParams().nutriscore_a == "on") { document.getElementById('nutriscore_a').click(); } if (getAllUrlParams().nutriscore_b == "on") { document.getElementById('nutriscore_b').click(); } if (getAllUrlParams().nutriscore_c == "on") { document.getElementById('nutriscore_c').click(); } if (getAllUrlParams().nutriscore_d == "on") { document.getElementById('nutriscore_d').click(); } if (getAllUrlParams().nutriscore_e == "on") { document.getElementById('nutriscore_e').click(); } if (getAllUrlParams().like_limit_1 == "on") { document.getElementById('like_limit_1').click(); } if (getAllUrlParams().like_limit_2 == "on") { document.getElementById('like_limit_2').click(); } if (getAllUrlParams().search != undefined) { document.getElementById('search-nav').value = getAllUrlParams().search; } function getAllUrlParams(url) { var queryString = url ? url.split('?')[1] : window.location.search.slice(1); var obj = {}; if (queryString) { queryString = queryString.split('#')[0]; var arr = queryString.split('&'); for (var i = 0; i < arr.length; i++) { var a = arr[i].split('='); var paramName = a[0]; var paramValue = typeof(a[1]) === 'undefined' ? true : a[1]; paramName = paramName.toLowerCase(); if (typeof paramValue === 'string') paramValue = paramValue.toLowerCase(); if (paramName.match(/\[(\d+)?\]$/)) { var key = paramName.replace(/\[(\d+)?\]/, ''); if (!obj[key]) obj[key] = []; if (paramName.match(/\[\d+\]$/)) { var index = /\[(\d+)\]/.exec(paramName)[1]; obj[key][index] = paramValue; } else { obj[key].push(paramValue); } } else { if (!obj[paramName]) { obj[paramName] = paramValue; } else if (obj[paramName] && typeof obj[paramName] === 'string') { obj[paramName] = [obj[paramName]]; obj[paramName].push(paramValue); } else { obj[paramName].push(paramValue); } } } } return obj; } })(jQuery); // End of use strict<file_sep>/purbeurre/migrations/0003_auto_20201216_1010.py # Generated by Django 3.1.4 on 2020-12-16 09:10 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('purbeurre', '0002_auto_20201216_1009'), ] operations = [ migrations.AlterField( model_name='product', name='user_id', field=models.ManyToManyField(related_name='save_product', to=settings.AUTH_USER_MODEL), ), ] <file_sep>/purbeurre/migrations/0011_auto_20210210_2234.py # Generated by Django 3.1.4 on 2021-02-10 21:34 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('purbeurre', '0010_auto_20210208_2300'), ] operations = [ migrations.AddField( model_name='product', name='dislike_count', field=models.IntegerField(default=0, null=True), ), migrations.AddField( model_name='product', name='like_count', field=models.IntegerField(default=0, null=True), ), ] <file_sep>/auth/tests/test_services.py """ class of test Services Auth """ import json from django.test import RequestFactory, TransactionTestCase, Client from django.contrib.auth import logout, get_user_model from auth.services.auth_services import sign_validation, account_get_info,\ connect_validation, get_history_article, replace_short_dash, get_page from purbeurre.models import Categories, Product # Create your tests here. class TestMyServicesAuth(TransactionTestCase): """class of test Services Auth Args: TransactionTestCase ([type]): TransactionTestCase and not TestCase because Every test needs "setUp method" """ reset_sequences = True def setUp(self): """[summary] """ # Every test needs a client. self.factory = RequestFactory() self.client = Client() self.create_product() def make_account(self): """this method can create an account for the test """ info = { "inputUsername": "Test_accound2", "inputemail": "<EMAIL>", "inputPassword1": "<PASSWORD>", "inputPassword2": "<PASSWORD>", "inputNom": "Test_Nom", "inputprenom": "Test_prenom"} response = self.client.post('/auth/sign_in.html', data=info) self.assertEqual(response.status_code, 302) response = self.client.get('/auth/account.html') self.assertEqual(response.status_code, 200) self.assertEqual( response.context['account_info']["Email"], info["inputemail"]) def test_sign_validation(self): """this method test the inscription """ info = { "inputUsername": "Test_accound", "inputemail": "<EMAIL>", "inputPassword1": "<PASSWORD>", "inputPassword2": "<PASSWORD>", "inputNom": "Test_Nom", "inputprenom": "Test_prenom"} request = self.factory.post('/auth/sign_in.html', data=info) request.session = self.client.session result = sign_validation(request) result_dict = {"methode": "redirect", "value": "account"} self.assertEqual(result, result_dict) def test_account_get_info(self): """this method test the account information """ user = get_user_model() request = self.factory.get('/auth/account.html') request.user = user.objects.create_user( username='jacob', email='<EMAIL>', password='<PASSWORD>') account_get = account_get_info(request) request.session = self.client.session request_response_dict = { "title": "Bienvenue " + request.user.username, "account_info": {"Email": request.user.email, "Speudo": request.user.username, "Prénom": request.user.first_name, "Nom": request.user.last_name}} self.assertEqual(account_get, request_response_dict) logout(request) account_get = account_get_info(request) request_response_dict = {} self.assertEqual(account_get, request_response_dict) def test_connect_validation(self): """this method test the account connection """ self.make_account() info = { "inputEmail_connect": "<EMAIL>", "inputPassword_connect": "<PASSWORD>"} request = self.factory.post('/auth/sign_in.html', data=info) request.session = self.client.session # request.user = User.objects.get(username="Test_accound2") result_dict = {"methode": "redirect", "value": "account"} resulta = connect_validation(request) self.assertEqual(result_dict, resulta) def test_get_history_article(self): """this method can get the history article """ user = get_user_model() self.make_account() info = { "inputEmail_connect": "<EMAIL>", "inputPassword_connect": "<PASSWORD>"} request = self.factory.post('/auth/sign_in.html', data=info) request.session = self.client.session request.user = user.objects.get(username="Test_accound2") rst = get_history_article(request, 6) if "methode" in rst and "value" in rst \ and "paginate" in rst and "seek" in rst: self.assertEqual(True, True) else: self.assertEqual(False, True) def test_replace_indent(self): """this method test if all the short dash has been removed from the string """ product_show = Product.objects.get(id="1") product_show = replace_short_dash(product_show) self.assertEqual(product_show.categories.name, "boissons a la canneberge") def create_product(self): """this method create a object product """ categories = Categories.objects.create( name="boissons-a-la-canneberge", url="https://fr.openfoodfacts.org/categorie/boissons-a-la-canneberge.json", # noqa: E501 nb_of_products=int(54)) categories.save() categories_fk = Categories.objects.get(name="boissons-a-la-canneberge") product_nutriments = { "fat_value": "0.5", "nova-group_100g": 4, "carbohydrates": "10.5", "saturated-fat_value": "0.1", "fat_serving": 1, "fat_100g": "0.5", "salt": 0, "sodium_value": 0, "fat": "0.5", "carbohydrates_serving": 21, "sugars_value": "10.5", "sodium_serving": 0, "salt_unit": "g", "fruits-vegetables-nuts_serving": 25, "salt_value": 0, "sodium": 0, "energy-kcal": 44, "energy-kcal_serving": 88, "fruits-vegetables-nuts_100g": 25, "saturated-fat_unit": "g", "nova-group_serving": 4, "sodium_unit": "g", "proteins_unit": "g", "energy_unit": "kcal", "salt_serving": 0, "fruits-vegetables-nuts-estimate-from-ingredients_100g": 25, "sodium_100g": 0, "sugars": "10.5", "energy_100g": 184, "proteins_value": "0.5", "nova-group": 4, "saturated-fat_serving": "0.2", "saturated-fat_100g": "0.1", "sugars_serving": 21, "proteins_100g": "0.5", "energy": 184, "carbohydrates_value": "10.5", "nutrition-score-fr_100g": 14, "carbon-footprint-from-known-ingredients_100g": "7.5", "carbon-footprint-from-known-ingredients_serving": 15, "energy-kcal_100g": 44, "carbohydrates_100g": "10.5", "salt_100g": 0, "fruits-vegetables-nuts_value": 25, "carbohydrates_unit": "g", "saturated-fat": "0.1", "carbon-footprint-from-known-ingredients_product": 75, "proteins_serving": 1, "fruits-vegetables-nuts_label": "0", "fruits-vegetables-nuts_unit": "g", "energy-kcal_value": 44, "energy-kcal_unit": "kcal", "energy_serving": 368, "nutrition-score-fr": 14, "sugars_100g": "10.5", "fruits-vegetables-nuts": 25, "sugars_unit": "g", "proteins": "0.5", "fat_unit": "g", "energy_value": 44} product_bdd = Product.objects.create( name="Cranberry", countries="France", id_openfoodfacts="3596710355051", url="https://fr.openfoodfacts.org/produit/3596710355051/cranberry-auchan", # noqa: E501 image_url="https://static.openfoodfacts.org/images/products/359/671/035/5051/front_fr.45.400.jpg", # noqa: E501 store="Auchan", nutriscore_grade="e", categories=categories_fk, nutriments=json.dumps(product_nutriments)) product_bdd.save() def test_get_page(self): """this method can get back a chosen page of the paginator """ product_show = Product.objects.filter( id="1") recherche, paginate = get_page(1, product_show, 6) self.assertEqual(recherche[0].name, "Cranberry") self.assertEqual(paginate, False) <file_sep>/purbeurre/tests/test_services.py """ class of test Services purbeurre """ import json from django.test import RequestFactory, TransactionTestCase, Client from django.contrib.auth import get_user_model from purbeurre.models import Categories, Product from purbeurre.services.purbeurre_services import save_product_result,\ get_articles, show_specify_product,\ remove_product, replace_indent, get_page, like_dislike_services # Create your tests here. from purbeurre.templatetags.utils import get_item class TestMyServicesPurbeurre(TransactionTestCase): """This class is for the service worker of app purbeurre Args: TransactionTestCase ([type]): TransactionTestCase because Every test needs setUp method. """ reset_sequences = True def setUp(self): """This method similar at __init__ for each instance """ # Every test needs a client. self.factory = RequestFactory() self.client = Client() self.make_account() self.create_product() def make_account(self): """This method make a account for testing the url form sign_in """ info = {"inputUsername": "Test_accound2", "inputemail": "<EMAIL>", "inputPassword1": "<PASSWORD>", "inputPassword2": "<PASSWORD>", "inputNom": "Test_Nom", "inputprenom": "Test_prenom"} response = self.client.post('/auth/sign_in.html', data=info) self.assertEqual(response.status_code, 302) response = self.client.get('/auth/account.html') self.assertEqual(response.status_code, 200) self.assertEqual( response.context['account_info']["Email"], info["inputemail"]) def create_product(self): """This method create a product for testing """ categories = Categories.objects.create( name="boissons-a-la-canneberge", url="https://fr.openfoodfacts.org/categorie/boissons-a-la-canneberge.json", # noqa: E501 nb_of_products=int(54)) categories.save() categories_fk = Categories.objects.get(name="boissons-a-la-canneberge") product_nutriments = { "fat_value": "0.5", "nova-group_100g": 4, "carbohydrates": "10.5", "saturated-fat_value": "0.1", "fat_serving": 1, "fat_100g": "0.5", "salt": 0, "sodium_value": 0, "fat": "0.5", "carbohydrates_serving": 21, "sugars_value": "10.5", "sodium_serving": 0, "salt_unit": "g", "fruits-vegetables-nuts_serving": 25, "salt_value": 0, "sodium": 0, "energy-kcal": 44, "energy-kcal_serving": 88, "fruits-vegetables-nuts_100g": 25, "saturated-fat_unit": "g", "nova-group_serving": 4, "sodium_unit": "g", "proteins_unit": "g", "energy_unit": "kcal", "salt_serving": 0, "fruits-vegetables-nuts-estimate-from-ingredients_100g": 25, "sodium_100g": 0, "sugars": "10.5", "energy_100g": 184, "proteins_value": "0.5", "nova-group": 4, "saturated-fat_serving": "0.2", "saturated-fat_100g": "0.1", "sugars_serving": 21, "proteins_100g": "0.5", "energy": 184, "carbohydrates_value": "10.5", "nutrition-score-fr_100g": 14, "carbon-footprint-from-known-ingredients_100g": "7.5", "carbon-footprint-from-known-ingredients_serving": 15, "energy-kcal_100g": 44, "carbohydrates_100g": "10.5", "salt_100g": 0, "fruits-vegetables-nuts_value": 25, "carbohydrates_unit": "g", "saturated-fat": "0.1", "carbon-footprint-from-known-ingredients_product": 75, "proteins_serving": 1, "fruits-vegetables-nuts_label": "0", "fruits-vegetables-nuts_unit": "g", "energy-kcal_value": 44, "energy-kcal_unit": "kcal", "energy_serving": 368, "nutrition-score-fr": 14, "sugars_100g": "10.5", "fruits-vegetables-nuts": 25, "sugars_unit": "g", "proteins": "0.5", "fat_unit": "g", "energy_value": 44} product_bdd = Product.objects.create( name="Cranberry", countries="France", id_openfoodfacts="3596710355051", url="https://fr.openfoodfacts.org/produit/3596710355051/cranberry-auchan", # noqa: E501 image_url="https://static.openfoodfacts.org/images/products/359/671/035/5051/front_fr.45.400.jpg", # noqa: E501 store="Auchan", nutriscore_grade="e", categories=categories_fk, nutriments=json.dumps(product_nutriments)) product_bdd.save() def test_save_product_result(self): """This method test save_product_result """ user = get_user_model() info = {"id": "1", "search": "boissons"} request = self.factory.post('/purbeurre/resultats.html', data=info) request.session = self.client.session request.user = user.objects.get(username="Test_accound2") result = save_product_result(request.user, request) result_dict = {"methode": "redirect", "value": request.path_info + "?search=" + request.POST["search"], "message": 'Votre article à bien été enregistré'} self.assertEqual(result, result_dict) def test_get_articles(self): """This method test get_article """ user = get_user_model() info = {"search": "boissons"} request = self.factory.get('/purbeurre/resultats.html', data=info) request.session = self.client.session request.user = user.objects.get(username="Test_accound2") rst = get_articles(request, 6) if "methode" in rst and "value" in rst \ and "paginate" in rst and "seek" in rst \ and "str_dict_return_param" in rst: self.assertEqual(True, True) else: self.assertEqual(False, True) info = {"search": "boissons", "like_limit_1": "on"} request = self.factory.get('/purbeurre/resultats.html', data=info) request.session = self.client.session request.user = user.objects.get(username="Test_accound2") rst = get_articles(request, 6) if "methode" in rst and "value" in rst \ and "paginate" in rst and "seek" in rst \ and "str_dict_return_param" in rst: self.assertEqual(True, True) else: self.assertEqual(False, True) if rst["str_dict_return_param"] == "search=boissons&like_limit_1=on": self.assertEqual(True, True) else: self.assertEqual(False, True) def test_show_specify_product(self): """This method test show_specify_product """ user = get_user_model() info = {"id": "1"} request = self.factory.get('show_product.html/', data=info) request.session = self.client.session request.user = user.objects.get(username="Test_accound2") product_show = Product.objects.get(id=request.GET["id"]) result = show_specify_product(request) prod = json.loads(product_show.nutriments) search = "" like_value = len(product_show.like_products.all()) dislike_value = len(product_show.disklike_products.all()) result_dict = { "methode": "render", "value": 'purbeurre/show_product.html', "context": {'title': "resultats de votre recherche", 'articles_list': product_show, 'aliment_search': search, "nutriments": prod, 'like': like_value, 'dislike': dislike_value}} self.assertEqual(result, result_dict) def test_remove_product(self): """This method test remove_product """ user = get_user_model() self.test_save_product_result() info = {"id": "1"} request = self.factory.post('unsave.html', data=info) request.session = self.client.session request.user = user.objects.get(username="Test_accound2") product_show = request.user.save_product.all() self.assertEqual(len(product_show), 1) remove_product(request) product_show = request.user.save_product.all() self.assertEqual(len(product_show), 0) def test_get_item(self): """This method test get_item """ dict_in = {"id": "test"} self.assertEqual(get_item(dict_in, "id"), "test") def test_replace_indent(self): """This method test replace_short dash """ product_show = Product.objects.get(id="1") product_show = replace_indent(product_show) self.assertEqual(product_show.categories.name, "boissons a la canneberge") def test_get_page(self): """This method test get_page """ product_show = Product.objects.filter( id="1") recherche, paginate = get_page(1, product_show, 6) self.assertEqual(recherche[0].name, "Cranberry") self.assertEqual(paginate, False) def test_like_dislike_services(self): """methode for test services like_dislike_services """ user = get_user_model() info = {"dislike": "1"} request = self.factory.get('like_dislike/', data=info) request.session = self.client.session request.user = user.objects.get(username="Test_accound2") result = like_dislike_services(request) context = { "text": "like dislake save", "like": 0, "dislike": 1} self.assertEqual(context, result) <file_sep>/purbeurre/admin.py """Class of admin panel """ from django.contrib import admin from purbeurre.models import Product, Categories @admin.register(Product) class ProductAdmin(admin.ModelAdmin): """admin product panel Args: admin ([type]): [description] """ search_fields = ['name', 'id'] list_filter = ['categories', 'save_product'] @admin.register(Categories) class CategoriesAdmin(admin.ModelAdmin): """admin categories panel Args: admin ([type]): [description] """ search_fields = ['name', 'id'] <file_sep>/purbeurre/migrations/0001_initial.py # Generated by Django 3.1.4 on 2020-12-16 08:57 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Categories', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200, unique=True)), ('url', models.URLField(unique=True)), ('nb_of_products', models.IntegerField(null=True)), ], ), migrations.CreateModel( name='Product', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('countries', models.CharField(max_length=200, null=True)), ('id_openfoodfacts', models.CharField(max_length=200, unique=True)), ('url', models.URLField(unique=True)), ('image_url', models.URLField(unique=True)), ('store', models.CharField(max_length=200, null=True)), ('nutriscore_grade', models.CharField(max_length=1)), ('categories', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='purbeurre.categories')), ], ), migrations.CreateModel( name='Products_save', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('product_id', models.ManyToManyField(to='purbeurre.Product')), ('user_id', models.ManyToManyField(to=settings.AUTH_USER_MODEL)), ], ), ] <file_sep>/purbeurre/migrations/0004_auto_20201216_1011.py # Generated by Django 3.1.4 on 2020-12-16 09:11 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('purbeurre', '0003_auto_20201216_1010'), ] operations = [ migrations.RenameField( model_name='product', old_name='user_id', new_name='save_product', ), ] <file_sep>/purbeurre/management/commands/import_product.py """The script is for the import of the api from open food facts data in the data base """ # !/usr/bin/python3 # -*- coding: Utf-8 -* import json from pathlib import Path import requests from django.core.management.base import BaseCommand from django.db.utils import IntegrityError from purbeurre.models import Categories from purbeurre.models import Product class Command(BaseCommand): """this class create a new command to call from the file manage.py Args: BaseCommand ([type]): [description] """ help = 'import_porduct' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.nb_of_page = 1 def add_arguments(self, parser): parser.add_argument('nb_page', type=int, help='Number of page for download') def handle(self, *args, **kwargs): self.nb_of_page = kwargs['nb_page'] self.main() def main(self): """This method import the api of open food facts data and make new DATA BASE myslq and table """ list_of_categories = [] list_of_url_categories = [] url_file = Path(__file__).resolve().parent.parent.parent files = open(str(url_file) + "/url_import_openfood.txt", 'r') lines = files.readlines() for line in lines: print(line) list_of_url_categories.append(line) for url in list_of_url_categories: payload = {} headers = {} data_all = {} response = requests.request( "GET", url, headers=headers, data=payload) test = json.loads(response.text.encode('utf8')) list_of_categories.append(test) for product_ in list_of_url_categories: # import all products for all categories in the limit nb_of_page list_temp = [] for i in range(1, self.nb_of_page + 1): print(product_.replace(".json", "") + "/" + str(i) + ".json") url_in = product_.replace(".json", "") + "/" + str(i) + ".json" response = requests.request( "GET", url_in, headers=headers, data=payload) data = json.loads(response.text.encode('utf8')) data = data["products"] data_temp = [] for d_var in data: if str(d_var['categories_lc']) == "fr": data_temp.append(d_var) list_temp.extend(data_temp) data_all.update({str(product_).split( "/")[4].replace(".json", ""): list_temp}) # print(data_all) print("####################################") print("####################################") print("####################################") print("#### Suppression des categories ####") print("####################################") print("####################################") print("####################################") for categ in Categories.objects.all(): print(str(categ.name) + " : delete") categ.delete() print("#################################") print("#################################") print("#################################") print("#### Création des catégories ####") print("#################################") print("#################################") print("#################################") for var_i, item in enumerate(list_of_url_categories): # insert in database all categories completed_name, # URL and nb_of_products product_ = list_of_categories[var_i] url = item name_produc = str(item).split("/")[4].replace(".json", "") nb_prod = product_["count"] categories = Categories.objects.create(name=name_produc, url=url, nb_of_products=int(nb_prod)) print(categories.name + " : create") categories.save() print("############################") print("############################") print("############################") print("#### Ajout des produits ####") print("############################") print("############################") print("############################") list_of_key = ["product_name", "countries", "id", "url", "image_url", "stores", "categories"] for categ in data_all: # insert all products in database with the fk_key FOREIGN KEY # (categories_idcategories)" # REFERENCES `openfoodfacts`.`categories` (`idcategories`)" print(categ) categories_fk = Categories.objects.get(name=categ) for product in data_all[categ]: produit_value_ok = True if "nutriscore_grade" in product: nutri_value = product["nutriscore_grade"] else: nutri_value = "e" for value in range(0, 7): if not list_of_key[value] in product: product[list_of_key[value]] = "" for key in list_of_key: if len(product[key]) > 800: produit_value_ok = False if produit_value_ok: if "energy-kcal_100g" in product["nutriments"] and \ "energy_100g" in product["nutriments"] and \ "fat_100g" in product["nutriments"] and \ "saturated-fat_100g" in product["nutriments"] and\ "carbohydrates_100g" in product["nutriments"] and\ "sugars_100g" in product["nutriments"] and\ "proteins_100g" in product["nutriments"] and\ "salt_100g" in product["nutriments"] and\ "sodium_100g" in product["nutriments"] and\ "nutrition-score-fr_100g" in product["nutriments"]: tmp = json.dumps(product["nutriments"]) try: product_bdd = Product.objects.create( name=product["product_name"], countries=product["countries"], id_openfoodfacts=product["id"], url=product["url"], image_url=product["image_url"], store=product["stores"], nutriscore_grade=nutri_value, categories=categories_fk, nutriments=tmp) product_bdd.save() except IntegrityError: self.stdout.write("######## la valeur d'une \ clé dupliquée rompt la contrainte unique ########") self.stdout.write("########## FIN ##########") if __name__ == "__main__": c = Command() print("e")
d5c28c712c52d22d7dd33f0ffa3b96975e322849
[ "Markdown", "Python", "JavaScript", "HTML" ]
29
Python
Anthony10700/P8
706d01dc94377e87531cb81ea936d70d278ae5c9
701bc07ccc717c27a065e7d611630fa45fe9150d
refs/heads/master
<repo_name>zm1990s/CNA-Demos<file_sep>/windows-wordpress/multi-pod-dockerfile/nginx/Dockerfile FROM mcr.microsoft.com/windows/servercore:ltsc2019 COPY wordpress.zip nginxinit.ps1 "C:/" RUN powershell -command Set-ExecutionPolicy Bypass -Scope Process -Force; \ iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1')) ; \ choco install -y nginx ; \ Expand-Archive -Path c:\wordpress.zip -DestinationPath "C:\tools\nginx-1.17.10\html" COPY wp-config.php "C:/tools/nginx-1.17.10/html/wordpress/" COPY nginx.conf "C:/tools/nginx-1.17.10/conf/" CMD powershell -command c:\nginxinit.ps1<file_sep>/rsync/client/docker-entrypoint.sh #!/bin/sh echo "$crontab" > /etc/crontabs/root echo "$password" > /root/passwd chmod 600 /root/passwd crond touch /var/log/rsync.log tail -f /var/log/rsync.log <file_sep>/rsync/client/Dockerfile FROM alpine:latest RUN apk add --no-cache rsync COPY backup.sh docker-entrypoint.sh / CMD /docker-entrypoint.sh <file_sep>/rsync/server/Dockerfile FROM alpine:latest RUN apk add rsync COPY rsyncd.conf rsyncd.motd rsyncd.secrets /etc/ COPY docker-entrypoint.sh / WORKDIR / CMD /docker-entrypoint.sh <file_sep>/rsync/README.md # Client Side YAML example ```yaml apiVersion: v1 kind: PersistentVolumeClaim metadata: name: rsync-client namespace: rsync labels: app: rsync-cient spec: storageClassName: nfs accessModes: - ReadWriteOnce resources: requests: storage: 1Gi --- apiVersion: v1 kind: Secret metadata: name: rsync-password namespace: rsync type: Opaque stringData: password: "<PASSWORD>" --- apiVersion: apps/v1 kind: Deployment metadata: name: rsync-client namespace: rsync labels: app: rsync tier: client spec: selector: matchLabels: app: rsync tier: client template: metadata: name: rsync-client labels: app: rsync tier: client spec: containers: - name: rsync-client image: dyadin/rsync-client:latest imagePullPolicy: IfNotPresent env: - name: crontab value: "* * * * * /backup.sh>>/var/log/rsync.log" - name: rsync_server value: "rsync-server-svc" - name: username value: "backupuser" - name: backup_path value: "/root/backup/" - name: dstconfig value: "common" - name: password valueFrom: secretKeyRef: name: rsync-password key: <PASSWORD> volumeMounts: - mountPath: /root/backup/ name: rsync-client volumes: - name: rsync-client persistentVolumeClaim: claimName: rsync-client ``` # server side YAML example ```yaml apiVersion: v1 kind: PersistentVolumeClaim metadata: name: rsync-backup namespace: rsync labels: app: rsync-backup spec: storageClassName: nfs accessModes: - ReadWriteOnce resources: requests: storage: 1Gi --- apiVersion: v1 kind: Secret metadata: name: rsync-password namespace: rsync type: Opaque stringData: password: "<PASSWORD>" --- apiVersion: apps/v1 kind: Deployment metadata: name: rsync-server namespace: rsync labels: app: rsync tier: server spec: selector: matchLabels: app: rsync tier: server template: metadata: name: rsync-server labels: app: rsync tier: server spec: containers: - name: rsync-server image: dyadin/rsync-server imagePullPolicy: IfNotPresent ports: - name: rsync containerPort: 873 env: - name: username value: "backupuser" - name: backup_path value: "/backup/" - name: password valueFrom: secretKeyRef: name: rsync-password key: password volumeMounts: - mountPath: /backup name: rsync-backup livenessProbe: tcpSocket: port: 873 initialDelaySeconds: 5 timeoutSeconds: 5 readinessProbe: tcpSocket: port: 873 initialDelaySeconds: 5 timeoutSeconds: 5 volumes: - name: rsync-backup persistentVolumeClaim: claimName: rsync-backup --- apiVersion: v1 kind: Service metadata: name: rsync-server-svc namespace: rsync labels: tier: rsync app: server spec: ports: - name: rsync port: 873 protocol: TCP targetPort: 873 selector: app: rsync tier: server ``` # Service side configuration (on vms or bare metal) /etc/rsyncd.conf ``` motd file = /etc/rsyncd.motd transfer logging = yes log file = /var/log/rsyncd.log port = 873 address = 0.0.0.0 uid = root gid = root use chroot = no read only = no max connections = 10 [common] comment = rsync server path = /backup/ ignore errors auth users = rsync secrets file = /etc/rsyncd.secrets hosts allow = 172.16.31.10/255.255.255.0 hosts deny = * list = false ``` /etc/rsyncd.motd ``` welcome to rsync server! ``` /etc/rsyncd.secrets ``` rsync:123456 ``` ```bash chmod 600 /etc/rsyncd.secrets rsync --daemon echo "rsync --daemon" >> /etc/rc.local ``` <file_sep>/quay.io/cert-manager-cainjector/Dockerfile FROM quay.io/jetstack/cert-manager-cainjector:v0.16.0 MAINTAINER <EMAIL> <file_sep>/windows-wordpress/multi-pod-dockerfile/php/Dockerfile FROM mcr.microsoft.com/windows/servercore:ltsc2019 COPY wordpress.zip phpinit.ps1 "C:/" RUN powershell -command Set-ExecutionPolicy Bypass -Scope Process -Force; \ iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1')) ; \ choco install -y php; mkdir -p "C:/tools/nginx-1.17.10/html/" ; Expand-Archive -Path c:\wordpress.zip -DestinationPath "C:\tools\nginx-1.17.10\html" COPY php.ini "C:/tools/php74/" COPY wp-config.php "C:/tools/nginx-1.17.10/html/wordpress/" CMD powershell -command c:\phpinit.ps1 <file_sep>/windows-wordpress/multi-pod-dockerfile/mysql/Dockerfile FROM mcr.microsoft.com/windows/servercore:ltsc2019 COPY mysqlinit.bat mysqlinit-2.bat mysqlinit.ps1 "C:/" RUN powershell -command Set-ExecutionPolicy Bypass -Scope Process -Force; \ iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1')) ; \ choco install -y mysql; \ c:\mysqlinit.bat ; ls CMD powershell -command c:\mysqlinit.ps1 ; pause<file_sep>/quay.io/cert-manager-webhook/Dockerfile FROM quay.io/jetstack/cert-manager-webhook:v0.16.0 MAINTAINER <EMAIL> <file_sep>/rsync/server/docker-entrypoint.sh #!/bin/sh sed -i 's#/backup/#'${backup_path}'#' /etc/rsyncd.conf sed -i 's/BACKUPUSER/'${username}'/' /etc/rsyncd.conf echo ${username}:${password}>/etc/rsyncd.secrets chmod 600 /etc/rsyncd.secrets touch /var/log/rsyncd.log rsync --daemon && tail -f /var/log/rsyncd.log <file_sep>/frpc.ini [common] server_addr = 172.16.58.3 server_port = 7071 [ssh] type = tcp local_ip = 10.10.50.51 local_port = 22 remote_port = 30022 [rancher] type = tcp local_ip = 10.10.50.51 local_port = 30443 remote_port = 8443 [gitlab] type = tcp # local_port代表你想要暴露给外网的本地web服务端口 local_ip = 10.10.50.51 local_port = 8080 remote_port = 30080 [rdp] type = tcp # local_port代表你想要暴露给外网的本地web服务端口 local_ip = 10.10.50.2 local_port = 3389 remote_port = 3389 [web] type = http # local_port代表你想要暴露给外网的本地web服务端口 local_ip = 10.10.50.51 local_port = 8080 # subdomain 在全局范围内要确保唯一,每个代理服务的subdomain不能重名,否则会影响正常使用�?# 客户端的subdomain需和服务端的subdomain_host配合使用 subdomain = gitlab <file_sep>/quay.io/cert-manager-acmesolver/Dockerfile FROM quay.io/jetstack/cert-manager-acmesolver:v0.16.0 MAINTAINER <EMAIL> <file_sep>/avi-demo-v2/Dockerfile FROM dyadin/toolbox:1.0 WORKDIR /root/ COPY src ./ CMD /root/start.sh <file_sep>/nextcloud/Dockerfile FROM nextcloud:19 ADD nextcloud-plugin/*.tar.gz /usr/src/nextcloud/custom_apps/ COPY sources.list /etc/apt/sources.list RUN apt-get update && apt-get install -y apt-utils ffmpeg p7zip p7zip-full unrar smbclient <file_sep>/avi-demo-v2/src/start.sh #!/bin/bash set -ex sed -i "s/demoapp/$HOSTNAME/" index.html export privateipaddress=`tail -1 /etc/hosts|awk '{print $1}'` sed -i "s/ipaddress/$privateipaddress/" index.html sed -i "s/hostnameinfo/$hostinfo/" index.html darkhttpd /root/ <file_sep>/quay.io/cert-manager-controller/Dockerfile FROM quay.io/jetstack/cert-manager-controller:v0.16.0 MAINTAINER <EMAIL> <file_sep>/rsync/client/backup.sh #!/bin/sh echo "-----------------------------------" echo "Backup job starting..." echo "Current date: `date`" rsync -avz --password-file=/root/passwd "$backup_path" "$username"@"$rsync_server"::"$dstconfig"
a3caab50e2917cd5c2df24ea68112c881bbd8999
[ "Markdown", "INI", "Dockerfile", "Shell" ]
17
Dockerfile
zm1990s/CNA-Demos
253a3e9757f4279d45ce18a7e686177ece92fb2b
a308e3abb5e4a883528c045b2a4a4df84d612750
refs/heads/master
<repo_name>Crigges/TableTicker<file_sep>/desktop/src/systems/crigges/gui/desktop/DesktopLauncher.java package systems.crigges.gui.desktop; import com.badlogic.gdx.Graphics.Monitor; import com.badlogic.gdx.backends.lwjgl3.Lwjgl3Application; import com.badlogic.gdx.backends.lwjgl3.Lwjgl3ApplicationConfiguration; import systems.crigges.gui.Ticker; public class DesktopLauncher { // Lwjgl3ApplicationConfiguration config = new Lwjgl3ApplicationConfiguration(); // Monitor[] monitors = config.getMonitors(); // for (int i = 0; i < monitors.length; i++) { // System.out.println(monitors[i]); // } // new Lwjgl3Application(new Ticker(), config); } <file_sep>/desktop/src/systems/crigges/gui/desktop/BowTableModel.java package systems.crigges.gui.desktop; import java.util.ArrayList; import javax.swing.event.TableModelEvent; import javax.swing.event.TableModelListener; import javax.swing.table.TableModel; public class BowTableModel implements TableModel{ private Class<?>[] colClasses; private String[] colNames; private ArrayList<String> names; private ArrayList<String> classes; private ArrayList<Integer> rings; private ArrayList<Integer> placements; private ArrayList<?>[] data; private TableModelListener listener; public BowTableModel() { colClasses = new Class<?>[4]; colClasses[0] = String.class; colClasses[1] = String.class; colClasses[2] = Integer.class; colClasses[3] = Integer.class; colNames = new String[4]; colNames[0] = "Name"; colNames[1] = "Klasse"; colNames[2] = "Ringe"; colNames[3] = "Platzierung"; names = new ArrayList<String>(); classes = new ArrayList<String>(); rings = new ArrayList<Integer>(); placements = new ArrayList<Integer>(); data = new ArrayList[4]; data[0] = names; data[1] = classes; data[2] = rings; data[3] = placements; } public void addEntry(String name, Object obj, int ringCount){ //find class String cls = (String) obj; int pos = -1; for(String c : classes){ if(c.equals(cls)){ pos = classes.indexOf(c); } } int placement = 1; if(pos == -1){ pos = classes.size(); }else{ int i = pos; int placementFix = 1; while(i < classes.size() && classes.get(i).equals(cls)){ if(rings.get(pos) >= ringCount){ placement++; pos++; }else{ placements.set(i, placementFix + 1); } placementFix++; i++; } } writeEntry(name, cls, ringCount, placement, pos); listener.tableChanged(new TableModelEvent(this)); } private void writeEntry(String name, String cs, int ringCount, int placement, int pos){ names.add(pos, name); classes.add(pos, cs); rings.add(pos, ringCount); placements.add(pos, placement); } @Override public void addTableModelListener(TableModelListener listen) { listener = listen; } @Override public Class<?> getColumnClass(int col) { return colClasses[col]; } @Override public int getColumnCount() { return 4; } @Override public String getColumnName(int col) { return colNames[col]; } @Override public int getRowCount() { return names.size(); } @Override public Object getValueAt(int row, int col) { return data[col].get(row); } @Override public boolean isCellEditable(int row, int col) { return col == 0; } @Override public void removeTableModelListener(TableModelListener arg0) { // TODO Auto-generated method stub } @Override public void setValueAt(Object arg0, int arg1, int arg2) { if(arg2 == 0){ names.set(arg1, (String) arg0); } } } <file_sep>/desktop/src/systems/crigges/gui/desktop/ClassModifierWindow.java package systems.crigges.gui.desktop; import java.awt.BorderLayout; import java.awt.FlowLayout; import java.awt.Image; import java.awt.Label; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.border.EmptyBorder; import javax.swing.JList; import javax.swing.GroupLayout; import javax.swing.GroupLayout.Alignment; import javax.swing.LayoutStyle.ComponentPlacement; import javax.swing.JTextField; import javax.swing.ListSelectionModel; import javax.swing.JLabel; import java.awt.event.ActionListener; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.awt.event.ActionEvent; import javax.swing.JTable; import javax.swing.JScrollPane; import javax.swing.table.DefaultTableModel; import javax.swing.border.LineBorder; import java.awt.Color; public class ClassModifierWindow extends JDialog { private static final long serialVersionUID = 1L; private JTable table; private JTextField textField; private DefaultTableModel tableModel; /** * Launch the application. */ public static void main(String[] args) { try { ClassModifierWindow dialog = new ClassModifierWindow(null, null); System.out.println(dialog.openDialog()); } catch (Exception e) { e.printStackTrace(); } } public ArrayList<String> openDialog(){ setVisible(true); ArrayList<String> res = new ArrayList<String>(); for(int i = 0; i< tableModel.getRowCount(); i++){ res.add(tableModel.getValueAt(i, 0).toString()); } return res; } /** * Create the dialog. */ public ClassModifierWindow(JFrame parent, ArrayList<String> existing) { super(parent, "Klassenšnderungen", true); Image icon = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB_PRE); setIconImage(icon); setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE); setBounds(100, 100, 706, 586); JScrollPane scrollPane = new JScrollPane(); scrollPane.setBorder(new LineBorder(new Color(130, 135, 144))); JPanel panel = new JPanel(); GroupLayout groupLayout = new GroupLayout(getContentPane()); groupLayout.setHorizontalGroup( groupLayout.createParallelGroup(Alignment.LEADING) .addGroup(groupLayout.createSequentialGroup() .addContainerGap() .addComponent(scrollPane, GroupLayout.DEFAULT_SIZE, 444, Short.MAX_VALUE) .addPreferredGap(ComponentPlacement.RELATED) .addComponent(panel, GroupLayout.PREFERRED_SIZE, 201, GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); groupLayout.setVerticalGroup( groupLayout.createParallelGroup(Alignment.LEADING) .addGroup(Alignment.TRAILING, groupLayout.createSequentialGroup() .addContainerGap() .addGroup(groupLayout.createParallelGroup(Alignment.TRAILING) .addComponent(panel, Alignment.LEADING, GroupLayout.DEFAULT_SIZE, 802, Short.MAX_VALUE) .addComponent(scrollPane, Alignment.LEADING, GroupLayout.DEFAULT_SIZE, 802, Short.MAX_VALUE)) .addContainerGap()) ); JLabel lblName = new JLabel("Name"); textField = new JTextField(); textField.setColumns(10); JButton btnNewButton = new JButton("Hinzuf\u00FCgen"); btnNewButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { tableModel.addRow(new Object[]{textField.getText()}); } }); JButton btnNewButton_1 = new JButton("L\u00F6schen"); btnNewButton_1.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if(table.getSelectedRow() != -1){ tableModel.removeRow(table.getSelectedRow()); } } }); GroupLayout gl_panel = new GroupLayout(panel); gl_panel.setHorizontalGroup( gl_panel.createParallelGroup(Alignment.LEADING) .addGroup(gl_panel.createSequentialGroup() .addContainerGap() .addGroup(gl_panel.createParallelGroup(Alignment.LEADING) .addComponent(textField, GroupLayout.DEFAULT_SIZE, 171, Short.MAX_VALUE) .addComponent(lblName) .addComponent(btnNewButton, GroupLayout.DEFAULT_SIZE, 171, Short.MAX_VALUE) .addComponent(btnNewButton_1, GroupLayout.DEFAULT_SIZE, 171, Short.MAX_VALUE)) .addContainerGap()) ); gl_panel.setVerticalGroup( gl_panel.createParallelGroup(Alignment.LEADING) .addGroup(gl_panel.createSequentialGroup() .addContainerGap() .addComponent(lblName) .addPreferredGap(ComponentPlacement.RELATED) .addComponent(textField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addGap(18) .addComponent(btnNewButton) .addPreferredGap(ComponentPlacement.RELATED) .addComponent(btnNewButton_1) .addContainerGap(342, Short.MAX_VALUE)) ); panel.setLayout(gl_panel); table = new JTable(); table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); tableModel = new DefaultTableModel( new Object[][] { }, new String[] { "Klassendefinition" } ); if(existing != null){ for(String s : existing){ tableModel.addRow(new Object[]{s}); } } table.setModel(tableModel); table.getColumnModel().getColumn(0).setPreferredWidth(960); table.setBorder(new LineBorder(new Color(0, 0, 0))); scrollPane.setViewportView(table); getContentPane().setLayout(groupLayout); } }
641d832e44a5fff03aab0074e998e19236cd4905
[ "Java" ]
3
Java
Crigges/TableTicker
aca9037973d2bb5809efdf2c597e18e9aef42a12
c9a5e584387f6485b23ab2e40fbc024958dedd83
refs/heads/master
<file_sep># jet-game *A simple game made with HTML5 canvas.* *Note: this is very much a work in progress.* ## Running Clone the files to your local machine. Then, run an HTTP server on the directory that holds the files. For example, you could use python 3 and run <br> `cd <YOUR DIRECTORY>` followed by <br> `python -m http.server <PORT>` Open a browser and navigate to `localhost:<PORT>/jet_game.html` ## Objective Guide your jet towards the right of the screen to complete each level without hitting the obstacles. ## Controls *Currently, jet-game is controlled entirely by the keyboard.* **Main Menu:** - **[P]**: Play - **[S]**: Stats *(TODO)* - **[O]**: Options *(TODO)* - **[A]**: About *(TODO)* **Level Select:** - **[1]**: Start level 1. - **[2]**: Start level 2. <br>*...* - **[9]**: Start level 9. - **[X]**: Return to title. **Game:** - **[SPACE]**: Launch the jet. - **[UP ARROW]**: Tilt the jet upwards. - **[DOWN ARROW]**: Tilt the jet downwards. - **[X]**: Return to level select. <file_sep>//Defines 'Scr' and 'ScrManager', which control "screens" of the program var screens = {} function Scr (config) { this.config = config this.isLoaded = !config.load //EXISTENCE of config.load, not return value. load not called here. //if no load function, we assume loading not necessary, hence loaded. this.isLoading = false this.loadCallbacks = [] //functions to be called when Scr loads screens[config.name] = this } Scr.getScr = function (name) { return screens[name] } Scr.load = function (scr, callback) { Scr.loadAll([scr], callback) } Scr.loadAll = function (screens, callback) { var count = 0 screens.forEach(function (scr) { if(typeof scr === "string") scr = Scr.getScr(scr) scr.load.call(scr, function () { count++ if(callback && screens.length === count) callback() }) }) } Scr.prototype = { get name () { return this.config.name }, get render () { if(!this.isLoaded || !this.config.render) return function () {} return this.config.render }, get oninput () { if(!this.isLoaded || !this.config.oninput) return function () {} return this.config.oninput }, get init () { if(!this.isLoaded || !this.config.init) return function () {} return this.config.init }, get load () { var thisScr = this return function (callback) { if(callback) thisScr.loadCallbacks.push(callback) if(thisScr.isLoaded) { if(callback) callback() } else if (!thisScr.isLoading) { thisScr.isLoading = true this.config.load.call(thisScr, function () { thisScr.isLoaded = true thisScr.isLoading = false thisScr.loadCallbacks.forEach(function (cb) { cb() }) }) } } }, get exit () { if(!this.isLoaded || !this.config.exit) return function () {} return this.config.exit } } var ScrManager = { ctx: document.getElementById("canv").getContext("2d"), currentScr: null, currentTime: null, lastTime: null, keyStates: new Array(1000).fill(1).map(function () { return { state: "up", lastDown: -Infinity, lastUp: -Infinity, lastChange: -Infinity } }), init: function () { window.requestAnimationFrame(function render (time) { ScrManager.currentTime = time ScrManager.currentScr.render.call(ScrManager.currentScr, ScrManager.ctx, time, ScrManager.lastTime) ScrManager.lastTime = time window.requestAnimationFrame(render) }) window.onkeyup = function (e) { var ks = ScrManager.keyStates[e.keyCode] if(ks.state == "down") { ks.state = "up" ks.lastChange = ScrManager.currentTime ks.lastUp = ks.lastChange } ScrManager.currentScr.oninput.call(ScrManager.currentScr, "keyup", e) } window.onkeydown = function (e) { var ks = ScrManager.keyStates[e.keyCode] if(ks.state == "up") { ks.state = "down" ks.lastChange = ScrManager.currentTime ks.lastDown = ks.lastChange } ScrManager.currentScr.oninput.call(ScrManager.currentScr, "keydown", e) } }, setScr: function (scr) { ScrManager.keyStates.forEach(function (state) { if(state.state == "down") { state.state = "up" state.lastChange = ScrManager.currentTime state.lastUp = state.lastChange } }) var targetScr; if(typeof scr === "string") targetScr = screens[scr] else targetScr = scr console.log('setting screen to ' + targetScr.name) //if first screen, no error if(ScrManager.currentScr) ScrManager.currentScr.exit.call(ScrManager.currentScr, ScrManager.currentTime) ScrManager.currentScr = targetScr ScrManager.currentScr.load(function () { ScrManager.currentScr.init.call(ScrManager.currentScr, ScrManager.currentTime) }) } }<file_sep>//the webworker used to generate the masks for displayables, based on their image and their maskRule self.addEventListener("message", function (e) { var args = e.data var maskRule = args.maskRule var imgData = args.imgData var imgWidth = args.imgWidth var imgHeight = args.imgHeight //generate an array of arrays of the correct size var mask = new Array(imgWidth) .fill(1) //<-- make iterable .map(function () { return new Array(imgHeight) }) for(var i = 0; i < imgData.length; i += 4) { var str = imgData[i] + "," + imgData[i+1] + "," + imgData[i+2] var y = Math.floor((i / 4) / imgWidth) var x = (i / 4) % imgWidth if(maskRule.indexOf(str) > -1) { mask[x][y] = 1 } else mask[x][y] = 0 } self.postMessage({ mask: mask }) }, false)
b0a70aa2de7fbc016139ff1d63b0744b319eac47
[ "Markdown", "JavaScript" ]
3
Markdown
jonrrich/jet-game
163fb2847163762f58aeb0428f2c7cf2b5a538d7
ab7615ffcf8cda228e22c3467b7445dccbefab7f
refs/heads/main
<repo_name>Harald-LB/DebugProblem<file_sep>/settings.gradle rootProject.name = 'DebugProblem' <file_sep>/readme.md # Coroutines debugger does not work with Kotlin 1.5.0 + coroutines 1.5.0-RC Just discovered, this problem. <NAME> has already issued an error report here: <https://youtrack.jetbrains.com/issue/KTIJ-11620> ![screenshot](/Screenshot.png) # Trace ``` 2021-05-13 16:33:34,202 [1935636] ERROR - roxy.CoroutineDebugProbesProxy - Exception is thrown by calling dumpCoroutines. java.lang.IllegalStateException: Unable to find loaded class _COROUTINE._CREATION at org.jetbrains.kotlin.idea.debugger.coroutine.proxy.LocationCache.createLocation(LocationCache.kt:17) at org.jetbrains.kotlin.idea.debugger.coroutine.proxy.CoroutineLibraryAgent2Proxy.findStackFrames(CoroutineLibraryAgent2Proxy.kt:86) ... at java.base/java.util.concurrent.Executors$PrivilegedThreadFactory$1$1.run(Executors.java:668) at java.base/java.util.concurrent.Executors$PrivilegedThreadFactory$1$1.run(Executors.java:665) at java.base/java.security.AccessController.doPrivileged(Native Method) at java.base/java.util.concurrent.Executors$PrivilegedThreadFactory$1.run(Executors.java:665) at java.base/java.lang.Thread.run(Thread.java:834) 2021-05-13 16:33:34,204 [1935638] ERROR - roxy.CoroutineDebugProbesProxy - IntelliJ IDEA 2021.1 Build #IU-211.6693.111 2021-05-13 16:33:34,206 [1935640] ERROR - roxy.CoroutineDebugProbesProxy - JDK: 11.0.10; VM: Dynamic Code Evolution 64-Bit Server VM; Vendor: JetBrains s.r.o. 2021-05-13 16:33:34,207 [1935641] ERROR - roxy.CoroutineDebugProbesProxy - OS: Linux 2021-05-13 16:33:34,207 [1935641] ERROR - roxy.CoroutineDebugProbesProxy - Plugin to blame: Kotlin version: 211-1.5.0-release-759-IJ6693.72 2021-05-13 16:33:34,207 [1935641] ERROR - roxy.CoroutineDebugProbesProxy - Last Action: Debug ```<file_sep>/src/main/kotlin/main.kt import kotlinx.coroutines.* @OptIn(ObsoleteCoroutinesApi::class) fun main() = runBlocking<Unit> { launch { println("main runBlocking : I'm working in thread ${Thread.currentThread().name}") } launch(Dispatchers.Unconfined) { // << put breakpoint here println("Unconfined : I'm working in thread ${Thread.currentThread().name}") } launch(Dispatchers.Default) { println("Default : I'm working in thread ${Thread.currentThread().name}") } launch(newSingleThreadContext("MyOwnThread")) { println("newSingleThreadContext: I'm working in thread ${Thread.currentThread().name}") } }
4b01f6d159497a482c7aa2adaa7eec741c7ebf44
[ "Markdown", "Kotlin", "Gradle" ]
3
Gradle
Harald-LB/DebugProblem
c5f1bf8bc7093cee21ac8a43be10da76f4c41bfc
3232949d07b6224dfdb8048d7858350df7b361b2
refs/heads/master
<file_sep>package model; import bean.*; import java.util.ArrayList; import java.sql.Array; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; public class HoatDongTNDAO { String url = "jdbc:sqlserver://localhost:1433;databaseName=HoatDongThienNguyen"; String userName = "sa"; String password = "<PASSWORD>"; Connection connection; void Connecti() throws ClassNotFoundException{ Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver"); try { connection = DriverManager.getConnection(url,userName,password); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public ArrayList<HoatDongTN> getListHoatDongTN() throws ClassNotFoundException{ Connecti(); String sql = "select * from HOATDONG"; ResultSet rs = null; Statement stm; try { stm = connection.createStatement(); rs = stm.executeQuery(sql); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } ArrayList<HoatDongTN> listhd = new ArrayList<>(); try { while(rs.next()){ HoatDongTN hd = new HoatDongTN(); hd.setMaHD(rs.getString("MaHD")); hd.setTenHD(rs.getString("TenHD")); hd.setMotaHD(rs.getString("MoTaHD")); hd.setNgaygioBD(rs.getDate("NgayGioBD")); hd.setNgaygioKT(rs.getDate("NgayGioKT")); hd.setSLToiThieuYC(rs.getInt("SLToiThieuYC")); hd.setSLToiDaYC(rs.getInt("SLToiDaYC")); hd.setThoiHanDK(rs.getString("ThoiHanDK")); hd.setTrangThai(rs.getString("TrangThai")); hd.setMaTV(rs.getString("MaTV")); hd.setLyDoHuyHD(rs.getString("LiDoHuyHD")); listhd.add(hd); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return listhd; } public void insertHD(String mahd,String tenhd,String motahd,Date ngaygiobd, Date ngaygiokt,int soluongtt,int soluongtd,String thoihandk,String trangthai,String matv) throws ClassNotFoundException{ Connecti(); String sql = String.format("insert into HOATDONG values ('%s',N'%s',N'%s','%s','%s','%s','%s',N'%s',N'%s','%s','')",mahd,tenhd,motahd,ngaygiobd,ngaygiokt,soluongtt,soluongtd,thoihandk,trangthai,matv ); try { Statement stm = connection.createStatement(); stm.executeUpdate(sql); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } <file_sep>package controller; import java.io.IOException; import java.sql.Date; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import model.HoatDongTNDAO; /** * Servlet implementation class ThemmoiHDServlet */ public class ThemmoiHDServlet extends HttpServlet { private static final long serialVersionUID = 1L; /** * @see HttpServlet#HttpServlet() */ public ThemmoiHDServlet() { super(); // TODO Auto-generated constructor stub } /** * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response) */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO Auto-generated method stub doPost(request, response); } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO Auto-generated method stub request.setCharacterEncoding("utf-8"); response.setCharacterEncoding("utf-8"); HoatDongTNDAO hdao = new HoatDongTNDAO(); if("submit".equals(request.getParameter("submit"))){ String mahd = request.getParameter("mahd"); String tenhd = request.getParameter("tenhd"); String motahd = request.getParameter("motahd"); Date ngaygiobd = Date.valueOf(request.getParameter("ngaygiobd")); Date ngaygiokt = Date.valueOf(request.getParameter("ngaygiokt")); int sltt = Integer.parseInt(request.getParameter("sltt")); int sltd = Integer.parseInt(request.getParameter("sltd")); String thoihandk = request.getParameter("thoihandk"); String matv = request.getParameter("matv"); String trangthai = request.getParameter("trangthai"); try { hdao.insertHD(mahd, tenhd, motahd, ngaygiobd, ngaygiokt, sltt, sltd, thoihandk, trangthai, matv); response.sendRedirect("DanhsachHDServlet"); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } }else{ RequestDispatcher dispatcher = request.getRequestDispatcher("ThemHD.jsp"); dispatcher.forward(request, response); } } } <file_sep>package model; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; public class DanhgiaDAO { String url = "jdbc:sqlserver://localhost:1433;databaseName=HoatDongThienNguyen"; String userName = "sa"; String password = "<PASSWORD>"; Connection connection; void Connecti() throws ClassNotFoundException{ Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver"); try { connection = DriverManager.getConnection(url,userName,password); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public void insertDanhgia(String matv,String mahd,int diemtruongdoan,int diemtieuchi1,int diemtieuchi2,int diemtieuchi3) throws ClassNotFoundException{ Connecti(); String sql = String.format("insert into DANHGIA values('%s','%s','%s','%s','%s','%s')", matv,mahd,diemtruongdoan,diemtieuchi1,diemtieuchi2,diemtieuchi3); Statement stm; try { stm = connection.createStatement(); stm.executeUpdate(sql); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } <file_sep>package bean; public class Danhgia { private String matv; private String mahd; private int DiemTruongDoan; private int DiemTieuChi1; private int DiemTieuChi2; private int DiemTieuChi3; public String getMatv() { return matv; } public void setMatv(String matv) { this.matv = matv; } public String getMahd() { return mahd; } public void setMahd(String mahd) { this.mahd = mahd; } public int getDiemTruongDoan() { return DiemTruongDoan; } public void setDiemTruongDoan(int diemTruongDoan) { DiemTruongDoan = diemTruongDoan; } public int getDiemTieuChi1() { return DiemTieuChi1; } public void setDiemTieuChi1(int diemTieuChi1) { DiemTieuChi1 = diemTieuChi1; } public int getDiemTieuChi2() { return DiemTieuChi2; } public void setDiemTieuChi2(int diemTieuChi2) { DiemTieuChi2 = diemTieuChi2; } public int getDiemTieuChi3() { return DiemTieuChi3; } public void setDiemTieuChi3(int diemTieuChi3) { DiemTieuChi3 = diemTieuChi3; } } <file_sep>package controller; import java.io.IOException; import java.sql.Date; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import bean.Dangki; import model.DangkiDAO; /** * Servlet implementation class DangkiServlet */ public class DangkiServlet extends HttpServlet { private static final long serialVersionUID = 1L; /** * @see HttpServlet#HttpServlet() */ public DangkiServlet() { super(); // TODO Auto-generated constructor stub } /** * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response) */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO Auto-generated method stub doPost(request, response); } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO Auto-generated method stub request.setCharacterEncoding("UTF-8"); response.setCharacterEncoding("UTF-8"); DangkiDAO ddao = new DangkiDAO(); String mahd = request.getParameter("mhd"); if("submit".equals(request.getParameter("submit"))){ String matv = request.getParameter("matv"); String hoten = request.getParameter("hoten"); Date ngaydangki = Date.valueOf((request.getParameter("ngaydangki"))); String mhd = request.getParameter("mahd"); try { ddao.insertDangki(matv, hoten, ngaydangki, mhd); response.sendRedirect("Thank.jsp"); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } }else{ request.setAttribute("mahd", mahd); RequestDispatcher dispatcher = request.getRequestDispatcher("DangKiTGHD.jsp"); dispatcher.forward(request, response); } } }
835ef2b7f237604d497317d85f957577954f48aa
[ "Java" ]
5
Java
vovanquang12cntt/HDThienNguyen
cbde8c1f41f542a3511d72721190dae1882e71a2
9ae35b4084ffa4d16050ff93df7a04dfaf38d7b1
refs/heads/agfx-fxdb-core-api-dmz
<repo_name>ranjitmenon/agfx-fxdb-core-api-dmz<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Fix/FixQuoteRequestModel.cs using Argentex.Core.Service.Attributes; using System; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Service.Models.Fix { public class FixQuoteRequestModel { [Required] public string TradeCode { get; set; } [Required] [StringLength(3)] public string LHSCCY { get; set; } [Required] [StringLength(3)] public string RHSCCY { get; set; } [Required] [StringLength(3)] public string MajorCurrency { get; set; } [Required] [Range(1, 2)] public int Side { get; set; } [Required] public decimal BrokerMajorAmount { get; set; } [Required] public string ValueDate { get; set; } [Required] [Range(1000, 60000)] public int TimeOut { get; set; } [Required] [Range(1, 1440)] public int Duration { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Enums/SwapType.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Enums { public enum SwapType { DeliveryLeg = 1, ReversalLeg = 2 } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/NotificationType.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.DataAccess.Entities { public partial class NotificationType { public int ID { get; set; } public string Name { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/ClientSiteAction/IClientSiteActionUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using System.Linq; namespace Argentex.Core.UnitsOfWork.ClientSiteAction { public interface IClientSiteActionUow: IBaseUow { IQueryable<DataAccess.Entities.ClientSiteAction> GetClientSiteAction(long clientSiteActionID); void LogAction(DataAccess.Entities.ClientSiteAction action, string Id); ClientSiteActionStatus GetClientSiteActionStatusFromName(string actionStatusName); ClientSiteActionType GetClientSiteActionTypeFromName(string actionTypeName); IQueryable<ClientSiteAction2FxforwardTrade2Opi> GetOPIsAssignedToTrades(); IQueryable<ClientSiteAction2FixFxforwardTrade> GetTradesWithoutFIXConfirmation(); void UpdateClientSiteAction(DataAccess.Entities.ClientSiteAction action); IQueryable<ClientSiteAction2ClientCompanyOpi> GetNewOPIRequested(); IQueryable<ClientSiteAction2ClientCompanyOpi> GetClientSiteActionByOPIID(int clientCompanyOPIID); IQueryable<ClientSiteAction2Fxswap> GetSwaps(); void DeleteAction2AssignedSettlementLink(long settlementId); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ReportStatus.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ReportStatus { public ReportStatus() { ReportProcessedLog = new HashSet<ReportProcessedLog>(); ReportQueueToProcess = new HashSet<ReportQueueToProcess>(); } public int Id { get; set; } public string Status { get; set; } public ICollection<ReportProcessedLog> ReportProcessedLog { get; set; } public ICollection<ReportQueueToProcess> ReportQueueToProcess { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AppUserType.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AppUserType { public AppUserType() { AppUser = new HashSet<AppUser>(); } public int Id { get; set; } public string Description { get; set; } public string HomePage { get; set; } public int? CommissionTypeId { get; set; } public int ClientCompanySalesRegionId { get; set; } public ClientCompanySalesRegion ClientCompanySalesRegion { get; set; } public CommissionType CommissionType { get; set; } public ICollection<AppUser> AppUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogAuthUser.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogAuthUser { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public string UserName { get; set; } public string Password { get; set; } public string Email { get; set; } public bool IsApproved { get; set; } public bool IsLockedOut { get; set; } public string Comment { get; set; } public DateTime CreateDate { get; set; } public DateTime? LastPasswordChangeDate { get; set; } public DateTime? LastLoginDate { get; set; } public DateTime? LastActivityDate { get; set; } public DateTime? LastLockOutDate { get; set; } public int FailedPasswordAttemptCount { get; set; } public DateTime FailedPasswordAttemptWindowStart { get; set; } public int ApplicationId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/IntroducingBroker.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class IntroducingBroker { public int Id { get; set; } public string Name { get; set; } public string Surname { get; set; } public string FullName { get; set; } public string Email { get; set; } public string Telephone { get; set; } public byte[] UpdateTimeStamp { get; set; } public DateTime UpdateDateTime { get; set; } public int UpdateAuthUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Identity/ApproveUsersRequest.cs using System.Collections.Generic; namespace Argentex.Core.Service.Models.Identity { public class ApproveUsersRequest { public int ApproverAuthUserId { get; set; } public ICollection<int> UserIdsToApprove { get; set; } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Helpers/CodeBuilderTests.cs using Argentex.Core.Service.Helpers; using Xunit; namespace Argentex.Core.Service.Tests.Helpers { public class CodeBuilderTests { [Fact] public void FormatTradeCode_With_Client_Company_ID_Less_Than_Ten_Thousand_And_With_Trade_Count_Less_Than_Ten_Thousand() { //Arrange //Act var tradeCode1 = CodeBuilder.FormatTradeCode(1, 1); var tradeCode01 = CodeBuilder.FormatTradeCode(10, 10); var tradeCode001 = CodeBuilder.FormatTradeCode(100, 100); var tradeCode0001 = CodeBuilder.FormatTradeCode(1000, 1000); //Assert Assert.Equal("AG0001-0001", tradeCode1); Assert.Equal("AG0010-0010", tradeCode01); Assert.Equal("AG0100-0100", tradeCode001); Assert.Equal("AG1000-1000", tradeCode0001); } [Fact] public void FormatTradeCode_With_Client_Company_ID_More_Than_Ten_Thousand_And_With_Trade_Count_Less_Than_Ten_Thousand() { //Arrange //Act var tradeCode1 = CodeBuilder.FormatTradeCode(1, 20000); var tradeCode01 = CodeBuilder.FormatTradeCode(10, 20000); var tradeCode001 = CodeBuilder.FormatTradeCode(100, 20000); var tradeCode0001 = CodeBuilder.FormatTradeCode(1000, 20000); //Assert Assert.Equal("AG20000-0001", tradeCode1); Assert.Equal("AG20000-0010", tradeCode01); Assert.Equal("AG20000-0100", tradeCode001); Assert.Equal("AG20000-1000", tradeCode0001); } [Fact] public void FormatTradeCode_With_Client_Company_ID_Less_Than_Ten_Thousand_And_With_Trade_Count_More_Than_Ten_Thousand() { //Arrange //Act var tradeCode1 = CodeBuilder.FormatTradeCode(20000, 1); var tradeCode01 = CodeBuilder.FormatTradeCode(20000, 10); var tradeCode001 = CodeBuilder.FormatTradeCode(20000, 100); var tradeCode0001 = CodeBuilder.FormatTradeCode(20000, 1000); //Assert Assert.Equal("AG0001-20000", tradeCode1); Assert.Equal("AG0010-20000", tradeCode01); Assert.Equal("AG0100-20000", tradeCode001); Assert.Equal("AG1000-20000", tradeCode0001); } [Fact] public void FormatTradeCode_With_Client_Company_ID_More_Than_Ten_Thousand_And_With_Trade_Count_More_Than_Ten_Thousand() { //Arrange //Act var tradeCode = CodeBuilder.FormatTradeCode(20000, 20000); //Assert Assert.Equal("AG20000-20000", tradeCode); } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Extensions/AppSettingExtensions.cs using System; using System.Collections.Generic; using System.Text; using Argentex.Core.DataAccess.Entities; namespace Argentex.Core.UnitsOfWork.Extensions { public static class AppSettingExtensions { public static T ValueAs<T>(this AppSetting setting) where T : struct, IConvertible => (T)Convert.ChangeType(setting.SettingValue, typeof(T)); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ArmreportOutgoingFileContent.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ArmreportOutgoingFileContent { public ArmreportOutgoingFileContent() { ArmreportOutgoingFile = new HashSet<ArmreportOutgoingFile>(); } public int Id { get; set; } public string FileContent { get; set; } public ICollection<ArmreportOutgoingFile> ArmreportOutgoingFile { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Payments/PaymentsService.cs using Argentex.Core.Service.Models.Email; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using Argentex.Core.UnitsOfWork.Payments; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace Argentex.Core.Service.Payments { public class PaymentsService : IPaymentsService { private readonly IPaymentUoW _paymentUoW; private readonly IEmailService _emailService; private readonly IClientCompanyUow _clientCompanyUow; private bool _disposed; public PaymentsService(IPaymentUoW paymentUoW, IEmailService emailService, IClientCompanyUow clientCompanyUow) { _paymentUoW = paymentUoW; _emailService = emailService; _clientCompanyUow = clientCompanyUow; } public async Task<bool> NotifyContacts(string paymentCode) { var payment = GetPaymentNotificationModel(paymentCode); var emailList = GetPaymentNotificationEmails(payment.ClientCompany.Id).ToList(); if(emailList.Count > 0) { switch (payment.PaymentTypeDescription) { case "OUT": await _emailService.SendOutwardPaymentEmailAsync(payment, emailList); return true; case "IN": await _emailService.SendInwardPaymentEmailAsync(payment, emailList); return true; case "Inter-Virtual-Account": // some email return true; } } return false; } private IEnumerable<string> GetPaymentNotificationEmails(int clientCompanyID) { var contacts = _clientCompanyUow.GetClientCompanyContact(clientCompanyID).Where(e=> !e.IsDeleted && e.Authorized && e.RecNotifications); return contacts.Select(e => e.Email); } private PaymentNotificationModel GetPaymentNotificationModel(string paymentCode) { return _paymentUoW.GetPaymentNotification(paymentCode) .Select(payment => new PaymentNotificationModel { PaymentCode = payment.Code, PaymentAmount = (decimal)payment.Amount, ValueDate = payment.ValueDate, Reference = payment.Reference, PaymentTypeDescription = payment.PaymentType.Description, ClientCompany = payment.ClientCompany, Currency = payment.Currency, ClientCompanyOpi = payment.ClientCompanyOpitransaction.FirstOrDefault().ClientCompanyOpi }).SingleOrDefault(); } #region Dispose protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { //_paymentUoW?.Dispose(); //_tradeUow?.Dispose(); //_currencyUoW?.Dispose(); //_clientCompanyUow?.Dispose(); //_clientCompanyAccountsUoW?.Dispose(); //_settlementUow?.Dispose(); //_userService?.Dispose(); //_currencyService?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftvalidationFieldFieldComponent.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftvalidationFieldFieldComponent { public int FieldId { get; set; } public int FieldComponentId { get; set; } public int LineNumber { get; set; } public int Sequence { get; set; } public SwiftvalidationField Field { get; set; } public SwiftvalidationFieldComponent FieldComponent { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/UserChangeRequestApproval.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class UserChangeRequestApproval { public int Id { get; set; } public int UserChangeRequestId { get; set; } public int ApprovedByAuthUserId { get; set; } public DateTime ApprovedDateTime { get; set; } public bool? IsActive { get; set; } public AuthUser ApprovedByAuthUser { get; set; } public UserChangeRequest UserChangeRequest { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Data/SecurityDbContext.cs using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.AspNetCore.Identity.EntityFrameworkCore; using Microsoft.EntityFrameworkCore; using Microsoft.AspNetCore.Identity; namespace Argentex.Core.Identity.DataAccess { public class SecurityDbContext : IdentityDbContext<ApplicationUser, ApplicationRole, long, ApplicationUserClaim, ApplicationUserRole, ApplicationUserLogin, ApplicationRoleClaim, ApplicationUserToken> { public SecurityDbContext(DbContextOptions<SecurityDbContext> options) : base(options) { } public virtual DbSet<Activity> Activies { get; set; } public virtual DbSet<ActivityLog> ActivieLogs { get; set; } public virtual DbSet<CountryGroup> CountryGroups { get; set; } public virtual DbSet<Country> Countries { get; set; } public virtual DbSet<Report> Reports { get; set; } public virtual DbSet<UserReport> UserReports { get; set; } public virtual DbSet<Token> Tokens { get; set; } public virtual DbSet<PreviousPassword> PreviousPasswords { get; set; } protected override void OnModelCreating(ModelBuilder builder) { base.OnModelCreating(builder); builder.Entity<ApplicationUser>() .ToTable("User"); builder.Entity<ApplicationUser>() .Property(p => p.CreateDate) .HasDefaultValueSql("getdate()"); builder.Entity<ApplicationUser>() .Property(p => p.LastPasswordChange) .HasDefaultValueSql("GETDATE()"); builder.Entity<ApplicationUser>() .Property(p => p.Id) .ValueGeneratedOnAdd(); builder.Entity<ApplicationUser>() .HasMany(p => p.PreviousPasswords); builder.Entity<PreviousPassword>() .HasOne(e => e.User) .WithMany(e => e.PreviousPasswords) .HasForeignKey(e => e.UserId) .HasConstraintName("FK_PreviousPasswords_User"); builder.Entity<ApplicationRole>().ToTable("Role"); builder.Entity<ApplicationUserRole>().ToTable("UserRole"); builder.Entity<ApplicationUserClaim>().ToTable("UserClaim"); builder.Entity<ApplicationUserLogin>().ToTable("UserLogin"); builder.Entity<ApplicationRoleClaim>().ToTable("RoleClaim"); builder.Entity<ApplicationUserToken>().ToTable("UserToken"); //additional tables builder.Entity<Activity>().ToTable("Activity"); builder.Entity<ActivityLog>().ToTable("ActivityLog"); builder.Entity<CountryGroup>().ToTable("CountryGroup"); builder.Entity<Country>().ToTable("Country"); builder.Entity<Report>().ToTable("Report"); builder.Entity<UserReport>().ToTable("UserReport") .HasKey(x => new { x.ReportId, x.UserId }); builder.Entity<UserReport>().ToTable("UserReport"); builder.Entity<UserReport>().ToTable("UserReport"); builder.Entity<Token>().ToTable("Token"); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Recurrent.Services/Program.cs using Argentex.ClientSite.Service.Http; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Argentex.Core.Service; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.ClientCompanies; using Argentex.Core.Service.Country; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Fix; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Identity; using Argentex.Core.Service.Identity.Services; using Argentex.Core.Service.Order; using Argentex.Core.Service.Payments; using Argentex.Core.Service.Settlements; using Argentex.Core.Service.Statements; using Argentex.Core.Service.Trade; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.AppSettings; using Argentex.Core.UnitsOfWork.ClientCompanies; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using Argentex.Core.UnitsOfWork.Countries; using Argentex.Core.UnitsOfWork.Currencies; using Argentex.Core.UnitsOfWork.Payments; using Argentex.Core.UnitsOfWork.Statements; using Argentex.Core.UnitsOfWork.Trades; using Argentex.Core.UnitsOfWork.Users; using EQService; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using SynetecLogger; using System; using System.Net.Http; using System.Threading.Tasks; namespace Argentex.Recurrent.Services { class Program { public static IConfiguration Configuration { get; set; } static void Main(string[] args) { MainAsync().Wait(); } static async Task MainAsync() { // config section // getting service collections with dependencies var serviceCollections = StartupDependencyInjections(); // building services var serviceProvider = serviceCollections.BuildServiceProvider(); // recurrent var orderService = serviceProvider.GetService<IOrderService>(); var orders = orderService.GetExpiredValidityOrders(); Console.WriteLine($"Current orders ={orders.Count}"); foreach (var order in orders) { Console.WriteLine($"Current orders ={order.TradeRef}"); await orderService.CancelOrderAsync(order); } Console.WriteLine($"Done"); Console.ReadLine(); } /// <summary> /// Get Program Dependency Injections /// </summary> /// <returns>ServiceCollection</returns> public static ServiceCollection StartupDependencyInjections() { var serviceCollections = new ServiceCollection(); var environmentName = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT"); var builder = new ConfigurationBuilder() .SetBasePath(Environment.CurrentDirectory) .AddJsonFile("appsettings.json", optional: false, reloadOnChange: true) .AddJsonFile($"appsettings.{environmentName}.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables(); Configuration = builder.Build(); // DB Context configs serviceCollections.AddDbContext<SecurityDbContext>(options => options.UseSqlServer(Configuration.GetConnectionString("IdentityDB"))); serviceCollections.AddIdentity<ApplicationUser, ApplicationRole>() .AddEntityFrameworkStores<SecurityDbContext>(); serviceCollections.AddEntityFrameworkSqlServer() .AddDbContext<FXDB1Context>(options => options.UseSqlServer(Configuration.GetConnectionString("FXDB1"))); // adding dependencies serviceCollections.AddSingleton(provider => Configuration); serviceCollections.AddScoped<ILogWrapper, NLogWrapper>((ctx) => new NLogWrapper(Configuration.GetConnectionString("NLogWrapperDB"))); serviceCollections.AddScoped<HttpClient, HttpClient>(); serviceCollections.AddScoped<IHttpService, HttpService>(); serviceCollections.AddScoped<IBarxFxService, BarxFxService>(); serviceCollections.AddScoped<ITradeService, TradeService>(); serviceCollections.AddScoped<ITradeUow, TradeUow>(); serviceCollections.AddScoped<IClientCompanyService, ClientCompanyService>(); serviceCollections.AddScoped<IClientCompanyUow, ClientCompanyUow>(); serviceCollections.AddScoped<ICountryService, CountryService>(); serviceCollections.AddScoped<ICountryUow, CountryUow>(); serviceCollections.AddScoped<IIdentityService, IdentityService>(); serviceCollections.AddScoped<IUserService, UserService>(); serviceCollections.AddScoped<IUserUow, UserUow>(); serviceCollections.AddScoped<IConfigWrapper, ConfigWrapper>(); serviceCollections.AddScoped<IStatementUoW, StatementUoW>(); serviceCollections.AddScoped<IStatementService, StatementService>(); serviceCollections.AddScoped<ICurrencyUoW, CurrencyUoW>(); serviceCollections.AddScoped<ICurrencyService, CurrencyService>(); serviceCollections.AddScoped<IClientCompanyAccountsUoW, ClientCompanyAccountsUoW>(); serviceCollections.AddScoped<IClientCompanyAccountsService, ClientCompanyAccountsService>(); serviceCollections.AddScoped<IPaymentUoW, PaymentUoW>(); serviceCollections.AddScoped<ISettlementService, SettlementService>(); serviceCollections.AddScoped<IOrderService, OrderService>(); serviceCollections.AddScoped<IAppSettingService, AppSettingService>(); serviceCollections.AddScoped<IAppSettingUow, AppSettingUow>(); serviceCollections.AddScoped<IServiceEmail, ServiceEmailClient>(); serviceCollections.AddScoped<IEmailSender, EmailSender>(); return serviceCollections; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyStatus.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyStatus { public ClientCompanyStatus() { ClientCompany = new HashSet<ClientCompany>(); } public int Id { get; set; } public string Description { get; set; } public bool IsLeadStatus { get; set; } public ICollection<ClientCompany> ClientCompany { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Country/ICountryService.cs using Argentex.Core.Service.Models.Country; using System; using System.Collections.Generic; namespace Argentex.Core.Service.Country { public interface ICountryService : IDisposable { IEnumerable<CountryModel> GetCountries(); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/CurrencyNotFoundException.cs using System; namespace Argentex.Core.Service.Exceptions { public class CurrencyNotFoundException : Exception { public CurrencyNotFoundException() : base() { } public CurrencyNotFoundException(string message) : base(message) { } public CurrencyNotFoundException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/EmirreportIncomingFile.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class EmirreportIncomingFile { public int Id { get; set; } public int EmirreportId { get; set; } public string Zipfilename { get; set; } public DateTime CreatedDateTime { get; set; } public string Xmlfilename { get; set; } public int? EmirreportIncomingFileContentId { get; set; } public Emirreport Emirreport { get; set; } public EmirreportIncomingFileContent EmirreportIncomingFileContent { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ActivityTabUserData.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ActivityTabUserData { public int AppUserId { get; set; } public DateTime DataDatetime { get; set; } public int HourDayRangeId { get; set; } public string DayOfWeek { get; set; } public int? Calls { get; set; } public int? LongCalls { get; set; } public decimal? LongCallsCalls { get; set; } public int? Brochures { get; set; } public ActivityTabHourDayRange HourDayRange { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/SynetecLogger/Interface/ILogWrapper.cs using log4net.Core; using System; using System.Collections.Generic; using System.Text; namespace SynetecLogger { /// <summary> /// Generic logger. Logs depending on implementation. Please see individual classes. /// </summary> public interface ILogWrapper { void Error(Exception ex); void Info(string info); void Fatal(Exception ex); void Debug(string message); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogPayment.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogPayment { public int LogId { get; set; } public string LogAction { get; set; } public DateTime CreatedDate { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime UpdateDateTime { get; set; } public int UpdatedByAuthUserId { get; set; } public int Id { get; set; } public string Code { get; set; } public int PaymentTypeId { get; set; } public bool Authorised { get; set; } public int? AuthorisedByAuthUserId { get; set; } public DateTime? AuthorisedDateTime { get; set; } public string FxforwardTradeCode { get; set; } public int? ClientCompanyId { get; set; } public int CurrencyId { get; set; } public DateTime ValueDate { get; set; } public decimal? Amount { get; set; } public string Reason { get; set; } public string Comments { get; set; } public bool? NotifyClient { get; set; } public bool Applied { get; set; } public DateTime? AppliedDateTime { get; set; } public int? PaymentRecReasonId { get; set; } public int? TransactionCommitId { get; set; } public bool IsDeleted { get; set; } public int? DebitBankAccountId { get; set; } public int? CreditBankAccountId { get; set; } public int? DebitClientCompanyVirtualAccountId { get; set; } public int? CreditClientCompanyVirtualAccountId { get; set; } public int? CreditClientCompanyOpiid { get; set; } public string Reference { get; set; } public decimal? ApplicableRate { get; set; } public byte[] UpdateTimeStamp { get; set; } public bool? IsSwiftpayment { get; set; } public int? PaymentSwiftoutgoingStatusId { get; set; } public int? SwiftAuth1ByAuthUserId { get; set; } public int? SwiftAuth2ByAuthUserId { get; set; } public DateTime? SwiftAuth1DateTime { get; set; } public DateTime? SwiftAuth2DateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/ClientCompanyContactNotFoundException.cs using System; namespace Argentex.Core.Service.Exceptions { public class ClientCompanyContactNotFoundException : Exception { public ClientCompanyContactNotFoundException() : base() { } public ClientCompanyContactNotFoundException(string message) : base(message) { } public ClientCompanyContactNotFoundException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Models/ResponseModel.cs using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc.ModelBinding; namespace Argentex.Core.Api.Models { public class ResponseModel { private ResponseModel() { } public IDictionary<string, string[]> ResponseMessages { get; set; } public static ResponseModel ResponseWithErrors(params string[] errors) => new ResponseModel { ResponseMessages = new Dictionary<string, string[]> { { "Errors", errors} } }; public static ResponseModel ResponseWithInfo(params string[] messages) => new ResponseModel { ResponseMessages = new Dictionary<string, string[]> { { "Info", messages} } }; public static ResponseModel ResponseFromIdentityModel(IdentityResult result) { if (result.Succeeded) throw new ArgumentOutOfRangeException(nameof(result), "Unable to generate Response model for a successful Identity result"); return ResponseWithErrors(result.Errors.Select(e => e.Description).ToArray()); } public static ResponseModel ResponseFromInvalidModelState(ModelStateDictionary modelState) => ResponseWithErrors(modelState.Values.SelectMany(x => x.Errors).Select(x => x.ErrorMessage).ToArray()); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyAccountModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service { public class ClientCompanyAccountModel { public int ClientCompanyOpiId { get; set; } public int ClientCompanyId { get; set; } public int CurrencyId { get; set; } public string Currency { get; set; } public int CountryId { get; set; } public string Country { get; set; } public string Description { get; set; } public string BankName { get; set; } public string BankAddress { get; set; } public int ClearingCodePrefixId { get; set; } public string AccountName { get; set; } public string AccountNumber { get; set; } public string SortCode { get; set; } public string SwiftCode { get; set; } public string Iban { get; set; } public bool IsDefault { get; set; } public bool Approved { get; set; } public string BeneficiaryName { get; set; } public string BeneficiaryAddress { get; set; } public int UpdatedByAuthUserId { get; set; } public string Reference { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ReportProcessedLog.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ReportProcessedLog { public int Id { get; set; } public string Name { get; set; } public string FunctionName { get; set; } public string Parameters { get; set; } public string Result { get; set; } public string ResultPage { get; set; } public DateTime StartDateTime { get; set; } public DateTime EndDateTime { get; set; } public int ReportStatusId { get; set; } public int AuthUserId { get; set; } public string ExceptionInfo { get; set; } public AuthUser AuthUser { get; set; } public ReportStatus ReportStatus { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Currencies/CurrencyUoW.cs using System.Linq; using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; namespace Argentex.Core.UnitsOfWork.Currencies { public class CurrencyUoW : BaseUow, ICurrencyUoW { private IGenericRepo<CurrencyPairPricing> _currencyPairPricingRepository; private IGenericRepo<Currency> _currencyRepository; public CurrencyUoW(FXDB1Context context) : base(context) { } public IGenericRepo<CurrencyPairPricing> CurrencyPairPricingRepository => _currencyPairPricingRepository = _currencyPairPricingRepository ?? new GenericRepo<CurrencyPairPricing>(Context); private IGenericRepo<Currency> CurrencyRepository => _currencyRepository = _currencyRepository ?? new GenericRepo<Currency>(Context); public IQueryable<Currency> GetCurrency(int currencyId) { return CurrencyRepository.GetQueryable(x => x.Id == currencyId); } public IQueryable<Currency> GetCurrencies() { return CurrencyRepository.GetQueryable(); } public IQueryable<Currency> GetCurrency(string code) { return CurrencyRepository.GetQueryable(x => x.Code == code.ToUpper()); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FixApatradeMessage.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FixApatradeMessage { public int Id { get; set; } public string TradeCode { get; set; } public DateTime MessageDate { get; set; } public string FixMessage { get; set; } public bool IsReset { get; set; } public FxforwardTrade TradeCodeNavigation { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Payments/PaymentServiceTests.cs using System; using System.Collections.Generic; using System.Linq; using System.Transactions; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Models.Payments; using Argentex.Core.Service.Models.Settlements; using Argentex.Core.Service.Payments; using Argentex.Core.Service.Trade; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.ClientCompanies; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using Argentex.Core.UnitsOfWork.Currencies; using Argentex.Core.UnitsOfWork.Payments; using Argentex.Core.UnitsOfWork.Settlements; using Argentex.Core.UnitsOfWork.Trades; using Moq; using Xunit; namespace Argentex.Core.Service.Tests.Payments { public class PaymentServiceTests { [Fact] public void Given_There_Is_No_Payment_Associated_With_The_Code_An_Exception_Should_Be_Thrown() { // Given var paymentCode = "PC 42"; var payments = new List<Payment>(); var paymentUoWMock = new Mock<IPaymentUoW>(); var currencyUoW = new Mock<ICurrencyUoW>(); var clientCompanyUow = new Mock<IClientCompanyUow>(); var clientCompanyAccountsUoW = new Mock<IClientCompanyAccountsUoW>(); paymentUoWMock.Setup(x => x.GetPayment(It.IsAny<string>())).Returns(payments.AsQueryable); var service = new SettlementService(paymentUoWMock.Object, currencyUoW.Object, clientCompanyUow.Object, clientCompanyAccountsUoW.Object, null, null, null, null, null, null, null, null); var expectedMessage = $"Payment with code {paymentCode} does not exist"; // When var result = Assert.Throws<PaymentNotFoundException>(() => service.GetPaymentInformation(paymentCode)); // Then Assert.NotNull(result); Assert.Equal(expectedMessage, result.Message); } [Fact] public void Given_A_Payment_Is_Associated_With_The_Code_And_It_Is_Not_Payment_Out_A_Payment_Information_Model_Should_Be_Returned() { // Given var now = DateTime.Now; var today = DateTime.Today; var payment = new Payment { Code = "PC 42", PaymentTypeId = 1, ValueDate = today, CreatedDate = now, Amount = 42000, Reference = "Ref 42", Currency = new Currency { Id = 42, Code = "GBP" } }; var payments = new List<Payment> { payment }; var paymentUoWMock = new Mock<IPaymentUoW>(); var currencyUoW = new Mock<ICurrencyUoW>(); var clientCompanyUow = new Mock<IClientCompanyUow>(); var clientCompanyAccountsUoW = new Mock<IClientCompanyAccountsUoW>(); paymentUoWMock.Setup(x => x.GetPayment(It.IsAny<string>())).Returns(payments.AsQueryable); var service = new SettlementService(paymentUoWMock.Object, currencyUoW.Object, clientCompanyUow.Object, clientCompanyAccountsUoW.Object, null, null, null, null, null, null, null, null); var expectedType = typeof(PaymentInformationModel); var expectedPaymentCode = payment.Code; var expectedPaymentType = "In"; var expectedValueDate = today; var expectedCreatedDateTime = now; var expectedAmount = payment.Amount.Value; var expectedReference = payment.Reference; var expectedCurrency = "GBP"; // When var result = service.GetPaymentInformation(payment.Code); // Then Assert.NotNull(result); Assert.Equal(expectedType, result.GetType()); Assert.Equal(expectedPaymentCode, result.PaymentCode); Assert.Equal(expectedPaymentType, result.PaymentType); Assert.Equal(expectedValueDate, result.ValueDate); Assert.Equal(expectedCreatedDateTime, result.CreatedDateTime); Assert.Equal(expectedAmount, result.Amount); Assert.Equal(expectedReference, result.Reference); Assert.Equal(expectedCurrency, result.Currency); } [Fact] public void Given_There_Is_No_Opi_Transaction_An_Exception_Should_Be_Thrown() { // Given var now = DateTime.Now; var today = DateTime.Today; var payment = new Payment { Id = 42, Code = "PC 42", PaymentTypeId = 1, ValueDate = today, CreatedDate = now, Amount = 42000, Reference = "Ref 42", CurrencyId = 42 }; var payments = new List<Payment> { payment }; var opiTransactions = new List<ClientCompanyOpitransaction>(); var paymentUoWMock = new Mock<IPaymentUoW>(); var currencyUoW = new Mock<ICurrencyUoW>(); var clientCompanyUow = new Mock<IClientCompanyUow>(); var clientCompanyAccountsUoW = new Mock<IClientCompanyAccountsUoW>(); paymentUoWMock.Setup(x => x.GetPayment(It.IsAny<string>())).Returns(payments.AsQueryable); paymentUoWMock.Setup(x => x.GetClientCompanyOpiTransaction(It.IsAny<string>())) .Returns(opiTransactions.AsQueryable); var service = new SettlementService(paymentUoWMock.Object, currencyUoW.Object, clientCompanyUow.Object, clientCompanyAccountsUoW.Object, null, null, null, null, null, null, null, null); ; var expectedMessage = $"Opi transaction for payment code {payment.Code} does not exist"; // When var result = Assert.Throws<ClientCompanyOpiTransactionNotFoundException>(() => service.GetPaymentInformation(payment.Code, true)); // Then Assert.NotNull(result); Assert.Equal(expectedMessage, result.Message); } [Fact] public async void AssignAsync_Should_Throw_Exception_When_maxSettlementCreateDateForTrade_Is_Different_In_Both_Checks() { // Arrange var userServiceMock = new Mock<IUserService>(); var tradeUowMock = new Mock<ITradeUow>(); var settlementUowMock = new Mock<ISettlementUow>(); var tradeServiceMock = new Mock<ITradeService>(); AssignSettlementModel settlementModelResult = new AssignSettlementModel() { ValueDate = DateTime.Today.ToString(), SettlementId = 1414, TradedCurrency = "GBPEUR", Account = new AccountModel(), Amount = 14500, IsPayTotal = false, Status = 0 }; AssignSettlementRequestModel settlementModel = new AssignSettlementRequestModel() { AuthUserId = 1, ClientCompanyId = 111, Trade = new Models.Trade.TradeModel() { TradeId = "10", ValueDate = DateTime.Today, Balance = 14500 }, SettlementModels = new List<AssignSettlementModel>() { settlementModelResult } }; var trades = new List<FxforwardTrade>(); userServiceMock.Setup(x => x.GetAuthUserById(It.IsAny<int>())).Returns(new AuthUser()); tradeUowMock.Setup(x => x.GetTrade(It.IsAny<string>())).Returns(trades.AsQueryable); settlementUowMock.SetupSequence(x => x.GetMaxCreateDateForTrade(It.IsAny<string>())).Returns(DateTime.Today.AddDays(-1)).Returns(DateTime.Today); settlementUowMock.Setup(x => x.GetTradeOpis(It.IsAny<string>())).Returns(new List<FxforwardTrade2Opi>()); settlementUowMock.Setup(x => x.GetTradeSwaps(It.IsAny<string>())).Returns(new Dictionary<FxforwardTrade, DataAccess.Entities.ClientSiteAction>()); tradeServiceMock.Setup(x => x.GetTradeBalance(It.IsAny<int>(), It.IsAny<string>())).Returns(14500); var expectedException = typeof(TransactionAbortedException); var expectedExceptionMessage = "The transaction has been aborted."; var service = new SettlementService(null, null, null, null, settlementUowMock.Object, tradeUowMock.Object, userServiceMock.Object, null, null, null, null, tradeServiceMock.Object); // Act var result = await Assert.ThrowsAsync<TransactionAbortedException>(() => service.AssignAsync(settlementModel)); // Assert Assert.NotNull(result); Assert.Equal(expectedExceptionMessage, result.Message); Assert.Equal(expectedException, result.GetType()); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Identity/Services/IdentityService.cs using Argentex.Core.Identity.DataAccess; using Argentex.Core.Service.Email.EmailSender; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Identity.Helpers; using Argentex.Core.Service.Identity.Services; using Argentex.Core.Service.Models.Identity; using Argentex.Core.UnitsOfWork.Users; using AspNet.Security.OpenIdConnect.Primitives; using Microsoft.AspNetCore.Identity; using Microsoft.IdentityModel.Tokens; using System; using System.Collections.Generic; using System.IdentityModel.Tokens.Jwt; using System.Linq; using System.Security.Claims; using System.Text; using System.Threading.Tasks; using System.Web; using OpenIddict.EntityFrameworkCore.Models; namespace Argentex.Core.Service.Identity { public class IdentityService : IIdentityService { private readonly IConfigWrapper _config; private readonly IUserUow _userUow; private readonly IClientApplicationUow _clientApplicationUow; private readonly IEmailService _emailService; private const string REGISTER = "register"; private const string LOGIN = "login"; private const string ADMIN = "admin"; private bool _disposed; public IdentityService( IConfigWrapper config, IUserUow userUow, IEmailService emailService, IClientApplicationUow clientApplicationUow ) { _config = config; _userUow = userUow; _emailService = emailService; _clientApplicationUow = clientApplicationUow; } private IEnumerable<Claim> BuildStandardClaims(string subject) => new[] { new Claim(JwtRegisteredClaimNames.Jti, Guid.NewGuid().ToString()), new Claim(JwtRegisteredClaimNames.Sub, subject), new Claim(JwtRegisteredClaimNames.Iat, new DateTimeOffset(DateTime.UtcNow).ToUnixTimeSeconds().ToString()) }; private IEnumerable<Claim> BuildUserClaims(UserModel user) => new[] { new Claim(JwtRegisteredClaimNames.Email, user.Email), new Claim(OpenIdConnectConstants.Claims.Name, user.Name), new Claim("fullname" , user.Forename + " " + user.Surname), new Claim("clientCompanyId", user.ClientCompanyId.ToString()), new Claim("authUserId", user.AuthUserId.ToString()), new Claim("role", JsonExtensions.SerializeToJson(user.Roles)), new Claim("daysBeforePasswordExpiration", CalculateDaysBeforePasswordExpiration(user.PasswordLastChanged).ToString()), new Claim("isAdmin", user.IsAdmin.ToString()), new Claim("grantType", "password"), }; private IEnumerable<Claim> BuildUserClaims() => new[] { new Claim("grantType", "client_credentials"), }; public JwtSecurityToken BuildToken(string tokenSubject, bool tokenCanExpire, IEnumerable<Claim> additionalClaims = null) { DateTime now = DateTime.UtcNow; var claims = BuildStandardClaims(tokenSubject).ToList(); if (additionalClaims != null) claims.AddRange(additionalClaims); var key = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(_config.Get("Jwt:SecurityKey"))); var creds = new SigningCredentials(key, SecurityAlgorithms.HmacSha256); double.TryParse(_config.Get("Jwt:Expires"), out double expires); var token = new JwtSecurityToken( issuer: _config.Get("Jwt:Issuer"),//TODO - get this from DB audience: _config.Get("Jwt:Issuer"), //TODO - get this from DB claims: claims, notBefore: now, expires: tokenCanExpire ? (DateTime?)now.AddSeconds(expires) : null,//TODO get this from DB signingCredentials: creds); return token; } public async Task<TokenModel> AuthenticateAsync(LoginServiceModel login) { switch (login.Grant_Type) { case OpenIdConnectConstants.GrantTypes.Password: return await GetTokenFromPassword(login); case OpenIdConnectConstants.GrantTypes.ClientCredentials: return await GetTokenFromCredentials(login); default: await LogCurrentActivity(login.Username, LOGIN, false, login.PrimaryIP, login.SecondaryIP); return null; } } public async Task<TokenModel> RefreshToken(RefreshTokenModel refreshTokenModel) { var refreshToken = _userUow.GetRefreshToken(refreshTokenModel.UserID, refreshTokenModel.RefreshToken); if (refreshToken == null) return null; ApplicationUser user = await _userUow.GetUserByIdAsync(refreshToken.UserId.ToString()); if (user == null) return null; var newRefreshToken = CreateRefreshToken(refreshToken.ClientId, refreshToken.UserId); await _userUow.ReplaceToken(newRefreshToken, refreshToken); var appUser = await CreateUserServiceModel(user); var token = CreateToken(appUser, newRefreshToken); return token; } public async Task<IdentityResult> ChangePasswordAsync(string userId, string oldPassword, string newPassword, string confirmPassword) { if (newPassword != confirmPassword) throw new PasswordsDoNotMatchException("Passwords do not match"); var user = await _userUow.GetUserByIdAsync(userId); if (user == null) throw new ApplicationUserNotFoundException($"User with id {userId} does not exist"); var hasher = new PasswordHasher<ApplicationUser>(); var newPasswordHash = hasher.HashPassword(user, newPassword); var result = PasswordValidation(hasher, user, oldPassword, newPassword); if (!result.Succeeded) return result; ValidatePasswordWithHistory(user, newPassword); result = await _userUow.ChangePasswordAsync(user, newPasswordHash); if (result.Succeeded) await _emailService.SendPasswordChangedEmailAsync(user.UserName); return result; } public async Task<IdentityResult> ResetPasswordAsync(string userName, string code, string password) { var user = await _userUow.GetUserByNameAsync(userName); if (user == null || user.IsDeleted) return IdentityResult.Failed(new IdentityError { Code = IdentityResultCodes.UserNotFound, Description = $"User with username {userName} does not exist" }); if (!user.IsApproved || user.LockoutEnabled) return IdentityResult.Failed(new IdentityError { Code = IdentityResultCodes.InvalidUserState, Description = $"{userName} is in an invalid state" }); code = SanitizeCode(code); ValidatePasswordWithHistory(user, password); var result = await _userUow.ResetPasswordAsync(user, code, password); if (result.Succeeded) await _emailService.SendPasswordChangedEmailAsync(user.UserName); return result; } public async Task<bool> VerifyUserToken(string userName, string tokenCode) { var tokenPurpose = "ResetPassword"; var appUser = await _userUow.GetUserByNameAsync(userName); tokenCode = SanitizeCode(tokenCode); var isValid = await _userUow.VerifyToken(appUser, TokenOptions.DefaultProvider, tokenPurpose, tokenCode); return isValid; } public async Task<string> GenerateEmailConfirmationTokenAsync(ApplicationServiceUser user) { ApplicationUser appUser = MapAppUser(new ApplicationUser(), user); string confirmationToken = await _userUow.GenerateEmailConfirmationTokenAsync(appUser); return confirmationToken; } public async Task SetUserAsAdmin(string username) { var adminId = _userUow.GetRole(ADMIN) .Select(x => x.Id) .FirstOrDefault(); var user = _userUow.ApplicationUserRepo.Get(x => x.UserName.Equals(username, StringComparison.InvariantCultureIgnoreCase)).FirstOrDefault(); await _userUow.SetRoleForUser(user.Id, adminId); } public async Task LogoutAsync(string identityUserName, string refreshToken) { // remove token from database try { var user = await _userUow.GetUserByNameAsync(identityUserName); Token token; if (user != null && !string.IsNullOrEmpty(refreshToken)) { token = _userUow.GetRefreshToken((int)user.Id, refreshToken); if (token != null) { await _userUow.RemoveToken(token); } } await LogCurrentActivity(identityUserName, "logout", true, null, null); await _userUow.CurrentUserSignOutAsync(); } catch (Exception ex) { /* The catch block is intentionally kept empty to avoid the DB from being bombarded with errors. The exception occurs when the user tries to logout from any page from the angular client site. The exception thrown is Database operation expected to affect 1 row(s) but actually affected 0 row(s). Data may have been modified or deleted since entities were loaded. */ } } public async Task<string> GetUserPhoneNumber(string username) { var applicationUser = await _userUow.GetUserByNameAsync(username); return applicationUser.PhoneNumberMobile; } #region Private private int CalculateDaysBeforePasswordExpiration(DateTime userPasswordLastChanged) { var today = DateTime.Today; var daysDifference = DateHelpers.GetDaysDifferencreBetween(userPasswordLastChanged, today); var remainingDays = 30 - daysDifference; return remainingDays <= 0 ? 0 : remainingDays; } private async Task<TokenModel> GetTokenFromPassword(LoginServiceModel login) { var userModel = await PasswordSignInAsync(login); // adding to activity log await LogCurrentActivity(login.Username, LOGIN, userModel != null, login.PrimaryIP, login.SecondaryIP); if (userModel == null) return null; var refreshToken = CreateRefreshToken(login.ClientId, userModel.UserId); await _userUow.PersistToken(refreshToken); var token = CreateToken(userModel, refreshToken); return token; } private async Task<TokenModel> GetTokenFromCredentials(LoginServiceModel login) { var clientApplication = await _clientApplicationUow.GetClientCredentialsAsync(login.ClientId); await LogCurrentActivity(login.ClientId, LOGIN, clientApplication != null, login.PrimaryIP, login.SecondaryIP); if (clientApplication == null) return null; var token = CreateToken(clientApplication); return token; } private TokenModel CreateToken(OpenIddictApplication clientApplication) { var token = BuildToken(clientApplication.ClientId, false, BuildUserClaims()); var stringToken = new JwtSecurityTokenHandler().WriteToken(token); return new TokenModel { Token_type = "Bearer", Access_token = stringToken, Id_token = stringToken, }; } private TokenModel CreateToken(UserModel userModel, Token rt) { //create token var token = BuildToken(userModel.UserId.ToString(), true, BuildUserClaims(userModel)); int.TryParse(_config.Get("Jwt:Expires"), out var expiresIn); var stringToken = new JwtSecurityTokenHandler().WriteToken(token); return new TokenModel { Token_type = "Bearer", Access_token = stringToken, Expires_in = expiresIn, Refresh_token = rt.Value, //TODO this is hack to make client app login - either change the way Id_token = stringToken, }; } private Token CreateRefreshToken(string clientId, long userId) { return new Token() { ClientId = clientId, UserId = userId, Type = 0, Value = Guid.NewGuid().ToString("N"), CreatedDate = DateTime.UtcNow, LastModifiedDate = DateTime.UtcNow }; } /// <summary> /// Logging user login activity into ActivityLog table /// </summary> /// <param name="username">Username or Email of the user</param> /// <param name="type">Activity type</param> /// <param name="success">Is the login successfull</param> /// <param name="primaryIP">Primary ip address</param> /// <param name="secondaryIP">Secondary ip address</param> /// <returns></returns> private async Task LogCurrentActivity(string username, string type, bool success, string primaryIP, string secondaryIP) { // checking if user exists var isUser = await _userUow.IsUserByNameAsync(username); ApplicationUser applicationUser = null; // getting application user if exists string userName; if (isUser) { applicationUser = await _userUow.GetUserByNameAsync(username); if (applicationUser == null) { userName = String.Empty; } else { userName = String.IsNullOrEmpty(applicationUser.UserName) ? applicationUser.Email : string.Empty; } } else { userName = username; } // getting Activity object var activity = _userUow.ActivityRepo.GetQueryable(x => x.Type == type).FirstOrDefault(); var log = new ActivityLog { ActivityId = activity.ActivityId, IsSuccess = success, LogDate = DateTime.Now, UserName = userName, PrimaryIP = primaryIP, SecondaryIP = secondaryIP, AuthUserId = applicationUser?.AuthUserId, Id = applicationUser?.Id, ApplicationUser = applicationUser ?? null, Activity = activity }; await _userUow.LogActivity(log); } private string SanitizeCode(string code) { string decodedCode = code .Replace(SanitizeForwardSlash.New, SanitizeForwardSlash.Old) .Replace(SanitizeDoubleEqual.New, SanitizeDoubleEqual.Old) .Replace(SanitizePlus.New, SanitizePlus.Old); code = HttpUtility.HtmlDecode(decodedCode); return code; } private static ApplicationUser MapAppUser(ApplicationUser appUser, ApplicationServiceUser serviceUser) { appUser.Id = serviceUser.Id; appUser.Title = serviceUser.Title; appUser.Forename = serviceUser.Forename; appUser.Surname = serviceUser.Surname; appUser.UserName = serviceUser.Username; appUser.Email = serviceUser.Email; appUser.ClientCompanyId = serviceUser.ClientCompanyId; appUser.UpdatedByAuthUserId = serviceUser.UpdatedByAuthUserId; appUser.Position = serviceUser.Position; appUser.PhoneNumber = serviceUser.PhoneNumberDirect; appUser.PhoneNumberMobile = serviceUser.PhoneNumberMobile; appUser.PhoneNumberOther = serviceUser.PhoneNumberOther; appUser.Birthday = DateTime.Parse(serviceUser.Birthday); appUser.IsApproved = serviceUser.IsApproved; appUser.PrimaryContact = serviceUser.PrimaryContact; appUser.IsAdmin = serviceUser.IsAdmin; return appUser; } private static IdentityResult PasswordValidation(PasswordHasher<ApplicationUser> hasher, ApplicationUser user, string oldPassword, string newPassword) { var idResult = IdentityResult.Success; var verify = hasher.VerifyHashedPassword(user, user.PasswordHash, newPassword); if (verify == PasswordVerificationResult.Failed) { verify = hasher.VerifyHashedPassword(user, user.PasswordHash, oldPassword); if (verify == PasswordVerificationResult.Failed) { idResult = IdentityResult.Failed(new IdentityError { Description = "Cannot change password. Incorrect current password inserted." }); } } else { idResult = IdentityResult.Failed(new IdentityError { Description = "Cannot change password. Duplicate new password inserted." }); } return idResult; } private void ValidatePasswordWithHistory(ApplicationUser user, string password) { var passwords = _userUow.GetLastPasswords(user.Id) .Select(x => x.PasswordHash) .Take(3) .ToList(); var hasher = new PasswordHasher<ApplicationUser>(); foreach (var passwordHash in passwords) { var used = hasher.VerifyHashedPassword(user, passwordHash, password); if (used == PasswordVerificationResult.Success) throw new PasswordAlreadyUsedException("Password already been used within the past 3 passwords"); } } private async Task<UserModel> PasswordSignInAsync(LoginServiceModel login) { var result = await _userUow.PasswordSignInAsync(login.Username, login.Password, isPersistent: false, lockoutOnFailure: false); if (!result.Succeeded) return null; var identityUser = await _userUow.GetUserByNameAsync(login.Username); if (!identityUser.IsApproved || identityUser.LockoutEnabled || identityUser.IsDeleted) return null; return await CreateUserServiceModel(identityUser); } private async Task<UserModel> CreateUserServiceModel(ApplicationUser identityUser) { return identityUser != null ? new UserModel { UserId = identityUser.Id, Name = identityUser.UserName, Email = identityUser.Email, Forename = identityUser.Forename, Surname = identityUser.Surname, ClientCompanyId = identityUser.ClientCompanyId, AuthUserId = identityUser.AuthUserId, Roles = await _userUow.GetRolesAsync(identityUser), PasswordLastChanged = identityUser.LastPasswordChange, IsAdmin = identityUser.IsAdmin } : null; } #endregion #region Dispose /// <summary> /// disposing == true coming from Dispose() /// disposig == false coming from finaliser /// </summary> /// <param name="disposing"></param> protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _userUow?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyVirtualAccountCurrencyBalance.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyVirtualAccountCurrencyBalance { public int ClientCompanyVirtualAccountId { get; set; } public int CurrencyId { get; set; } public decimal? Balance { get; set; } public byte[] UpdateTimeStamp { get; set; } public DateTime BalanceDate { get; set; } public int TransactionCommitId { get; set; } public ClientCompanyVirtualAccount ClientCompanyVirtualAccount { get; set; } public Currency Currency { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientSiteAction.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientSiteAction { public ClientSiteAction() { ClientSiteAction2ClientCompanyOpi = new HashSet<ClientSiteAction2ClientCompanyOpi>(); ClientSiteAction2FixFxforwardTrade = new HashSet<ClientSiteAction2FixFxforwardTrade>(); ClientSiteAction2FxforwardTrade2Opi = new HashSet<ClientSiteAction2FxforwardTrade2Opi>(); ClientSiteAction2Fxswap = new HashSet<ClientSiteAction2Fxswap>(); } public long Id { get; set; } public int ClientSiteActionTypeId { get; set; } public int ClientSiteActionStatusId { get; set; } public string Details { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime CreatedDateTime { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public byte[] UpdatedTimestamp { get; set; } public ClientSiteActionStatus ClientSiteActionStatus { get; set; } public ClientSiteActionType ClientSiteActionType { get; set; } public AuthUser CreatedByAuthUser { get; set; } public AuthUser UpdatedByAuthUser { get; set; } public ICollection<ClientSiteAction2ClientCompanyOpi> ClientSiteAction2ClientCompanyOpi { get; set; } public ICollection<ClientSiteAction2FixFxforwardTrade> ClientSiteAction2FixFxforwardTrade { get; set; } public ICollection<ClientSiteAction2FxforwardTrade2Opi> ClientSiteAction2FxforwardTrade2Opi { get; set; } public ICollection<ClientSiteAction2Fxswap> ClientSiteAction2Fxswap { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Payments/PaymentOut.cs using Argentex.Core.DataAccess.Entities; namespace Argentex.Core.Service.Models.Payments { public class PaymentOut : Payment { public ClientCompanyOpi CreditOPI { get; set; } public BankAccount CreditAccount { get; set; } public BankAccount DebitAccount { get; set; } public ClientCompanyVirtualAccount CreditVirtualAccount { get; set; } public ClientCompanyVirtualAccount DebitVirtualAccount { get; set; } public string AuthUserName { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogFxforwardTrade.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogFxforwardTrade { public int LogId { get; set; } public string LogAction { get; set; } public string Code { get; set; } public DateTime CreatedDate { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime UpdatedDate { get; set; } public int UpdatedByAuthUserId { get; set; } public int ClientCompanyId { get; set; } public int? AuthorisedByClientCompanyContactId { get; set; } public int? TradeInstructionMethodId { get; set; } public int? BrokerId { get; set; } public bool Verified { get; set; } public int? VerifiedByAuthUserId { get; set; } public DateTime? ContractDate { get; set; } public DateTime? ValueDate { get; set; } public bool? IsOrder { get; set; } public string CurrencyPair { get; set; } public bool IsBuy { get; set; } public int? Lhsccyid { get; set; } public int? Rhsccyid { get; set; } public decimal? ClientRate { get; set; } public decimal? ClientLhsamt { get; set; } public decimal? ClientRhsamt { get; set; } public decimal? BrokerRate { get; set; } public decimal? BrokerLhsamt { get; set; } public decimal? BrokerRhsamt { get; set; } public decimal? CollateralPerc { get; set; } public int FxforwardTradeStatusId { get; set; } public bool? IsRhsmajor { get; set; } public decimal? Profit { get; set; } public bool? ProfitConsolidated { get; set; } public bool? Deleted { get; set; } public int? TransactionCommitId { get; set; } public int? ClientCompanyOpiid { get; set; } public decimal? ProfitConsolidatedValue { get; set; } public DateTime? ProfitConsolidatedDateTime { get; set; } public DateTime? SettledDate { get; set; } public DateTime? DeliveredDate { get; set; } public string Notes { get; set; } public decimal? ProfitGbprate { get; set; } public decimal? PrevailingRate2 { get; set; } public byte[] UpdateTimeStamp { get; set; } public DateTime? CommPaidOutDate { get; set; } public string EmirUti { get; set; } public bool? EmirReported { get; set; } public DateTime? EmirReportedDateTime { get; set; } public string Reference { get; set; } public decimal? RemainingClientLhsamt { get; set; } public decimal? RemainingClientRhsamt { get; set; } public decimal? MarkToMarketValue { get; set; } public decimal? BrokenDatePrice { get; set; } public DateTime? MarkToMarketValueUpdatedDateTime { get; set; } public bool? IsComplianceSupported { get; set; } public bool? IsComplianceRegulated { get; set; } public bool? ComplianceTradeReasonId { get; set; } public DateTime? FilledDateTime { get; set; } public DateTime? ContractNoteSentToClientDateTime { get; set; } public DateTime? ContractNoteSentToMyselfDateTime { get; set; } public string ComplianceIsin { get; set; } public string EmirsubmissionId { get; set; } public string EmirdelegatedSubmissionId { get; set; } public decimal? PrevDayMarktoMarket { get; set; } public DateTime? PrevDayMarktoMarketUpdatedDateTime { get; set; } public string MarketSideUti { get; set; } public decimal? BdpforwardPoints { get; set; } public int? FilledByAuthUserId { get; set; } public int? OpiupdatedByAuthUserId { get; set; } public DateTime? OpiupdatedDateTime { get; set; } public int? BrokeredByAuthUserId { get; set; } public DateTime? BrokeredDate { get; set; } public DateTime? OpenValueDate { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/EmirreportTradeResponseError.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class EmirreportTradeResponseError { public int Id { get; set; } public int EmirreportFxforwardTradeId { get; set; } public string Source { get; set; } public int? EmirreportResponseCodeId { get; set; } public int? ResponseCode { get; set; } public string ResponseMessage { get; set; } public EmirreportFxforwardTrade EmirreportFxforwardTrade { get; set; } public EmirreportResponseCode EmirreportResponseCode { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Automapper/MappingProfiles.cs using System.Linq; using Argentex.Core.Api.ClientAuthentication; using Argentex.Core.Identity.DataAccess; using Argentex.Core.Service.Models.Identity; using AutoMapper; using OpenIddict.Abstractions; namespace Argentex.Core.Api.Automapper { public class MappingProfiles : Profile { public MappingProfiles() { CreateMap<RegisterServiceModel, ApplicationServiceUser>(); CreateMap<ApplicationServiceUser, ApplicationUser>(); CreateMap<Models.SecurityModels.LoginModel, LoginServiceModel>(); CreateMap<ClientConfig, OpenIddictApplicationDescriptor>() .ForMember(d => d.ClientSecret, o => o.MapFrom(s => s.Secret)) .ForMember(d => d.Permissions, o => o.Ignore()); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/ClientAuthentication/ClientUpdater.cs using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using AutoMapper; using Microsoft.Extensions.DependencyInjection; using OpenIddict.Abstractions; using OpenIddict.Core; using OpenIddict.EntityFrameworkCore.Models; namespace Argentex.Core.Api.ClientAuthentication { internal class ClientUpdater: IDisposable { private readonly OpenIddictApplicationManager<OpenIddictApplication> _appManager; private readonly IMapper _mapper; private readonly IServiceScope _scope; internal ClientUpdater(IServiceProvider services) { _scope = services.GetRequiredService<IServiceProvider>().CreateScope(); _appManager = _scope.ServiceProvider.GetRequiredService<OpenIddictApplicationManager<OpenIddictApplication>>(); _mapper = _scope.ServiceProvider.GetRequiredService<IMapper>(); } public void SynchroniseClients(IEnumerable<ClientConfig> configs) { var tasks = configs.Select(SetClient).ToArray(); //TODO: Update clients, and remove clients from environment that do not exist in the config list Task.WaitAll(tasks); } private async Task SetClient(ClientConfig config) { var existingClient = await GetExistingClient(config.ClientId); if (existingClient == null) { var descriptor = _mapper.Map<OpenIddictApplicationDescriptor>(config); config.Permissions.ForEach(p=> descriptor.Permissions.Add(p)); await _appManager.CreateAsync(descriptor); } } private async Task<OpenIddictApplication> GetExistingClient(string clientId) => await _appManager.FindByClientIdAsync(clientId); public void Dispose() { _scope.Dispose(); } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/BankAccountCurrencyBalanceHistory.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class BankAccountCurrencyBalanceHistory { public int BankAccountId { get; set; } public int CurrencyId { get; set; } public int TransactionCommitId { get; set; } public DateTime BalanceDate { get; set; } public decimal? Balance { get; set; } public BankAccount BankAccount { get; set; } public Currency Currency { get; set; } public TransactionCommit TransactionCommit { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Order/OrderService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Email.EmailSender; using Argentex.Core.Service.Enums; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Fix; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Models.Email; using Argentex.Core.Service.Models.Fix; using Argentex.Core.Service.Models.Order; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.Trades; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace Argentex.Core.Service.Order { public class OrderService : IOrderService { private readonly ITradeUow _tradeUow; private readonly IBarxFxService _fixQuoteService; private readonly IUserService _userService; private readonly ICurrencyService _currencyService; private readonly IAppSettingService _appSettingService; private readonly IEmailService _emailService; private readonly IConfigWrapper _config; private readonly IClientCompanyService _clientCompanyService; public OrderService(ITradeUow uow , IBarxFxService fixQuoteService , IUserService userService , ICurrencyService currencyService , IAppSettingService appSetting , IEmailService emailService , IConfigWrapper config , IClientCompanyService clientCompanyService) { _tradeUow = uow; _fixQuoteService = fixQuoteService; _userService = userService; _currencyService = currencyService; _appSettingService = appSetting; _emailService = emailService; _config = config; _clientCompanyService = clientCompanyService; } public async Task<IList<OrderResponseModel>> ExecuteOrdersAsync(OrderRequestModel orderRequest) { //IMPORTANT: The call to the FIX service to get the quotes will be temporarily disabled //(other related service calls and related methods have been commented out too) //Do not activate this without a prior discussion with the Product Owner! List<OrderResponseModel> responseList = new List<OrderResponseModel>(); //_fixQuoteService.SetHttpTimeout(new TimeSpan(0, 0, 0, 0, _appSettingService.GetTimeOut())); foreach (var order in orderRequest.OrderModels) { var response = new OrderResponseModel { IsSuccessful = false }; var tradeCountObject = _tradeUow.GetTradeCountByPrimaryKey(orderRequest.ClientCompanyId.Value); tradeCountObject.TradeCount++; response.Code = CodeBuilder.FormatTradeCode(tradeCountObject.TradeCount, orderRequest.ClientCompanyId.Value); var quoteRequest = CreateQuoteRequestModel(order, response.Code); //var orderResponse = await _fixQuoteService.GetQuoteAsync(quoteRequest); //bool isOrderFavourable = CheckOrderValidity(order, orderResponse, orderRequest.ClientCompanyId.Value); bool isOrderFavourable = true; if (isOrderFavourable) { var orderEntity = CreateTradeEntity(order, response.Code, orderRequest.ClientCompanyId.Value, orderRequest.AuthUserId.Value); response.IsSuccessful = _tradeUow.ExecuteOrder(orderEntity, tradeCountObject); // sending email to notify client and dealer if (response.IsSuccessful) { var orderNoteModel = GetOrderNote(response.Code); await _emailService.SendOrderNoteEmailAsync(orderNoteModel); await _emailService.SendDealerOrderNoteEmailAsync(orderNoteModel); } } else { response.Code = ""; response.ErrorMessage = "You have attempted to create an order with a worse rate than the current forward rate. Please alter the rate before re-submitting your order. "; } response.OrderIndex = order.OrderIndex; responseList.Add(response); } return responseList; } public IList<Models.Trade.TradeModel> GetOpenOrders(int clientCompanyId) { var list = new List<Models.Trade.TradeModel>(); list = _tradeUow .GetOpenOrders(clientCompanyId) .Select(trade => new Models.Trade.TradeModel { TradeId = trade.Code, ClientRate = trade.ClientRate, CreatedDate = trade.CreatedDate, ValueDate = trade.ValueDate.Value, ValidityDate = trade.OpenValueDate.Value, SellCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, ClientSellAmount = trade.IsBuy ? trade.ClientRhsamt.Value : trade.ClientLhsamt.Value, BuyCcy = trade.IsBuy ? trade.Lhsccy.Code : trade.Rhsccy.Code, ClientBuyAmount = trade.IsBuy ? trade.ClientLhsamt.Value : trade.ClientRhsamt.Value }).ToList(); return list; } /// <summary> /// Getting open orders that have the validity date expired /// </summary> /// <returns>CancelOrderModel for send cancel order email</returns> public IList<CancelOrderModel> GetExpiredValidityOrders() { var list = new List<CancelOrderModel>(); list = _tradeUow .GetExpiredValidityOrders() .Select(trade => new CancelOrderModel { InstructedBy = trade.AuthorisedByClientCompanyContactId.HasValue ? trade.AuthorisedByClientCompanyContact.Fullname : string.Empty, InstructedDateTime = trade.CreatedDate, Method = Enum.GetName(typeof(TradeInstructionMethods), trade.TradeInstructionMethodId ?? 0), TradeRef = trade.Code, SellCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, SellAmount = trade.IsBuy ? trade.ClientRhsamt.Value : trade.ClientLhsamt.Value, BuyCcy = trade.IsBuy ? trade.Lhsccy.Code : trade.Rhsccy.Code, BuyAmount = trade.IsBuy ? trade.ClientLhsamt.Value : trade.ClientRhsamt.Value, ValueDate = trade.ValueDate.Value, Rate = (double?)trade.BrokerRate ?? 0, Collateral = trade.IsBuy ? CalculateCollateralValue(trade.ClientRhsamt ?? 0, trade.CollateralPerc ?? 0) : CalculateCollateralValue(trade.ClientLhsamt ?? 0, trade.CollateralPerc ?? 0), CollateralCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, CurrencyPair = trade.CurrencyPair, ClientEmail = trade.AuthorisedByClientCompanyContactId.HasValue ? trade.AuthorisedByClientCompanyContact.Email : string.Empty, ClientCompany = trade.AuthorisedByClientCompanyContact.ClientCompany, SettlementAccountDetails = trade.ClientCompanyOpi }).ToList(); return list; } /// <summary> /// Canceling an order (Trade) that is in pending status /// This is done by user action /// </summary> /// <param name="tradeCode">Unique Id for Order/Trade</param> /// <returns></returns> public async Task<bool> CancelOrderAsync(string tradeCode) { var orderIsDeleted = await _tradeUow.CancelOrder(tradeCode); if (orderIsDeleted) { var model = GetOrderInformation(tradeCode); await _emailService.SendCancelOrderEmailAsync(model); } return orderIsDeleted; } /// <summary> /// Canceling an order (Trade) that is in pending status /// This is done by service in the scheduled hours /// </summary> /// <param name="model">CancelOrderModel that is used to send the email for the client</param> /// <returns></returns> public async Task<bool> CancelOrderAsync(CancelOrderModel model) { var orderIsDeleted = await _tradeUow.CancelOrder(model.TradeRef); if (orderIsDeleted) await _emailService.SendCancelOrderEmailAsync(model); return orderIsDeleted; } #region Private methods /// <summary> /// Getting order informations for the sending email /// </summary> /// <param name="tradeCode">Unique Id for Order/Trade</param> /// <returns></returns> private OrderNoteModel GetOrderNote(string tradeCode) { var orderNote = _tradeUow .GetTrade(tradeCode) .Select(trade => new OrderNoteModel { InstructedBy = trade.AuthorisedByClientCompanyContactId.HasValue ? trade.AuthorisedByClientCompanyContact.Fullname : string.Empty, InstructedDateTime = trade.CreatedDate, Method = Enum.GetName(typeof(TradeInstructionMethods), trade.TradeInstructionMethodId ?? 0), TradeRef = trade.Code, SellCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, SellAmount = trade.IsBuy ? trade.ClientRhsamt.Value : trade.ClientLhsamt.Value, BuyCcy = trade.IsBuy ? trade.Lhsccy.Code : trade.Rhsccy.Code, BuyAmount = trade.IsBuy ? trade.ClientLhsamt.Value : trade.ClientRhsamt.Value, ValueDate = trade.ValueDate.Value, Rate = (double?)trade.ClientRate ?? 0, CurrencyPair = trade.CurrencyPair, ClientEmail = trade.AuthorisedByClientCompanyContactId.HasValue ? trade.AuthorisedByClientCompanyContact.Email : string.Empty, ClientCompany = trade.AuthorisedByClientCompanyContact.ClientCompany, ValidityDate = trade.OpenValueDate, CreatedDate = trade.CreatedDate, }).FirstOrDefault(); if (orderNote == null) { throw new TradeNotFoundException($"Trade with code {tradeCode} does not exist"); } else if (orderNote.ClientCompany != null && orderNote.ClientCompany.DealerAppUserId > 0) { var appUser = _userService.GetFXDBAppUserById((int)orderNote.ClientCompany.DealerAppUserId); var authUser = _userService.GetApplicationUserByAuthUserId(appUser.AuthUserId); orderNote.DealerAuthUser = authUser; } return orderNote; } /// <summary> /// Getting order informations for the sending email /// </summary> /// <param name="tradeCode">Unique Id for Order/Trade</param> /// <returns></returns> private CancelOrderModel GetOrderInformation(string tradeCode) { var tradeInformation = _tradeUow .GetTrade(tradeCode) .Select(trade => new CancelOrderModel { InstructedBy = trade.AuthorisedByClientCompanyContactId.HasValue ? trade.AuthorisedByClientCompanyContact.Fullname : string.Empty, InstructedDateTime = trade.CreatedDate, Method = Enum.GetName(typeof(TradeInstructionMethods), trade.TradeInstructionMethodId ?? 0), TradeRef = trade.Code, SellCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, SellAmount = trade.IsBuy ? trade.ClientRhsamt.Value : trade.ClientLhsamt.Value, BuyCcy = trade.IsBuy ? trade.Lhsccy.Code : trade.Rhsccy.Code, BuyAmount = trade.IsBuy ? trade.ClientLhsamt.Value : trade.ClientRhsamt.Value, ValueDate = trade.ValueDate.Value, Rate = (double?)trade.ClientRate ?? 0, Collateral = trade.IsBuy ? CalculateCollateralValue(trade.ClientRhsamt ?? 0, trade.CollateralPerc ?? 0) : CalculateCollateralValue(trade.ClientLhsamt ?? 0, trade.CollateralPerc ?? 0), CollateralCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, CurrencyPair = trade.CurrencyPair, ClientEmail = trade.AuthorisedByClientCompanyContactId.HasValue ? trade.AuthorisedByClientCompanyContact.Email : string.Empty, ClientCompany = trade.AuthorisedByClientCompanyContact.ClientCompany, SettlementAccountDetails = trade.ClientCompanyOpi, ValidityDate = trade.OpenValueDate, CreatedDate = trade.CreatedDate }).SingleOrDefault(); if (tradeInformation == null) throw new TradeNotFoundException($"Trade with code {tradeCode} does not exist"); return tradeInformation; } private FixQuoteRequestModel CreateQuoteRequestModel(OrderModel order, string tradeCode) { return new FixQuoteRequestModel { TradeCode = tradeCode, LHSCCY = order.LhsCcy, RHSCCY = order.RhsCcy, MajorCurrency = order.IsRhsMajor ? order.RhsCcy : order.LhsCcy, Side = order.IsBuy ? 1 : 2, BrokerMajorAmount = order.ClientAmount, TimeOut = _appSettingService.GetTimeOut(), Duration = _appSettingService.GetStreamingQuoteDuration(), ValueDate = order.ValueDate.ToString("yyyy-MM-dd") }; } private FxforwardTrade CreateTradeEntity(OrderModel orderModel, string code, int clientCompanyId, int authUserId) { const int Pending = 1;//TODO - possibly retrieve from FXDB const int Generating = 2;//TODO - possibly retrieve from FXDB var tradeModel = new FxforwardTrade() { Code = code, CreatedDate = DateTime.Now, CreatedByAuthUserId = authUserId, ClientCompanyId = clientCompanyId, AuthorisedByClientCompanyContactId = GetClientCompanyContactId(authUserId), Verified = false, ContractDate = DateTime.Now, ValueDate = orderModel.ValueDate, OpenValueDate = orderModel.ValidityDate, IsOrder = true, CurrencyPair = orderModel.LhsCcy + orderModel.RhsCcy, IsBuy = orderModel.IsBuy, Lhsccyid = _currencyService.GetCurrencyId(orderModel.LhsCcy), Rhsccyid = _currencyService.GetCurrencyId(orderModel.RhsCcy), ClientRate = orderModel.ClientRate, UpdatedByAuthUserId = authUserId, FxforwardTradeStatusId = Pending, IsRhsmajor = orderModel.IsRhsMajor, ProfitConsolidated = false, Deleted = false, EmirReported = false, IsComplianceSupported = true, IsComplianceRegulated = false, EmirDelegatedReported = false, IsEmirreportable = true, IsApareportable = true, IsArmreportable = true, Armreported = false, EmirstatusId = Generating, ApastatusId = Generating, ArmstatusId = Generating, TradeInstructionMethodId = (int)TradeInstructionMethods.Online }; CalculateClientAmounts(orderModel, tradeModel); return tradeModel; } private int GetClientCompanyContactId(int authUserId) { return _userService .GetApplicationUserByAuthUserId(authUserId) .ClientCompanyContactId; } private void CalculateClientAmounts(OrderModel orderModel, FxforwardTrade tradeModel) { if (orderModel.IsRhsMajor) { tradeModel.ClientLhsamt = orderModel.ClientAmount / orderModel.ClientRate; tradeModel.ClientRhsamt = orderModel.ClientAmount; tradeModel.BrokerRhsamt = orderModel.ClientAmount; } else { tradeModel.ClientLhsamt = orderModel.ClientAmount; tradeModel.ClientRhsamt = orderModel.ClientAmount * orderModel.ClientRate; tradeModel.BrokerLhsamt = orderModel.ClientAmount; } } private bool CheckOrderValidity(OrderModel orderModel, FixQuoteResponseModel fixQuoteResponse, int clientCompanyId) { decimal finalRate; bool isOrderValid; int spread = _clientCompanyService.GetClientCompanySpread(clientCompanyId, orderModel.LhsCcy, orderModel.RhsCcy, orderModel.IsBuy, orderModel.ValueDate, DateTime.Now); if (orderModel.IsBuy) { finalRate = (fixQuoteResponse.BrokerRate + ((decimal)spread / 10000)) * 1.1m; isOrderValid = (orderModel.ClientRate <= finalRate); } else { finalRate = (fixQuoteResponse.BrokerRate + ((decimal)spread / 10000)) * 0.9m; isOrderValid = (orderModel.ClientRate >= finalRate); } return isOrderValid; } private static decimal CalculateCollateralValue(decimal value, decimal collateralPercentage) { return value * (collateralPercentage / 100); } #endregion } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/BankAccountCurrencyDetails.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class BankAccountCurrencyDetails { public int BankAccountId { get; set; } public int CurrencyId { get; set; } public int CountryId { get; set; } public string BankName { get; set; } public string BankAddress { get; set; } public string BankAccountName { get; set; } public string BankAccountNumber { get; set; } public string BankAccountSort { get; set; } public string BankAccountSwift { get; set; } public string BankAccountIban { get; set; } public string BeneficiaryName { get; set; } public string BeneficiaryAddress { get; set; } public int? ClearingCodePrefixId { get; set; } public byte[] UpdateTimeStamp { get; set; } public int CreatedByAuthUserId { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdateDateTime { get; set; } public BankAccount BankAccount { get; set; } public ClearingCodePrefix ClearingCodePrefix { get; set; } public Country Country { get; set; } public AuthUser CreatedByAuthUser { get; set; } public Currency Currency { get; set; } public AuthUser UpdatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Http/HttpResponseExtension.cs using Newtonsoft.Json; using System.Net.Http; using System.Threading.Tasks; namespace Argentex.ClientSite.Service.Http { public static class HttpResponseExtensions { public static async Task<T> ContentAsType<T>(this HttpResponseMessage response) { var data = await response.Content.ReadAsStringAsync(); return string.IsNullOrEmpty(data) ? default(T) : JsonConvert.DeserializeObject<T>(data); } public static async Task<string> ContentAsJson(this HttpResponseMessage response) { var data = await response.Content.ReadAsStringAsync(); return JsonConvert.SerializeObject(data); } public static async Task<string> ContentAsString(this HttpResponseMessage response) { return await response.Content.ReadAsStringAsync(); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ComplianceQuestionnaireQuestion.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ComplianceQuestionnaireQuestion { public ComplianceQuestionnaireQuestion() { ComplianceQuestionnaire = new HashSet<ComplianceQuestionnaire>(); ComplianceQuestionnaireAnswer = new HashSet<ComplianceQuestionnaireAnswer>(); } public int Id { get; set; } public string Name { get; set; } public string Description { get; set; } public DateTime CreatedDateTime { get; set; } public ICollection<ComplianceQuestionnaire> ComplianceQuestionnaire { get; set; } public ICollection<ComplianceQuestionnaireAnswer> ComplianceQuestionnaireAnswer { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyContactResponseModel.cs using System.Collections.Generic; namespace Argentex.Core.Service.Models.ClientCompany { public class ClientCompanyContactResponseModel { public IDictionary<string, string[]> ResponseMessages { get; set; } public bool Succeeded { get; set; } public ClientCompanyContactModel CompanyContactModel { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogFxforwardTradeInvoice.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogFxforwardTradeInvoice { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public string TradeCode { get; set; } public string FileName { get; set; } public int? FileSize { get; set; } public string Comment { get; set; } public int? DocumentId { get; set; } public DateTime? UploadedDateTime { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogBankAccountCurrencyDetails.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogBankAccountCurrencyDetails { public long LogId { get; set; } public string LogAction { get; set; } public int BankAccountId { get; set; } public int CurrencyId { get; set; } public int CountryId { get; set; } public string BankName { get; set; } public string BankAddress { get; set; } public string BankAccountName { get; set; } public string BankAccountNumber { get; set; } public string BankAccountSort { get; set; } public string BankAccountSwift { get; set; } public string BankAccountIban { get; set; } public string BeneficiaryName { get; set; } public string BeneficiaryAddress { get; set; } public int? ClearingCodePrefixId { get; set; } public int CreatedByAuthUserId { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdateDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanySalesAppUser.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanySalesAppUser { public int ClientCompanyId { get; set; } public int SalesPersonAppUserId { get; set; } public int SalesOrder { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public ClientCompany ClientCompany { get; set; } public AppUser SalesPersonAppUser { get; set; } public AuthUser UpdatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ComplianceQuestionnaireAnswer.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ComplianceQuestionnaireAnswer { public ComplianceQuestionnaireAnswer() { ComplianceQuestionnaire = new HashSet<ComplianceQuestionnaire>(); } public int Id { get; set; } public int ComplianceQuestionnaireQuestionId { get; set; } public string Name { get; set; } public string Description { get; set; } public int Points { get; set; } public DateTime CreatedDateTime { get; set; } public ComplianceQuestionnaireQuestion ComplianceQuestionnaireQuestion { get; set; } public ICollection<ComplianceQuestionnaire> ComplianceQuestionnaire { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Identity/AuthoriseSignatoryRequest.cs using System.Collections.Generic; namespace Argentex.Core.Service.Models.Identity { public class AuthoriseSignatoryRequest { public int ApproverAuthUserId { get; set; } public ICollection<int> UserIdsToAuthorise { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api.Tests/Statements/StatementsControllerTests.cs using System; using System.Collections.Generic; using System.Net; using Argentex.Core.Api.Controllers.Statements; using Argentex.Core.Service.Models.Statements; using Argentex.Core.Service.Statements; using Microsoft.AspNetCore.Mvc; using Moq; using SynetecLogger; using Xunit; namespace Argentex.Core.Api.Tests.Statements { public class StatementsControllerTests { [Fact(Skip = "Needs updating")] public void Given_There_Are_No_Results_When_Getting_Statements_A_No_Content_Should_Be_Returned() { // Given var statementServiceMock = new Mock<IStatementService>(); var loggerMock = new Mock<ILogWrapper>(); statementServiceMock .Setup(x => x.GetStatements(It.IsAny<int>(), It.IsAny<DateTime>(), It.IsAny<DateTime>())) .Returns(new Dictionary<string, List<StatementModel>>()); var expectedStatusCode = HttpStatusCode.NoContent; var controller = new StatementsController(statementServiceMock.Object, loggerMock.Object); // When var response = controller.GetStatements(0, DateTime.Now, DateTime.Now); var result = response as NoContentResult; // Then Assert.NotNull(result); Assert.Equal((int)expectedStatusCode, result.StatusCode); } [Fact] public void Given_There_Are_Results_When_Getting_Statements_An_Ok_Response_Should_Be_Returned() { // Given var statements = new Dictionary<string, List<StatementModel>>() { {"GBP", new List<StatementModel>() { new StatementModel() { ValueDate = DateTime.Today, Event = "Who cares", IsDebit = false, Amount = 10000m } } } }; var statementServiceMock = new Mock<IStatementService>(); var loggerMock = new Mock<ILogWrapper>(); statementServiceMock .Setup(x => x.GetStatements(It.IsAny<int>(), It.IsAny<DateTime>(), It.IsAny<DateTime>())) .Returns(statements); var expectedStatusCode = HttpStatusCode.OK; var controller = new StatementsController(statementServiceMock.Object, loggerMock.Object); // When var response = controller.GetStatements(0, DateTime.Now, DateTime.Now); var result = response as OkObjectResult; // Then Assert.NotNull(result); Assert.Equal((int)expectedStatusCode, result.StatusCode); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FxforwardTradeSwapCount.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FxforwardTradeSwapCount { public string FxforwardTradeCode { get; set; } public int SwapCount { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientSiteAction/ClientSiteActionModel.cs using System; namespace Argentex.Core.Service.Models.ClientSiteAction { public class ClientSiteActionModel { public long ID { get; set; } public string ActionType { get; set; } public int ActionStatusID { get; set; } public string ActionStatus { get; set; } public string Details { get; set; } public string CreatedByUser { get; set; } public DateTime CreatedDateTime { get; set; } public string UpdatedByUser { get; set; } public int UpdatedByUserID { get; set; } public DateTime UpdatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/EmirreportIncomingFileContent.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class EmirreportIncomingFileContent { public EmirreportIncomingFileContent() { EmirreportIncomingFile = new HashSet<EmirreportIncomingFile>(); } public int Id { get; set; } public string FileContent { get; set; } public ICollection<EmirreportIncomingFile> EmirreportIncomingFile { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/UserAuditLogPageViews.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class UserAuditLogPageViews { public int Id { get; set; } public DateTime DateTime { get; set; } public int AuthUserId { get; set; } public string PageViewName { get; set; } public string IpAddress { get; set; } public AuthUser AuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/User/UserServiceTests.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Identity.Services; using Argentex.Core.Service.Models.ClientCompany; using Argentex.Core.Service.Models.Identity; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.Users; using Argentex.Core.UnitsOfWork.Users.Model; using Microsoft.AspNetCore.Identity; using Moq; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using System.Threading.Tasks.Sources; using Argentex.Core.Service.AppSettings; using FluentAssertions; using Xunit; using Argentex.Core.Service.Sms.SmsSender; using Argentex.Core.Service.Sms.Models; namespace Argentex.Core.Service.Tests.User { public class UserServiceTests { [Fact] public void GetApplicationUserAsync_Success_With_Correct_And_Different_Password() { //Arrange Mock<IUserUow> mockUserUow = mockUserFactory(); var user = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", PasswordHash = "<PASSWORD>==", //password hash for "<PASSWORD>" ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100, PrimaryContact = false, Birthday = DateTime.Now }; mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(user)); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(null, mockUserUow.Object, emailSenderMock.Object, null, null, null, null); //Act var result = service.GetApplicationUserAsync("1").Result; //Assert Assert.IsType<ApplicationServiceUser>(result); Assert.NotNull(result); } [Fact] public void AddUnapprovedUserAsync_Successful_When_It_Has_The_Correct_Input() { //Arrange var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(uow => uow.AddUserAsync(It.IsAny<ClientUserModel>(), It.IsAny<string>())) .Returns(Task.FromResult(IdentityResult.Success)); mockUserUow.Setup(x => x.ValidateUserDetails(It.IsAny<UserValidationModel>())).Returns(IdentityResult.Success); var mockConfig = new Mock<IConfigWrapper>(); mockConfig.Setup(x => x.Get(It.IsAny<string>())).Returns(It.IsAny<string>()); var mockIdentityService = new Mock<IIdentityService>(); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(mockConfig.Object, mockUserUow.Object, emailSenderMock.Object, mockIdentityService.Object, null, null, null); //Act ApplicationServiceUser testServiceUser = new ApplicationServiceUser { Username = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", Password = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, Birthday = "10/10/2010" }; IdentityResult result = service.AddUnapprovedUserAsync(testServiceUser).Result; //Assert Assert.Equal(IdentityResult.Success, result); } [Fact] public void AddUnapprovedUserAsync_Successful_Set_User_As_Admin() { //Arrange var mockUserUow = new Mock<IUserUow>(); var configMock = new Mock<IConfigWrapper>(); mockUserUow.Setup(x => x.ValidateUserDetails(It.IsAny<UserValidationModel>())) .Returns(IdentityResult.Success); mockUserUow.Setup(uow => uow.AddUserAsync(It.IsAny<ClientUserModel>(), It.IsAny<string>())) .Returns(Task.FromResult(IdentityResult.Success)); mockUserUow.Setup(x => x.GetRole(It.IsAny<string>())) .Returns((new List<ApplicationRole> { new ApplicationRole() { Id = 42 } }).AsQueryable()); mockUserUow.Setup(x => x.SetRoleForUser(It.IsAny<long>(), It.IsAny<long>())).Returns(Task.FromResult(true)); configMock.Setup(x => x.Get("GeneratedPassword")).Returns("<PASSWORD>!"); mockUserUow.Setup(x => x.SetRoleForUser(It.IsAny<long>(), It.IsAny<long>())); var mockIdentityService = new Mock<IIdentityService>(); var service = new UserService(configMock.Object, mockUserUow.Object, null, mockIdentityService.Object, null, null, null); //Act ApplicationServiceUser testServiceUser = new ApplicationServiceUser { Id = 1, Username = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", Password = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, IsAdmin = true, Birthday = "10/10/2010" }; var result = service.AddUnapprovedUserAsync(testServiceUser).Result; //Assert Assert.Equal(IdentityResult.Success, result); } [Fact] public void AddUnapprovedUserAsync_Failed_When_It_Has_Duplicate_Email_And_Username_Input() { //Arrange var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(uow => uow.AddUserAsync(It.IsAny<ClientUserModel>(), It.IsAny<string>())) .Returns(Task.FromResult(IdentityResult.Success)); IdentityResult identityResult = IdentityResult.Failed( new IdentityError() { Description = "Username must be unique" }, new IdentityError() { Description = "Email must be unique within the Client Company Account" }); mockUserUow.Setup(x => x.ValidateUserDetails(It.IsAny<UserValidationModel>())).Returns(identityResult); var mockConfig = new Mock<IConfigWrapper>(); mockConfig.Setup(x => x.Get(It.IsAny<string>())).Returns(It.IsAny<string>()); var mockIdentityService = new Mock<IIdentityService>(); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(mockConfig.Object, mockUserUow.Object, emailSenderMock.Object, mockIdentityService.Object, null, null, null); //Act ApplicationServiceUser testServiceUser = new ApplicationServiceUser { Username = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", Password = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, Birthday = "10/10/2010" }; IdentityResult result = service.AddUnapprovedUserAsync(testServiceUser).Result; //Assert result.Should().BeEquivalentTo(identityResult); } [Fact] public void UpdateUserAsync_Successful_When_It_Has_The_Correct_Input() { //Arrange var mockUserUow = new Mock<IUserUow>(); var originalUser = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", PasswordHash = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100, Birthday = DateTime.Now }; mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(originalUser)); mockUserUow.Setup(x => x.ValidateUserDetails(It.IsAny<UserValidationModel>())) .Returns(IdentityResult.Success); mockUserUow.Setup(x => x.UpdateUserAsync(It.IsAny<ApplicationUser>(), originalUser)) .Returns(Task.FromResult(IdentityResult.Success)); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(null, mockUserUow.Object, emailSenderMock.Object, null, null, null, null); //Act ApplicationServiceUser userToUpdate = new ApplicationServiceUser { Id = 1, Username = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", Password = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, Birthday = "10/10/2010" }; IdentityResult result = service.UpdateUserAsync(userToUpdate).Result; //Assert Assert.Equal(IdentityResult.Success, result); } [Fact] public void UpdateUserAsync_Failed_When_It_Has_Duplicate_Email_And_Username_Input() { //Arrange var mockUserUow = new Mock<IUserUow>(); var originalUser = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", PasswordHash = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100, Birthday = DateTime.Now }; mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(originalUser)); IdentityResult identityResult = IdentityResult.Failed( new IdentityError() { Description = "Username must be unique" }, new IdentityError() { Description = "Email must be unique within the Client Company Account" }); mockUserUow.Setup(x => x.ValidateUserDetails(It.IsAny<UserValidationModel>())) .Returns(identityResult); mockUserUow.Setup(x => x.UpdateUserAsync(It.IsAny<ApplicationUser>(), It.IsAny<ApplicationUser>())) .Returns(Task.FromResult(IdentityResult.Success)); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(null, mockUserUow.Object, emailSenderMock.Object, null, null, null, null); //Act ApplicationServiceUser userToUpdate = new ApplicationServiceUser { Id = 1, Username = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", Password = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, Birthday = "10/10/2010" }; IdentityResult result = service.UpdateUserAsync(userToUpdate).Result; //Assert result.Should().BeEquivalentTo(identityResult); } [Fact] public void UpdateMyAccountAsync_Successful_When_It_Has_The_Correct_Input() { //Arrange var mockUserUow = new Mock<IUserUow>(); var originalUser = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", PasswordHash = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100, Birthday = DateTime.Now }; mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(originalUser)); mockUserUow.Setup(x => x.ValidateUserDetails(It.IsAny<UserValidationModel>())) .Returns(IdentityResult.Success); mockUserUow.Setup(x => x.UpdateUserAsync(It.IsAny<ApplicationUser>(), originalUser)) .Returns(Task.FromResult(IdentityResult.Success)); var service = new UserService(null, mockUserUow.Object, null, null, null, null, null); //Act ApplicationServiceUser userToUpdate = new ApplicationServiceUser { Id = 1, Username = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", Password = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, Birthday = "10/10/2010" }; IdentityResult result = service.UpdateUserAsync(userToUpdate).Result; //Assert Assert.Equal(IdentityResult.Success, result); } [Fact] public void ApproveUsersAsync_Successful_When_It_Has_The_Correct_Input() { //Arrange var user = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", PasswordHash = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100, Birthday = DateTime.Now }; List<ClientCompaniesModel> clientCompanies = new List<ClientCompaniesModel> { new ClientCompaniesModel {ClientCompanyId = 22, ClientCompanyName = "Abc"}, new ClientCompaniesModel {ClientCompanyId = 23, ClientCompanyName = "Xyz"}, }; var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(user)); mockUserUow.Setup(x => x.ApproveUserAsync(user)) .Returns(Task.FromResult(IdentityResult.Success)); var mockConfig = new Mock<IConfigWrapper>(); mockConfig.Setup(x => x.Get(It.IsAny<string>())).Returns("Url"); var mockIdentityService = new Mock<IIdentityService>(); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendUserNewPasswordEmailAsync(It.IsAny<string>(), null)). Returns(Task.FromResult(IdentityResult.Success)); var service = new UserService(mockConfig.Object, mockUserUow.Object, null, mockIdentityService.Object, mockEmailService.Object, null, null); //Act ApproveUsersRequest approveUsersRequest = new ApproveUsersRequest { ApproverAuthUserId = 1, UserIdsToApprove = new List<int> { 1 } }; IdentityResult result = service.ApproveUsersAsync(approveUsersRequest, clientCompanies).Result[0]; //Assert Assert.Equal(IdentityResult.Success, result); } [Fact] public void DeleteUserAsync_Successful_When_It_Has_The_Correct_Input() { //Arrange var mockUserUow = new Mock<IUserUow>(); var user = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", PasswordHash = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100 }; mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(user)); mockUserUow.Setup(x => x.DeleteUserAsync(It.IsAny<ApplicationUser>())) .Returns(Task.FromResult(IdentityResult.Success)); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(null, mockUserUow.Object, emailSenderMock.Object, null, null, null, null); //Act IdentityResult result = service.DeleteUserAsync("<EMAIL>").Result; //Assert Assert.Equal(IdentityResult.Success, result); } [Fact] public void UpdateUserAsync_Identity_Fails_When_Inputted_With_An_Empty_Or_Invalid_Id() { //Arrange var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult((ApplicationUser)null)); mockUserUow.Setup(x => x.UpdateUserAsync(It.IsAny<ApplicationUser>(), It.IsAny<ApplicationUser>())) .Returns(Task.FromResult(IdentityResult.Success)); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(null, mockUserUow.Object, emailSenderMock.Object, null, null, null, null); //Act ApplicationServiceUser userToUpdate = new ApplicationServiceUser { Id = 1, Username = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", Password = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111 }; IdentityResult result = service.UpdateUserAsync(userToUpdate).Result; //Assert Assert.Equal(IdentityResult.Failed().Succeeded, result.Succeeded); } [Fact] public async Task Approve_UserChangeRequest_SuccessAsync() { var userChangeRequest = new UserChangeRequest() { Id = 60, AuthUserId = 1400, CurrentValue = "<EMAIL>", ProposedValue = "<EMAIL>", ChangeValueType = "Email", ChangeDateTime = DateTime.Now, ChangedByAuthUserId = 1352, ChangeStatus = "Pending" }; var approveUserChangeResponse = new ApproveUserChangeResponse() { Result = IdentityResult.Success, SendNotification = true, UserChangeRequest = null }; var approveUserChangeRequest = new ApproveUserChangeRequest { UserChangeRequestID = 60, ApprovedByAuthUserId = 1352 }; //Arrange var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(x => x.GetUserChangeRequest(It.IsAny<int>())) .Returns(userChangeRequest); mockUserUow.Setup(x => x.ApproveUserChangeRequest(It.IsAny<ApproveUserChangeRequest>())) .Returns(Task.FromResult(approveUserChangeResponse)); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(null, mockUserUow.Object, emailSenderMock.Object, null, null, null, null); var result = await service.ApproveUserChangeRequest(approveUserChangeRequest); //Assert Assert.Equal(IdentityResult.Success.Succeeded, result.Result.Succeeded); } [Fact] public async void Approve_UserChangeRequest_Failed() { var userChangeRequest = new UserChangeRequest() { Id = 60, AuthUserId = 1400, CurrentValue = "<EMAIL>", ProposedValue = "<EMAIL>", ChangeValueType = "Email", ChangeDateTime = DateTime.Now, ChangedByAuthUserId = 1352, ChangeStatus = "Pending" }; var approveUserChangeResponse = new ApproveUserChangeResponse() { Result = IdentityResult.Failed(), SendNotification = true, UserChangeRequest = null }; var approveUserChangeRequest = new ApproveUserChangeRequest { UserChangeRequestID = 60, ApprovedByAuthUserId = 1352 }; //Arrange var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(x => x.GetUserChangeRequest(It.IsAny<int>())) .Returns(userChangeRequest); mockUserUow.Setup(x => x.ApproveUserChangeRequest(It.IsAny<ApproveUserChangeRequest>())) .ReturnsAsync(approveUserChangeResponse); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(null, mockUserUow.Object, emailSenderMock.Object, null, null, null, null); var result = await service.ApproveUserChangeRequest(approveUserChangeRequest); //Assert Assert.Equal(IdentityResult.Failed().Succeeded, result.Result.Succeeded); } [Fact] public async Task Approve_UserChangeRequest_SendSMSSuccess_with_no_default_mobile_number_provided() { var userChangeRequest = new UserChangeRequest() { Id = 60, AuthUserId = 1400, CurrentValue = "<EMAIL>", ProposedValue = "<EMAIL>", ChangeValueType = "Email", ChangeDateTime = DateTime.Now, ChangedByAuthUserId = 1352, ChangeStatus = "Pending" }; var approveUserChangeResponse = new ApproveUserChangeResponse() { Result = IdentityResult.Success, SendNotification = true, UserChangeRequest = userChangeRequest }; var approveUserChangeRequest = new ApproveUserChangeRequest { UserChangeRequestID = 60, ApprovedByAuthUserId = 1352 }; //Arrange var mockUserUow = new Mock<IUserUow>(); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendUserNewPasswordEmailAsync(It.IsAny<string>(), null)). Returns(Task.FromResult(IdentityResult.Success)); mockUserUow.Setup(x => x.GetSendersPhoneNumber(It.IsAny<int>())).Returns("442222222222"); mockUserUow.Setup(x => x.ApproveUserChangeRequest(approveUserChangeRequest)) .Returns(Task.FromResult(approveUserChangeResponse)); var emailSenderMock = new Mock<IEmailSender>(); var smsSenderMock = new Mock<ISmsSender>(); var configWrapperMock = new Mock<IConfigWrapper>(); configWrapperMock.Setup(x => x.Get("Sms:DefaultPhoneNumber")).Returns(""); var service = new UserService(configWrapperMock.Object, mockUserUow.Object, emailSenderMock.Object,null, mockEmailService.Object, null, smsSenderMock.Object); var result = await service.ApproveUserChangeRequest(approveUserChangeRequest); //Assert mockUserUow.Verify(m => m.GetSendersPhoneNumber(It.IsAny<int>()), Times.Once); smsSenderMock.Verify(m => m.SendMessage(It.IsAny<SmsModel>(), Enums.SmsProviders.TextMagic),Times.Once); mockUserUow.Verify(m => m.GetSendersEmailAddress(It.IsAny<int>()), Times.Never); mockEmailService.Verify(m => m.SendMobileChangeEmailAsync(It.IsAny<string>(), It.IsAny<string>()), Times.Never); } [Fact] public async Task Approve_UserChangeRequest_SendSMSSuccess_with_default_mobile_number_provided() { var userChangeRequest = new UserChangeRequest() { Id = 60, AuthUserId = 1400, CurrentValue = "<EMAIL>", ProposedValue = "<EMAIL>", ChangeValueType = "Email", ChangeDateTime = DateTime.Now, ChangedByAuthUserId = 1352, ChangeStatus = "Pending" }; var approveUserChangeResponse = new ApproveUserChangeResponse() { Result = IdentityResult.Success, SendNotification = true, UserChangeRequest = userChangeRequest }; var approveUserChangeRequest = new ApproveUserChangeRequest { UserChangeRequestID = 60, ApprovedByAuthUserId = 1352 }; //Arrange var mockUserUow = new Mock<IUserUow>(); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendUserNewPasswordEmailAsync(It.IsAny<string>(), null)). Returns(Task.FromResult(IdentityResult.Success)); mockUserUow.Setup(x => x.ApproveUserChangeRequest(approveUserChangeRequest)) .Returns(Task.FromResult(approveUserChangeResponse)); var emailSenderMock = new Mock<IEmailSender>(); var smsSenderMock = new Mock<ISmsSender>(); var configWrapperMock = new Mock<IConfigWrapper>(); configWrapperMock.Setup(x => x.Get("Sms:DefaultPhoneNumber")).Returns("442222222222"); var service = new UserService(configWrapperMock.Object, mockUserUow.Object, emailSenderMock.Object, null, mockEmailService.Object, null, smsSenderMock.Object); var result = await service.ApproveUserChangeRequest(approveUserChangeRequest); //Assert mockUserUow.Verify(m => m.GetSendersPhoneNumber(It.IsAny<int>()), Times.Never); smsSenderMock.Verify(m => m.SendMessage(It.IsAny<SmsModel>(), Enums.SmsProviders.TextMagic), Times.Once); mockUserUow.Verify(m => m.GetSendersEmailAddress(It.IsAny<int>()), Times.Never); mockEmailService.Verify(m => m.SendMobileChangeEmailAsync(It.IsAny<string>(), It.IsAny<string>()), Times.Never); } [Fact] public async Task Approve_UserChangeRequest_SendEmailSuccess_with_default_email_provided() { var userChangeRequest = new UserChangeRequest() { Id = 60, AuthUserId = 1400, CurrentValue = "442222222222", ProposedValue = "443333333333", ChangeValueType = "Telephone", ChangeDateTime = DateTime.Now, ChangedByAuthUserId = 1352, ChangeStatus = "Pending" }; var approveUserChangeResponse = new ApproveUserChangeResponse() { Result = IdentityResult.Success, SendNotification = true, UserChangeRequest = userChangeRequest }; var approveUserChangeRequest = new ApproveUserChangeRequest { UserChangeRequestID = 60, ApprovedByAuthUserId = 1352 }; //Arrange var mockUserUow = new Mock<IUserUow>(); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendUserNewPasswordEmailAsync(It.IsAny<string>(), null)). Returns(Task.FromResult(IdentityResult.Success)); mockUserUow.Setup(x => x.ApproveUserChangeRequest(approveUserChangeRequest)) .Returns(Task.FromResult(approveUserChangeResponse)); var emailSenderMock = new Mock<IEmailSender>(); var smsSenderMock = new Mock<ISmsSender>(); var configWrapperMock = new Mock<IConfigWrapper>(); configWrapperMock.Setup(x => x.Get("Emails:DefaultEmail")).Returns("<EMAIL>"); var service = new UserService(configWrapperMock.Object, mockUserUow.Object, emailSenderMock.Object, null, mockEmailService.Object, null, smsSenderMock.Object); var result = await service.ApproveUserChangeRequest(approveUserChangeRequest); //Assert mockUserUow.Verify(m => m.GetSendersEmailAddress(It.IsAny<int>()), Times.Never); mockEmailService.Verify(m => m.SendMobileChangeEmailAsync(It.IsAny<string>(), It.IsAny<string>()), Times.Once); mockUserUow.Verify(m => m.GetSendersPhoneNumber(It.IsAny<int>()), Times.Never); smsSenderMock.Verify(m => m.SendMessage(It.IsAny<SmsModel>(), Enums.SmsProviders.TextMagic), Times.Never); } [Fact] public async Task Approve_UserChangeRequest_SendEmailSuccess_with_no_default_email_provided() { var userChangeRequest = new UserChangeRequest() { Id = 60, AuthUserId = 1400, CurrentValue = "442222222222", ProposedValue = "443333333333", ChangeValueType = "Telephone", ChangeDateTime = DateTime.Now, ChangedByAuthUserId = 1352, ChangeStatus = "Pending" }; var approveUserChangeResponse = new ApproveUserChangeResponse() { Result = IdentityResult.Success, SendNotification = true, UserChangeRequest = userChangeRequest }; var approveUserChangeRequest = new ApproveUserChangeRequest { UserChangeRequestID = 60, ApprovedByAuthUserId = 1352 }; //Arrange var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(x => x.GetSendersEmailAddress(It.IsAny<int>())).Returns("<EMAIL>"); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendUserNewPasswordEmailAsync(It.IsAny<string>(), null)). Returns(Task.FromResult(IdentityResult.Success)); mockUserUow.Setup(x => x.ApproveUserChangeRequest(approveUserChangeRequest)) .Returns(Task.FromResult(approveUserChangeResponse)); var emailSenderMock = new Mock<IEmailSender>(); var smsSenderMock = new Mock<ISmsSender>(); var configWrapperMock = new Mock<IConfigWrapper>(); configWrapperMock.Setup(x => x.Get("Emails:DefaultEmail")).Returns(""); var service = new UserService(configWrapperMock.Object, mockUserUow.Object, emailSenderMock.Object, null, mockEmailService.Object, null, smsSenderMock.Object); var result = await service.ApproveUserChangeRequest(approveUserChangeRequest); //Assert mockUserUow.Verify(m => m.GetSendersEmailAddress(It.IsAny<int>()), Times.Once); mockEmailService.Verify(m => m.SendMobileChangeEmailAsync(It.IsAny<string>(), It.IsAny<string>()), Times.Once); mockUserUow.Verify(m => m.GetSendersPhoneNumber(It.IsAny<int>()), Times.Never); smsSenderMock.Verify(m => m.SendMessage(It.IsAny<SmsModel>(), Enums.SmsProviders.TextMagic), Times.Never); } [Fact] public async Task Approve_UserChangeRequest_NoEmail_WithLessthanTwoApprovals() { var userChangeRequest = new UserChangeRequest() { Id = 60, AuthUserId = 1400, CurrentValue = "442222222222", ProposedValue = "443333333333", ChangeValueType = "Telephone", ChangeDateTime = DateTime.Now, ChangedByAuthUserId = 1352, ChangeStatus = "Pending" }; var approveUserChangeResponse = new ApproveUserChangeResponse() { Result = IdentityResult.Success, SendNotification = false, UserChangeRequest = userChangeRequest }; var approveUserChangeRequest = new ApproveUserChangeRequest { UserChangeRequestID = 60, ApprovedByAuthUserId = 1352 }; //Arrange var mockUserUow = new Mock<IUserUow>(); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendUserNewPasswordEmailAsync(It.IsAny<string>(), null)). Returns(Task.FromResult(IdentityResult.Success)); mockUserUow.Setup(x => x.ApproveUserChangeRequest(approveUserChangeRequest)) .Returns(Task.FromResult(approveUserChangeResponse)); var emailSenderMock = new Mock<IEmailSender>(); var smsSenderMock = new Mock<ISmsSender>(); var configWrapperMock = new Mock<IConfigWrapper>(); configWrapperMock.Setup(x => x.Get("Emails:DefaultEmail")).Returns("<EMAIL>"); var service = new UserService(configWrapperMock.Object, mockUserUow.Object, emailSenderMock.Object, null, mockEmailService.Object, null, smsSenderMock.Object); var result = await service.ApproveUserChangeRequest(approveUserChangeRequest); //Assert mockUserUow.Verify(m => m.GetSendersEmailAddress(It.IsAny<int>()), Times.Never); mockEmailService.Verify(m => m.SendMobileChangeEmailAsync(It.IsAny<string>(), It.IsAny<string>()), Times.Never); mockUserUow.Verify(m => m.GetSendersPhoneNumber(It.IsAny<int>()), Times.Never); smsSenderMock.Verify(m => m.SendMessage(It.IsAny<SmsModel>(), Enums.SmsProviders.TextMagic), Times.Never); } [Fact] public async Task Approve_UserChangeRequest_NoSMS_WithLessthanTwoApprovals() { var userChangeRequest = new UserChangeRequest() { Id = 60, AuthUserId = 1400, CurrentValue = "<EMAIL>", ProposedValue = "<EMAIL>", ChangeValueType = "Email", ChangeDateTime = DateTime.Now, ChangedByAuthUserId = 1352, ChangeStatus = "Pending" }; var approveUserChangeResponse = new ApproveUserChangeResponse() { Result = IdentityResult.Success, SendNotification = false, UserChangeRequest = userChangeRequest }; var approveUserChangeRequest = new ApproveUserChangeRequest { UserChangeRequestID = 60, ApprovedByAuthUserId = 1352 }; //Arrange var mockUserUow = new Mock<IUserUow>(); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendUserNewPasswordEmailAsync(It.IsAny<string>(), null)). Returns(Task.FromResult(IdentityResult.Success)); mockUserUow.Setup(x => x.ApproveUserChangeRequest(approveUserChangeRequest)) .Returns(Task.FromResult(approveUserChangeResponse)); var emailSenderMock = new Mock<IEmailSender>(); var smsSenderMock = new Mock<ISmsSender>(); var configWrapperMock = new Mock<IConfigWrapper>(); configWrapperMock.Setup(x => x.Get("Sms:DefaultPhoneNumber")).Returns("442222222222"); var service = new UserService(configWrapperMock.Object, mockUserUow.Object, emailSenderMock.Object, null, mockEmailService.Object, null, smsSenderMock.Object); var result = await service.ApproveUserChangeRequest(approveUserChangeRequest); //Assert mockUserUow.Verify(m => m.GetSendersPhoneNumber(It.IsAny<int>()), Times.Never); smsSenderMock.Verify(m => m.SendMessage(It.IsAny<SmsModel>(), Enums.SmsProviders.TextMagic), Times.Never); mockUserUow.Verify(m => m.GetSendersEmailAddress(It.IsAny<int>()), Times.Never); mockEmailService.Verify(m => m.SendMobileChangeEmailAsync(It.IsAny<string>(), It.IsAny<string>()), Times.Never); } [Fact] public void UpdateMyAccountAsync_Identity_Fails_When_Inputted_With_An_Empty_Or_Invalid_Id() { //Arrange var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult((ApplicationUser)null)); mockUserUow.Setup(x => x.UpdateUserAsync(It.IsAny<ApplicationUser>(), It.IsAny<ApplicationUser>())) .Returns(Task.FromResult(IdentityResult.Success)); var service = new UserService(null, mockUserUow.Object, null, null, null, null, null); //Act ApplicationServiceUser userToUpdate = new ApplicationServiceUser { Id = 1, Username = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", Password = "<PASSWORD>", ClientCompanyId = 439, UpdatedByAuthUserId = 111 }; IdentityResult result = service.UpdateUserAsync(userToUpdate).Result; //Assert Assert.Equal(IdentityResult.Failed().Succeeded, result.Succeeded); } [Fact] public void DeleteUserAsync_Identity_Fails_When_Inputted_With_An_Empty_Or_Invalid_Id() { //Arrange var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(x => x.GetUserByEmailAsync(It.IsAny<string>())) .Returns(Task.FromResult((ApplicationUser)null)); mockUserUow.Setup(x => x.DeleteUserAsync(It.IsAny<ApplicationUser>())) .Returns(Task.FromResult(IdentityResult.Success)); var emailSenderMock = new Mock<IEmailSender>(); var service = new UserService(null, mockUserUow.Object, emailSenderMock.Object, null, null, null, null); //Act IdentityResult result = service.DeleteUserAsync("0").Result; //Assert Assert.Equal(IdentityResult.Failed().Succeeded, result.Succeeded); } [Fact] public async Task UpdateUser_That_Doesnt_Exist() { //Arrange const int contactId = 5; var mockUow = new Mock<IUserUow>(MockBehavior.Strict); //throw errors if the UoW gets used without us setting it up mockUow.Setup(u => u.GetClientUserModelByContactId(contactId)).Returns((ClientUserModel)null); var service = new UserService(null, mockUow.Object, null, null, null, null,null); var updateUser = new ApplicationServiceUser { ClientCompanyContactId = contactId }; //Act var result = await service.UpdateUserContactAsync(updateUser); //Assert result.Succeeded.Should().BeFalse("Because it shouldn't have found any record to update"); result.Errors.Should().Contain(e => e.Code == "ContactNotFound", "return code ContactNotFound if the user cannot be found"); } [Fact] public async Task UpdateUser_By_User_Who_Doesnt_Exist() { //arrange const int contactId = 5; const int updatedBy = 999; var mockUow = new Mock<IUserUow>(MockBehavior.Strict); //throw errors if the UoW gets used without us setting it up var userModel = new ClientUserModel { ApplicationId = 2, ClientCompanyContactId = contactId, ClientCompanyId = 1 }; mockUow.Setup(u => u.GetClientUserModelByContactId(contactId)).Returns(userModel); mockUow.Setup(u => u.GetAuthUserByAuthUserId(updatedBy)).Returns((AuthUser)null); var service = new UserService(null, mockUow.Object, null, null, null, null,null); var updateUser = new ApplicationServiceUser { ClientCompanyContactId = contactId, UpdatedByAuthUserId = updatedBy }; //act var result = await service.UpdateUserContactAsync(updateUser); //assert result.Succeeded.Should().BeFalse("Because the specified auth user for the update does not exist"); result.Errors.Should().Contain(e => e.Code == "InvalidAuthUser", "The update user was invalid."); } [Fact] public async Task UpdateUser_With_No_Email_Or_Phone_Changes() { //Arrange const int contactId = 5; const int updatedBy = 999; const int dayPeriod = 10; var userModel = new ClientUserModel { ApplicationId = 2, ClientCompanyContactId = contactId, ClientCompanyId = 1, PhoneNumberDirect = "441952271509", PhoneNumberMobile = "447718977121", Email = "<EMAIL>", LastEmailChangeDate = new DateTime(2019, 3, 3), LastPhoneNumberMobileChangeDate = new DateTime(2019, 3, 3) }; var authUser = new AuthUser { Id = updatedBy }; var mockUow = new Mock<IUserUow>(MockBehavior.Strict); //throw errors if the UoW gets used without us setting it up mockUow.Setup(u => u.GetClientUserModelByContactId(contactId)).Returns(userModel); mockUow.Setup(u => u.GetAuthUserByAuthUserId(updatedBy)).Returns(authUser); mockUow.Setup(u => u.ValidateUserMobileChangeRequest(It.IsAny<ClientUserModel>(), It.IsAny<ClientUserModel>(), dayPeriod)).Returns(new UserChangeRequestResponse { InsertOrUpdateUserChangeRequest = false }); mockUow.Setup(u => u.ValidateUserEmailChangeRequest(It.IsAny<ClientUserModel>(), It.IsAny<ClientUserModel>(), dayPeriod)).Returns(new UserChangeRequestResponse { InsertOrUpdateUserChangeRequest = false }); mockUow.Setup(u => u.UpdateUserAsync(It.IsAny<ClientUserModel>())).ReturnsAsync(IdentityResult.Success); mockUow.Setup(u => u.ValidateUserDetails(It.IsAny<UserValidationModel>())).Returns(IdentityResult.Success); var mockSettingService = new Mock<IAppSettingService>(MockBehavior.Strict); mockSettingService.Setup(s => s.GetUserChangeDaysRequiredForApproval()).Returns(dayPeriod); var service = new UserService(null, mockUow.Object, null, null, null, mockSettingService.Object,null); var updateUser = new ApplicationServiceUser { ClientCompanyContactId = contactId, UpdatedByAuthUserId = updatedBy, PhoneNumberDirect = "441952271509", PhoneNumberMobile = "447718977121", Email = "<EMAIL>" }; //Act var result = await service.UpdateUserContactAsync(updateUser); //Assert result.Succeeded.Should().BeTrue(); } [Fact] public async Task UpdateUser_With_Email_Change() { //Arrange const int contactId = 5; const int updatedBy = 999; const int dayPeriod = 10; const int contactAuthUserId = 12; const string originalEmail = "<EMAIL>"; const string newEmail = "<EMAIL>"; var updatedTime = DateTime.Now; var userModel = new ClientUserModel { ApplicationId = 2, ClientCompanyContactId = contactId, ClientCompanyId = 1, PhoneNumberDirect = "441952271509", PhoneNumberMobile = "447718977121", Email = originalEmail, LastEmailChangeDate = new DateTime(2019, 3, 3), LastPhoneNumberMobileChangeDate = new DateTime(2019, 3, 3), UpdatedDateTime = new DateTime(2019, 10, 15), AuthUserId = contactAuthUserId }; var authUser = new AuthUser { Id = updatedBy }; var mockUow = new Mock<IUserUow>(MockBehavior.Strict); //throw errors if the UoW gets used without us setting it up mockUow.Setup(u => u.GetClientUserModelByContactId(contactId)).Returns(userModel); mockUow.Setup(u => u.GetAuthUserByAuthUserId(updatedBy)).Returns(authUser); mockUow.Setup(u => u.ValidateUserMobileChangeRequest(It.IsAny<ClientUserModel>(), It.IsAny<ClientUserModel>(), dayPeriod)).Returns(new UserChangeRequestResponse { InsertOrUpdateUserChangeRequest = false }); mockUow.Setup(u => u.ValidateUserEmailChangeRequest(It.IsAny<ClientUserModel>(), It.IsAny<ClientUserModel>(), dayPeriod)).Returns(new UserChangeRequestResponse { InsertOrUpdateUserChangeRequest = true }); mockUow.Setup(u => u.UpdateUserAsync(It.IsAny<ClientUserModel>())).ReturnsAsync(IdentityResult.Success); UserChangeRequest request = null; mockUow.Setup(u => u.ProcessUserChangeRequest(It.IsAny<UserChangeRequest>())).ReturnsAsync(IdentityResult.Success) .Callback((UserChangeRequest r) => request = r); mockUow.Setup(u => u.ValidateUserDetails(It.IsAny<UserValidationModel>())).Returns(IdentityResult.Success); var mockSettingService = new Mock<IAppSettingService>(MockBehavior.Strict); mockSettingService.Setup(s => s.GetUserChangeDaysRequiredForApproval()).Returns(dayPeriod); var service = new UserService(null, mockUow.Object, null, null, null, mockSettingService.Object,null); var updateUser = new ApplicationServiceUser { ClientCompanyContactId = contactId, UpdatedByAuthUserId = updatedBy, PhoneNumberDirect = "441952271509", PhoneNumberMobile = "447718977121", Email = newEmail, AuthUserId = contactAuthUserId, UpdatedDateTime = updatedTime }; //Act var result = await service.UpdateUserContactAsync(updateUser); //Assert request.Should().NotBeNull(); request.AuthUserId.Should().Be(contactAuthUserId); request.CurrentValue.Should().Be(originalEmail); request.ProposedValue.Should().Be(newEmail); request.ChangeValueType.Should().Be("Email"); request.ChangeStatus.Should().Be("Pending"); request.ChangedByAuthUserId.Should().Be(updatedBy); result.Succeeded.Should().BeTrue(); } [Fact] public async Task UpdateUser_With_Phone_Change() { //Arrange const int contactId = 5; const int updatedBy = 999; const int dayPeriod = 10; const int contactAuthUserId = 12; const string originalEmail = "<EMAIL>"; const string originalPhone = "447718977121"; const string newPhone = "4477189771111"; var updatedTime = DateTime.Now; var userModel = new ClientUserModel { ApplicationId = 2, ClientCompanyContactId = contactId, ClientCompanyId = 1, PhoneNumberDirect = "441952271509", PhoneNumberMobile = originalPhone, Email = originalEmail, LastEmailChangeDate = new DateTime(2019, 3, 3), LastPhoneNumberMobileChangeDate = new DateTime(2019, 3, 3), UpdatedDateTime = new DateTime(2019, 10, 15), AuthUserId = contactAuthUserId }; var authUser = new AuthUser { Id = updatedBy }; var mockUow = new Mock<IUserUow>(MockBehavior.Strict); //throw errors if the UoW gets used without us setting it up mockUow.Setup(u => u.GetClientUserModelByContactId(contactId)).Returns(userModel); mockUow.Setup(u => u.GetAuthUserByAuthUserId(updatedBy)).Returns(authUser); mockUow.Setup(u => u.ValidateUserMobileChangeRequest(It.IsAny<ClientUserModel>(), It.IsAny<ClientUserModel>(), dayPeriod)).Returns(new UserChangeRequestResponse { InsertOrUpdateUserChangeRequest = true }); mockUow.Setup(u => u.ValidateUserEmailChangeRequest(It.IsAny<ClientUserModel>(), It.IsAny<ClientUserModel>(), dayPeriod)).Returns(new UserChangeRequestResponse { InsertOrUpdateUserChangeRequest = false }); mockUow.Setup(u => u.UpdateUserAsync(It.IsAny<ClientUserModel>())).ReturnsAsync(IdentityResult.Success); UserChangeRequest request = null; mockUow.Setup(u => u.ProcessUserChangeRequest(It.IsAny<UserChangeRequest>())).ReturnsAsync(IdentityResult.Success) .Callback((UserChangeRequest r) => request = r); mockUow.Setup(u => u.ValidateUserDetails(It.IsAny<UserValidationModel>())).Returns(IdentityResult.Success); var mockSettingService = new Mock<IAppSettingService>(MockBehavior.Strict); mockSettingService.Setup(s => s.GetUserChangeDaysRequiredForApproval()).Returns(dayPeriod); var service = new UserService(null, mockUow.Object, null, null, null, mockSettingService.Object,null); var updateUser = new ApplicationServiceUser { ClientCompanyContactId = contactId, UpdatedByAuthUserId = updatedBy, PhoneNumberDirect = "441952271509", PhoneNumberMobile = newPhone, Email = originalEmail, AuthUserId = contactAuthUserId, UpdatedDateTime = updatedTime }; //Act var result = await service.UpdateUserContactAsync(updateUser); //Assert request.Should().NotBeNull(); request.AuthUserId.Should().Be(contactAuthUserId); request.CurrentValue.Should().Be(originalPhone); request.ProposedValue.Should().Be(newPhone); request.ChangeValueType.Should().Be("Telephone"); request.ChangeStatus.Should().Be("Pending"); request.ChangedByAuthUserId.Should().Be(updatedBy); result.Succeeded.Should().BeTrue(); } [Fact] public async Task Delete_Contact_Who_Doesnt_Exist() { //Arrange const int contactId = 20; var mockUow = new Mock<IUserUow>(MockBehavior.Strict); //throw errors if the UoW gets used without us setting it up var service = new UserService(null, mockUow.Object, null, null, null, null,null); mockUow.Setup(m => m.GetUserByClientCompanyContactId(contactId)).Returns((ApplicationUser)null); //Act var result = await service.DeleteUserContactAsync(contactId); //Assert result.Should().NotBeNull(); result.Succeeded.Should().BeFalse("No user could be found, so it should fail"); result.Errors.Should().Contain(e => e.Code == "ContactNotFound"); mockUow.Verify(u => u.GetUserByClientCompanyContactId(contactId), Times.Once); } [Fact] public async Task DeleteContact() { //Arrange const int contactId = 20; var mockUow = new Mock<IUserUow>(MockBehavior.Strict); //throw errors if the UoW gets used without us setting it up var service = new UserService(null, mockUow.Object, null, null, null, null,null); var applicationUser = new ApplicationUser { ClientCompanyContactId = contactId }; mockUow.Setup(m => m.GetUserByClientCompanyContactId(contactId)).Returns(applicationUser); mockUow.Setup(m => m.DeleteUserAsync(applicationUser)).ReturnsAsync(IdentityResult.Success); //Act var result = await service.DeleteUserContactAsync(contactId); //Assert result.Should().NotBeNull(); result.Succeeded.Should().BeTrue("The user should be able to be deleted"); mockUow.Verify(u => u.GetUserByClientCompanyContactId(contactId), Times.Once); mockUow.Verify(u => u.DeleteUserAsync(applicationUser), Times.Once); } private static Mock<IUserUow> mockUserFactory() { var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(uow => uow.AuthUserRepository.GetByPrimaryKey(It.IsAny<int>())) .Returns(new AuthUser { Id = 300, UserName = "testaccount", Email = "<EMAIL>", Password = "<PASSWORD>==" //password hash for "<PASSWORD>" }); return mockUserUow; } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogSwiftintegrationService.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogSwiftintegrationService { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public bool IsActive { get; set; } public DateTime LastStatusChangeDateTime { get; set; } public int LastStatusChangeByAuthUserId { get; set; } public byte[] UpdateTimeStamp { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Helpers/SmsHelpers.cs using System; using System.Collections.Generic; using System.Security.Cryptography; using System.Text; namespace Argentex.Core.Service.Helpers { public class SmsHelpers { /// <summary> /// Get the 4 digit validation code for two-factor authentication /// </summary> /// <returns>4 digit code</returns> public static string GenerateValidationCodeFor2FA() { var generator = new Random(); var validationCode = generator.Next(0, 9999).ToString("D4"); return validationCode; } /// <summary> /// Encrypted validation code /// </summary> /// <param name="stringToEncrypt">any string</param> /// <returns>SHA256 encrypted code</returns> public static string GetHash(string stringToEncrypt) { // this a hashed SHA256 string var crypt = new SHA256Managed(); var hash = new StringBuilder(); byte[] crypto = crypt.ComputeHash(Encoding.UTF8.GetBytes(stringToEncrypt)); foreach (byte theByte in crypto) { hash.Append(theByte.ToString("x2")); } return hash.ToString(); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyVirtualAccountCurrencyBalanceHistory.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyVirtualAccountCurrencyBalanceHistory { public int ClientCompanyVirtualAccountId { get; set; } public int CurrencyId { get; set; } public int TransactionCommitId { get; set; } public DateTime BalanceDate { get; set; } public decimal? Balance { get; set; } public int? UpdatedByAuthUserId { get; set; } public ClientCompanyVirtualAccount ClientCompanyVirtualAccount { get; set; } public Currency Currency { get; set; } public TransactionCommit TransactionCommit { get; set; } public AuthUser UpdatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Security/SecurityController.cs using Argentex.Core.Api.Exceptions; using Argentex.Core.Api.Models.AccountViewModels; using Argentex.Core.Api.Models.SecurityModels; using Argentex.Core.Service; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Identity.Services; using Argentex.Core.Service.Models.Identity; using Argentex.Core.Service.Sms.Models; using AspNet.Security.OpenIdConnect.Primitives; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; using SynetecLogger; using System.Linq; using System.Threading.Tasks; using Argentex.Core.Api.Models; using Argentex.Core.Service.Identity; namespace Argentex.Core.Api.Controllers.Security { [Route("api/[controller]")] public class SecurityController : Controller { private readonly ILogWrapper _logger; private readonly IConfiguration _config; private readonly IIdentityService _identityService; private readonly IEmailService _emailService; private readonly ISmsService _smsService; public SecurityController( IIdentityService identity, ILogWrapper logger, IConfiguration config, IEmailService emailService, ISmsService smsService ) { _identityService = identity; _logger = logger; _config = config; _emailService = emailService; _smsService = smsService; } [AllowAnonymous] [HttpPost] [Route("token")] [Produces("application/json")] public async Task<IActionResult> CreateToken(OpenIdConnectRequest login) { if (!ModelState.IsValid) { return BadRequest(new OpenIdConnectResponse { Error = OpenIdConnectConstants.Errors.InvalidGrant, ErrorDescription = "Invalid request data" }); } var loginService = new LoginServiceModel { Username = login.Username, Password = <PASSWORD>, Grant_Type = login.GrantType, ClientId = login.ClientId, RefreshToken = login.RefreshToken, ClientSecret = login.ClientSecret }; var token = await _identityService.AuthenticateAsync(loginService); if (token == null) { return BadRequest(new OpenIdConnectResponse { Error = OpenIdConnectConstants.Errors.InvalidGrant, ErrorDescription = "Please check that your credentials are correct" }); } else // sending 2FA Message { // sending message to the user if (loginService.Grant_Type == OpenIdConnectConstants.GrantTypes.Password) { token.Validation_code = await _smsService.Send2FAMessage(login.Username); if (string.IsNullOrEmpty(token.Validation_code)) { return BadRequest(new { data = $"Error sending message, please try again." }); } } } return Ok(token); } [AllowAnonymous] [HttpPost("token/refresh")] [Produces("application/json")] public async Task<IActionResult> RefreshToken(RefreshTokenModel model) { if (!ModelState.IsValid) { return BadRequest(new OpenIdConnectResponse { Error = OpenIdConnectConstants.Errors.InvalidGrant, ErrorDescription = "Invalid request data" }); } var token = await _identityService.RefreshToken(model); if (token == null) { return BadRequest(new OpenIdConnectResponse { Error = OpenIdConnectConstants.Errors.InvalidGrant, ErrorDescription = "Please check that your credentials are correct" }); } return Ok(token); } [HttpGet] [Route("resend-validation-code/{username}")] public async Task<IActionResult> ResendValidationCode(string username) { if (!ModelState.IsValid || string.IsNullOrEmpty(username)) { return BadRequest(new OpenIdConnectResponse { Error = OpenIdConnectConstants.Errors.InvalidGrant, ErrorDescription = "Invalid request data" }); } // sending the message var validationCode = await _smsService.Send2FAMessage(username); if (string.IsNullOrEmpty(validationCode)) { return BadRequest(new { data = $"Error sending message, please try again." }); } return Ok(new { validationCode }); } [HttpPut] [Route("change-password")] public async Task<IActionResult> ChangePassword([FromBody] ChangePasswordModel model) { if (!ModelState.IsValid) { return BadRequest(ModelState); } try { string message; var result = await _identityService.ChangePasswordAsync(model.UserId.ToString(), model.CurrentPassword, model.NewPassword, model.ConfirmPassword); if (result.Succeeded) { message = $"Password of user {model.UserName} was updated successfully"; return Ok(new { data = message }); } message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error updating password user {model.UserName}. Message: {message}")); return BadRequest(new { data = result.Errors.Select(x => x.Description) }); } catch (PasswordsDoNotMatchException e) { _logger.Error(e); return BadRequest(new { data = e.Message }); } catch (ApplicationUserNotFoundException e) { _logger.Error(e); return BadRequest(new { data = e.Message }); } catch (PasswordAlreadyUsedException e) { _logger.Error(e); return BadRequest(new { data = e.Message }); } } [HttpPost] [AllowAnonymous] [Route("pass-reset-link")] public async Task<IActionResult> ForgotPassword(ForgotPasswordViewModel model) { if (!ModelState.IsValid) { return BadRequest(ResponseModel.ResponseFromInvalidModelState(ModelState)); } var result = await _emailService.SendResetPasswordEmailAsync(model.UserName); if (!result.Succeeded) { var errors = string.Join(", ", result.Errors); _logger.Info(errors); if (result.Errors.Any(e => e.Code != IdentityResultCodes.UserNotFound && e.Code != IdentityResultCodes.InvalidUserState)) return BadRequest(ResponseModel.ResponseFromIdentityModel(result)); } return Ok(new {message = $"If user {model.UserName} exists, an e-mail with a password reset link has been sent."}); //Always return Ok, to prevent username fishing } [HttpPost] [AllowAnonymous] [Route("reset-password")] public async Task<IActionResult> ResetPasswordAsync([FromBody] ResetPasswordViewModel model) { if (!ModelState.IsValid) { return BadRequest("Invalid input data"); } try { var result = await _identityService.ResetPasswordAsync(model.UserName, model.Code, model.Password); if (!result.Succeeded) { var errors = string.Join(", ", result.Errors); _logger.Info(errors); if (result.Errors.Any(e => e.Code != IdentityResultCodes.UserNotFound && e.Code != IdentityResultCodes.InvalidUserState)) return BadRequest(errors); } return Ok(); //Always return Ok, to prevent username fishing } catch (PasswordAlreadyUsedException e) { _logger.Error(e); return BadRequest(e.Message); } } [HttpPost] [Route("set-user-admin/{username}")] public async Task<IActionResult> SetUserAsAdmin(string username) { await _identityService.SetUserAsAdmin(username); return Ok(); } [HttpPost] [AllowAnonymous] [Route("verify-token")] public async Task<IActionResult> VerifyUserToken([FromBody] ResetPasswordViewModel model) { var isTokenValid = await _identityService.VerifyUserToken(model.UserName, model.Code); return Ok(isTokenValid); } [HttpGet] [AllowAnonymous] [Route("logout-notification/{identityUserName}/{refreshToken}")] public async Task<IActionResult> LogoutNotification(string identityUserName, string refreshToken) { await _identityService.LogoutAsync(identityUserName, refreshToken); return Ok(); } protected override void Dispose(bool disposing) { if (disposing) { _identityService.Dispose(); //_logger.Dispose(); //TODO base.Dispose(disposing); } } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogClientCompanyCompliance.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogClientCompanyCompliance { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public int ClientCompanyId { get; set; } public int? AmlriskId { get; set; } public int? RegisteredDomicileCountryId { get; set; } public DateTime? RefreshDueDateTime { get; set; } public decimal? ExpectedTotalVolume { get; set; } public int? ExpectedFrequencyId { get; set; } public decimal? ExpectedMaxTradeSize { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public decimal? TurnoverGbp { get; set; } public decimal? BalanceSheetGbp { get; set; } public decimal? OwnFundsGbp { get; set; } public bool? Regulated { get; set; } public int? ClassificationId { get; set; } public int? ReasonId { get; set; } public bool? Ttca { get; set; } public int? NatureId { get; set; } public bool? RequestInvoices { get; set; } public bool? ThirdPartyPayments { get; set; } public bool? DelegatedReporting { get; set; } public bool? IsMiFid { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/ClientCompany/IClientCompanyAccountsService.cs using Argentex.Core.Service.Models.ClearingCodePrefix; using Argentex.Core.Service.Models.ClientCompany; using System; using System.Collections.Generic; namespace Argentex.Core.Service.ClientCompanies { public interface IClientCompanyAccountsService : IDisposable { IEnumerable<ClientCompanyAccountModel> GetClientCompanyAccounts(int clientCompanyId); ClientCompanyAccountModel GetClientCompanyAccount(int clientCompanyOpiId); void AddSettlementAccount(SettlementAccountModel settlementAccount); IEnumerable<ClearingCodePrefixModel> GetClearingCodePrefixes(); void EditSettlementAccount(SettlementAccountModel settlementAccount); void SetAccountAsDefault(SetDefaultAccountModel model); IEnumerable<FXForwardTrade2OPIModel> GetTradeOPIs(string tradeCode); void AddTradeOPI(FXForwardTrade2OPIModel model); void DeleteSettlementAccount(int clientCompanyOpiId, int authUserName); int GetNumberOfAssociatedTrades(int clientCompanyOpiId); } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Users/IUserUow.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Microsoft.AspNetCore.Identity; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System.Collections.Generic; using System.Threading.Tasks; using System.Linq; using OpenIddict.EntityFrameworkCore.Models; using Argentex.Core.UnitsOfWork.Users.Model; namespace Argentex.Core.UnitsOfWork.Users { public interface IClientApplicationUow : IBaseUow { Task<OpenIddictApplication> GetClientCredentialsAsync(string clientId); } public interface IUserUow : IBaseUow { IGenericRepo<AuthUser> AuthUserRepository { get; } IGenericRepo<ClientCompanyContact> ClientCompanyContactRepository { get; } IGenericRepo<ActivityLog> ActivityLogRepo { get; } IGenericRepo<Activity> ActivityRepo { get; } IGenericRepo<ApplicationUser> ApplicationUserRepo { get; } IGenericRepo<AppUser> AppUserRepository { get; } IGenericRepo<PreviousPassword> PreviousPasswordsRepository { get; } IGenericRepo<ApplicationUserRole> ApplicationUserRoleRepository { get; } IGenericRepo<AuthApplication> AuthApplicationRepository { get; } Task<SignInResult> PasswordSignInAsync(string user, string password, bool isPersistent, bool lockoutOnFailure); Task CurrentUserSignOutAsync(); Task<IdentityResult> AddUserAsync(ClientUserModel user, string password); Task<IdentityResult> UpdateUserAsync(ApplicationUser userToUpdate, ApplicationUser originalUser); Task<IdentityResult> UpdateUserAsync(ClientUserModel userToUpdate); UserChangeRequestResponse ValidateUserMobileChangeRequest(ClientUserModel updatedClientUser, ClientUserModel originalClientUser, int daysPeriod); UserChangeRequestResponse ValidateUserEmailChangeRequest(ClientUserModel updatedClientUser, ClientUserModel originalClientUser, int daysPeriod); Task<IdentityResult> ApproveUserAsync(ApplicationUser user); Task<IdentityResult> AuthoriseSignatoryAsync(ApplicationUser user); Task<IdentityResult> DeleteUserAsync(ApplicationUser user); IQueryable<ApplicationUser> GetUnapprovedUsers(); IQueryable<ApplicationUser> GetUsersByCompanyId(int clientCompanyId); ApplicationUser GetUserByClientCompanyContactId(int clientCompanyContactId); Task<ApplicationUser> GetUserByIdAsync(string userId); Task<ApplicationUser> GetUserByEmailAsync(string email); Task<ApplicationUser> GetUserByNameAsync(string userId); AuthUser GetAuthUserByAuthUserId(int authUserId); Task<bool> IsUserByNameAsync(string userId); Task<IdentityResult> ChangePasswordAsync(ApplicationUser user, string newPasswordHash); Task PersistToken(Token rt); Task ReplaceToken(Token newRefreshToken, Token oldRefreshToken); Task RemoveToken(Token newRefreshToken); Token GetRefreshToken(int userID, string refreshToken); Task<IList<string>> GetRolesAsync(ApplicationUser user); Task<string> GenerateEmailConfirmationTokenAsync(ApplicationUser user); Task<string> GeneratePasswordResetTokenAsync(ApplicationUser user); Task<bool> VerifyToken(ApplicationUser user, string tokenProvider, string tokenPurpose, string tokenCode); Task<IdentityResult> ResetPasswordAsync(ApplicationUser user, string code, string password); IQueryable<PreviousPassword> GetLastPasswords(long userId); Task<string> HashPasswordAsync(string password); Task SetRoleForUser(long userId, long roleId); IQueryable<ApplicationRole> GetRole(string role); IQueryable<ApplicationUser> GetApplicationUserByAuthUserId(int authUserId); IQueryable<IGrouping<int, ActivityLog>> GetActivityLog(IList<int> clientCompanyIDs, string activityType); IQueryable<ActivityLog> GetUserActivityLog(int authUserId); Task LogActivity(ActivityLog log); AppUser GetAppUserById(int authUserId); IdentityResult ValidateUserDetails(UserValidationModel user); ClientUserModel GetClientUserModelByContactId(int clientCompanyContactId); UserChangeRequest GetUserChangeRequest(int authUserID, string changeStatus, string changeValueType); UserChangeRequest GetUserChangeRequest(int userChangeRequestId); IEnumerable<PendingApprovalUserChangeRequest> GetPendingChangeRequest(); Task<ApproveUserChangeResponse> ApproveUserChangeRequest(ApproveUserChangeRequest approveUserChangeRequest); Task<IdentityResult> ProcessUserChangeRequest(UserChangeRequest changeRequest); IEnumerable<AppUser> GetAllDirectorsAsList(); string GetSendersEmailAddress(int authUserId); string GetSendersPhoneNumber(int authUserId); string GenerateUniqueUsername(string initialValue = ""); } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/EmirreportType.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class EmirreportType { public EmirreportType() { EmirreportField = new HashSet<EmirreportField>(); EmirreportFxforwardTrade = new HashSet<EmirreportFxforwardTrade>(); } public int Id { get; set; } public string Description { get; set; } public ICollection<EmirreportField> EmirreportField { get; set; } public ICollection<EmirreportFxforwardTrade> EmirreportFxforwardTrade { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Enums/RequestOrigin.cs namespace Argentex.Core.Service.Enums { public enum RequestOrigin { ArgentexTrader, ClientSite, Unknown } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftvalidationMessageField.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftvalidationMessageField { public SwiftvalidationMessageField() { SwiftvalidationCurrencyMessageField = new HashSet<SwiftvalidationCurrencyMessageField>(); } public int Id { get; set; } public string Name { get; set; } public string Description { get; set; } public ICollection<SwiftvalidationCurrencyMessageField> SwiftvalidationCurrencyMessageField { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyContactModel.cs using Argentex.Core.DataAccess.Entities; using System; namespace Argentex.Core.Service { public class ClientCompanyContactModel { public int ID { get; set; } public string ContactTitle { get; set; } public string ContactForename { get; set; } public string ContactSurname { get; set; } public string ContactEmail { get; set; } public string ContactTelephone { get; set; } public bool Authorized { get; set; } public string TelephoneMobile { get; set; } public string TelephoneOther { get; set; } public DateTime? BirthDay { get; set; } public string NiNumber { get; set; } public string BloombergGpi { get; set; } public bool ReceiveNotifications { get; set; } public bool ReceiveAMReport { get; set; } public bool ReceiveActivityReport { get; set; } public DateTime UpdatedDateTime { get; set; } public string Notes { get; set; } public string FullName { get; set; } public int ClientSiteAuthUserID { get; set; } public string Position { get; set; } public bool PrimaryContact { get; set; } public string ASPNumber { get; set; } public DateTime ASPExpirationDate { get; set; } public DateTime? ASPCreationDate { get; set; } public DateTime? LastTelephoneChangeDate { get; set; } public DateTime? LastEmailChangeDate { get; set; } public string UserName { get; set; } public bool IsApproved { get; set; } public int UpdatedByAuthUserId { get; set; } public ClientCompanyModel ClientCompany { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Helpers/ConfigWrapper.cs using Microsoft.Extensions.Configuration; namespace Argentex.Core.Service.Helpers { public class ConfigWrapper : IConfigWrapper { private readonly IConfiguration _config; public ConfigWrapper(IConfiguration config) { _config = config; } public string Get(string key) { return _config[key]; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyOptionCount.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyOptionCount { public int ClientCompanyId { get; set; } public int OptionCount { get; set; } public ClientCompany ClientCompany { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/BreachLevel.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class BreachLevel { public BreachLevel() { Breach = new HashSet<Breach>(); BreachType = new HashSet<BreachType>(); } public int Id { get; set; } public string Description { get; set; } public int Sequence { get; set; } public ICollection<Breach> Breach { get; set; } public ICollection<BreachType> BreachType { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ActivityTabHourDayRange.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ActivityTabHourDayRange { public ActivityTabHourDayRange() { ActivityTabUserData = new HashSet<ActivityTabUserData>(); } public int Id { get; set; } public string Range { get; set; } public int LowerLimit { get; set; } public int UpperLimit { get; set; } public ICollection<ActivityTabUserData> ActivityTabUserData { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/BankAccountCurrencyBalance.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class BankAccountCurrencyBalance { public int BankAccountId { get; set; } public int CurrencyId { get; set; } public decimal? Balance { get; set; } public byte[] UpdateTimeStamp { get; set; } public DateTime? BalanceDate { get; set; } public int TransactionCommitId { get; set; } public BankAccount BankAccount { get; set; } public Currency Currency { get; set; } public TransactionCommit TransactionCommit { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Payment.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Payment { public Payment() { BankAccountTransaction = new HashSet<BankAccountTransaction>(); Breach = new HashSet<Breach>(); ClientCompanyOpitransaction = new HashSet<ClientCompanyOpitransaction>(); SwiftincomingFileStatement = new HashSet<SwiftincomingFileStatement>(); Swiftmessage = new HashSet<Swiftmessage>(); VirtualAccountTransaction = new HashSet<VirtualAccountTransaction>(); } public int Id { get; set; } public string Code { get; set; } public int PaymentTypeId { get; set; } public DateTime CreatedDate { get; set; } public int CreatedByAuthUserId { get; set; } public byte[] UpdateTimeStamp { get; set; } public DateTime UpdateDateTime { get; set; } public int UpdatedByAuthUserId { get; set; } public bool Authorised { get; set; } public int? AuthorisedByAuthUserId { get; set; } public DateTime? AuthorisedDateTime { get; set; } public string FxforwardTradeCode { get; set; } public int? ClientCompanyId { get; set; } public int CurrencyId { get; set; } public DateTime ValueDate { get; set; } public decimal? Amount { get; set; } public string Reason { get; set; } public string Comments { get; set; } public bool? NotifyClient { get; set; } public bool Applied { get; set; } public DateTime? AppliedDateTime { get; set; } public int? PaymentRecReasonId { get; set; } public int? TransactionCommitId { get; set; } public bool IsDeleted { get; set; } public string Reference { get; set; } public decimal? ApplicableRate { get; set; } public bool? IsSwiftpayment { get; set; } public int PaymentSwiftoutgoingStatusId { get; set; } public int? SwiftAuth1ByAuthUserId { get; set; } public int? SwiftAuth2ByAuthUserId { get; set; } public DateTime? SwiftAuth1DateTime { get; set; } public DateTime? SwiftAuth2DateTime { get; set; } public bool? IsDebitedForMfidaccounts { get; set; } public AuthUser AuthorisedByAuthUser { get; set; } public ClientCompany ClientCompany { get; set; } public AuthUser CreatedByAuthUser { get; set; } public Currency Currency { get; set; } public FxforwardTrade FxforwardTradeCodeNavigation { get; set; } public PaymentRecReason PaymentRecReason { get; set; } public PaymentSwiftoutgoingStatus PaymentSwiftoutgoingStatus { get; set; } public PaymentType PaymentType { get; set; } public AuthUser SwiftAuth1ByAuthUser { get; set; } public AuthUser SwiftAuth2ByAuthUser { get; set; } public TransactionCommit TransactionCommit { get; set; } public AuthUser UpdatedByAuthUser { get; set; } public ICollection<BankAccountTransaction> BankAccountTransaction { get; set; } public ICollection<Breach> Breach { get; set; } public ICollection<ClientCompanyOpitransaction> ClientCompanyOpitransaction { get; set; } public ICollection<SwiftincomingFileStatement> SwiftincomingFileStatement { get; set; } public ICollection<Swiftmessage> Swiftmessage { get; set; } public ICollection<VirtualAccountTransaction> VirtualAccountTransaction { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogClientCompanyComplianceNote.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogClientCompanyComplianceNote { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public int ClientCompanyId { get; set; } public string Title { get; set; } public string NoteText { get; set; } public int? AuthUserId { get; set; } public DateTime CreatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Currencies/ICurrencyService.cs using System; using System.Collections.Generic; using Argentex.Core.Service.Models.Currencies; namespace Argentex.Core.Service.Currencies { public interface ICurrencyService : IDisposable { double GetCurrencyPairRate(string currencyPair); CurrencyModel GetCurrency(int currencyId); int GetCurrencyId(string code); IEnumerable<CurrencyModel> GetCurrencies(); } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Notifications/INotificationUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using System.Data; using System.Linq; using System.Threading.Tasks; namespace Argentex.Core.UnitsOfWork.Notifications { public interface INotificationUow : IBaseUow { // v1 IQueryable<AppUserNotification> GetCompanyAppUserNotification(int clientCompanyID); bool SaveAppUserNotification(AppUserNotification model); // v2 //IQueryable<NotificationType> GetNotificationTypes(); //IQueryable<AppUserNotificationType> GetCompanyNotifications(int clientCompanyID); //bool SaveUserNotification(AppUserNotificationType model); } } <file_sep>/agfx-fxdb-core-api-dmz/README.md # agfx-fxdb-core-api Argentex FXDB Core API test<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/ClientCompanyNotFoundException.cs using System; namespace Argentex.Core.Service.Exceptions { public class ClientCompanyNotFoundException : Exception { public ClientCompanyNotFoundException() : base() { } public ClientCompanyNotFoundException(string message) : base(message) { } public ClientCompanyNotFoundException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Sms/SmsService.cs using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Identity.Services; using Argentex.Core.Service.Sms.Models; using Argentex.Core.Service.Sms.SmsSender; using System.Threading.Tasks; namespace Argentex.Core.Service { public class SmsService : ISmsService { #region Constructor & Dependencies private readonly IConfigWrapper _config; private readonly ISmsSender _smsSender; private readonly IIdentityService _identityService; public SmsService(IConfigWrapper config, ISmsSender smsSender, IIdentityService identityService) { _config = config; _smsSender = smsSender; _identityService = identityService; } #endregion #region Properties private bool _disposed; #endregion public async Task<string> Send2FAMessage(string username) { var validationCode = SmsHelpers.GenerateValidationCodeFor2FA(); var messageTemplate = _config.Get("Sms:2FAMessageTemplate"); // if a default phone number is set for dev enviroment we use that var userPhoneNumber = _config.Get("Sms:DefaultPhoneNumber"); if (string.IsNullOrEmpty(userPhoneNumber)) { userPhoneNumber = await _identityService.GetUserPhoneNumber(username); } if (!string.IsNullOrEmpty(userPhoneNumber)) { // creating the model var smsModel = new SmsModel() { PhoneNumber = userPhoneNumber, Message = string.Format(messageTemplate, validationCode) }; // send the message to client var isMessageSent = _smsSender.SendMessage(smsModel); // Encrypt validation code var encryptedValidationCode = SmsHelpers.GetHash(validationCode); return isMessageSent ? encryptedValidationCode : string.Empty; } return string.Empty; } #region Dispose protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _smsSender?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftvalidationCurrencyCountry.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftvalidationCurrencyCountry { public int CurrencyId { get; set; } public int CountryId { get; set; } public int OptionId { get; set; } public Country Country { get; set; } public Currency Currency { get; set; } public SwiftvalidationOption Option { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/PasswordsDoNotMatchException.cs using System; namespace Argentex.Core.Service.Exceptions { public class PasswordsDoNotMatchException : Exception { public PasswordsDoNotMatchException() : base() { } public PasswordsDoNotMatchException(string message) : base(message) { } public PasswordsDoNotMatchException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompany.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompany { public ClientCompany() { ClientCompanyCompliance = new HashSet<ClientCompanyCompliance>(); ClientCompanyComplianceNote = new HashSet<ClientCompanyComplianceNote>(); ClientCompanyContact = new HashSet<ClientCompanyContact>(); ClientCompanyCurrencyDefaultOpi = new HashSet<ClientCompanyCurrencyDefaultOpi>(); ClientCompanyIbrelationship = new HashSet<ClientCompanyIbrelationship>(); ClientCompanyNote = new HashSet<ClientCompanyNote>(); ClientCompanyOnlineDetails = new HashSet<ClientCompanyOnlineDetails>(); ClientCompanyOpi = new HashSet<ClientCompanyOpi>(); ClientCompanySalesAppUser = new HashSet<ClientCompanySalesAppUser>(); ClientCompanyVirtualAccount = new HashSet<ClientCompanyVirtualAccount>(); FxforwardTrade = new HashSet<FxforwardTrade>(); Fxoption = new HashSet<Fxoption>(); Payment = new HashSet<Payment>(); SwiftincomingMatchedAccount = new HashSet<SwiftincomingMatchedAccount>(); } public int Id { get; set; } public string Crn { get; set; } public string Name { get; set; } public string Description { get; set; } public string TradingName { get; set; } public string TelephoneNumber { get; set; } public string FaxNumber { get; set; } public string WebsiteUrl { get; set; } public string Address { get; set; } public int? ClientCompanyTypeId { get; set; } public int ClientCompanyStatusId { get; set; } public int? DealerAppUserId { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public string ImportantNote { get; set; } public int? ClientCompanyCategoryId { get; set; } public bool IsHouseAccount { get; set; } public string PostCode { get; set; } public DateTime? ApprovedDateTime { get; set; } public bool? IsKyc { get; set; } public bool? IsTandCs { get; set; } public bool? IsRiskWarning { get; set; } public int? ClientCompanyOptionStatusId { get; set; } public DateTime? ApprovedOptionDateTime { get; set; } public bool IsPitched { get; set; } public int? PitchedByAppUserId { get; set; } public DateTime? PitchedDateTime { get; set; } public DateTime? AccountFormsSentDateTime { get; set; } public bool IsInternalAccount { get; set; } public string QualifiedNewTradeCode { get; set; } public string TradingAddress { get; set; } public int? MaxOpenGbp { get; set; } public int? MaxTradeSizeGbp { get; set; } public int? MaxTenorMonths { get; set; } public decimal? MaxCreditLimit { get; set; } public string TradingPostCode { get; set; } public string EmirLei { get; set; } public bool? EmirEea { get; set; } public bool? AssignNewTrades { get; set; } public int? ClientCompanyIndustrySectorId { get; set; } public int ClientCompanySalesRegionId { get; set; } public string SpreadsNote { get; set; } public int? ClientCompanyLinkedGroupId { get; set; } public bool IsExcludedFromEmoney { get; set; } public DateTime? FirstTradeDate { get; set; } public int ClientCompanyCreditTypeId { get; set; } public DateTime? LastContractDate { get; set; } public ClientCompanyCategory ClientCompanyCategory { get; set; } public ClientCompanyCreditType ClientCompanyCreditType { get; set; } public ClientCompanyIndustrySector ClientCompanyIndustrySector { get; set; } public ClientCompanyLinkedGroup ClientCompanyLinkedGroup { get; set; } public ClientCompanySalesRegion ClientCompanySalesRegion { get; set; } public ClientCompanyStatus ClientCompanyStatus { get; set; } public ClientCompanyType ClientCompanyType { get; set; } public FxforwardTrade QualifiedNewTradeCodeNavigation { get; set; } public ClientCompanyOptionCount ClientCompanyOptionCount { get; set; } public ClientCompanyPipeline ClientCompanyPipeline { get; set; } public ClientCompanyTradeCount ClientCompanyTradeCount { get; set; } public ICollection<ClientCompanyCompliance> ClientCompanyCompliance { get; set; } public ICollection<ClientCompanyComplianceNote> ClientCompanyComplianceNote { get; set; } public ICollection<ClientCompanyContact> ClientCompanyContact { get; set; } public ICollection<ClientCompanyCurrencyDefaultOpi> ClientCompanyCurrencyDefaultOpi { get; set; } public ICollection<ClientCompanyIbrelationship> ClientCompanyIbrelationship { get; set; } public ICollection<ClientCompanyNote> ClientCompanyNote { get; set; } public ICollection<ClientCompanyOnlineDetails> ClientCompanyOnlineDetails { get; set; } public ICollection<ClientCompanyOpi> ClientCompanyOpi { get; set; } public ICollection<ClientCompanySalesAppUser> ClientCompanySalesAppUser { get; set; } public ICollection<ClientCompanyVirtualAccount> ClientCompanyVirtualAccount { get; set; } public ICollection<FxforwardTrade> FxforwardTrade { get; set; } public ICollection<Fxoption> Fxoption { get; set; } public ICollection<Payment> Payment { get; set; } public ICollection<SwiftincomingMatchedAccount> SwiftincomingMatchedAccount { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Settlements/SettlementUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System; using System.Collections.Generic; using System.Linq; namespace Argentex.Core.UnitsOfWork.Settlements { public class SettlementUow : BaseUow, ISettlementUow { #region Repos private IGenericRepo<FxforwardTrade> _tradeRepository; private IGenericRepo<FxforwardTradeSwapCount> _tradeSwapCountRepository; private IGenericRepo<Fxswap> _fxswapRepository; private IGenericRepo<FxforwardTrade2Opi> _fxforwardTrade2OpiRepo; private IGenericRepo<ClientSiteAction2Fxswap> _clientSiteAction2FxswapRepository; private IGenericRepo<DataAccess.Entities.ClientSiteAction> _clientSiteActionRepository; private IGenericRepo<FxforwardTrade> TradeRepository => _tradeRepository = _tradeRepository ?? new GenericRepo<FxforwardTrade>(Context); private IGenericRepo<FxforwardTradeSwapCount> TradeSwapCountRepository => _tradeSwapCountRepository = _tradeSwapCountRepository ?? new GenericRepo<FxforwardTradeSwapCount>(Context); private IGenericRepo<Fxswap> FxswapRepository => _fxswapRepository = _fxswapRepository ?? new GenericRepo<Fxswap>(Context); private IGenericRepo<FxforwardTrade2Opi> FxforwardTrade2OpiRepo => _fxforwardTrade2OpiRepo = _fxforwardTrade2OpiRepo ?? new GenericRepo<FxforwardTrade2Opi>(Context); private IGenericRepo<ClientSiteAction2Fxswap> ClientSiteAction2FxswapRepository => _clientSiteAction2FxswapRepository = _clientSiteAction2FxswapRepository ?? new GenericRepo<ClientSiteAction2Fxswap>(Context); private IGenericRepo<DataAccess.Entities.ClientSiteAction> ClientSiteActionRepository => _clientSiteActionRepository = _clientSiteActionRepository ?? new GenericRepo<DataAccess.Entities.ClientSiteAction>(Context); #endregion public SettlementUow(FXDB1Context context) : base(context) { } public IList<FxforwardTrade2Opi> GetTradeOpis(string parentTradeCode) { var result = new List<FxforwardTrade2Opi>(); result = FxforwardTrade2OpiRepo.GetQueryable( e => e.FxforwardTradeCode == parentTradeCode, includeProperties: "ClientCompanyOpi,ClientCompanyOpi.Currency,FxforwardTradeCodeNavigation,FxforwardTradeCodeNavigation.Lhsccy,FxforwardTradeCodeNavigation.Rhsccy").ToList(); return result; } public IDictionary<FxforwardTrade, DataAccess.Entities.ClientSiteAction> GetTradeSwaps(string parentTradeCode) { var result = new Dictionary<FxforwardTrade, DataAccess.Entities.ClientSiteAction>(); var swaps = FxswapRepository.GetQueryable(e => e.ParentTradeCode == parentTradeCode); foreach(var swap in swaps) { var csa2Swap = ClientSiteAction2FxswapRepository.GetQueryable(e => e.FxswapId == swap.Id).SingleOrDefault(); if(csa2Swap != null) { var csa = ClientSiteActionRepository.GetQueryable(e => e.Id == csa2Swap.ClientSiteActionId).SingleOrDefault(); if(csa != null) { var trade = TradeRepository.GetQueryable(e => e.Code == swap.DeliveryLegTradeCode, includeProperties: "Rhsccy,Lhsccy,ClientCompanyOpi,ClientCompanyOpi.Currency").SingleOrDefault(); result.Add(trade, csa); } } } return result; } public FxforwardTradeSwapCount GetTradeSwapCount(string parentTradeCode) { return TradeSwapCountRepository.GetByPrimaryKey(parentTradeCode); } public int Assign(FxforwardTrade deliveryLegTrade, FxforwardTrade reversalLegTrade, string parentTradeCode, int authUserID) { // updating tradeswapcount +1 var tradeSwapCount = GetTradeSwapCount(parentTradeCode); tradeSwapCount.SwapCount++; TradeSwapCountRepository.Update(tradeSwapCount); // inserting the counts TradeSwapCountRepository.Insert( new FxforwardTradeSwapCount { FxforwardTradeCode = deliveryLegTrade.Code, SwapCount = 0 }); TradeSwapCountRepository.Insert( new FxforwardTradeSwapCount { FxforwardTradeCode = reversalLegTrade.Code, SwapCount = 0 }); // inserting trades TradeRepository.Insert(deliveryLegTrade); TradeRepository.Insert(reversalLegTrade); // inserting the swap var swap = new Fxswap() { ParentTradeCode = parentTradeCode, DeliveryLegTradeCode = deliveryLegTrade.Code, ReversalLegTradeCode = reversalLegTrade.Code, CreatedAuthUserId = authUserID }; FxswapRepository.Insert(swap); // ??? InsertTradeLog(trade, "INSERT"); SaveContext(); return swap.Id; } public void AddTrade2Opi(FxforwardTrade2Opi trade2opi) { FxforwardTrade2OpiRepo.Insert(trade2opi); SaveContext(); } public void DeleteAssignedSettlement(long settlementId) { // TODO fix long-int var fxforwardTrade2Opi = FxforwardTrade2OpiRepo.GetByPrimaryKey(settlementId); FxforwardTrade2OpiRepo.Delete(fxforwardTrade2Opi); SaveContext(); } public decimal GetSettlementAmountForTrade(string tradeCode) => FxforwardTrade2OpiRepo.GetQueryable(x => x.FxforwardTradeCode == tradeCode).Sum(x => x.Amount); public DateTime GetMaxCreateDateForTrade(string tradeCode) => FxforwardTrade2OpiRepo.GetQueryable(x => x.FxforwardTradeCode == tradeCode) .OrderByDescending(x => x.CreatedDateTime).Select(x => x.CreatedDateTime).FirstOrDefault(); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/EmirreportOutgoingFileContent.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class EmirreportOutgoingFileContent { public EmirreportOutgoingFileContent() { EmirreportOutgoingFile = new HashSet<EmirreportOutgoingFile>(); } public int Id { get; set; } public string FileContent { get; set; } public ICollection<EmirreportOutgoingFile> EmirreportOutgoingFile { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Monitoring/MonitoringService.cs using Argentex.ClientSite.Service.Http; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.Models.Trades; using Argentex.Core.Service.Trade; using Argentex.Core.Service.User; using Argentex.Core.SignalRService; using Newtonsoft.Json; using System; using System.Collections.Generic; using System.Threading.Tasks; namespace Argentex.Core.Service.Monitoring { public class MonitoringService : IMonitoringService { private readonly IHttpService _httpService; private readonly IAppSettingService _appSettingService; private readonly IMonitoringHub _monitoring; private readonly IClientCompanyService _clientCompanyService; private readonly IUserService _userService; private bool _disposed; public MonitoringService(IHttpService httpService, IAppSettingService appSettingService, IMonitoringHub monitoring, IClientCompanyService clientCompanyService, IUserService userService) { _httpService = httpService; _appSettingService = appSettingService; _monitoring = monitoring; _clientCompanyService = clientCompanyService; _userService = userService; } public async Task<bool> NotifyTradeStarted(int authUserId) { try { await _monitoring.TradingStarted(authUserId.ToString()); } catch (System.Exception ex) { var error = ex.Message; return false; } return true; } public async Task<bool> CheckExecuteTrade(TradeNotificationModel model) { try { //determine if the trade should be executed //in case there is an active temporary spread the trade can be executed //otherwise the CSR user must wait for the Trader site user to adjust the spread, //cancel the spread adjusting or the adjust spread counter to time out bool executeTrade = _clientCompanyService.GetTradeExecutionStatusBySpread(model.ClientCompanyID, model.BuyCcy, model.SellCcy, model.IsBuy); if (executeTrade) { return true; } else { //the trade should not be executed at the moment FillInTradeNotificationModel(model); //in case the Dealer for the company has the OnlineTradingNotifications checked we send the notification to the Trader //otherwise the trade can be executed (as there is no trader that can do the spread adjustment) if (model.SendNotification) { //send trade notification to the Trader site //do not continue the trade execution await _monitoring.CheckExecuteTrade(JsonConvert.SerializeObject(model)); return false; } else { //the trade should be executed because either a dealer is not associated with the company //or it does not have the OnlineTradingNotifications checked //or the Company's AllowOnlineTrading is set to false return true; } } } catch (System.Exception ex) { return false; } } public async Task RefreshClientDetails() { await _monitoring.RefreshClientDetails(); } private void FillInTradeNotificationModel(TradeNotificationModel model) { model.BuyCcy = model.BuyCcy.ToUpperInvariant(); model.SellCcy = model.SellCcy.ToUpperInvariant(); ClientCompanyModel clientCompanyModel = _clientCompanyService.GetClientCompany(model.ClientCompanyID); if(clientCompanyModel != null) { model.ClientCompanyName = clientCompanyModel.Name; model.DealerAppUserID = clientCompanyModel.DealerAppUserID; } IList<Models.Identity.UserModel> list = _userService.GetUserLoginDetails(model.AuthUserID); if(list != null && list.Count > 0) { model.LastLoginDate = list[0].LastLoginDate.Value; } Models.Identity.ApplicationServiceUser user = _userService.GetApplicationUserByAuthUserId(model.AuthUserID); if (user != null) { model.AuthUserName = user.Forename + " " + user.Surname; } model.TraderNotificationCounter = _appSettingService.GetTradeNotificationCounter(); model.SendNotification = false; if (model.DealerAppUserID.HasValue) { DataAccess.Entities.AppUser appUser = _userService.GetAppUserById(model.DealerAppUserID.Value); if (appUser != null) { model.SendNotification = appUser.OnlineTradingNotifications; } if (model.SendNotification) { DataAccess.Entities.ClientCompanyOnlineDetails clientCompanyOnlineDetails = _clientCompanyService.GetClientCompanyOnlineDetails(model.ClientCompanyID); if (clientCompanyOnlineDetails == null) { model.SendNotification = false; } else { model.SendNotification = clientCompanyOnlineDetails.AllowOnlineTrading; } } } model.Spread = _clientCompanyService.GetClientCompanySpread(model.ClientCompanyID, model.BuyCcy, model.SellCcy, model.IsBuy, model.ValueDate, model.ContractDate); } protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _appSettingService?.Dispose(); _httpService?.Dispose(); _clientCompanyService?.Dispose(); _userService?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Users/Model/ClientUserModel.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.UnitsOfWork.Users.Model { public class ClientUserModel { public long ID { get; set; } public string Title { get; set; } public string Forename { get; set; } public string Surname { get; set; } public string Username { get; set; } public string PasswordHash { get; set; } public string Email { get; set; } public int ClientCompanyId { get; set; } public int ClientCompanyContactId { get; set; } public int AuthUserId { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public string Position { get; set; } public string PhoneNumberDirect { get; set; } public string PhoneNumberMobile { get; set; } public string PhoneNumberOther { get; set; } public DateTime? Birthday { get; set; } public bool IsApproved { get; set; } public bool PrimaryContact { get; set; } public string Notes { get; set; } public DateTime? LastPasswordChangeDate { get; set; } /// <summary> /// Refers to LastTelephoneChangeDate in the ClientCompanyContact Table /// </summary> public DateTime? LastPhoneNumberMobileChangeDate { get; set; } public DateTime? LastEmailChangeDate { get; set; } public string ASPNumber { get; set; } public DateTime? ASPCreationDate { get; set; } public string Fullname { get; set; } public bool Authorized { get; set; } public bool RecNotification { get; set; } public bool RecAmReport { get; set; } public bool RecActivityReport { get; set; } public bool IsDeleted { get; set; } public string BloombergGpi { get; set; } public string NiNumber { get; set; } public int[] AssignedCategoryIds { get; set; } public bool? IsLockedOut { get; set; } public string Comment { get; set; } public DateTime CreateDate { get; set; } public DateTime? LastLoginDate { get; set; } public DateTime? LastActivityDate { get; set; } public DateTime? LastLockOutDate { get; set; } public int FailedPasswordAttemptCount { get; set; } public DateTime FailedPasswordAttemptWindowStart { get; set; } public int ApplicationId { get; set; } public bool EmailConfirmed { get; set; } public bool IsAdmin { get; set; } public bool IsSignatory { get; set; } public bool IsAuthorisedSignatory { get; set; } public int? ApprovedByAuthUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/UserAuditLogChanges.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class UserAuditLogChanges { public int Id { get; set; } public DateTime DateTime { get; set; } public string UserName { get; set; } public string UserRole { get; set; } public string IpAddress { get; set; } public string ActionType { get; set; } public string Data { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ArmreportField.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ArmreportField { public int Id { get; set; } public string Name { get; set; } public string Description { get; set; } public string BrokerValue { get; set; } public string ClientValue { get; set; } public string AppSettingKey { get; set; } public bool IsMandatory { get; set; } public bool IsBlank { get; set; } public bool? IsActive { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/SpreadAdjustmentModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Models.ClientCompany { public class SpreadAdjustmentModel { public int ClientCompanyID { get; set; } public string BuyCcy { get; set; } public string SellCcy { get; set; } public bool? IsBuy { get; set; } public int SpreadAdjustment { get; set; } public int UpdatedByAuthUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyContactSearchContext.cs namespace Argentex.Core.Service.Models.ClientCompany { public class ClientCompanyContactSearchContext { public int? ClientCompanyContactId { get; set; } public int? AuthUsertId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Sms/SmsSender/SmsSender.cs using Argentex.Core.Service.Enums; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Sms.Models; namespace Argentex.Core.Service.Sms.SmsSender { public class SmsSender : ISmsSender { #region Constructor & Dependencies private readonly ITextMagicService _textMagicService; public SmsSender(ITextMagicService textMagicService) { _textMagicService = textMagicService; } #endregion #region Properties private bool _disposed; #endregion /// <summary> /// Sending Sms Message with the selected service provider /// </summary> /// <param name="smsModel">Phone Number and Message</param> /// <param name="provider">Service provider that sends the sms /// currently only TextMagic is setup</param> /// <returns>If the message was sent successfuly</returns> public bool SendMessage(SmsModel smsModel, SmsProviders provider = 0) { switch (provider) { case SmsProviders.TextMagic: return _textMagicService.SendMessage(smsModel); default: return false; } } #region Dispose protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _textMagicService?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Notification/AppUserNotificationModel.cs using System; namespace Argentex.Core.Service.Models.Order { public class AppUserNotificationModel { public int ID { get; set; } public long AppUserID { get; set; } public int ClientCompanyID { get; set; } public bool TradeNotifications { get; set; } public bool InwardPaymentNotifications { get; set; } public bool OutwardPaymentNotifications { get; set; } public bool SettlementRequests { get; set; } public string Forename { get; set; } public string Surname { get; set; } public string Username { get; set; } public string Email { get; set; } public string ClientCompany { get; set; } public string AddedBy { get; set; } public string Position { get; set; } public string PhoneNumberDirect { get; set; } public string PhoneNumberMobile { get; set; } public bool IsApproved { get; set; } public bool IsAdmin { get; set; } public string AppClientUrl { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Http/IHttpService.cs using System; using System.Net.Http; using System.Threading.Tasks; namespace Argentex.ClientSite.Service.Http { public interface IHttpService : IDisposable { string GenerateUri<T>(string baseUri, T obj); Task<T> GetResponseObject<T>(HttpResponseMessage message); Task<string> GetResponseAsString(HttpResponseMessage message); Task<HttpResponseMessage> SendAsync(); void AddMethod(HttpMethod method); void AddRequestUri(string requestUri); void AddContent(HttpContent content); void AddBearerToken(string bearerToken); void AddAcceptHeader(string acceptHeader); void AddTimeout(TimeSpan timeout); } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Attributes/BoolRequiredAttribute.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Text; namespace Argentex.Core.Service.Attributes { public class BoolRequiredAttribute : ValidationAttribute { public override bool IsValid(object value) { return value is bool; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Swiftmessage.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Swiftmessage { public int Id { get; set; } public int PaymentId { get; set; } public DateTime CreatedDateTime { get; set; } public string SenderReference { get; set; } public string FileName { get; set; } public string LaufileName { get; set; } public string Xmlfile { get; set; } public string Laufile { get; set; } public string NakErrorCode { get; set; } public string ErrorFile { get; set; } public string HitErrorCode { get; set; } public string GenerationErrorMessages { get; set; } public DateTime? NakUpdatedDateTime { get; set; } public DateTime? HitUpdatedDateTime { get; set; } public Payment Payment { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyOnlineDetailsModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service { public class ClientCompanyOnlineDetailsModel { public int Id { get; set; } public int ClientCompanyId { get; set; } public bool AllowOnlineTrading { get; set; } public decimal? MaxTradeSize { get; set; } public decimal? MaxOpen { get; set; } public DateTime? MaxTenor { get; set; } public decimal? Collateral { get; set; } public int? SpotSpread { get; set; } public int? FwdSpread { get; set; } public bool? Kicked { get; set; } public string DealerFullName { get; set; } public string DealerPhoneNumber { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/ClientCompanyContacts/Model/ClientCompanyContactSearchModel.cs namespace Argentex.Core.UnitsOfWork.ClientCompanyContacts.Model { public class ClientCompanyContactSearchModel { public int? ClientCompanyContactId { get; set; } public int? AuthUsertId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/PipelineActionType.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class PipelineActionType { public PipelineActionType() { PipelineAction = new HashSet<PipelineAction>(); } public int Id { get; set; } public string Description { get; set; } public ICollection<PipelineAction> PipelineAction { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/PaymentSwiftoutgoingStatusTransitions.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class PaymentSwiftoutgoingStatusTransitions { public int FromStatusId { get; set; } public int ToStatusId { get; set; } public DateTime CreateDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftincomingFile.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftincomingFile { public SwiftincomingFile() { SwiftincomingFileStatement = new HashSet<SwiftincomingFileStatement>(); } public int Id { get; set; } public string Filename { get; set; } public DateTime CreatedDateTime { get; set; } public int? SwiftincomingFileTypeId { get; set; } public string Content { get; set; } public string ContentDecoded { get; set; } public string Laufilename { get; set; } public string LaufileContent { get; set; } public int SwiftincomingFileProcessingStatusId { get; set; } public string DisplayError { get; set; } public string ProcessingError { get; set; } public SwiftincomingFileProcessingStatus SwiftincomingFileProcessingStatus { get; set; } public SwiftincomingFileType SwiftincomingFileType { get; set; } public ICollection<SwiftincomingFileStatement> SwiftincomingFileStatement { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FixQuoteCancelled.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FixQuoteCancelled { public string QuoteId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api.Tests/ClientCompanies/ClientCompanyAccountsControllerTests.cs using Argentex.Core.Api.Controllers.ClientCompanies; using Argentex.Core.Api.Models.ClientCompanies; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service; using Argentex.Core.Service.ClientCompanies; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Models.ClientCompany; using Argentex.Core.Service.Settlements; using Argentex.Core.UnitsOfWork.ClientCompanies; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; using Moq; using SynetecLogger; using System.Collections.Generic; using System.Linq; using System.Net; using Xunit; namespace Argentex.Core.Api.Tests.ClientCompanies { public class ClientCompanyAccountsControllerTests { [Fact] public void Given_There_Are_No_Accounts_For_A_company_A_No_Content_Result_Should_Be_Returned() { // Given var clientCompanyId = 42; var clientCompanyAccountsServiceMock = new Mock<IClientCompanyAccountsService>(); var settlementServiceMock = new Mock<ISettlementService>(); var loggerMock = new Mock<ILogWrapper>(); clientCompanyAccountsServiceMock.Setup(x => x.GetClientCompanyAccounts(It.IsAny<int>())) .Returns(new List<ClientCompanyAccountModel>()); var controller = new ClientCompanyAccountsController(clientCompanyAccountsServiceMock.Object, settlementServiceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.NoContent; // When var response = controller.GetClientCompanyAccounts(clientCompanyId); var result = response as NoContentResult; // Then Assert.NotNull(result); Assert.Equal((int) expectedStatusCode, result.StatusCode); } [Fact] public void Given_A_ClientCompanyNotFoundException_Is_Caught_A_Bad_Request_Result_Should_Be_Returned() { // Given var clientCompanyId = 42; var clientCompanyAccountsServiceMock = new Mock<IClientCompanyAccountsService>(); var settlementServiceMock = new Mock<ISettlementService>(); var loggerMock = new Mock<ILogWrapper>(); clientCompanyAccountsServiceMock.Setup(x => x.GetClientCompanyAccounts(It.IsAny<int>())) .Throws(new ClientCompanyNotFoundException($"Client company with id {clientCompanyId} does not exist")); var controller = new ClientCompanyAccountsController(clientCompanyAccountsServiceMock.Object, settlementServiceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedValue = $"Client company with id {clientCompanyId} does not exist"; // When var response = controller.GetClientCompanyAccounts(clientCompanyId); var result = response as BadRequestObjectResult; // Then Assert.NotNull(result); Assert.Equal((int)expectedStatusCode, result.StatusCode); Assert.Equal(expectedValue, result.Value); } [Fact] public void Given_A_Company_Has_A_List_Of_Accounts_An_Ok_Object_Result_Should_Be_Returned() { // Given var clientCompanyId = 42; var clientCompanyAccounts = new List<ClientCompanyAccountModel> { new ClientCompanyAccountModel { ClientCompanyId = 42, AccountName = "<NAME>", AccountNumber = "123456", Currency = "GBP" } }; var clientCompanyAccountsServiceMock = new Mock<IClientCompanyAccountsService>(); var settlementServiceMock = new Mock<ISettlementService>(); var loggerMock = new Mock<ILogWrapper>(); clientCompanyAccountsServiceMock.Setup(x => x.GetClientCompanyAccounts(It.IsAny<int>())) .Returns(clientCompanyAccounts); var controller = new ClientCompanyAccountsController(clientCompanyAccountsServiceMock.Object, settlementServiceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedClientCompanyId = 42; var expectedAccountName = "<NAME>"; var expectedAccountNumber = "123456"; var expectedCurrency = "GBP"; // When var response = controller.GetClientCompanyAccounts(clientCompanyId); var result = response as OkObjectResult; // Then Assert.NotNull(result); Assert.Equal((int)expectedStatusCode, result.StatusCode); var content = result.Value as IEnumerable<ClientCompanyAccountDto>; Assert.NotNull(content); Assert.True(content.Any()); var firstAccount = content.First(); Assert.Equal(expectedClientCompanyId, firstAccount.ClientCompanyId); Assert.Equal(expectedAccountName, firstAccount.AccountName); Assert.Equal(expectedAccountNumber, firstAccount.AccountNumber); Assert.Equal(expectedCurrency, firstAccount.Currency); } [Fact] public void AddClientCompanyAccount_Success_With_Valid_Model_Input() { //Arrange var mockService = new Mock<IClientCompanyAccountsService>(); var settlementServiceMock = new Mock<ISettlementService>(); mockService.Setup(x => x.AddSettlementAccount(It.IsAny<SettlementAccountModel>())); var controller = new ClientCompanyAccountsController(mockService.Object, settlementServiceMock.Object, null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; var account = new SettlementAccountModel { CurrencyId = 1, CountryId = 1, AccountName = "<NAME>", AccountNumber = 1, UpdatedByAuthUserId = 1, ClientCompanyId = 1 }; //Act var result = controller.AddClientCompanyAccount(account); //Assert Assert.IsType<OkResult>(result); } [Fact] public void DeleteSettlementAccount_Should_Fail_If_clientCompanyOpiId_Is_Zero() { //Arrange int clientCompanyOpiId = 0; int authUserId = 10; var controller = new ClientCompanyAccountsController(null, null, null); //Act var result = controller.DeleteSettlementAccount(clientCompanyOpiId, authUserId); //Assert Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void DeleteSettlementAccount_Success_With_Valid_clientCompanyOpiId_Input() { //Arrange int clientCompanyOpiId = 110; int authUserId = 10; var clientCompanyAccountsServiceMock = new Mock<IClientCompanyAccountsService>(); var clientCompanyAccountsUowMock = new Mock<IClientCompanyAccountsUoW>(); var settlementServiceMock = new Mock<ISettlementService>(); clientCompanyAccountsUowMock.Setup(x => x.GetClientCompanyAccount(It.IsAny<int>())).Returns(new ClientCompanyOpi()); clientCompanyAccountsUowMock.Setup(x => x.UpdateAccount(It.IsAny<ClientCompanyOpi>())); clientCompanyAccountsUowMock.Setup(x => x.GetSettlementIDs(It.IsAny<int>())).Returns(new List<long>()); settlementServiceMock.Setup(x => x.DeleteAssignedSettlements(It.IsAny<long>())); var controller = new ClientCompanyAccountsController(clientCompanyAccountsServiceMock.Object, settlementServiceMock.Object, null); //Act var result = controller.DeleteSettlementAccount(clientCompanyOpiId, authUserId); //Assert Assert.IsType<OkResult>(result); } [Fact] public void GetNumberOfAssignedSettlements_Should_Fail_If_clientCompanyOpiId_Is_Zero() { //Arrange int clientCompanyOpiId = 0; var controller = new ClientCompanyAccountsController(null, null, null); //Act var result = controller.GetNumberOfAssignedSettlements(clientCompanyOpiId); //Assert Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void GetNumberOfAssignedSettlements_Success_With_Valid_clientCompanyOpiId_Input() { //Arrange int clientCompanyOpiId = 110; var clientCompanyAccountsServiceMock = new Mock<IClientCompanyAccountsService>(); var clientCompanyAccountsUowMock = new Mock<IClientCompanyAccountsUoW>(); clientCompanyAccountsUowMock.Setup(x => x.GetAssociatedTradesCount(It.IsAny<int>(), It.IsAny<int>())); var controller = new ClientCompanyAccountsController(clientCompanyAccountsServiceMock.Object, null, null); //Act var result = controller.GetNumberOfAssignedSettlements(clientCompanyOpiId); //Assert Assert.IsType<OkObjectResult>(result); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogComplianceIsincurrencyValueDate.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogComplianceIsincurrencyValueDate { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public string Isin { get; set; } public string CurrencyPair { get; set; } public DateTime ValueDate { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Sms/SmsSender/ISmsSender.cs using Argentex.Core.Service.Enums; using Argentex.Core.Service.Sms.Models; using System; namespace Argentex.Core.Service.Sms.SmsSender { public interface ISmsSender : IDisposable { bool SendMessage(SmsModel smsModel, SmsProviders provider = 0); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FxoptionSettlementsTemplate.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FxoptionSettlementsTemplate { public int Id { get; set; } public int? FxoptionTypeId { get; set; } public string Template { get; set; } public bool? IsBuy { get; set; } public string TradeCodeSuffix { get; set; } public string Notional { get; set; } public string ClientRate { get; set; } public int? GroupNum { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/CurrencyPairPricing.cs using System; namespace Argentex.Core.DataAccess.Entities { public class CurrencyPairPricing { public string CurrencyPair { get; set; } public double Rate { get; set; } public DateTime? RateTimeStamp { get; set; } public DateTime FeedTimeStamp { get; set; } public string RateCurrencyPair { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Country/CountryService.cs using Argentex.Core.Service.Models.Country; using Argentex.Core.UnitsOfWork.Countries; using System.Collections.Generic; using System.Linq; namespace Argentex.Core.Service.Country { public class CountryService : ICountryService { private readonly ICountryUow _countryUow; private bool _disposed; public CountryService(ICountryUow countryUow) { _countryUow = countryUow; } public IEnumerable<CountryModel> GetCountries() { return _countryUow.GetCountries() .Select(x => new CountryModel { Id = x.Id, Name = x.Name }) .ToList(); } protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _countryUow?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientSiteActionStatus.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientSiteActionStatus { public ClientSiteActionStatus() { ClientSiteAction = new HashSet<ClientSiteAction>(); } public int Id { get; set; } public string Name { get; set; } public string Details { get; set; } public ICollection<ClientSiteAction> ClientSiteAction { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AuthRolePermission.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AuthRolePermission { public int RoleId { get; set; } public int PermissionId { get; set; } public AuthPermission Permission { get; set; } public AuthRole Role { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogFxoption.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogFxoption { public int LogId { get; set; } public string LogAction { get; set; } public string Code { get; set; } public DateTime CreatedDate { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime UpdatedDate { get; set; } public int UpdatedByAuthUserId { get; set; } public int ClientCompanyId { get; set; } public int? AuthorisedByClientCompanyContactId { get; set; } public int? TradeInstructionMethodId { get; set; } public int? BrokerId { get; set; } public int? VerifiedByAuthUserId { get; set; } public DateTime? ContractDate { get; set; } public DateTime? ExpiryDate { get; set; } public string CurrencyPair { get; set; } public int? FxoptionTypeId { get; set; } public int? FxoptionSettlementId { get; set; } public int? FxoptionStatusId { get; set; } public int? Lhsccyid { get; set; } public int? Rhsccyid { get; set; } public decimal? ClientLhsamt { get; set; } public decimal? ClientRhsamt { get; set; } public decimal? ProtectedLevel { get; set; } public decimal? PercentagePart { get; set; } public decimal? Premium { get; set; } public int? SettlementTradeId { get; set; } public DateTime? SettledDate { get; set; } public decimal? ExtBarrier { get; set; } public DateTime? ExtBarrierDate { get; set; } public DateTime? ExtValueDate { get; set; } public decimal? ExtStrike { get; set; } public decimal? LevBarrier { get; set; } public DateTime? LevBarrierDate { get; set; } public DateTime? LevValueDate { get; set; } public decimal? LevStrike { get; set; } public decimal? Profit { get; set; } public int? TransactionCommitId { get; set; } public int? ClientCompanyOpiid { get; set; } public DateTime? DeliveredDate { get; set; } public DateTime? CommPaidOutDate { get; set; } public bool? Verified { get; set; } public bool? IsBuy { get; set; } public bool? IsExtended { get; set; } public bool? IsLeveraged { get; set; } public bool? Deleted { get; set; } public bool? IsRhsmajour { get; set; } public decimal? OptionTrigger { get; set; } public decimal? OptionTriggerProtecLvl { get; set; } public decimal? BestCaseRate { get; set; } public decimal? WorstCaseRate { get; set; } public decimal? KnockOutRate { get; set; } public decimal? KnockInRate { get; set; } public decimal? LevNotional { get; set; } public bool? IsExpired { get; set; } public string ParentCode { get; set; } public bool? IsGenerated { get; set; } public byte[] UpdateTimeStamp { get; set; } public DateTime? ValueDate { get; set; } public decimal? Barrier { get; set; } public decimal? ForwardRate { get; set; } public decimal? ClientLhsamtNotional { get; set; } public decimal? ClientRhsamtNotional { get; set; } public string GraphImgTemplateFile { get; set; } public bool? IsKnockedIn { get; set; } public bool? IsKnockedOut { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/User/IUserService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Models.ClientCompany; using Argentex.Core.Service.Models.Identity; using Argentex.Core.UnitsOfWork.Users.Model; using Microsoft.AspNetCore.Identity; using System; using System.Collections.Generic; using System.Security.Principal; using System.Threading.Tasks; using Argentex.Core.Service.Enums; namespace Argentex.Core.Service.User { public interface IUserService : IDisposable { IEnumerable<ApplicationServiceUser> GetApplicationUsersOfCompany(int clientCompanyId); IEnumerable<ApplicationServiceUser> GetUnapprovedApplicationUsers(); Task<ApplicationServiceUser> GetApplicationUserAsync(string userId); Task<IdentityResult> AddUnapprovedUserAsync(ApplicationServiceUser serviceUser); Task<IdentityResult> SendUserNewPasswordEmailAsync(ApplicationServiceUser serviceUser, string clientCompanyName); Task<IdentityResult> UpdateUserAsync(ApplicationServiceUser user); Task<IdentityResult> UpdateUserContactAsync(ApplicationServiceUser user); Task<IdentityResult> UpdateMyAccountAsync(ApplicationServiceUser user); Task<IList<IdentityResult>> ApproveUsersAsync(ApproveUsersRequest approveUserRequests, ICollection<ClientCompaniesModel> clientCompanies); Task<IList<IdentityResult>> AuthoriseSignatoryAsync(AuthoriseSignatoryRequest authoriseSignatoryRequests, ICollection<ClientCompaniesModel> clientCompanies); Task<IdentityResult> DeleteUserAsync(string userId); Task<IdentityResult> DeleteUserContactAsync(int clientCompanyContactId); ApplicationServiceUser GetApplicationUserByAuthUserId(int authUserId); AuthUser GetAuthUserById(int authUserId); AppUser GetFXDBAppUserById(int appUserId); IList<UserModel> GetUserLoginDetails(IList<int> clientCompanyIDs); IList<UserModel> GetUserLoginDetails(int authUserId); AppUser GetAppUserById(int authUserId); IList<ClientCompanyContactModel> GetAuthorisedSignatories(int clientCompanyId); IList<PendingApprovalUserChangeRequest> GetPendingChangeRequest(); Task<ApproveUserChangeResponse> ApproveUserChangeRequest(ApproveUserChangeRequest approveUserChangeRequest); RequestOrigin GetRequestOrigin(IIdentity userIdentity); string GenerateUniqueUsername(string initialValue); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Helpers/QuoteHelpers.cs using System; using Argentex.Core.Api.Models.Quotes; using Argentex.Core.Service.Models.Fix; namespace Argentex.Core.Api.Helpers { public static class QuoteHelpers { public static FixQuoteRequestModel CreateFixQuoteRequestModel(QuoteRequestDto quoteRequest, string tradeCode) { return new FixQuoteRequestModel { TradeCode = tradeCode, LHSCCY = quoteRequest.LeftCurrency, RHSCCY = quoteRequest.RightCurrency, //MajorCurrency = quoteRequest.IsBuy ? quoteRequest.RightCurrency : quoteRequest.LeftCurrency, MajorCurrency = quoteRequest.IsRhsMajor ? quoteRequest.RightCurrency : quoteRequest.LeftCurrency, Side = quoteRequest.IsBuy ? 1 : 2, BrokerMajorAmount = quoteRequest.Amount, ValueDate = quoteRequest.ValueDate.ToString("yyyy-MM-dd"), TimeOut = 10000, Duration = 35 }; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Trades/TradeUow.cs using Argentex.Core.DataAccess.Entities; using Microsoft.EntityFrameworkCore; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System; using System.Data; using System.Data.SqlClient; using System.Linq; using System.Threading.Tasks; namespace Argentex.Core.UnitsOfWork.Trades { public class TradeUow : BaseUow, ITradeUow { private IGenericRepo<FxforwardTrade> _tradeRepository; private IGenericRepo<FxforwardTradeSwapCount> _tradeSwapCountRepository; private IGenericRepo<LogFxforwardTrade> _tradeLogRepository; private IGenericRepo<Currency> _currencyRepo; private IGenericRepo<CurrencyPairValidation> _currencyPairValidationRepo; private IGenericRepo<ClientCompanyTradeCount> _tradeCountRepo; private IGenericRepo<VirtualAccountTransaction> _virtualAccountTransactionRepo; private IGenericRepo<VirtualAccountType> _virtualAccountTypeRepo; private IGenericRepo<BankAccountTransaction> _bankAccountTransactionRepo; private IGenericRepo<ClientCompanyVirtualAccount> _clientCompanyVirtualAccountRepo; private IGenericRepo<Broker> _brokerRepo; private IGenericRepo<FxforwardTradeStatus> _tradeStatusRepository; private IGenericRepo<Emirstatus> _emirStatusRepository; private IGenericRepo<TradeInstructionMethod> _tradeInstructionMethodRepository; #region Properties private IGenericRepo<FxforwardTrade> TradeRepository => _tradeRepository = _tradeRepository ?? new GenericRepo<FxforwardTrade>(Context); private IGenericRepo<FxforwardTradeSwapCount> TradeSwapCountRepository => _tradeSwapCountRepository = _tradeSwapCountRepository ?? new GenericRepo<FxforwardTradeSwapCount>(Context); private IGenericRepo<LogFxforwardTrade> TradeLogRepository => _tradeLogRepository = _tradeLogRepository ?? new GenericRepo<LogFxforwardTrade>(Context); private IGenericRepo<Currency> CurrencyRepo => _currencyRepo = _currencyRepo ?? new GenericRepo<Currency>(Context); private IGenericRepo<CurrencyPairValidation> CurrencyPairValidationRepo => _currencyPairValidationRepo = _currencyPairValidationRepo ?? new GenericRepo<CurrencyPairValidation>(Context); private IGenericRepo<VirtualAccountTransaction> VirtualAccountTransactionRepository => _virtualAccountTransactionRepo = _virtualAccountTransactionRepo ?? new GenericRepo<VirtualAccountTransaction>(Context); private IGenericRepo<VirtualAccountType> VirtualAccountTypeRepository => _virtualAccountTypeRepo = _virtualAccountTypeRepo ?? new GenericRepo<VirtualAccountType>(Context); private IGenericRepo<ClientCompanyVirtualAccount> ClientCompanyVirtualAccountRepository => _clientCompanyVirtualAccountRepo = _clientCompanyVirtualAccountRepo ?? new GenericRepo<ClientCompanyVirtualAccount>(Context); private IGenericRepo<BankAccountTransaction> BankAccountTransactionRepository => _bankAccountTransactionRepo = _bankAccountTransactionRepo ?? new GenericRepo<BankAccountTransaction>(Context); private IGenericRepo<ClientCompanyTradeCount> ClientCompanyTradeCountRepository => _tradeCountRepo = _tradeCountRepo ?? new GenericRepo<ClientCompanyTradeCount>(Context); private IGenericRepo<Broker> BrokerRepository => _brokerRepo = _brokerRepo ?? new GenericRepo<Broker>(Context); private IGenericRepo<FxforwardTradeStatus> TradeStatusRepository => _tradeStatusRepository = _tradeStatusRepository ?? new GenericRepo<FxforwardTradeStatus>(Context); private IGenericRepo<Emirstatus> EmirStatusRepository => _emirStatusRepository = _emirStatusRepository ?? new GenericRepo<Emirstatus>(Context); private IGenericRepo<TradeInstructionMethod> TradeInstructionMethodRepository => _tradeInstructionMethodRepository = _tradeInstructionMethodRepository ?? new GenericRepo<TradeInstructionMethod>(Context); #endregion public TradeUow(FXDB1Context context) : base(context) { } public DataTable GetUnsettledTrades(int clientCompanyId) { DataTable dt = new DataTable(); using (SqlConnection sqlConn = (SqlConnection)Context.Database.GetDbConnection()) { string sql = "ClientCompanyGetClientOpenTradeSummary"; using (SqlCommand sqlCmd = new SqlCommand(sql, sqlConn)) { sqlCmd.CommandType = CommandType.StoredProcedure; sqlCmd.Parameters.AddWithValue("@ClientCompanyID", clientCompanyId); sqlConn.Open(); using (SqlDataAdapter sqlAdapter = new SqlDataAdapter(sqlCmd)) { sqlAdapter.Fill(dt); } } } return dt; } public IQueryable<Currency> GetCurrencies() { return CurrencyRepo.GetQueryable(); } public IQueryable<CurrencyPairValidation> GetCurrencyPairValidation() { return CurrencyPairValidationRepo.GetQueryable(); } public bool ExecuteOrder(FxforwardTrade trade, ClientCompanyTradeCount tradeCountObject) { bool isSuccessful = false; ClientCompanyTradeCountRepository.Update(tradeCountObject); TradeSwapCountRepository.Insert(new FxforwardTradeSwapCount { FxforwardTradeCode = trade.Code, SwapCount = 0}); TradeRepository.Insert(trade); SaveContext(); isSuccessful = true; return isSuccessful; } public bool CreateDeal(FxforwardTrade trade, ClientCompanyTradeCount tradeCountObject) { bool isSuccessful = false; ClientCompanyTradeCountRepository.Update(tradeCountObject); TradeSwapCountRepository.Insert(new FxforwardTradeSwapCount { FxforwardTradeCode = trade.Code, SwapCount = 0 }); TradeRepository.Insert(trade); InsertTradeLog(trade, "INSERT"); SaveContext(); CreateTradeTransactions(trade); SaveContext(); isSuccessful = true; return isSuccessful; } public bool BrokerDeal(FxforwardTrade trade, ClientCompanyTradeCount tradeCountObject) { bool isSuccessful = false; TradeRepository.Update(trade); InsertTradeLog(trade, "UPDATE"); SaveContext(); BrokerTradeTransactions(trade); SaveContext(); isSuccessful = true; return isSuccessful; } public void RejectOrder(FxforwardTrade trade) { trade.Deleted = true; TradeRepository.Update(trade); SaveContext(); } public ClientCompanyTradeCount GetTradeCountByPrimaryKey(int clientCompanyId) { var tradeCountObject = ClientCompanyTradeCountRepository.GetByPrimaryKey(clientCompanyId); if(tradeCountObject == null) throw new NullReferenceException($"Cannot find trade count for ClientCompanyId: {clientCompanyId}"); return tradeCountObject; } public IQueryable<FxforwardTrade> GetTrade(string tradeCode) { return TradeRepository.GetQueryable(x => x.Code == tradeCode); } public IQueryable<ClientCompanyTradeCount> GetClientCompanyTradeCount(int clientCompanyId) { return ClientCompanyTradeCountRepository.GetQueryable(x => x.ClientCompanyId == clientCompanyId); } public DataTable GetClosedTrades(int clientCompanyId) { DataTable dt = new DataTable(); using (SqlConnection sqlConn = (SqlConnection)Context.Database.GetDbConnection()) { string sql = "ClientCompanyGetClientDeliveredTradeSummary"; using (SqlCommand sqlCmd = new SqlCommand(sql, sqlConn)) { sqlCmd.CommandType = CommandType.StoredProcedure; sqlCmd.Parameters.AddWithValue("@ClientCompanyID", clientCompanyId); sqlConn.Open(); using (SqlDataAdapter sqlAdapter = new SqlDataAdapter(sqlCmd)) { sqlAdapter.Fill(dt); } } } return dt; } public IQueryable<FxforwardTrade> GetOpenOrders(int clientCompanyId) { var pendingStatus = TradeStatusRepository.Get(x => x.Description == "Pending").SingleOrDefault(); var orders = TradeRepository .GetQueryable(x => x.ClientCompanyId == clientCompanyId && x.IsOrder == true && x.Deleted == false && x.FxforwardTradeStatusId == pendingStatus.Id ).OrderByDescending(x => x.CreatedDate); return orders; } /// <summary> /// Getting open orders that have the validity date expired /// </summary> /// <returns>FxforwardTrade</returns> public IQueryable<FxforwardTrade> GetExpiredValidityOrders() { var pendingStatus = TradeStatusRepository.Get(x => x.Description == "Pending").SingleOrDefault(); var validityDateCompare = new DateTime(DateTime.Now.Year, DateTime.Now.Month, DateTime.Now.Day, 18, 0, 0); var orders = TradeRepository .GetQueryable(x => x.IsOrder == true && x.Deleted == false && x.OpenValueDate < validityDateCompare && x.FxforwardTradeStatusId == pendingStatus.Id ).OrderByDescending(x => x.CreatedDate); return orders; } public void UpdateTrade(FxforwardTrade trade) { _tradeRepository.Update(trade); SaveContext(); } public FxforwardTrade GetTrade(string tradeCode, bool getAdditionalProperties) { if(getAdditionalProperties) { return TradeRepository .GetQueryable(x => x.Code == tradeCode, orderBy: null, includeProperties: "Rhsccy,Lhsccy,ClientCompanyNavigation,ClientCompanyOpi") .Single(); } else { return GetTrade(tradeCode).Single(); } } public FxforwardTradeStatus GetFxForwardStatus(string statusDescription) { return TradeStatusRepository.Get(x => x.Description == statusDescription).SingleOrDefault(); } public Emirstatus GetEmirStatus(string emirStatusDescription) { return EmirStatusRepository.Get(x => x.Description == emirStatusDescription).SingleOrDefault(); } public TradeInstructionMethod GetTradeInstructionMethod(string tradeInstructionMethod) { return TradeInstructionMethodRepository.Get(x => x.Description == tradeInstructionMethod).SingleOrDefault(); } public Broker GetBroker(string brokerDescription) { return BrokerRepository.Get(x => x.Description == brokerDescription).SingleOrDefault(); } public async Task<bool> CancelOrder(string code) { var order = TradeRepository.GetByPrimaryKey(code); order.Deleted = true; TradeRepository.Update(order); await SaveContextAsync(); return order.Deleted; } // This method was created because there is a connection conflict if using GetUnsettledTrades() when connection is opened and closed public DataTable GetUnsettledTradesForBalanceCalculation(int clientCompanyId) { DataTable dt = new DataTable(); SqlConnection sqlConn = (SqlConnection)Context.Database.GetDbConnection(); string sql = "ClientCompanyGetClientOpenTradeSummary"; using (SqlCommand sqlCmd = new SqlCommand(sql, sqlConn)) { sqlCmd.CommandType = CommandType.StoredProcedure; sqlCmd.Parameters.AddWithValue("@ClientCompanyID", clientCompanyId); using (SqlDataAdapter sqlAdapter = new SqlDataAdapter(sqlCmd)) { sqlAdapter.Fill(dt); } } return dt; } #region Private methods private void CreateTradeTransactions(FxforwardTrade trade) { CheckAccountExistsAndIfNotCreate(trade.ClientCompanyId); var debitVat = CreateVirtualAccountTransaction(trade, "X", true, false); VirtualAccountTransactionRepository.Insert(debitVat); var creditVat = CreateVirtualAccountTransaction(trade, "Y", false, false); VirtualAccountTransactionRepository.Insert(creditVat); } private void BrokerTradeTransactions(FxforwardTrade trade) { var debitBat = CreateBankAccountTransaction(trade, true); BankAccountTransactionRepository.Insert(debitBat); var creditBat = CreateBankAccountTransaction(trade, false); BankAccountTransactionRepository.Insert(creditBat); bool isDebit = false; if (trade.Profit <= 0) { isDebit = true; trade.Profit = trade.Profit * -1; } var vat = CreateVirtualAccountTransaction(trade, "Y", isDebit, true); VirtualAccountTransactionRepository.Insert(vat); } private BankAccountTransaction CreateBankAccountTransaction(FxforwardTrade trade, bool isDebit) { var barclaysBroker = BrokerRepository.GetByPrimaryKey(trade.BrokerId); BankAccountTransaction bat = new BankAccountTransaction { BankAccountId = isDebit ? barclaysBroker.BankAccountBrokerPaymentsOutId.Value : barclaysBroker.BankAccountBrokerPaymentsInId.Value, CurrencyId = isDebit ? trade.IsBuy ? trade.Rhsccyid.Value : trade.Lhsccyid.Value : trade.IsBuy ? trade.Lhsccyid.Value : trade.Rhsccyid.Value, Amount = isDebit ? trade.IsBuy ? trade.BrokerRhsamt.Value : trade.BrokerLhsamt.Value : trade.IsBuy ? trade.BrokerLhsamt.Value : trade.BrokerRhsamt.Value, IsDebit = isDebit, PaymentId = null, FxforwardTradeCode = trade.Code }; return bat; } private VirtualAccountTransaction CreateVirtualAccountTransaction(FxforwardTrade trade, string accountDescription, bool isDebit, bool isProfitTransaction) { VirtualAccountTransaction vat; if (isProfitTransaction) { vat = new VirtualAccountTransaction { VirtualAccountId = GetHouseAccountId(accountDescription), CurrencyId = trade.IsRhsmajor.Value ? trade.Lhsccyid.Value : trade.Rhsccyid.Value, Amount = trade.Profit, IsDebit = isDebit, PaymentId = null, FxforwardTradeCode = trade.Code, IsProfitTransaction = true }; } else { vat = new VirtualAccountTransaction { VirtualAccountId = GetVirtualAccountId(accountDescription, trade.ClientCompanyId), CurrencyId = isDebit ? trade.IsBuy ? trade.Rhsccyid.Value : trade.Lhsccyid.Value : trade.IsBuy ? trade.Lhsccyid.Value : trade.Rhsccyid.Value, Amount = isDebit ? trade.IsBuy ? trade.BrokerRhsamt.Value : trade.BrokerLhsamt.Value : trade.IsBuy ? trade.BrokerLhsamt.Value : trade.BrokerRhsamt.Value, IsDebit = isDebit, PaymentId = null, FxforwardTradeCode = trade.Code, IsProfitTransaction = false }; } return vat; } private void CheckAccountExistsAndIfNotCreate(int clientCompanyId) { bool associationExists = true; if (!ClientCompanyVirtualAccountExists(clientCompanyId, "X")) { VirtualAccountCreate(clientCompanyId, "X"); associationExists = false; } if (!ClientCompanyVirtualAccountExists(clientCompanyId, "Y")) { VirtualAccountCreate(clientCompanyId, "Y"); associationExists = false; } if (!associationExists) { if (!ClientCompanyVirtualAccountExists(clientCompanyId, "A")) { VirtualAccountCreate(clientCompanyId, "A"); } if (!ClientCompanyVirtualAccountExists(clientCompanyId, "B")) { VirtualAccountCreate(clientCompanyId, "B"); } if (!ClientCompanyVirtualAccountExists(clientCompanyId, "Collateral")) { VirtualAccountCreate(clientCompanyId, "Collateral"); } } } private void VirtualAccountCreate(int clientCompanyId, string description) { ClientCompanyVirtualAccount ccva = new ClientCompanyVirtualAccount { ClientCompanyId = clientCompanyId, VirtualAccountTypeId = GetVirtualAccountTypeId(description) }; ClientCompanyVirtualAccountRepository.Insert(ccva); SaveContext(); } private bool ClientCompanyVirtualAccountExists(int clientCompanyId, string description) { var account = ClientCompanyVirtualAccountRepository .GetQueryable(x => x.ClientCompanyId == clientCompanyId && x.VirtualAccountType.Description == description); return account.Any(); } private int GetVirtualAccountTypeId(string accountDescription) { var VirtualAccountTypeId = VirtualAccountTypeRepository .GetQueryable(x => x.Description == accountDescription) .Select(vat => vat.Id).FirstOrDefault(); return VirtualAccountTypeId; } private int GetVirtualAccountId(string accountDescription, int clientCompanyId) { var clientCompanyVirtualAccountId = ClientCompanyVirtualAccountRepository .GetQueryable(x => x.ClientCompanyId == clientCompanyId && x.VirtualAccountType.Description == accountDescription) .Select(ccva => ccva.Id).FirstOrDefault(); return clientCompanyVirtualAccountId; } private int GetHouseAccountId(string accountDescription) { var clientCompanyVirtualAccountId = ClientCompanyVirtualAccountRepository .GetQueryable(x => x.ClientCompany.IsHouseAccount == true && x.VirtualAccountType.Description == accountDescription) .Select(ccva => ccva.Id).FirstOrDefault(); return clientCompanyVirtualAccountId; } private void InsertTradeLog(FxforwardTrade trade, string action) { var tradeLog = new LogFxforwardTrade { LogAction = action, UpdatedDate = DateTime.Now, Code = trade.Code, CreatedDate = trade.CreatedDate, CreatedByAuthUserId = trade.CreatedByAuthUserId, ClientCompanyId = trade.ClientCompanyId, AuthorisedByClientCompanyContactId = trade.AuthorisedByClientCompanyContactId, Verified = trade.Verified, ContractDate = trade.ContractDate, ValueDate = trade.ValueDate, IsOrder = trade.IsOrder, CurrencyPair = trade.CurrencyPair, IsBuy = trade.IsBuy, Lhsccyid = trade.Lhsccyid, Rhsccyid = trade.Rhsccyid, ClientRate = trade.ClientRate, BrokerRate = trade.BrokerRate, CollateralPerc = trade.CollateralPerc, UpdatedByAuthUserId = trade.UpdatedByAuthUserId, IsRhsmajor = trade.IsRhsmajor, ProfitConsolidated = trade.ProfitConsolidated, Deleted = trade.Deleted, EmirReported = trade.EmirReported, IsComplianceSupported = trade.IsComplianceSupported, IsComplianceRegulated = trade.IsComplianceRegulated, TradeInstructionMethodId = trade.TradeInstructionMethodId, FxforwardTradeStatusId = trade.FxforwardTradeStatusId, EmirUti = trade.EmirUti, BrokerId = trade.BrokerId, ClientLhsamt = trade.ClientLhsamt, BrokerLhsamt = trade.BrokerLhsamt, ClientRhsamt = trade.ClientRhsamt, BrokerRhsamt = trade.BrokerRhsamt, RemainingClientLhsamt = trade.RemainingClientLhsamt, RemainingClientRhsamt = trade.RemainingClientRhsamt, Profit = trade.Profit }; TradeLogRepository.Insert(tradeLog); } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/ApplicationRoleClaim.cs using Microsoft.AspNetCore.Identity; using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class ApplicationRoleClaim : IdentityRoleClaim<long> { } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogCurrency.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogCurrency { public long LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public string Code { get; set; } public string Description { get; set; } public string SwiftAmountFormat { get; set; } public byte[] UpdateTimeStamp { get; set; } public int CreatedByAuthUserId { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdateDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Notification/NotificationTypeModel.cs using Argentex.Core.Service.Attributes; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Service.Models.Order { public class NotificationTypeModel { public int ID { get; set; } public string Name { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Synetec.Data.UnitOfWork/BaseUnitOfWork/BaseUoW.cs using Microsoft.EntityFrameworkCore; using System; using System.Threading; using System.Threading.Tasks; namespace Synetec.Data.UnitOfWork.BaseUnitOfWork { public class BaseUow : IBaseUow { private DbContext _context; private bool _disposed = false; public BaseUow(DbContext context) { _context = context; } public DbContext Context { get { if (_disposed) { throw new ObjectDisposedException("BaseUow: database connection was disposed"); } return _context; //= _context ?? new DbContext(); } } public int SaveContext() { return _context.SaveChanges(); } public async Task<int> SaveContextAsync() { return await _context.SaveChangesAsync(); } public async Task<int> SaveContextAsync(CancellationToken cancellationToken = default(CancellationToken)) { return await _context.SaveChangesAsync(cancellationToken); } public async Task<int> SaveContextAsync(bool acceptAllChangesOnSuccess, CancellationToken cancellationToken = default(CancellationToken)) { return await _context.SaveChangesAsync(acceptAllChangesOnSuccess, cancellationToken); } public DbContext DbContext { get { if (_disposed || _context == null) { throw new ObjectDisposedException("BaseUow: database connection was disposed"); } return _context;// = _context ?? new DbContext(); } } //*****************************Context disposal****************************** protected virtual void Dispose(bool disposing) { if (_disposed) return; if (disposing) { if (_context != null) { _context.Dispose(); _context = null; } } _disposed = true; } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/ClientCompanyContacts/IClientCompanyUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Argentex.Core.UnitsOfWork.ClientCompanyContacts.Model; namespace Argentex.Core.UnitsOfWork.ClientCompanyContacts { public interface IClientCompanyUow : IBaseUow { IGenericRepo<ClientCompanyContact> ClientCompanyContactRepository { get; } IQueryable<ClientCompany> GetClientCompany(int clientCompanyId); IQueryable<ClientCompanyOpi> GetClientCompanyAccounts(int clientCompanyId); IQueryable<ClientCompanyContact> GetClientCompanyContact(int clientCompanyId); IQueryable<ClientCompany> GetClientCompanies(); void UpdateCompanyQualifiedTradeDetails(int clientCompanyId, string qualifiedTradeCode, int authUserId); void UpdateCompanyFirstTradeDate(int clientCompanyId, int authUserId); void UpdateCompanyLastContractDate(int clientCompanyId, DateTime? contractDate, int authUserId); IQueryable<ClientCompanyOnlineDetails> GetClientCompanyOnlineDetails(int clientCompanyId); IQueryable<ClientCompanyOnlineDetailsSkew> GetClientCompanyOnlineDetailsSkew(int clientCompanyId, int currency1Id, int currency2Id, bool isBuy); IQueryable<ClientCompanyOnlineSpreadAdjustment> GetClientCompanyOnlineSpreadAdjustment(int clientCompanyId, int currency1Id, int currency2Id, bool isBuy); void AddClientCompanyOnlineSpreadAdjustment(ClientCompanyOnlineSpreadAdjustment model); void SetClientCompanyOnlineKicked(int clientCompanyId); IQueryable<ClientCompanyContactCategory> GetClientCompanyContactCategories(int clientCompanyContactId); IQueryable<ContactCategory> GetContactCategories(); void AddContactCategory(ContactCategory entity); IQueryable<ContactCategory> GetContactCategory(int contactCategoryId); IQueryable<ContactCategory> GetContactCategory(string contactCategoryDescription); bool ProcessClientCompanyContactCategories(List<int> unassignClientCompanyContactCategoryIds, List<int> assignClientCompanyContactCategoryIds, int modelClientCompanyContactId, int modelCreatedByAuthUserId); IQueryable<ClientCompanyContact> GetClientCompanyContactList(int clientCompanyID); ClientCompanyContact GetCurrentClientCompanyContact(ClientCompanyContactSearchModel clientCompanyContactSearchContext); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FixApatradeCapture.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FixApatradeCapture { public int Id { get; set; } public string TradeCode { get; set; } public string TradeReportId { get; set; } public int AuthUserId { get; set; } public string BloombergTradeId { get; set; } public short? AcknowledgeStatus { get; set; } public string RejectReason { get; set; } public string ErrorMessage { get; set; } public short? BloombergPublishIndicator { get; set; } public DateTime? PublishDateTime { get; set; } public int ApastatusId { get; set; } public DateTime ApastatusUpdatedDateTime { get; set; } public Emirstatus Apastatus { get; set; } public AuthUser AuthUser { get; set; } public FxforwardTrade TradeCodeNavigation { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Statements/StatementService.cs using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Data; using System.Linq; using System.Threading.Tasks; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Models.Statements; using Argentex.Core.UnitsOfWork.Statements; using Microsoft.EntityFrameworkCore; namespace Argentex.Core.Service.Statements { public class StatementService : IStatementService { private readonly IStatementUoW _statementUoW; private bool _disposed; public StatementService(IStatementUoW statementUoW) { _statementUoW = statementUoW; } public IDictionary<string, List<StatementModel>> GetStatements(int clientCompanyId, DateTime startDate, DateTime endDate) { var paymentsAndTrades = AddTransactionsFromPayments(clientCompanyId, startDate, endDate); return paymentsAndTrades; } public bool CheckCompany(int clientCompanyId) { return _statementUoW .ClientCompanyRepository .GetQueryable(x => x.Id == clientCompanyId) .Any(); } private IDictionary<string, List<StatementModel>> AddTransactionsFromPayments(int clientCompanyId, DateTime startDate, DateTime endDate) { //get list of payment ids for the company and the given dates var paymentIDs = _statementUoW.PaymentRepository.GetQueryable(x => x.ClientCompanyId == clientCompanyId && !x.PaymentSwiftoutgoingStatus.IsSwiftRejected && x.ValueDate.Date >= startDate.Date && x.ValueDate.Date <= endDate.Date).Select(x => x.Id).ToList(); //get the list of trades for the company and the given dates var tradeCodes = _statementUoW.FxForwardTradeRepository.GetQueryable(x => x.ClientCompanyId == clientCompanyId && x.ClientCompanyId == clientCompanyId && x.ValueDate.HasValue && x.ValueDate.Value.Date >= startDate.Date && x.ValueDate.Value.Date <= endDate.Date && !x.Deleted && !x.TransactionCommitId.HasValue).Select(x => x.Code).ToList(); var transactions = _statementUoW.BankAccountTransactionRepository .GetQueryable(x => (paymentIDs.Count > 0 && x.PaymentId.HasValue && paymentIDs.Contains(x.PaymentId.Value)) || (tradeCodes.Count > 0 && !string.IsNullOrWhiteSpace(x.FxforwardTradeCode) && tradeCodes.Contains(x.FxforwardTradeCode))) .Include(x => x.Payment) .Include(x => x.FxforwardTradeCodeNavigation) .Include(x => x.Currency) .Select(x => new BankAccountTransaction() { Id = x.Id, PaymentId = x.PaymentId, Amount = x.Amount, BankAccountId = x.BankAccountId, FxforwardTradeCode = x.FxforwardTradeCode, IsDebit = x.IsDebit, Currency = x.Currency != null ? new Currency() { Code = x.Currency.Code, Id = x.Currency.Id} : null, CurrencyId = x.CurrencyId, FxforwardTradeCodeNavigation = x.FxforwardTradeCodeNavigation != null ? new FxforwardTrade() { Code = x.FxforwardTradeCodeNavigation.Code , ValueDate = x.FxforwardTradeCodeNavigation.ValueDate } : null , Payment = x.Payment != null ? new Payment() { Code = x.Payment.Code, ValueDate = x.Payment.ValueDate } : null }) .GroupBy(x => x.Currency.Code) .ToDictionary(x => x.Key, x => x.Select(y => new StatementModel() { PaymentCode = y.Payment?.Code, TradeCode = y.FxforwardTradeCode, BankAccountId = y.BankAccountId, ValueDate = y.Payment?.Code != null ? y.Payment.ValueDate : y.FxforwardTradeCodeNavigation.ValueDate.Value, Event = y.Payment?.Code != null ? $"Payment {y.Payment.Code}" : $"Trade {y.FxforwardTradeCode}", IsDebit = y.IsDebit, Amount = y.Amount ?? 0m }) .OrderByDescending(y => y.ValueDate) .ToList()); return transactions; } protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _statementUoW?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SuspiciousActivityReport.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SuspiciousActivityReport { public int Id { get; set; } public string ClientName { get; set; } public string TradeCode { get; set; } public string PaymentCode { get; set; } public string Description { get; set; } public DateTime? CreateDateTime { get; set; } public byte[] UpdateTimestamp { get; set; } public bool IsSendNotification { get; set; } public int? CreatedByAuthUserId { get; set; } public DateTime? DateTimeReceivedByMlro { get; set; } public string ResearchUnderTakenDescription { get; set; } public string CustomerInformation { get; set; } public string DocumentsInvestigatedInformation { get; set; } public string Conlusions { get; set; } public bool IsReportMadeToNca { get; set; } public DateTime? NcareportDateTime { get; set; } public bool IsAcknowledgementReceived { get; set; } public DateTime? AcknowledgementReceivedDateTime { get; set; } public string ConsentNcareceivedDescription { get; set; } public string ReasonNcareportNotMade { get; set; } public bool IsIssueClosed { get; set; } public DateTime? IssueClosedDateTime { get; set; } public int? IssueClosedByAuthUserId { get; set; } public DateTime? UpdateDateTime { get; set; } public int? UpdatedByAuthUserId { get; set; } public AuthUser CreatedByAuthUser { get; set; } public AuthUser IssueClosedByAuthUser { get; set; } public AuthUser UpdatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/PaymentRecReason.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class PaymentRecReason { public PaymentRecReason() { Payment = new HashSet<Payment>(); } public int Id { get; set; } public string Description { get; set; } public bool IsDebit { get; set; } public ICollection<Payment> Payment { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientSiteAction2FixFxforwardTrade.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientSiteAction2FixFxforwardTrade { public long Id { get; set; } public long ClientSiteActionId { get; set; } public string FxforwardTradeCode { get; set; } public ClientSiteAction ClientSiteAction { get; set; } public FxforwardTrade FxforwardTradeCodeNavigation { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Statements/StatementServiceTests.cs using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Data; using System.Linq; using System.Linq.Expressions; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Models.Statements; using Argentex.Core.Service.Statements; using Argentex.Core.UnitsOfWork.Statements; using Moq; using Synetec.Data.UnitOfWork.GenericRepo; using Xunit; namespace Argentex.Core.Service.Tests.Statements { public class StatementServiceTests { [Fact(Skip = "Needs updating")] public void Given_Start_Date_Is_Posterior_To_End_Date_When_Getting_Statements_Then_An_Exception_Should_Be_Thrown() { // Given var statementUoWMock = new Mock<IStatementUoW>(); var startDate = DateTime.Today; var endDate = DateTime.Today.AddDays(-1); var expectedMessage = "Start date cannot be posterior to end date"; var statementService = new StatementService(statementUoWMock.Object); // When var exception = Assert.Throws<ArgumentException>(() => statementService.GetStatements(0, startDate, endDate)); // Then Assert.NotNull(exception); Assert.Equal(expectedMessage, exception.Message); } [Fact(Skip = "Needs updating")] public void Given_There_Is_No_Company_With_The_Provided_Id_When_Getting_Statements_Then_An_Exception_Should_Be_Thrown() { // Given var startDate = DateTime.Today; var endDate = DateTime.Today.AddMonths(1); var clientCompanyId = 42; var statementUoWMock = new Mock<IStatementUoW>(); var clientCompanyRepositoryMock = new Mock<IGenericRepo<ClientCompany>>(); clientCompanyRepositoryMock.Setup(x => x.GetByPrimaryKey(It.IsAny<int>())).Returns((ClientCompany) null); statementUoWMock.Setup(x => x.ClientCompanyRepository).Returns(clientCompanyRepositoryMock.Object); var expectedMessage = $"Client company with id {clientCompanyId} does not exist"; var statementService = new StatementService(statementUoWMock.Object); // When var exception = Assert.Throws<ArgumentException>(() => statementService.GetStatements(clientCompanyId, startDate, endDate)); // Then Assert.NotNull(exception); Assert.Equal(expectedMessage, exception.Message); } [Fact(Skip = "Needs updating")] public void Given_There_Is_No_Transaction_When_Getting_Statements_Then_An_Empty_Collection_Should_Be_Returned() { // Given var startDate = DateTime.Today; var endDate = DateTime.Today.AddMonths(1); var clientCompany = new ClientCompany() { Id = 42 }; var currencyRepositoryMock = new Mock<IGenericRepo<Currency>>(); var clientCompanyRepositoryMock = new Mock<IGenericRepo<ClientCompany>>(); var bankAccountTransactionRepositoryMock = new Mock<IGenericRepo<BankAccountTransaction>>(); var tradeRepositoryMock = new Mock<IGenericRepo<FxforwardTrade>>(); var paymentRepositoryMock = new Mock<IGenericRepo<Payment>>(); var statementUoWMock = new Mock<IStatementUoW>(); currencyRepositoryMock.Setup(x => x.GetAllAsList()).Returns(new List<Currency>()); clientCompanyRepositoryMock.Setup(x => x.GetByPrimaryKey(It.IsAny<int>())).Returns(clientCompany); bankAccountTransactionRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<BankAccountTransaction, bool>>>(), It.IsAny<Func<IQueryable<BankAccountTransaction>, IOrderedQueryable<BankAccountTransaction>>>(), "")) .Returns(new List<BankAccountTransaction>()); tradeRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<FxforwardTrade, bool>>>(), It.IsAny<Func<IQueryable<FxforwardTrade>, IOrderedQueryable<FxforwardTrade>>>(), "")) .Returns(new List<FxforwardTrade>()); paymentRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<Payment, bool>>>(), It.IsAny<Func<IQueryable<Payment>, IOrderedQueryable<Payment>>>(), "")) .Returns(new List<Payment>()); statementUoWMock.Setup(x => x.ClientCompanyRepository).Returns(clientCompanyRepositoryMock.Object); statementUoWMock.Setup(x => x.CurrencyRepository).Returns(currencyRepositoryMock.Object); statementUoWMock.Setup(x => x.BankAccountTransactionRepository).Returns(bankAccountTransactionRepositoryMock.Object); statementUoWMock.Setup(x => x.FxForwardTradeRepository).Returns(tradeRepositoryMock.Object); statementUoWMock.Setup(x => x.PaymentRepository).Returns(paymentRepositoryMock.Object); var expectedCount = 0; var expectedType = typeof(ConcurrentDictionary<string, List<StatementModel>>); var statementService = new StatementService(statementUoWMock.Object); // When var result = statementService.GetStatements(clientCompany.Id, startDate, endDate); // Then Assert.NotNull(result); Assert.Equal(expectedType, result.GetType()); Assert.Equal(expectedCount, result.Count); } [Fact(Skip = "Needs updating")] public void Given_The_Parameters_Are_Valid_When_Getting_Statements_A_Non_Empty_Collection_Should_Be_Returned() { // Given var startDate = DateTime.Today; var endDate = DateTime.Today.AddMonths(1); var clientCompany = new ClientCompany() { Id = 42 }; var trade = new FxforwardTrade() { Code = "TradeCode42", ValueDate = DateTime.Today.AddDays(10) }; var payment = new Payment() { Id = 404, Code = "PaymentCode", ValueDate = DateTime.Now.AddDays(5) }; var transactions = new List<BankAccountTransaction>() { new BankAccountTransaction() { Currency = new Currency() { Id = 42, Code = "GBP" }, FxforwardTradeCodeNavigation = trade, Payment = payment, IsDebit = true, Amount = 25m, CurrencyId = 42, FxforwardTradeCode = "TradeCode42", PaymentId = 404 }, }; var currencyRepositoryMock = new Mock<IGenericRepo<Currency>>(); var clientCompanyRepositoryMock = new Mock<IGenericRepo<ClientCompany>>(); var bankAccountTransactionRepositoryMock = new Mock<IGenericRepo<BankAccountTransaction>>(); var tradeRepositoryMock = new Mock<IGenericRepo<FxforwardTrade>>(); var paymentRepositoryMock = new Mock<IGenericRepo<Payment>>(); var statementUoWMock = new Mock<IStatementUoW>(); currencyRepositoryMock.Setup(x => x.GetAllAsList()).Returns(new List<Currency> { new Currency() { Id = 42, Code = "GBP" } }); clientCompanyRepositoryMock.Setup(x => x.GetByPrimaryKey(It.IsAny<int>())).Returns(clientCompany); bankAccountTransactionRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<BankAccountTransaction, bool>>>(), It.IsAny<Func<IQueryable<BankAccountTransaction>, IOrderedQueryable<BankAccountTransaction>>>(), "")) .Returns(transactions); tradeRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<FxforwardTrade, bool>>>(), It.IsAny<Func<IQueryable<FxforwardTrade>, IOrderedQueryable<FxforwardTrade>>>(), "")) .Returns(new List<FxforwardTrade> { trade }); paymentRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<Payment, bool>>>(), It.IsAny<Func<IQueryable<Payment>, IOrderedQueryable<Payment>>>(), "")) .Returns(new List<Payment> { payment }); statementUoWMock.Setup(x => x.ClientCompanyRepository).Returns(clientCompanyRepositoryMock.Object); statementUoWMock.Setup(x => x.BankAccountTransactionRepository).Returns(bankAccountTransactionRepositoryMock.Object); statementUoWMock.Setup(x => x.CurrencyRepository).Returns(currencyRepositoryMock.Object); statementUoWMock.Setup(x => x.FxForwardTradeRepository).Returns(tradeRepositoryMock.Object); statementUoWMock.Setup(x => x.PaymentRepository).Returns(paymentRepositoryMock.Object); var expectedCount = 1; var expectedTransactionsCount = 2; var expectedKey = "GBP"; var expectedType = typeof(ConcurrentDictionary<string, List<StatementModel>>); var statementService = new StatementService(statementUoWMock.Object); // When var result = statementService.GetStatements(clientCompany.Id, startDate, endDate); // Then Assert.NotNull(result); Assert.Equal(expectedType, result.GetType()); Assert.Equal(expectedCount, result.Count); var gbpTransactions = result.First(); Assert.Equal(expectedKey, gbpTransactions.Key); var transactionsResult = gbpTransactions.Value; Assert.Equal(expectedTransactionsCount, transactionsResult.Count); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Order/OrderResponseModel.cs namespace Argentex.Core.Service.Models.Order { public class OrderResponseModel { public int OrderIndex { get; set; } public string Code { get; set; } public bool IsSuccessful { get; set; } public string ErrorMessage { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ComplianceReason.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ComplianceReason { public ComplianceReason() { ClientCompanyCompliance = new HashSet<ClientCompanyCompliance>(); } public int Id { get; set; } public string Description { get; set; } public int Sequence { get; set; } public ICollection<ClientCompanyCompliance> ClientCompanyCompliance { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyActivityReport.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyActivityReport { public int Id { get; set; } public int ClientCompanyId { get; set; } public string ClientCompanyName { get; set; } public DateTime? LastActivityReportSentDateTime { get; set; } public int? LastActivityReportSentByAppUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Payments/PaymentsController.cs using Argentex.Core.Service.Payments; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; using SynetecLogger; using System.Threading.Tasks; namespace Argentex.Core.Api.Controllers.Paymets { [Route("api/[controller]")] public class PaymentsController : Controller { private readonly ILogWrapper _logger; private readonly IConfiguration _config; private readonly IPaymentsService _paymentService; public PaymentsController( ILogWrapper logger, IConfiguration config, IPaymentsService paymentService) { _logger = logger; _config = config; _paymentService = paymentService; } [HttpPost] [Route("notify-contacts/{paymentCode}")] public async Task<IActionResult> NotifyContacts(string paymentCode) { if (!ModelState.IsValid) { return BadRequest(ModelState); } var result = await _paymentService.NotifyContacts(paymentCode); return Ok(); } protected override void Dispose(bool disposing) { if (disposing) { _paymentService.Dispose(); base.Dispose(disposing); } } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyOnlineSpreadAdjustment.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyOnlineSpreadAdjustment { public long Id { get; set; } public int ClientCompanyOnlineDetailsId { get; set; } public int? Currency1Id { get; set; } public int? Currency2Id { get; set; } public bool? IsBuy { get; set; } public int Spread { get; set; } public DateTime ExpirationDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public ClientCompanyOnlineDetails ClientCompanyOnlineDetails { get; set; } public Currency Currency1 { get; set; } public Currency Currency2 { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Country.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Country { public Country() { BankAccountCurrencyDetails = new HashSet<BankAccountCurrencyDetails>(); ClientCompanyCompliance = new HashSet<ClientCompanyCompliance>(); ClientCompanyOpi = new HashSet<ClientCompanyOpi>(); ClientCompanyOpitransaction = new HashSet<ClientCompanyOpitransaction>(); CountryClearingCodePrefix = new HashSet<CountryClearingCodePrefix>(); SwiftvalidationCurrencyCountry = new HashSet<SwiftvalidationCurrencyCountry>(); } public int Id { get; set; } public string Name { get; set; } public string FormalName { get; set; } public string CodeIso2 { get; set; } public string CodeIso3 { get; set; } public string PhoneCode { get; set; } public int? CodeIso3numeric { get; set; } public int Sequence { get; set; } public int? CountryGroupId { get; set; } public int? LengthIban { get; set; } public string RegexBban { get; set; } public bool IsEea { get; set; } public CountryGroup CountryGroup { get; set; } public ICollection<BankAccountCurrencyDetails> BankAccountCurrencyDetails { get; set; } public ICollection<ClientCompanyCompliance> ClientCompanyCompliance { get; set; } public ICollection<ClientCompanyOpi> ClientCompanyOpi { get; set; } public ICollection<ClientCompanyOpitransaction> ClientCompanyOpitransaction { get; set; } public ICollection<CountryClearingCodePrefix> CountryClearingCodePrefix { get; set; } public ICollection<SwiftvalidationCurrencyCountry> SwiftvalidationCurrencyCountry { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Users/UserUow.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Microsoft.AspNetCore.Identity; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using SynetecLogger; using System; using System.Collections.Generic; using System.Threading.Tasks; using System.Linq; using Argentex.Core.UnitsOfWork.Users.Model; using Argentex.Core.UnitsOfWork.AppSettings; using Argentex.Core.UnitsOfWork.Extensions; using Microsoft.EntityFrameworkCore.Internal; namespace Argentex.Core.UnitsOfWork.Users { public class UserUow : BaseUow, IUserUow { private IAppSettingUow _appSettingUow; private IGenericRepo<AppSetting> _appSettingRepository; private IGenericRepo<AuthUser> _authUserRepository; private IGenericRepo<LogAuthUser> _logAuthUserRepository; private IGenericRepo<AppUser> _appUserRepository; private IGenericRepo<ClientCompanyContact> _clientCompanyContactRepository; private IGenericRepo<LogClientCompanyContact> _logClientCompanyContactRepository; private IGenericRepo<ClientCompanyContactCategory> _clientCompanyContactCategoryRepository; private IGenericRepo<LogClientCompanyContactCategory> _logClientCompanyContactCategoryRepository; private IGenericRepo<ActivityLog> _activityLogRepository; private IGenericRepo<Activity> _activityRepository; private IGenericRepo<ApplicationUser> _applicationUserRepository; private IGenericRepo<Token> _tokenRepository; private readonly ILogWrapper _logger; // REVISIT, do we need a wrapper? private IGenericRepo<PreviousPassword> _previousPasswordsRepository; private IGenericRepo<ApplicationUserRole> _applicationUserRoleRepository; private IGenericRepo<ApplicationRole> _applicationRoleRepository; private IGenericRepo<UserChangeRequest> _userChangeRequestRepository; private IGenericRepo<UserChangeRequestApproval> _userChangeRequestApprovalRepository; private IGenericRepo<ClientCompany> _clientCompanyRepository; private IGenericRepo<AuthApplication> _authApplicationRepository; private IGenericRepo<ClientCompanyStatus> _clientCompanyStatusRepository; private readonly SecurityDbContext _securityContext; private readonly UserManager<ApplicationUser> _userManager; private readonly SignInManager<ApplicationUser> _signInManager; public UserUow(FXDB1Context context, IAppSettingUow settingUow, UserManager<ApplicationUser> userManager, SignInManager<ApplicationUser> signInManager, SecurityDbContext securityContext, ILogWrapper logger) : base(context) { _appSettingUow = settingUow; _userManager = userManager; _signInManager = signInManager; _securityContext = securityContext; _logger = logger; } #region Properties public IGenericRepo<AppSetting> AppSettingRepository => _appSettingRepository = _appSettingRepository ?? new GenericRepo<AppSetting>(Context); public IGenericRepo<AuthUser> AuthUserRepository => _authUserRepository = _authUserRepository ?? new GenericRepo<AuthUser>(Context); public IGenericRepo<LogAuthUser> LogAuthUserRepository => _logAuthUserRepository = _logAuthUserRepository ?? new GenericRepo<LogAuthUser>(Context); public IGenericRepo<AppUser> AppUserRepository => _appUserRepository = _appUserRepository ?? new GenericRepo<AppUser>(Context); public IGenericRepo<ClientCompanyContact> ClientCompanyContactRepository => _clientCompanyContactRepository = _clientCompanyContactRepository ?? new GenericRepo<ClientCompanyContact>(Context); public IGenericRepo<ClientCompany> ClientCompanyRepository => _clientCompanyRepository = _clientCompanyRepository ?? new GenericRepo<ClientCompany>(Context); public IGenericRepo<LogClientCompanyContact> LogClientCompanyContactRepository => _logClientCompanyContactRepository = _logClientCompanyContactRepository ?? new GenericRepo<LogClientCompanyContact>(Context); public IGenericRepo<ClientCompanyContactCategory> ClientCompanyContactCategoryRepository => _clientCompanyContactCategoryRepository = _clientCompanyContactCategoryRepository ?? new GenericRepo<ClientCompanyContactCategory>(Context); public IGenericRepo<LogClientCompanyContactCategory> LogClientCompanyContactCategoryRepository => _logClientCompanyContactCategoryRepository = _logClientCompanyContactCategoryRepository ?? new GenericRepo<LogClientCompanyContactCategory>(Context); public IGenericRepo<UserChangeRequest> UserChangeRequestRepository => _userChangeRequestRepository = _userChangeRequestRepository ?? new GenericRepo<UserChangeRequest>(Context); public IGenericRepo<UserChangeRequestApproval> UserChangeRequestApprovalRepository => _userChangeRequestApprovalRepository = _userChangeRequestApprovalRepository ?? new GenericRepo<UserChangeRequestApproval>(Context); public IGenericRepo<ActivityLog> ActivityLogRepo => _activityLogRepository = _activityLogRepository ?? new GenericRepo<ActivityLog>(_securityContext); public IGenericRepo<Activity> ActivityRepo => _activityRepository = _activityRepository ?? new GenericRepo<Activity>(_securityContext); public IGenericRepo<ApplicationUser> ApplicationUserRepo => _applicationUserRepository = _applicationUserRepository ?? new GenericRepo<ApplicationUser>(_securityContext); private IGenericRepo<Token> TokenRepo => _tokenRepository = _tokenRepository ?? new GenericRepo<Token>(_securityContext); public IGenericRepo<PreviousPassword> PreviousPasswordsRepository => _previousPasswordsRepository = _previousPasswordsRepository ?? new GenericRepo<PreviousPassword>(_securityContext); public IGenericRepo<ApplicationUserRole> ApplicationUserRoleRepository => _applicationUserRoleRepository = _applicationUserRoleRepository ?? new GenericRepo<ApplicationUserRole>(_securityContext); public IGenericRepo<ApplicationRole> ApplicationRoleRepository => _applicationRoleRepository = _applicationRoleRepository ?? new GenericRepo<ApplicationRole>(_securityContext); public IGenericRepo<AuthApplication> AuthApplicationRepository => _authApplicationRepository = _authApplicationRepository ?? new GenericRepo<AuthApplication>(Context); public IGenericRepo<ClientCompanyStatus> ClientCompanyStatusRepository => _clientCompanyStatusRepository = _clientCompanyStatusRepository ?? new GenericRepo<ClientCompanyStatus>(Context); #endregion public ClientUserModel GetClientUserModelByContactId(int clientCompanyContactId) { ClientCompanyContact clientCompanyContact = ClientCompanyContactRepository.GetByPrimaryKey(clientCompanyContactId); if (clientCompanyContact == null) return null; ApplicationUser appUser = GetUserByClientCompanyContactId(clientCompanyContactId); if (appUser == null) throw new NullReferenceException($"User is not tied to a valid ClientCompanyContactId: {clientCompanyContactId}. Database out of sync."); AuthUser authUser = GetAuthUserByAuthUserId(appUser.AuthUserId); if (authUser == null) throw new NullReferenceException($"User is not tied to a valid ClientCompanyContactId: {clientCompanyContactId}. Database out of sync."); ClientUserModel clientUser = MapClientUser(clientCompanyContact, authUser, appUser); return clientUser; } public ClientUserModel MapClientUser(ClientCompanyContact clientCompanyContact, AuthUser authUser, ApplicationUser appUser) { return new ClientUserModel() { Title = clientCompanyContact.Title, Forename = clientCompanyContact.Forename, Surname = clientCompanyContact.Surname, Username = appUser.UserName, PasswordHash = <PASSWORD>, Email = clientCompanyContact.Email, ClientCompanyId = clientCompanyContact.ClientCompanyId, ClientCompanyContactId = clientCompanyContact.Id, AuthUserId = authUser.Id, UpdatedByAuthUserId = clientCompanyContact.UpdatedByAuthUserId, Position = clientCompanyContact.Position, PhoneNumberDirect = clientCompanyContact.TelephoneDirect, PhoneNumberMobile = clientCompanyContact.TelephoneMobile, PhoneNumberOther = clientCompanyContact.TelephoneOther, Birthday = clientCompanyContact.Birthday ?? DateTime.MinValue, IsApproved = appUser.IsApproved, PrimaryContact = clientCompanyContact.PrimaryContact ?? false, Notes = clientCompanyContact.Notes, LastPasswordChangeDate = appUser.LastPasswordChange, LastPhoneNumberMobileChangeDate = clientCompanyContact.LastTelephoneChangeDate, LastEmailChangeDate = clientCompanyContact.LastEmailChangeDate, ASPNumber = clientCompanyContact.Aspnumber, ASPCreationDate = clientCompanyContact.AspcreationDate, Fullname = clientCompanyContact.Fullname, Authorized = clientCompanyContact.Authorized, RecNotification = clientCompanyContact.RecNotifications, RecAmReport = clientCompanyContact.RecAmreport, RecActivityReport = clientCompanyContact.RecActivityReport, IsDeleted = clientCompanyContact.IsDeleted, BloombergGpi = clientCompanyContact.BloombergGpi, NiNumber = clientCompanyContact.NiNumber, //AssignedCategoryIds = clientCompanyContact, IsLockedOut = authUser.IsLockedOut, Comment = authUser.Comment, CreateDate = authUser.CreateDate, LastLoginDate = authUser.LastLoginDate, LastActivityDate = authUser.LastActivityDate, LastLockOutDate = authUser.LastLockOutDate, FailedPasswordAttemptCount = authUser.FailedPasswordAttemptCount, FailedPasswordAttemptWindowStart = authUser.FailedPasswordAttemptWindowStart, ApplicationId = authUser.ApplicationId, EmailConfirmed = appUser.EmailConfirmed, IsAdmin = appUser.IsAdmin, IsSignatory = appUser.IsSignatory, IsAuthorisedSignatory = appUser.IsAuthorisedSignatory, ApprovedByAuthUserId = appUser.ApprovedByAuthUserId }; } public async Task<IdentityResult> AddUserAsync(ClientUserModel newClientUser, string password) { using (var userTransaction = Context.Database.BeginTransaction()) { using (var securityTransaction = _securityContext.Database.BeginTransaction()) { try { AuthUser authUser = MapAuthUser(new AuthUser(), newClientUser); authUser.Password = <PASSWORD>; //set temporary password authUser.CreateDate = newClientUser.UpdatedDateTime; authUser.FailedPasswordAttemptCount = 0; authUser.ApplicationId = 2; authUser.LastPasswordChangeDate = newClientUser.UpdatedDateTime; authUser.FailedPasswordAttemptWindowStart = newClientUser.UpdatedDateTime; AuthUserRepository.Insert(authUser); InsertAuthUserLog(authUser, "CREATE"); newClientUser.LastEmailChangeDate = newClientUser.UpdatedDateTime; newClientUser.LastPhoneNumberMobileChangeDate = newClientUser.UpdatedDateTime; //create new Client Company Contact ClientCompanyContact clientCompanyContact = MapClientCompanyContact(new ClientCompanyContact(), newClientUser); clientCompanyContact.AuthUser = authUser; ClientCompanyContactRepository.Insert(clientCompanyContact); InsertClientCompanyContactLog(clientCompanyContact, "CREATE"); newClientUser.ClientCompanyContactId = clientCompanyContact.Id; await SaveContextAsync(); ApplicationUser user = MapApplicationUser(new ApplicationUser(), newClientUser); user.AuthUserId = authUser.Id; user.ClientCompanyContactId = clientCompanyContact.Id; IdentityResult result = await _userManager.CreateAsync(user, password); if (!result.Succeeded) throw new Exception($"Unable to create an Application User for {newClientUser.Forename} {newClientUser.Surname}."); authUser.Password = <PASSWORD>.PasswordHash; // when the password is hashed, store hashed password instead AuthUserRepository.Update(authUser); InsertAuthUserLog(authUser, "UPDATE"); await SaveContextAsync(); await InsertInPreviousPasswords(user.PasswordHash, user); userTransaction.Commit(); securityTransaction.Commit(); return result; } catch (Exception ex) { //TODO remove previous password for the user to be deleted userTransaction.Rollback(); securityTransaction.Rollback(); _logger.Error(ex); return IdentityResult.Failed(new IdentityError { Description = ex.Message }); } } } } public async Task<IdentityResult> UpdateUserAsync(ApplicationUser userToUpdate, ApplicationUser originalUser) { using (var userTransaction = Context.Database.BeginTransaction()) { using (var securityTransaction = _securityContext.Database.BeginTransaction()) { try { IdentityResult result = await _userManager.UpdateAsync(userToUpdate); if (!result.Succeeded) return result; AuthUser authUser = AuthUserRepository.GetByPrimaryKey(userToUpdate.AuthUserId); if (authUser == null) throw new NullReferenceException($"User is not tied to a valid AuthUserId: {userToUpdate.AuthUserId}. Database out of sync."); authUser = MapAuthUser(authUser, userToUpdate); AuthUserRepository.Update(authUser); InsertAuthUserLog(authUser, "UPDATE"); await SaveContextAsync(); ClientCompanyContact clientCompanyContact = ClientCompanyContactRepository.GetByPrimaryKey(userToUpdate.ClientCompanyContactId); if (clientCompanyContact == null) throw new NullReferenceException($"User is not tied to a valid ClientCompanyContactId: {userToUpdate.ClientCompanyContactId}. Database out of sync."); clientCompanyContact = MapClientCompanyContact(clientCompanyContact, userToUpdate); ClientCompanyContactRepository.Update(clientCompanyContact); InsertClientCompanyContactLog(clientCompanyContact, "UPDATE"); await SaveContextAsync(); userTransaction.Commit(); securityTransaction.Commit(); return result; } catch (Exception ex) { userTransaction.Rollback(); securityTransaction.Rollback(); _logger.Error(ex); return IdentityResult.Failed(new IdentityError { Description = ex.Message }); } } } } public async Task<IdentityResult> UpdateUserAsync(ClientUserModel userToUpdate) { bool existingSecurityContext = true; bool existingFXDBContext = true; if (Context.Database.CurrentTransaction == null) { existingFXDBContext = false; Context.Database.BeginTransaction(); } if (_securityContext.Database.CurrentTransaction == null) { existingSecurityContext = false; _securityContext.Database.BeginTransaction(); } try { //Get Entities ApplicationUser originalUser = GetUserByClientCompanyContactId(userToUpdate.ClientCompanyContactId); ClientCompanyContact clientCompanyContact = ClientCompanyContactRepository.GetByPrimaryKey(userToUpdate.ClientCompanyContactId); if (clientCompanyContact == null) throw new NullReferenceException($"User is not tied to a valid ClientCompanyContactId: {userToUpdate.ClientCompanyContactId}. Database out of sync."); AuthUser authUser = AuthUserRepository.GetByPrimaryKey(userToUpdate.AuthUserId); if (authUser == null) throw new NullReferenceException($"User is not tied to a valid AuthUserId: {userToUpdate.AuthUserId}. Database out of sync."); //prevent certain fields from being modified from update call userToUpdate.PasswordHash = <PASSWORD>; //prevent modification to certain fields until functionality is added on trader userToUpdate.IsSignatory = originalUser.IsSignatory; userToUpdate.IsAuthorisedSignatory = originalUser.IsAuthorisedSignatory; userToUpdate.IsAdmin = originalUser.IsAdmin; ApplicationUser updatedUser = MapApplicationUser(originalUser, userToUpdate); IdentityResult result = await _userManager.UpdateAsync(updatedUser); if (!result.Succeeded) return result; authUser = MapAuthUser(authUser, userToUpdate); AuthUserRepository.Update(authUser); InsertAuthUserLog(authUser, "UPDATE"); await SaveContextAsync(); clientCompanyContact = MapClientCompanyContact(clientCompanyContact, userToUpdate); ClientCompanyContactRepository.Update(clientCompanyContact); InsertClientCompanyContactLog(clientCompanyContact, "UPDATE"); await SaveContextAsync(); if (!existingFXDBContext) Context.Database.CurrentTransaction.Commit(); if (!existingSecurityContext) _securityContext.Database.CurrentTransaction.Commit(); return result; } catch (Exception ex) { if (!existingFXDBContext) Context.Database.CurrentTransaction.Rollback(); if (!existingSecurityContext) _securityContext.Database.CurrentTransaction.Rollback(); _logger.Error(ex); return IdentityResult.Failed(new IdentityError { Description = ex.Message }); } } public async Task<IdentityResult> ApproveUserAsync(ApplicationUser user) { using (var securityTransaction = _securityContext.Database.BeginTransaction()) { try { IdentityResult result = await _userManager.UpdateAsync(user); if (!result.Succeeded) return result; AuthUser authUser = AuthUserRepository.GetByPrimaryKey(user.AuthUserId); if (authUser == null) throw new NullReferenceException("User is not tied to a valid AuthUserId. Database out of sync."); authUser.IsApproved = user.IsApproved; AuthUserRepository.Update(authUser); await SaveContextAsync(); securityTransaction.Commit(); return result; } catch (Exception ex) { securityTransaction.Rollback(); _logger.Error(ex); return IdentityResult.Failed(new IdentityError { Description = ex.Message }); } } } public async Task<IdentityResult> AuthoriseSignatoryAsync(ApplicationUser user) { try { IdentityResult result = await _userManager.UpdateAsync(user); if (!result.Succeeded) return result; await SaveContextAsync(); return result; } catch (Exception ex) { _logger.Error(ex); return IdentityResult.Failed(new IdentityError { Description = ex.Message }); } } public async Task<IdentityResult> DeleteUserAsync(ApplicationUser user) { using (var userTransaction = Context.Database.BeginTransaction()) { using (var securityTransaction = _securityContext.Database.BeginTransaction()) { try { /* Identity Framework has a unique username constraint built into the IdentityContext * User Entity which prevents soft-deleted usernames to be reused natively*/ user.UserName = GenerateUniqueUsername(user.UserName); user.LockoutEnabled = true; user.IsDeleted = true; IdentityResult result = await _userManager.UpdateAsync(user); if (!result.Succeeded) return result; AuthUser authUser = AuthUserRepository.GetByPrimaryKey(user.AuthUserId); if (authUser == null) throw new NullReferenceException("User is not tied to a valid AuthUserId. Database out of sync."); authUser.UserName = user.UserName; authUser.IsLockedOut = true; AuthUserRepository.Update(authUser); InsertAuthUserLog(authUser, "DELETE"); await SaveContextAsync(); ClientCompanyContact clientCompanyContact = ClientCompanyContactRepository.GetByPrimaryKey(user.ClientCompanyContactId); if (clientCompanyContact == null) throw new NullReferenceException("User is not tied to a valid ClientCompanyContactId. Database out of sync."); clientCompanyContact.IsDeleted = true; ClientCompanyContactRepository.Update(clientCompanyContact); InsertClientCompanyContactLog(clientCompanyContact, "DELETE"); await SaveContextAsync(); userTransaction.Commit(); securityTransaction.Commit(); return result; } catch (Exception ex) { userTransaction.Rollback(); securityTransaction.Rollback(); _logger.Error(ex); return IdentityResult.Failed(new IdentityError { Description = ex.Message }); } } } } /// <summary> /// Get an unique username using the provided initial value (e.g. email) and a GUID /// Max length restriction due to AuthUser.Username length /// </summary> /// <param name="initialValue"></param> /// <returns></returns> public string GenerateUniqueUsername(string initialValue = "") { var username = $"{initialValue}-{Guid.NewGuid()}"; int size = DatabaseConstant.Setting_UserManagement_UsernameCharacterLimit; if (username.Length > size) username = username.Substring(0, size); return username; } public async Task<IdentityResult> ChangePasswordAsync(ApplicationUser user, string newPasswordHash) { using (var userTransaction = Context.Database.BeginTransaction()) { using (var securityTransaction = _securityContext.Database.BeginTransaction()) { try { user.PasswordHash = newPasswordHash; user.LastPasswordChange = DateTime.Today; IdentityResult result = await _userManager.UpdateAsync(user); if (!result.Succeeded) return result; AuthUser authUser = AuthUserRepository.GetByPrimaryKey(user.AuthUserId); if (authUser == null) throw new NullReferenceException("User is not tied to a valid AuthUserId. Database out of sync."); authUser.Password = <PASSWORD>; AuthUserRepository.Update(authUser); await InsertInPreviousPasswords(newPasswordHash, user); await SaveContextAsync(); userTransaction.Commit(); securityTransaction.Commit(); return result; } catch (Exception ex) { userTransaction.Rollback(); securityTransaction.Rollback(); _logger.Error(ex); return IdentityResult.Failed(new IdentityError { Description = ex.Message }); } } } } public IQueryable<ApplicationUser> GetUnapprovedUsers() { var applicationUserList = ApplicationUserRepo.GetQueryable(x => (x.IsApproved == false || (x.IsSignatory == true && x.IsAuthorisedSignatory == false)) && x.IsDeleted == false); return applicationUserList; } public IQueryable<ApplicationUser> GetUsersByCompanyId(int clientCompanyId) { var applicationUserList = ApplicationUserRepo.GetQueryable(x => x.ClientCompanyId == clientCompanyId && x.IsDeleted == false); return applicationUserList; } public ApplicationUser GetUserByClientCompanyContactId(int clientCompanyContactId) { ApplicationUser applicationUser = ApplicationUserRepo.GetQueryable(x => x.ClientCompanyContactId == clientCompanyContactId && x.IsDeleted == false).FirstOrDefault(); return applicationUser; } public async Task<ApplicationUser> GetUserByIdAsync(string userId) { ApplicationUser originalUser = await _userManager.FindByIdAsync(userId); return originalUser; } public async Task<ApplicationUser> GetUserByEmailAsync(string email) { return await _userManager.FindByEmailAsync(email); } private bool CheckUniqueEmail(int clientCompanyId, string email, int clientCompanyContactId = 0) { bool anyMatching = ApplicationUserRepo.Get() .Any(x => x.NormalizedEmail == email.ToUpperInvariant() && x.ClientCompanyId == clientCompanyId && !x.IsDeleted && x.ClientCompanyContactId != clientCompanyContactId); return !anyMatching; } private bool CheckUniqueUsername(string username, int clientCompanyContactId = 0) { bool anyMatching = ApplicationUserRepo.Get() .Any(x => x.NormalizedUserName == username.ToUpperInvariant() && !x.IsDeleted && x.ClientCompanyContactId != clientCompanyContactId); return !anyMatching; } public IdentityResult ValidateUserDetails(UserValidationModel user) { //validate only if the value exists and it is set to true if (!user.ValidateUserDetails.HasValue || !user.ValidateUserDetails.Value) { return IdentityResult.Success; } List<IdentityError> validationErrors = new List<IdentityError>(); if (!CheckUniqueUsername(user.Username, user.ClientCompanyContactId)) { _logger.Info($"Username: {user.Username} must be unique"); validationErrors.Add(new IdentityError { Description = "Username must be unique" }); } if (!CheckUniqueEmail(user.ClientCompanyId, user.Email, user.ClientCompanyContactId)) { _logger.Info($"Email: {user.Email} must be unique within the Client Company ID: {user.ClientCompanyId}"); validationErrors.Add(new IdentityError { Description = "Email must be unique within the Client Company Account" }); } return validationErrors.Count > 0 ? IdentityResult.Failed(validationErrors.ToArray()) : IdentityResult.Success; } public async Task<ApplicationUser> GetUserByNameAsync(string username) { var originalUser = await _userManager.FindByNameAsync(username); if (originalUser == null) return null; return originalUser; } /// <summary> /// Checking if username exists /// </summary> /// <param name="username">UserName or Email</param> /// <returns>bool</returns> public async Task<bool> IsUserByNameAsync(string username) { var userName = GetUserName(username); var originalUser = await _userManager.FindByNameAsync(userName); return originalUser != null; } public async Task<IList<string>> GetRolesAsync(ApplicationUser user) { var roles = await _userManager.GetRolesAsync(user); if (roles == null) throw new NullReferenceException("Model is missing an ID"); return roles; } public async Task<string> GenerateEmailConfirmationTokenAsync(ApplicationUser user) { return await _userManager.GenerateEmailConfirmationTokenAsync(user); } public async Task<string> GeneratePasswordResetTokenAsync(ApplicationUser user) { return await _userManager.GeneratePasswordResetTokenAsync(user); } public async Task<IdentityResult> ResetPasswordAsync(ApplicationUser user, string code, string password) { var result = await _userManager.ResetPasswordAsync(user, code, password); if (result.Succeeded) { await InsertInPreviousPasswords(user.PasswordHash, user); } return result; } public async Task<SignInResult> PasswordSignInAsync(string user, string password, bool isPersistent, bool lockoutOnFailure) { return await _signInManager.PasswordSignInAsync(user, password, isPersistent, lockoutOnFailure); } public async Task<bool> VerifyToken(ApplicationUser user, string tokenProvider, string tokenPurpose, string tokenCode) { var isTokenValid = await _userManager.VerifyUserTokenAsync(user, tokenProvider, tokenPurpose, tokenCode); return isTokenValid; } public async Task PersistToken(Token refreshToken) { _securityContext.Add(refreshToken); await _securityContext.SaveChangesAsync(); //TokenRepo.Insert(refreshToken); //await SaveContextAsync(); } public async Task ReplaceToken(Token newRefreshToken, Token oldRefreshToken) { //invalidate old token _securityContext.Remove(oldRefreshToken); //insert new token _securityContext.Add(newRefreshToken); //persist await _securityContext.SaveChangesAsync(); } public async Task RemoveToken(Token token) { //invalidate old token _securityContext.Remove(token); //persist await _securityContext.SaveChangesAsync(); } public Token GetRefreshToken(int userID, string refreshToken) { var token = TokenRepo.GetQueryable(x => x.UserId == userID && x.Value == refreshToken) .FirstOrDefault(); return token; } public async Task CurrentUserSignOutAsync() { await _signInManager.SignOutAsync(); } private string GetUserName(string userNameOrEmail) { if (!userNameOrEmail.Contains("@")) return userNameOrEmail; var response = _userManager.FindByEmailAsync(userNameOrEmail); return response.Result != null ? response.Result.UserName : string.Empty; } public IQueryable<PreviousPassword> GetLastPasswords(long userId) { return PreviousPasswordsRepository .GetQueryable(x => x.UserId == userId) .OrderByDescending(x => x.CreatedDate); } public async Task<string> HashPasswordAsync(string password) { var user = new ApplicationUser { PasswordHash = password }; var success = await _userManager.HasPasswordAsync(user); if (success) { return user.PasswordHash; } return null; } public async Task SetRoleForUser(long userId, long roleId) { var userRole = new ApplicationUserRole { UserId = userId, RoleId = roleId, }; ApplicationUserRoleRepository.Insert(userRole); await _securityContext.SaveChangesAsync(); } public IQueryable<ApplicationRole> GetRole(string role) { return ApplicationRoleRepository .GetQueryable(x => x.NormalizedName == role.Trim().ToUpper()); } public IQueryable<ApplicationUser> GetApplicationUserByAuthUserId(int authUserId) { return ApplicationUserRepo .GetQueryable(x => x.AuthUserId == authUserId); } public AuthUser GetAuthUserByAuthUserId(int authUserId) { return AuthUserRepository.GetByPrimaryKey(authUserId); } public IQueryable<IGrouping<int, ActivityLog>> GetActivityLog(IList<int> clientCompanyIDs, string activityType) { return ActivityLogRepo .GetQueryable(x => x.Activity.Type == activityType && x.AuthUserId.HasValue && clientCompanyIDs.Contains(x.ApplicationUser.ClientCompanyId) && x.IsSuccess, orderBy: null, includeProperties: "Activity,ApplicationUser") .OrderByDescending(x => x.LogDate) .GroupBy(x => x.ApplicationUser.ClientCompanyId); } public IQueryable<ActivityLog> GetUserActivityLog(int authUserId) { return ActivityLogRepo .GetQueryable(x => x.Activity.Type == "login" && x.AuthUserId.HasValue && x.AuthUserId == authUserId, orderBy: null, includeProperties: "Activity") .OrderByDescending(x => x.LogDate); } public async Task LogActivity(ActivityLog log) { _securityContext.Add(log); await _securityContext.SaveChangesAsync(); } public AppUser GetAppUserById(int appUserId) { var appUser = AppUserRepository .GetQueryable(x => x.Id == appUserId) .FirstOrDefault(); return appUser; } public IEnumerable<AppUser> GetAllDirectorsAsList() { return AppUserRepository.GetQueryable(x => x.IsDirector == true); } public UserChangeRequestResponse ValidateUserMobileChangeRequest(ClientUserModel updatedClientUser, ClientUserModel originalClientUser, int daysPeriod) { UserChangeRequestResponse response = new UserChangeRequestResponse() { InsertOrUpdateUserChangeRequest = false, SendUserChangeAlerts = false, WarningMessage = string.Empty, }; if (updatedClientUser.PhoneNumberMobile == originalClientUser.PhoneNumberMobile || string.IsNullOrEmpty(originalClientUser.PhoneNumberMobile)) return response; if (GetUserChangeRequest(originalClientUser.AuthUserId, "Pending", "Telephone") != null) { response.InsertOrUpdateUserChangeRequest = true; response.WarningMessage = "There is already a change request pending"; } else { //Insert a change request for the mobile if the user's email has been recently changed if (originalClientUser.LastEmailChangeDate.HasValue && originalClientUser.LastEmailChangeDate.Value.AddDays(daysPeriod) > DateTime.Now) { response.InsertOrUpdateUserChangeRequest = true; response.SendUserChangeAlerts = true; response.WarningMessage = $"Email was changed within the past {daysPeriod} days"; } } return response; } public UserChangeRequestResponse ValidateUserEmailChangeRequest(ClientUserModel updatedClientUser, ClientUserModel originalClientUser, int daysPeriod) { UserChangeRequestResponse response = new UserChangeRequestResponse() { InsertOrUpdateUserChangeRequest = false, SendUserChangeAlerts = false, WarningMessage = string.Empty, }; if (updatedClientUser.Email == originalClientUser.Email || string.IsNullOrEmpty(originalClientUser.Email)) return response; if (GetUserChangeRequest(originalClientUser.AuthUserId, "Pending", "Email") != null) { response.InsertOrUpdateUserChangeRequest = true; //to update the existing change request response.WarningMessage = "There is already a change request pending"; } else { bool phoneNumberModified = (updatedClientUser.PhoneNumberMobile != originalClientUser.PhoneNumberMobile); //Insert a change request for the email if the user's mobile is also recently changed if (phoneNumberModified || originalClientUser.LastPhoneNumberMobileChangeDate.HasValue && (originalClientUser.LastPhoneNumberMobileChangeDate.Value.AddDays(daysPeriod) > DateTime.Now)) { response.InsertOrUpdateUserChangeRequest = true; response.SendUserChangeAlerts = true; response.WarningMessage = $"Mobile was changed within the past {daysPeriod} days"; } } return response; } public UserChangeRequest GetUserChangeRequest(int userChangeRequestId) { return UserChangeRequestRepository.GetQueryable(x => x.Id == userChangeRequestId) .FirstOrDefault(); } public IEnumerable<PendingApprovalUserChangeRequest> GetPendingChangeRequest() { var externalUserApprovals = _appSettingUow.GetAppSetting(AppSettingEnum.ExternalUserChangeRequestApprovalsRequired).ValueAs<int>(); var internalUserapprovals = _appSettingUow.GetAppSetting(AppSettingEnum.InternalUserChangeRequestApprovalsRequired).ValueAs<int>(); var requests = UserChangeRequestRepository.GetQueryable().Where(r => r.ChangeStatus == "Pending") .Select(r => new PendingApprovalUserChangeRequest { UserChangeRequestID = r.Id, AuthUserID = r.AuthUserId, AuthUserName = r.AuthUser.UserName, CurrentValue = r.CurrentValue.Replace('|', char.MinValue), ProposedValue = r.ProposedValue.Replace('|', char.MinValue), ChangeValueType = r.ChangeValueType, ChangeDateTime = r.ChangeDateTime, ChangedByAuthUserID = r.ChangedByAuthUserId, ChangedByAuthUserName = r.ChangedByAuthUser.UserName, ChangeStatus = r.ChangeStatus, AuthApplicationDescription = r.AuthUser.Application.Description, ApprovedBy = string.Join(", ", r.UserChangeRequestApproval.Select(a => a.ApprovedByAuthUser.UserName)), Company = r.AuthUser.ClientCompanyContactAuthUser.SingleOrDefault().ClientCompany.Name ?? "Argentex", Forename = r.AuthUser.ClientCompanyContactAuthUser.SingleOrDefault().Forename ?? r.AuthUser.AppUser.SingleOrDefault().Forename, Surname = r.AuthUser.ClientCompanyContactAuthUser.SingleOrDefault().Surname ?? r.AuthUser.AppUser.SingleOrDefault().Surname }).ToList(); requests.ForEach(r => r.ApprovalsRequired = r.Company == "Argentex" ? internalUserapprovals : externalUserApprovals); return requests; } public async Task<ApproveUserChangeResponse> ApproveUserChangeRequest(ApproveUserChangeRequest approveUserChangeRequest) { using (var userTransaction = Context.Database.BeginTransaction()) { using (var identityTransaction = _securityContext.Database.BeginTransaction()) { ApproveUserChangeResponse approveUserChangeResponse = new ApproveUserChangeResponse(); try { approveUserChangeResponse.Result = IdentityResult.Success; var userChangeRequest = GetUserChangeRequest(approveUserChangeRequest.UserChangeRequestID); // Check if UserChangeRequest is still Pending bool isUserChangeRequestPending = UserChangeRequestRepository.GetQueryable(x => x.Id == userChangeRequest.Id && x.ChangeStatus == "Pending").Any(); if (isUserChangeRequestPending && approveUserChangeRequest.ApprovedByAuthUserId > 0) { // Inserting record in the UserChangeRequestApproval table InsertUserChangeRequestApproval(userChangeRequest); await SaveContextAsync(); //Updating the user details in IdentityDB, ClientCompanyContact and AuthUser tables approveUserChangeResponse = await UpdateUserDetails(userChangeRequest); } else { approveUserChangeResponse.Result = IdentityResult.Failed(new IdentityError { Description = "No userchange request to approve" }); } if (approveUserChangeResponse.Result.Succeeded) { identityTransaction.Commit(); userTransaction.Commit(); approveUserChangeResponse.Result = IdentityResult.Success; approveUserChangeResponse.UserChangeRequest = GetUserChangeRequest(approveUserChangeRequest.UserChangeRequestID); } else { identityTransaction.Rollback(); userTransaction.Rollback(); approveUserChangeResponse.Result = IdentityResult.Failed(); } // return idResult; return approveUserChangeResponse; } catch (Exception ex) { _logger.Error(ex); identityTransaction.Rollback(); userTransaction.Rollback(); approveUserChangeResponse.Result = IdentityResult.Failed(new IdentityError { Description = ex.Message }); return approveUserChangeResponse; } } } } public async Task<ApproveUserChangeResponse> UpdateUserDetails(UserChangeRequest userChangeRequest) { var approveUserChangeResponse = new ApproveUserChangeResponse(); approveUserChangeResponse.Result = IdentityResult.Success; try { //Get the Number of approval required for External Users int? externalApprovalsRequired = _appSettingUow .GetAppSetting(AppSettingEnum.ExternalUserChangeRequestApprovalsRequired) .ValueAs<int>(); //Get the Number of approvals for the request var numberOfAprovals = UserChangeRequestApprovalRepository.GetQueryable(x => x.UserChangeRequestId == userChangeRequest.Id).Count(); //If the number of approvals is equal or greater than the Approvals required then Update AppUser or Contact details if (numberOfAprovals >= externalApprovalsRequired) { int clientContactId = ClientCompanyContactRepository.GetQueryable(x => x.AuthUserId == userChangeRequest.AuthUserId) .Select(x => x.Id).FirstOrDefault(); var clientUser = GetClientUserModelByContactId(clientContactId); if (clientUser != null) { if (userChangeRequest.ChangeValueType == "Email") { clientUser.Email = userChangeRequest.ProposedValue; clientUser.LastEmailChangeDate = DateTime.Now; } else if (userChangeRequest.ChangeValueType == "Telephone") { clientUser.PhoneNumberMobile = userChangeRequest.ProposedValue; clientUser.LastPhoneNumberMobileChangeDate = DateTime.Now; } clientUser.UpdatedDateTime = DateTime.Now; //Updating the UserChangeRequest table userChangeRequest.ChangeStatus = "Approved"; UserChangeRequestRepository.Update(userChangeRequest); await SaveContextAsync(); approveUserChangeResponse.Result = await UpdateUserAsync(clientUser); approveUserChangeResponse.SendNotification = approveUserChangeResponse.Result.Succeeded; } } return approveUserChangeResponse; } catch (Exception ex) { _logger.Error(ex); approveUserChangeResponse.Result = IdentityResult.Failed(new IdentityError { Description = ex.Message }); return approveUserChangeResponse; } } public UserChangeRequest GetUserChangeRequest(int authUserID, string changeStatus, string changeValueType) { return UserChangeRequestRepository.GetQueryable(x => x.AuthUserId == authUserID && x.ChangeStatus == changeStatus && x.ChangeValueType == changeValueType) .FirstOrDefault(); } public async Task<IdentityResult> ProcessUserChangeRequest(UserChangeRequest changeRequest) { using (var userTransaction = Context.Database.BeginTransaction()) { try { //cancel any existing request var existingPendingRequest = GetUserChangeRequest(changeRequest.AuthUserId, "Pending", changeRequest.ChangeValueType); if (existingPendingRequest != null) { existingPendingRequest.ChangeStatus = "Cancelled"; UserChangeRequestRepository.Update(existingPendingRequest); await SaveContextAsync(); } //Insert new change request UserChangeRequestRepository.Insert(changeRequest); await SaveContextAsync(); userTransaction.Commit(); return IdentityResult.Success; } catch (Exception ex) { userTransaction.Rollback(); _logger.Error(ex); return IdentityResult.Failed(new IdentityError { Description = ex.Message }); } } } public string GetSendersEmailAddress(int authUserId) { string emailTo = string.Empty; // Only Send "Mobile Modified" email alert to contacts from Companies that are Clients if (GetClientCompanyStatus(authUserId) == "Client") { emailTo = ApplicationUserRepo.GetQueryable(x => x.AuthUserId == authUserId).Select(x => x.Email).FirstOrDefault(); } return emailTo; } public string GetSendersPhoneNumber(int authUserId) { string phoneNumberTo = string.Empty; // Only Send "Email Modified" text message alert to contacts from Companies that are Clients if (GetClientCompanyStatus(authUserId) == "Client") { phoneNumberTo = ApplicationUserRepo.GetQueryable(x => x.AuthUserId == authUserId).Select(x => x.PhoneNumberMobile).FirstOrDefault(); } return phoneNumberTo; } private string GetClientCompanyStatus(int authUserId) { return ClientCompanyContactRepository.GetQueryable(a => a.AuthUserId == authUserId) .Select(x => x.ClientCompany.ClientCompanyStatus.Description).FirstOrDefault(); } #region Private Methods private AuthUser MapAuthUser(AuthUser authUser, ApplicationUser user) { authUser.Id = user.AuthUserId; authUser.UserName = user.UserName; authUser.Password = <PASSWORD>; authUser.Email = user.Email; authUser.IsApproved = user.IsApproved; authUser.CreateDate = DateTime.Now; authUser.FailedPasswordAttemptCount = 0; authUser.ApplicationId = 2; authUser.FailedPasswordAttemptWindowStart = DateTime.Now; authUser.IsLockedOut = user.LockoutEnabled; return authUser; } private ClientCompanyContact MapClientCompanyContact(ClientCompanyContact clientCompanyContact, ApplicationUser user) { //clientCompanyContact.AuthUserId = user.AuthUserId; clientCompanyContact.ClientCompanyId = user.ClientCompanyId; clientCompanyContact.Title = user.Title; clientCompanyContact.Forename = user.Forename; clientCompanyContact.Surname = user.Surname; clientCompanyContact.Fullname = user.Title + " " + user.Forename + " " + user.Surname; clientCompanyContact.Email = user.Email; clientCompanyContact.Position = user.Position; clientCompanyContact.PrimaryContact = user.PrimaryContact; clientCompanyContact.TelephoneDirect = user.PhoneNumber; clientCompanyContact.TelephoneMobile = user.PhoneNumberMobile; clientCompanyContact.TelephoneOther = user.PhoneNumberOther; clientCompanyContact.Aspnumber = user.ASPNumber; clientCompanyContact.AspcreationDate = user.ASPCreationDate; clientCompanyContact.Authorized = user.IsApproved; clientCompanyContact.UpdatedByAuthUserId = user.UpdatedByAuthUserId; clientCompanyContact.UpdatedDateTime = DateTime.Now; clientCompanyContact.Notes = user.Notes; clientCompanyContact.RecNotifications = false; clientCompanyContact.RecAmreport = false; clientCompanyContact.RecActivityReport = false; clientCompanyContact.IsDeleted = user.IsDeleted; return clientCompanyContact; } private ApplicationUser MapApplicationUser(ApplicationUser appUser, ClientUserModel user) { appUser.Title = user.Title; appUser.Forename = user.Forename; appUser.Surname = user.Surname; appUser.UserName = user.Username; appUser.Email = user.Email; appUser.ClientCompanyId = user.ClientCompanyId; appUser.ClientCompanyContactId = user.ClientCompanyContactId; appUser.UpdatedByAuthUserId = user.UpdatedByAuthUserId; appUser.Position = user.Position; appUser.PhoneNumber = user.PhoneNumberDirect; appUser.PhoneNumberMobile = user.PhoneNumberMobile; appUser.PhoneNumberOther = user.PhoneNumberOther; appUser.Birthday = user.Birthday; appUser.Notes = user.Notes; appUser.PrimaryContact = user.PrimaryContact; appUser.LockoutEnabled = user.IsLockedOut ?? false; appUser.IsApproved = user.IsApproved; appUser.IsSignatory = user.IsSignatory; appUser.IsAuthorisedSignatory = user.IsAuthorisedSignatory; appUser.IsAdmin = user.IsAdmin; appUser.ApprovedByAuthUserId = user.ApprovedByAuthUserId; return appUser; } private AuthUser MapAuthUser(AuthUser authUser, ClientUserModel user) { authUser.UserName = user.Username; authUser.Password = <PASSWORD>; authUser.Email = user.Email ?? "NoEmail"; authUser.IsApproved = user.IsApproved; authUser.IsLockedOut = user.IsLockedOut ?? false; authUser.Comment = user.Comment; return authUser; } private ClientCompanyContact MapClientCompanyContact(ClientCompanyContact clientCompanyContact, ClientUserModel user) { clientCompanyContact.ClientCompanyId = user.ClientCompanyId; clientCompanyContact.Title = user.Title; clientCompanyContact.Forename = user.Forename; clientCompanyContact.Surname = user.Surname; clientCompanyContact.Fullname = $"{user.Title} {user.Forename} {user.Surname}"; clientCompanyContact.Email = user.Email; clientCompanyContact.LastEmailChangeDate = user.LastEmailChangeDate; clientCompanyContact.Position = user.Position; clientCompanyContact.PrimaryContact = user.PrimaryContact; clientCompanyContact.TelephoneDirect = user.PhoneNumberDirect; clientCompanyContact.TelephoneMobile = user.PhoneNumberMobile; clientCompanyContact.TelephoneOther = user.PhoneNumberOther; clientCompanyContact.LastTelephoneChangeDate = user.LastPhoneNumberMobileChangeDate; clientCompanyContact.Birthday = user.Birthday; clientCompanyContact.Aspnumber = user.ASPNumber; clientCompanyContact.AspcreationDate = user.ASPCreationDate; clientCompanyContact.UpdatedByAuthUserId = user.UpdatedByAuthUserId; clientCompanyContact.UpdatedDateTime = user.UpdatedDateTime; clientCompanyContact.Notes = user.Notes; clientCompanyContact.Authorized = user.Authorized; clientCompanyContact.RecNotifications = user.RecNotification; clientCompanyContact.RecAmreport = user.RecAmReport; clientCompanyContact.RecActivityReport = user.RecActivityReport; clientCompanyContact.BloombergGpi = user.BloombergGpi; clientCompanyContact.NiNumber = user.NiNumber; clientCompanyContact.IsDeleted = user.IsDeleted; return clientCompanyContact; } private void InsertAuthUserLog(AuthUser authUser, string action) { var log = new LogAuthUser() { LogAction = action, Id = authUser.Id, UserName = authUser.UserName, Password = <PASSWORD>, Email = authUser.Email, IsApproved = authUser.IsApproved, IsLockedOut = authUser.IsLockedOut, Comment = authUser.Comment, CreateDate = authUser.CreateDate, LastPasswordChangeDate = authUser.LastPasswordChangeDate, LastLoginDate = authUser.LastLoginDate, LastActivityDate = authUser.LastActivityDate, LastLockOutDate = authUser.LastLockOutDate, FailedPasswordAttemptCount = authUser.FailedPasswordAttemptCount, FailedPasswordAttemptWindowStart = authUser.FailedPasswordAttemptWindowStart, ApplicationId = authUser.ApplicationId }; LogAuthUserRepository.Insert(log); } private void InsertClientCompanyContactLog(ClientCompanyContact clientCompanyContact, string action) { var log = new LogClientCompanyContact() { LogAction = action, Id = clientCompanyContact.Id, ClientCompanyId = clientCompanyContact.ClientCompanyId, Title = clientCompanyContact.Title, Forename = clientCompanyContact.Forename, Surname = clientCompanyContact.Surname, Email = clientCompanyContact.Email, TelephoneDirect = clientCompanyContact.TelephoneDirect, TelephoneMobile = clientCompanyContact.TelephoneMobile, TelephoneOther = clientCompanyContact.TelephoneOther, Birthday = clientCompanyContact.Birthday, Authorized = clientCompanyContact.Authorized, UpdateTimeStamp = clientCompanyContact.UpdateTimeStamp, UpdatedByAuthUserId = clientCompanyContact.UpdatedByAuthUserId, UpdatedDateTime = clientCompanyContact.UpdatedDateTime, Notes = clientCompanyContact.Notes, Fullname = clientCompanyContact.Fullname, RecNotifications = clientCompanyContact.RecNotifications, RecAmreport = clientCompanyContact.RecAmreport, AuthUserId = clientCompanyContact.AuthUserId, Position = clientCompanyContact.Position, PrimaryContact = clientCompanyContact.PrimaryContact, RecActivityReport = clientCompanyContact.RecActivityReport, IsDeleted = clientCompanyContact.IsDeleted, Aspnumber = clientCompanyContact.Aspnumber, AspcreationDate = clientCompanyContact.AspcreationDate, LastTelephoneChangeDate = clientCompanyContact.LastTelephoneChangeDate, LastEmailChangeDate = clientCompanyContact.LastEmailChangeDate, BloombergGpi = clientCompanyContact.BloombergGpi, NiNumber = clientCompanyContact.NiNumber }; LogClientCompanyContactRepository.Insert(log); } private void InsertUserChangeRequestApproval(UserChangeRequest userChangeRequest) { UserChangeRequestApproval userChangeRequestApproval = new UserChangeRequestApproval() { UserChangeRequestId = userChangeRequest.Id, ApprovedByAuthUserId = userChangeRequest.ChangedByAuthUserId, ApprovedDateTime = DateTime.Now, IsActive = true }; UserChangeRequestApprovalRepository.Insert(userChangeRequestApproval); } private async Task InsertInPreviousPasswords(string passwordHash, ApplicationUser user) { var previousPassword = new PreviousPassword() { UserId = user.Id, CreatedDate = DateTime.Now, PasswordHash = <PASSWORD>Hash }; PreviousPasswordsRepository.Insert(previousPassword); await _securityContext.SaveChangesAsync(); } #endregion #region Disposing //TODO - this has to be refactored that each unit of work handles only one context //or not inherit from BaseUow private bool _disposed = false; protected override void Dispose(bool disposing) { if (_disposed) return; if (disposing) { _securityContext.Dispose(); } _disposed = true; } #endregion } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogClientCompanyOpi.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogClientCompanyOpi { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public int ClientCompanyId { get; set; } public string Description { get; set; } public string AccountName { get; set; } public string BankName { get; set; } public string AccountNumber { get; set; } public string SortCode { get; set; } public string Reference { get; set; } public string SwiftCode { get; set; } public string Iban { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDate { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime CreatedDate { get; set; } public bool Authorised { get; set; } public int? AuthorisedByAuthUserId { get; set; } public DateTime? AuthorisedDateTime { get; set; } public int CurrencyId { get; set; } public bool? IsCompanyAccount { get; set; } public string BeneficiaryAddress { get; set; } public int? CountryId { get; set; } public string BeneficiaryName { get; set; } public string BankAddress { get; set; } public int? ClearingCodePrefixId { get; set; } public bool Rejected { get; set; } public int? RejectedByAuthUserId { get; set; } public DateTime? RejectedDateTime { get; set; } public bool? IsOwnAccount { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Fix/BarxFxService.cs using Argentex.ClientSite.Service.Http; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.Models.Fix; using SynetecLogger; using System; using System.Net; using System.Net.Http; using System.Threading.Tasks; namespace Argentex.Core.Service.Fix { public class BarxFxService : IBarxFxService { private readonly IHttpService _httpService; private readonly IAppSettingService _appSettingService; private readonly ILogWrapper _logger; private bool _disposed; public BarxFxService(IHttpService httpService, IAppSettingService appSetting, ILogWrapper logger) { _httpService = httpService; _appSettingService = appSetting; _logger = logger; } public async Task<FixQuoteResponseModel> GetQuoteAsync(FixQuoteRequestModel quoteRequest) { var baseUri = _appSettingService.GetBarxFXFixQuoteUrl(); var uri = _httpService.GenerateUri(baseUri, quoteRequest); _httpService.AddRequestUri(uri); _httpService.AddMethod(HttpMethod.Get); HttpResponseMessage result = null; try { result = await _httpService.SendAsync(); } catch (HttpRequestException ex) { throw new HttpRequestException("Synetec FIX API is unreachable", ex.InnerException); } FixQuoteResponseModel quoteResponse = null; //Not valid request model if (result.StatusCode == HttpStatusCode.NotFound) { var responseObj = _httpService.GetResponseAsString(result); throw new HttpRequestException($"FIX API is unreachable. Reason: {responseObj.Result}"); } //Not valid request model if (result.StatusCode == HttpStatusCode.BadRequest) { var responseObj = _httpService.GetResponseAsString(result); throw new HttpRequestException($"Invalid http request to Synetec FIX API. Reason: {responseObj.Result}"); } //BarxFX is not available if (result.StatusCode == HttpStatusCode.ServiceUnavailable) { quoteResponse = await _httpService.GetResponseObject<FixQuoteResponseModel>(result); throw new HttpRequestException($"BarxFX is not available. Reason: {quoteResponse?.ErrorMessage}"); } //successfull http call if (result.StatusCode == HttpStatusCode.OK) { quoteResponse = await _httpService.GetResponseObject<FixQuoteResponseModel>(result); //unsuccessfull call from Synetec FIX API to BarxFX if(quoteResponse != null && quoteResponse.ErrorMessage != null) { throw new HttpRequestException($"Error getting quote from BarxFX. Reason: {quoteResponse?.ErrorMessage}"); } } return quoteResponse; } public async Task<FixNewOrderResponseModel> NewOrderSingleAsync(FixNewOrderRequestModel dealRequest) { var baseUri = _appSettingService.GetBarxFXFixNewOrderUrl(); var uri = _httpService.GenerateUri(baseUri, dealRequest); _httpService.AddRequestUri(uri); _httpService.AddMethod(HttpMethod.Get); HttpResponseMessage result = null; try { result = await _httpService.SendAsync(); } catch (HttpRequestException ex) { _logger.Error(ex); throw new HttpRequestException("Deal not done due to an unexpected error, please try again", ex.InnerException); } FixNewOrderResponseModel dealResponse = null; //Not valid request model if (result.StatusCode == HttpStatusCode.BadRequest) { var responseObj = _httpService.GetResponseAsString(result); //dealResponse.ErrorMessage = $"Invalid http request to Synetec FIX API. Reason: {responseObj.Result}"; } //BarxFX is not available if (result.StatusCode == HttpStatusCode.ServiceUnavailable || result.StatusCode == HttpStatusCode.NotFound) { dealResponse = await _httpService.GetResponseObject<FixNewOrderResponseModel>(result); //dealResponse.ErrorMessage = $"BarxFX is not available. Reason: {dealResponse?.ErrorMessage}"; _logger.Error(new Exception($"BarxFX is not available. Reason: {dealResponse?.ErrorMessage}")); dealResponse.ErrorMessage = "Deal not done due to a communication failure, please try again"; } //successfull http call if (result.StatusCode == HttpStatusCode.OK) { dealResponse = await _httpService.GetResponseObject<FixNewOrderResponseModel>(result); //unsuccessfull call from Synetec FIX API to BarxFX if (dealResponse != null && dealResponse.ErrorMessage != null) { dealResponse.ErrorMessage = "Deal not done due to market movements causing variance in rates, please re-quote and try again"; _logger.Error(new Exception($"Error creating order from BarxFX. Reason: {dealResponse?.ErrorMessage}")); //dealResponse.ErrorMessage = $"Error creating order from BarxFX. Reason: {dealResponse?.ErrorMessage}"; } } return dealResponse; } public void SetHttpTimeout(TimeSpan timeout) { _httpService.AddTimeout(timeout); } protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { } } _disposed = true; } public void Dispose() { Dispose(true); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/NavMenuItem.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class NavMenuItem { public int Id { get; set; } public int NavMenuSectionId { get; set; } public int? AuthPermissionId { get; set; } public string DisplayText { get; set; } public string NavigateUrl { get; set; } public int? DisplayOrder { get; set; } public AuthPermission AuthPermission { get; set; } public NavMenuSection NavMenuSection { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftvalidationOptionField.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftvalidationOptionField { public int OptionId { get; set; } public int FieldId { get; set; } public int Sequence { get; set; } public SwiftvalidationField Field { get; set; } public SwiftvalidationOption Option { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Identity/IdentityResultCodes.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Identity { public static class IdentityResultCodes { public const string UserNotFound = @"UserNotFound"; public const string InvalidUserState = @"InvalidUserState"; } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Fix/FixNewOrderResponseModel.cs using System; namespace Argentex.Core.Service.Models.Fix { public class FixNewOrderResponseModel { public string TradeId { get; set; } public string BarclaysTradeId { get; set; } public string BarclaysAssignedId { get; set; } public string ErrorMessage { get; set; } public bool IsFilled { get; set; } public string RejectReason { get; set; } public decimal Price { get; set; } public decimal OrderQty { get; set; } public decimal SecondaryQty { get; set; } public decimal Side { get; set; } public string BrokerMajorPart { get; set; } public string CurrencyPair { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/ApplicationUser.cs using Microsoft.AspNetCore.Identity; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Identity.DataAccess { public class ApplicationUser : IdentityUser<long> { [Key] [Required] public int AuthUserId { get; set; } [Required] [MaxLength(16)] public string Title { get; set; } [Required] [MaxLength(256)] public string Forename { get; set; } [Required] [MaxLength(100)] public string Surname { get; set; } [Required] public int ClientCompanyId { get; set; } [Required] public int ClientCompanyContactId { get; set; } [Required] public int UpdatedByAuthUserId { get; set; } [MaxLength(128)] public string PhoneNumberMobile { get; set; } [MaxLength(128)] public string PhoneNumberOther { get; set; } public DateTime? LastUpdate { get; set; } public string ASPNumber { get; set; } public DateTime? ASPCreationDate { get; set; } public DateTime? LastTelephoneChange { get; set; } public DateTime? LastEmailChange { get; set; } [Required] public DateTime LastPasswordChange { get; set; } public DateTime CreateDate { get; set; } public bool IsApproved { get; set; } public int? ApprovedByAuthUserId { get; set; } public DateTime? Birthday { get; set; } public string Notes { get; set; } [MaxLength(50)] public string Position { get; set; } public bool? PrimaryContact { get; set; } public bool IsDeleted { get; set; } public bool IsAdmin { get; set; } public bool IsSignatory { get; set; } public bool IsAuthorisedSignatory { get; set; } public virtual IList<UserReport> UserReports { get; set; } public virtual ICollection<PreviousPassword> PreviousPasswords { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/User/UserService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Identity.Services; using Argentex.Core.Service.Models.ClientCompany; using Argentex.Core.Service.Models.Identity; using Argentex.Core.Service.Sms.Models; using Argentex.Core.Service.Sms.SmsSender; using Argentex.Core.UnitsOfWork.Users; using Argentex.Core.UnitsOfWork.Users.Model; using Microsoft.AspNetCore.Identity; using System; using System.Collections.Generic; using System.Linq; using System.Security.Claims; using System.Security.Principal; using System.Threading.Tasks; using Argentex.Core.Service.Enums; using Argentex.Core.Service.Extensions; using ApproveUserChangeRequest = Argentex.Core.UnitsOfWork.Users.Model.ApproveUserChangeRequest; namespace Argentex.Core.Service.User { public class UserService : IUserService { private readonly IConfigWrapper _config; private readonly IUserUow _userUow; private readonly IEmailSender _emailSender; private readonly IIdentityService _identityService; private readonly IEmailService _emailService; private readonly IAppSettingService _appSettingService; private readonly ISmsSender _smsSender; private bool _disposed; public UserService(IConfigWrapper config, IUserUow userUow, IEmailSender emailSender, IIdentityService identityService, IEmailService emailService, IAppSettingService appSettingService, ISmsSender smsSender) { _config = config; _userUow = userUow; _emailSender = emailSender; _identityService = identityService; _emailService = emailService; _appSettingService = appSettingService; _smsSender = smsSender; } public IEnumerable<ApplicationServiceUser> GetUnapprovedApplicationUsers() { var applicationServiceUserList = _userUow.GetUnapprovedUsers() .Select(x => new ApplicationServiceUser { Id = x.Id, Title = x.Title, Forename = x.Forename, Surname = x.Surname, Username = x.UserName, Email = x.Email, ClientCompanyId = x.ClientCompanyId, UpdatedByAuthUserId = x.UpdatedByAuthUserId, Position = x.Position, PhoneNumberDirect = x.PhoneNumber, PhoneNumberMobile = x.PhoneNumberMobile, Birthday = x.Birthday.ToString(), IsApproved = x.IsApproved, ApprovedByAuthUserId = x.ApprovedByAuthUserId, IsAdmin = x.IsAdmin, IsSignatory = x.IsSignatory, IsAuthorisedSignatory = x.IsAuthorisedSignatory }); return applicationServiceUserList; } public IEnumerable<ApplicationServiceUser> GetApplicationUsersOfCompany(int clientCompanyId) { var applicationServiceUserList = _userUow.GetUsersByCompanyId(clientCompanyId) .Select(x => new ApplicationServiceUser { Id = x.Id, Title = x.Title, Forename = x.Forename, Surname = x.Surname, Username = x.UserName, Email = x.Email, ClientCompanyId = x.ClientCompanyId, UpdatedByAuthUserId = x.UpdatedByAuthUserId, Position = x.Position, PhoneNumberDirect = x.PhoneNumber, PhoneNumberMobile = x.PhoneNumberMobile, Birthday = x.Birthday.ToString(), IsApproved = x.IsApproved, IsAdmin = x.IsAdmin, IsSignatory = x.IsSignatory, IsAuthorisedSignatory = x.IsAuthorisedSignatory }); return applicationServiceUserList; } public async Task<ApplicationServiceUser> GetApplicationUserAsync(string userId) { var applicationUser = await _userUow.GetUserByIdAsync(userId); if (applicationUser == null) return null; var applicationServiceUser = MapAppServiceUser(new ApplicationServiceUser(), applicationUser); return applicationServiceUser; } public async Task<IdentityResult> AddUnapprovedUserAsync(ApplicationServiceUser serviceUser) { var clientUser = MapClientUser(serviceUser); var validationResult = _userUow.ValidateUserDetails(new UserValidationModel { Email = clientUser.Email, Username = clientUser.Username, ClientCompanyId = clientUser.ClientCompanyId, ClientCompanyContactId = clientUser.ClientCompanyContactId, ValidateUserDetails = serviceUser.ValidateUserDetails }); if (!validationResult.Succeeded) return validationResult; clientUser.EmailConfirmed = false; clientUser.LastPasswordChangeDate = clientUser.UpdatedDateTime; var result = await _userUow.AddUserAsync(clientUser, _config.Get("GeneratedPassword")); return result; } public async Task<IdentityResult> SendUserNewPasswordEmailAsync(ApplicationServiceUser serviceUser, string clientCompanyName) { var user = MapAppUser(new ApplicationUser(), serviceUser); user.EmailConfirmed = false; user.LastPasswordChange = DateTime.Now; var result = await _emailService.SendUserNewPasswordEmailAsync(user.UserName, clientCompanyName); return result; } public async Task<IdentityResult> UpdateUserAsync(ApplicationServiceUser serviceUser) { ApplicationUser originalUser; if (serviceUser.FindUserByUsername.HasValue && serviceUser.FindUserByUsername.Value) { originalUser = await _userUow.GetUserByNameAsync(serviceUser.Username); } else { if (serviceUser.FindUserByEmail.HasValue && serviceUser.FindUserByEmail.Value) { originalUser = await _userUow.GetUserByEmailAsync(serviceUser.Email); } else { originalUser = await _userUow.GetUserByIdAsync(serviceUser.Id.ToString()); } } if (originalUser == null) return IdentityResult.Failed(new IdentityError { Description = "Model is missing a valid Id" }); ApplicationUser userToUpdate = MapAppUserForUpdate(originalUser, serviceUser); var validationResult = _userUow.ValidateUserDetails(new UserValidationModel { Email = userToUpdate.Email, Username = userToUpdate.UserName, ClientCompanyId = userToUpdate.ClientCompanyId, ClientCompanyContactId = userToUpdate.ClientCompanyContactId, ValidateUserDetails = serviceUser.ValidateUserDetails }); if (!validationResult.Succeeded) return validationResult; var result = await _userUow.UpdateUserAsync(userToUpdate, originalUser); return result; } public async Task<IdentityResult> UpdateUserContactAsync(ApplicationServiceUser serviceUser) { var updatedClientUser = MapClientUser(serviceUser); var originalClientUser = _userUow.GetClientUserModelByContactId(updatedClientUser.ClientCompanyContactId); if (originalClientUser == null) return IdentityResult.Failed(new IdentityError { Description = $"User is not tied to a valid ClientCompanyContactId {updatedClientUser.ClientCompanyContactId}. Database out of sync.", Code = "ContactNotFound" }); var updatedByAuthUser = _userUow.GetAuthUserByAuthUserId(serviceUser.UpdatedByAuthUserId); if (updatedByAuthUser == null) return IdentityResult.Failed(new IdentityError { Description = $"AuthUserId attempting update is not tied to a valid AuthUser {serviceUser.UpdatedByAuthUserId}. Aborting update.", Code = "InvalidAuthUser" }); var validationResult = _userUow.ValidateUserDetails(new UserValidationModel { Email = updatedClientUser.Email, Username = updatedClientUser.Username, ClientCompanyContactId = updatedClientUser.ClientCompanyContactId, ClientCompanyId = updatedClientUser.ClientCompanyId, ValidateUserDetails = serviceUser.ValidateUserDetails }); if (!validationResult.Succeeded) return validationResult; string description = string.Empty; var daysPeriod = _appSettingService.GetUserChangeDaysRequiredForApproval(); //Validate if mobile phone and email is being updated var mobileChangeResponse = _userUow.ValidateUserMobileChangeRequest(updatedClientUser, originalClientUser, daysPeriod); var emailChangeResponse = _userUow.ValidateUserEmailChangeRequest(updatedClientUser, originalClientUser, daysPeriod); if (mobileChangeResponse.InsertOrUpdateUserChangeRequest) { UserChangeRequest newMobileChangeRequest = new UserChangeRequest() { AuthUserId = updatedClientUser.AuthUserId, CurrentValue = originalClientUser.PhoneNumberMobile, ProposedValue = updatedClientUser.PhoneNumberMobile, ChangeValueType = "Telephone", ChangeDateTime = updatedClientUser.UpdatedDateTime, ChangedByAuthUserId = updatedClientUser.UpdatedByAuthUserId, ChangeStatus = "Pending" }; await _userUow.ProcessUserChangeRequest(newMobileChangeRequest); //retain previous values until approval updatedClientUser.PhoneNumberMobile = originalClientUser.PhoneNumberMobile; updatedClientUser.LastPhoneNumberMobileChangeDate = originalClientUser.LastPhoneNumberMobileChangeDate; } else { if (updatedClientUser.PhoneNumberMobile != originalClientUser.PhoneNumberMobile) updatedClientUser.LastPhoneNumberMobileChangeDate = updatedClientUser.UpdatedDateTime; } if (!string.IsNullOrEmpty(mobileChangeResponse.WarningMessage)) description += mobileChangeResponse.WarningMessage; if (emailChangeResponse.InsertOrUpdateUserChangeRequest) { UserChangeRequest newEmailChangeRequest = new UserChangeRequest() { AuthUserId = updatedClientUser.AuthUserId, CurrentValue = originalClientUser.Email, ProposedValue = updatedClientUser.Email, ChangeValueType = "Email", ChangeDateTime = updatedClientUser.UpdatedDateTime, ChangedByAuthUserId = updatedClientUser.UpdatedByAuthUserId, ChangeStatus = "Pending" }; await _userUow.ProcessUserChangeRequest(newEmailChangeRequest); //retain previous values until approval updatedClientUser.Email = originalClientUser.Email; updatedClientUser.LastEmailChangeDate = originalClientUser.LastEmailChangeDate; } else { if (updatedClientUser.Email != originalClientUser.Email) updatedClientUser.LastEmailChangeDate = updatedClientUser.UpdatedDateTime; } if (!string.IsNullOrEmpty(emailChangeResponse.WarningMessage)) { if (!string.IsNullOrEmpty(description)) { description += ","; } description += emailChangeResponse.WarningMessage; } IdentityResult result = await _userUow.UpdateUserAsync(updatedClientUser); if (mobileChangeResponse.SendUserChangeAlerts || emailChangeResponse.SendUserChangeAlerts) { await _emailService.SendEmailToDirectorsForApproval(); } return result; } public async Task<IdentityResult> UpdateMyAccountAsync(ApplicationServiceUser serviceUser) { ApplicationUser originalUser = await _userUow.GetUserByIdAsync(serviceUser.Id.ToString()); if (originalUser == null) return IdentityResult.Failed(new IdentityError { Description = $"Model is missing a valid Id: {serviceUser.Id}" }); ApplicationUser userToUpdate = MapAppUserFromMyAccount(originalUser, serviceUser); var result = await _userUow.UpdateUserAsync(userToUpdate, originalUser); return result; } public async Task<IList<IdentityResult>> ApproveUsersAsync(ApproveUsersRequest approveUserRequests, ICollection<ClientCompaniesModel> clientCompanies) { List<IdentityResult> resultList = new List<IdentityResult>(); foreach (var userIdToApprove in approveUserRequests.UserIdsToApprove) { IdentityResult result; ApplicationUser userToApprove = await _userUow.GetUserByIdAsync(userIdToApprove.ToString()); if (userToApprove != null) { userToApprove.IsApproved = true; userToApprove.ApprovedByAuthUserId = approveUserRequests.ApproverAuthUserId; result = await _userUow.ApproveUserAsync(userToApprove); if (result.Succeeded) { await _emailService.SendUserNewPasswordEmailAsync(userToApprove.UserName, GetClientCompanyName(userToApprove, clientCompanies)); } resultList.Add(result); } else { resultList.Add(IdentityResult.Failed(new IdentityError { Description = $"Invalid Id :{userIdToApprove}" })); } } return resultList; } public async Task<IList<IdentityResult>> AuthoriseSignatoryAsync(AuthoriseSignatoryRequest authoriseSignatoryRequests, ICollection<ClientCompaniesModel> clientCompanies) { List<IdentityResult> resultList = new List<IdentityResult>(); foreach (var userIdToAuthorise in authoriseSignatoryRequests.UserIdsToAuthorise) { IdentityResult result; ApplicationUser userToAuthorise = await _userUow.GetUserByIdAsync(userIdToAuthorise.ToString()); if (userToAuthorise != null) { userToAuthorise.IsAuthorisedSignatory = true; userToAuthorise.ApprovedByAuthUserId = authoriseSignatoryRequests.ApproverAuthUserId; result = await _userUow.AuthoriseSignatoryAsync(userToAuthorise); if (result.Succeeded) { await _emailService.SendUserNewPasswordEmailAsync(userToAuthorise.UserName, GetClientCompanyName(userToAuthorise, clientCompanies)); } resultList.Add(result); } else { resultList.Add(IdentityResult.Failed(new IdentityError { Description = $"Invalid Id :{userIdToAuthorise}" })); } } return resultList; } public async Task<IdentityResult> DeleteUserAsync(string userId) { ApplicationUser userToDelete = await _userUow.GetUserByIdAsync(userId); if (userToDelete == null) return IdentityResult.Failed(new IdentityError { Description = $"Invalid userId: {userId}" }); var result = await _userUow.DeleteUserAsync(userToDelete); return result; } public async Task<IdentityResult> DeleteUserContactAsync(int clientCompanyContactId) { var userToDelete = _userUow.GetUserByClientCompanyContactId(clientCompanyContactId); if (userToDelete == null) return IdentityResult.Failed(new IdentityError { Description = $"User is not tied to a valid ClientCompanyContactId: {clientCompanyContactId}. Database out of sync.", Code = "ContactNotFound" }); var result = await _userUow.DeleteUserAsync(userToDelete); return result; } public ApplicationServiceUser GetApplicationUserByAuthUserId(int authUserId) { return _userUow.GetApplicationUserByAuthUserId(authUserId) .Select(x => new ApplicationServiceUser { AuthUserId = x.AuthUserId, Email = x.Email, Forename = x.Forename, Surname = x.Surname, ClientCompanyContactId = x.ClientCompanyContactId }).SingleOrDefault(); } public AuthUser GetAuthUserById(int authUserId) { return _userUow.GetAuthUserByAuthUserId(authUserId); } public IList<UserModel> GetUserLoginDetails(IList<int> clientCompanyIDs) { IList<ActivityLogModel> list = GetActivityLogModel(clientCompanyIDs, "login"); IList<UserModel> userModelList = list.Select(x => new UserModel { AuthUserId = x.AuthUserId ?? 0, UserId = x.Id ?? 0, ClientCompanyId = x.ApplicationUser.ClientCompanyId, LastLoginDate = x.LogDate, IsSuccesfullLogin = x.IsSuccess, IsOnline = x.IsOnline }).ToList(); return userModelList; } public IList<UserModel> GetUserLoginDetails(int authUserId) { var query = _userUow.GetUserActivityLog(authUserId); IList<UserModel> userModelList = query.Select(x => new UserModel { AuthUserId = x.AuthUserId ?? 0, UserId = x.Id ?? 0, ClientCompanyId = x.ApplicationUser.ClientCompanyId, LastLoginDate = x.LogDate, IsSuccesfullLogin = x.IsSuccess }).ToList(); return userModelList; } public AppUser GetAppUserById(int appUserId) { return _userUow.GetAppUserById(appUserId); } public AppUser GetFXDBAppUserById(int appUserId) { return _userUow.AppUserRepository.GetByPrimaryKey(appUserId); } public IList<ClientCompanyContactModel> GetAuthorisedSignatories(int clientCompanyId) { return _userUow .ApplicationUserRepo .GetQueryable(x => x.ClientCompanyId == clientCompanyId && !x.IsDeleted && x.IsAuthorisedSignatory) .Select(x => new ClientCompanyContactModel { ContactTitle = x.Title, ContactForename = x.Forename, ContactSurname = x.Surname, ContactEmail = x.Email, ContactTelephone = x.PhoneNumber }) .ToList(); } public async Task<ApproveUserChangeResponse> ApproveUserChangeRequest(ApproveUserChangeRequest approveUserChangeRequest) { var approveUserChangeResponse = await _userUow.ApproveUserChangeRequest(approveUserChangeRequest); if (approveUserChangeResponse.Result.Succeeded) { if (approveUserChangeResponse.UserChangeRequest != null && approveUserChangeResponse.SendNotification) { if (approveUserChangeResponse.UserChangeRequest.ChangeValueType == "Email") { string userPhoneNumber = _config.Get("Sms:DefaultPhoneNumber"); if (string.IsNullOrEmpty(userPhoneNumber)) { userPhoneNumber = _userUow.GetSendersPhoneNumber(approveUserChangeResponse.UserChangeRequest.AuthUserId); } if (!string.IsNullOrEmpty(userPhoneNumber)) { // creating the model var smsModel = new SmsModel() { PhoneNumber = userPhoneNumber, Message = string.Format("Security message: This confirms your email address has been updated to {0}. Please contact Argentex if you didn't request this change.", approveUserChangeResponse.UserChangeRequest.ProposedValue) }; // send the message to client _smsSender.SendMessage(smsModel); } } else if (approveUserChangeResponse.UserChangeRequest.ChangeValueType == "Telephone") { string userEmail = _config.Get("Emails:DefaultEmail"); if (string.IsNullOrEmpty(userEmail)) { userEmail = _userUow.GetSendersEmailAddress(approveUserChangeResponse.UserChangeRequest.AuthUserId); } await _emailService.SendMobileChangeEmailAsync(approveUserChangeResponse.UserChangeRequest.ProposedValue, userEmail); } } } return approveUserChangeResponse; } public UserChangeRequest GetUserChangeRequest(int userChangeRequestId) { return _userUow.GetUserChangeRequest(userChangeRequestId); } public IList<PendingApprovalUserChangeRequest> GetPendingChangeRequest() { return _userUow.GetPendingChangeRequest().ToList(); } public RequestOrigin GetRequestOrigin(IIdentity identity) { ClaimsIdentity claimsIdentity = identity as ClaimsIdentity; return claimsIdentity.GetRequestOrigin(); } public string GenerateUniqueUsername(string initialValue) => _userUow.GenerateUniqueUsername(initialValue); private static ApplicationUser MapAppUser(ApplicationUser appUser, ApplicationServiceUser serviceUser) { appUser.Id = serviceUser.Id; appUser.Title = serviceUser.Title; appUser.Forename = serviceUser.Forename; appUser.Surname = serviceUser.Surname; appUser.UserName = serviceUser.Username; appUser.Email = serviceUser.Email; appUser.ClientCompanyId = serviceUser.ClientCompanyId; appUser.UpdatedByAuthUserId = serviceUser.UpdatedByAuthUserId; appUser.Position = serviceUser.Position; appUser.PhoneNumber = serviceUser.PhoneNumberDirect; appUser.PhoneNumberMobile = serviceUser.PhoneNumberMobile; appUser.PhoneNumberOther = serviceUser.PhoneNumberOther; appUser.Birthday = DateTime.Parse(serviceUser.Birthday); appUser.Notes = serviceUser.Notes; appUser.IsApproved = serviceUser.IsApproved; appUser.IsSignatory = serviceUser.IsSignatory; appUser.IsAuthorisedSignatory = serviceUser.IsAuthorisedSignatory; appUser.PrimaryContact = serviceUser.PrimaryContact; appUser.IsAdmin = serviceUser.IsAdmin; appUser.ClientCompanyContactId = serviceUser.ClientCompanyContactId; appUser.LockoutEnabled = serviceUser.IsLockedOut ?? false; return appUser; } private static ApplicationUser MapAppUserForUpdate(ApplicationUser appUser, ApplicationServiceUser serviceUser) { //the id should not be set as it is the key and the update will fail with an error //The property 'Id' on entity type 'ApplicationUser' is part of a key and so cannot be modified or marked as modified. appUser.Title = serviceUser.Title; appUser.Forename = serviceUser.Forename; appUser.Surname = serviceUser.Surname; appUser.UserName = serviceUser.Username; appUser.Email = serviceUser.Email; appUser.ClientCompanyId = serviceUser.ClientCompanyId; appUser.UpdatedByAuthUserId = serviceUser.UpdatedByAuthUserId; appUser.Position = serviceUser.Position; appUser.PhoneNumber = serviceUser.PhoneNumberDirect; appUser.PhoneNumberMobile = serviceUser.PhoneNumberMobile; appUser.PhoneNumberOther = serviceUser.PhoneNumberOther; appUser.Birthday = DateTime.Parse(serviceUser.Birthday); appUser.IsApproved = serviceUser.IsApproved; appUser.IsSignatory = serviceUser.IsSignatory; appUser.IsAuthorisedSignatory = serviceUser.IsAuthorisedSignatory; appUser.PrimaryContact = serviceUser.PrimaryContact; appUser.IsAdmin = serviceUser.IsAdmin; appUser.ClientCompanyContactId = serviceUser.ClientCompanyContactId; appUser.LockoutEnabled = serviceUser.IsLockedOut ?? false; return appUser; } public ClientUserModel MapClientUser(ApplicationServiceUser serviceUser) { return new ClientUserModel() { Title = serviceUser.Title, Forename = serviceUser.Forename, Surname = serviceUser.Surname, Username = serviceUser.Username, Email = serviceUser.Email, ClientCompanyId = serviceUser.ClientCompanyId, ClientCompanyContactId = serviceUser.ClientCompanyContactId, AuthUserId = serviceUser.AuthUserId, UpdatedByAuthUserId = serviceUser.UpdatedByAuthUserId, Position = serviceUser.Position, PhoneNumberDirect = serviceUser.PhoneNumberDirect, PhoneNumberMobile = serviceUser.PhoneNumberMobile, PhoneNumberOther = serviceUser.PhoneNumberOther, Birthday = DateTime.TryParse(serviceUser.Birthday, out var date) ? (DateTime?)date : null, IsApproved = serviceUser.IsApproved, Notes = serviceUser.Notes, PrimaryContact = serviceUser.PrimaryContact, ASPNumber = serviceUser.ASPNumber, ASPCreationDate = serviceUser.ASPCreationDate, Authorized = serviceUser.Authorized, RecNotification = serviceUser.RecNotification, RecAmReport = serviceUser.RecAmReport, RecActivityReport = serviceUser.RecActivityReport, NiNumber = serviceUser.NiNumber, BloombergGpi = serviceUser.BloombergGpi, IsDeleted = serviceUser.IsDeleted, AssignedCategoryIds = serviceUser.AssignedCategoryIds, IsLockedOut = serviceUser.IsLockedOut, Comment = serviceUser.Comment, IsAdmin = serviceUser.IsAdmin, IsSignatory = serviceUser.IsSignatory, IsAuthorisedSignatory = serviceUser.IsAuthorisedSignatory, ApprovedByAuthUserId = serviceUser.ApprovedByAuthUserId, UpdatedDateTime = serviceUser.UpdatedDateTime }; } private static ApplicationServiceUser MapAppServiceUser(ApplicationServiceUser serviceUser, ApplicationUser appUser) { serviceUser.Id = appUser.Id; serviceUser.Title = appUser.Title; serviceUser.Forename = appUser.Forename; serviceUser.Surname = appUser.Surname; serviceUser.Username = appUser.UserName; serviceUser.Email = appUser.Email; serviceUser.ClientCompanyId = appUser.ClientCompanyId; serviceUser.ClientCompanyContactId = appUser.ClientCompanyContactId; serviceUser.UpdatedByAuthUserId = appUser.UpdatedByAuthUserId; serviceUser.Position = appUser.Position; serviceUser.PhoneNumberDirect = appUser.PhoneNumber; serviceUser.PhoneNumberMobile = appUser.PhoneNumberMobile; serviceUser.PhoneNumberOther = appUser.PhoneNumberOther; serviceUser.Birthday = appUser.Birthday.HasValue ? appUser.Birthday.Value.ToString("dd/MM/yyyy") : ""; serviceUser.IsApproved = appUser.IsApproved; serviceUser.PrimaryContact = appUser.PrimaryContact ?? false; serviceUser.IsAdmin = appUser.IsAdmin; serviceUser.IsSignatory = appUser.IsSignatory; serviceUser.IsAuthorisedSignatory = appUser.IsAuthorisedSignatory; return serviceUser; } private static ApplicationUser MapAppUserFromMyAccount(ApplicationUser appUser, ApplicationServiceUser serviceUser) { appUser.Id = serviceUser.Id; appUser.Forename = serviceUser.Forename; appUser.Surname = serviceUser.Surname; appUser.UserName = serviceUser.Username; appUser.Email = serviceUser.Email; appUser.ClientCompanyId = serviceUser.ClientCompanyId; appUser.UpdatedByAuthUserId = serviceUser.UpdatedByAuthUserId; return appUser; } private IList<ActivityLogModel> GetActivityLogModel(IList<int> clientCompanyIDs, string activityType) { List<ActivityLogModel> activityLogModelList = new List<ActivityLogModel>(); //get the login and logout groups for the companies List<IGrouping<int, ActivityLog>> loginGroupsCompanyList = _userUow.GetActivityLog(clientCompanyIDs, "login").ToList(); List<IGrouping<int, ActivityLog>> logoutGroupsCompanyList = _userUow.GetActivityLog(clientCompanyIDs, "logout").ToList(); foreach (var loginGroupCompany in loginGroupsCompanyList) { //get the corresponding logout group per company var logoutGroupCompany = logoutGroupsCompanyList.FirstOrDefault(x => x.Key == loginGroupCompany.Key); if (logoutGroupCompany == null) { //no corresponding logout group, then this login group is the one we need activityLogModelList.Add(CreateActivityLogModel(loginGroupCompany.ToList()[0], true)); //move to the next company login group continue; } List<ActivityLog> loginActivityLog = loginGroupCompany.ToList(); List<ActivityLog> logoutActivityLog = logoutGroupCompany.ToList(); List<ActivityLogModel> tempList = null; foreach (var login in loginActivityLog) { tempList = new List<ActivityLogModel>(); var logout = logoutActivityLog.FirstOrDefault(x => x.AuthUserId == login.AuthUserId && x.Id == login.Id && login.LogDate < x.LogDate); if (logout == null) { //found the login entity without a corresponding logout tempList.Add(CreateActivityLogModel(login, true)); break; } else { //remove the logout entity from the list logoutActivityLog.Remove(logout); } } if (tempList == null || tempList.Count == 0) { //add the login entity even if it's not active //the user has logged in, then logged out if (loginActivityLog.Count > 0) { //add the activity log, set to offline activityLogModelList.Add(CreateActivityLogModel(loginActivityLog[0], false)); } } else { activityLogModelList.AddRange(tempList); } } return activityLogModelList; } private string GetClientCompanyName(ApplicationUser user, ICollection<ClientCompaniesModel> clientCompanies) { return clientCompanies.Where(x => x.ClientCompanyId == user.ClientCompanyId).Select(x => x.ClientCompanyName).FirstOrDefault(); } private ActivityLogModel CreateActivityLogModel(ActivityLog activityLog, bool isOnline) { if (activityLog == null) return null; return new ActivityLogModel() { Id = activityLog.Id, ActivityLogId = activityLog.ActivityLogId, UserName = activityLog.UserName, LogDate = activityLog.LogDate, IsSuccess = activityLog.IsSuccess, PrimaryIP = activityLog.PrimaryIP, SecondaryIP = activityLog.SecondaryIP, ActivityId = activityLog.ActivityId, Activity = activityLog.Activity, AuthUserId = activityLog.AuthUserId, ApplicationUser = activityLog.ApplicationUser, IsOnline = isOnline }; } /// <summary> /// disposing == true coming from Dispose() /// disposig == false coming from finaliser /// </summary> /// <param name="disposing"></param> protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _userUow?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/ApplicationUserClaim.cs using Microsoft.AspNetCore.Identity; using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class ApplicationUserClaim : IdentityUserClaim<long> { } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess.Tests/TestUserUow.cs using System; using System.Linq; using System.Threading.Tasks; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Argentex.Core.UnitsOfWork.Users; using Argentex.Core.UnitsOfWork.Users.Model; using FluentAssertions; using FluentAssertions.Equivalency; using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Diagnostics; using Microsoft.Extensions.DependencyInjection; using Moq; using SynetecLogger; using Xunit; namespace Argentex.Core.DataAccess.Tests { public class TestUserUow : IDisposable { private readonly UserUow _userUow; private readonly InMemoryDbContext<FXDB1Context> _fxdbContext = new InMemoryDbContext<FXDB1Context>("FXDB1_InMemory"); private readonly IServiceProvider _serviceProvider; private readonly Mock<ILogWrapper> _mockedLogger = new Mock<ILogWrapper>(); public TestUserUow() { var testContext = new UserIdentityTestContext(); _serviceProvider = testContext.ServiceProvider; _userUow = new UserUow(_fxdbContext.GetDbContext(),null, testContext.ServiceProvider.GetService<UserManager<ApplicationUser>>(), testContext.ServiceProvider.GetService<SignInManager<ApplicationUser>>(), testContext.ServiceProvider.GetService<SecurityDbContext>(), _mockedLogger.Object); } [Fact] public async Task Verify_User_Gets_Added_To_DbTables() { //Arrange const string userName = "TestUser"; var clientModel = new ClientUserModel { ApplicationId = 2, Username = userName, Email = <EMAIL>", Forename = userName, Surname = "<NAME>", Title = "Mr", ClientCompanyId = 5, UpdatedByAuthUserId = 2, UpdatedDateTime = DateTime.Now, Position = "Boss", PhoneNumberDirect = "441111111111", PhoneNumberMobile = "441111111111", PhoneNumberOther = string.Empty, PrimaryContact = false, Notes = string.Empty, Authorized = true, RecNotification = false, RecAmReport = true, RecActivityReport = false, NiNumber = string.Empty, BloombergGpi = string.Empty, AssignedCategoryIds = new int[0], IsLockedOut = false, IsAdmin = false, IsSignatory = false, IsAuthorisedSignatory = false, IsApproved = true, ApprovedByAuthUserId = 2 }; var password = "<PASSWORD>"; //Act var result = await _userUow.AddUserAsync(clientModel, password); //Assert result.Should().NotBeNull(); result.Succeeded.Should().BeTrue("We want the user to be successfully added"); result.Errors.Should().BeNullOrEmpty("We should have no errors"); using (var assertContext = _fxdbContext.GetDbContext()) using (var securityAssertContext = _serviceProvider.GetService<SecurityDbContext>()) { var authUser = assertContext.AuthUser.Single(u => u.UserName == userName); authUser.Should().BeEquivalentTo(clientModel, GetEquivalencyOptionsForInsertion, "An Authuser should be created as part of creating a Client User"); var authUserCreateLog = assertContext.LogAuthUser.SingleOrDefault(x => x.Id == authUser.Id && x.LogAction == "CREATE"); authUserCreateLog.Should().NotBeNull("A Create log should be created for the AuthUser"); authUserCreateLog.Should().BeEquivalentTo(clientModel, GetEquivalencyOptionsForInsertion, "AuthUserLog should match the details provided"); var authUserUpdateLog = assertContext.LogAuthUser.SingleOrDefault(x => x.Id == authUser.Id && x.LogAction == "UPDATE"); authUserUpdateLog.Should().NotBeNull("An Update log should be created for the AuthUser after the password gets updated"); authUserUpdateLog.Should().BeEquivalentTo(clientModel, GetEquivalencyOptionsForInsertion, "AuthUserLog should match the details provided"); var contactUser = assertContext.ClientCompanyContact.Single(c => c.AuthUserId == authUser.Id); contactUser.Should().BeEquivalentTo(clientModel, GetEquivalencyOptionsForInsertion, "An ClientCompanyContact should be created as part of creating a Client User"); var contactUserLog = assertContext.LogClientCompanyContact.SingleOrDefault(x => x.Id == contactUser.Id); contactUserLog.Should().NotBeNull("A create log should be created for the Client Company Contact"); contactUserLog.Should().BeEquivalentTo(clientModel, GetEquivalencyOptionsForInsertion, "Client Company contact log should match the details provided"); var appUser = securityAssertContext.Users.Single(u => u.UserName == userName); appUser.Should().BeEquivalentTo(clientModel, GetEquivalencyOptionsForInsertion, "An IdentityDB User should be created as part of creating a Client User"); } } [Fact] public async Task Verify_User_Gets_Deleted_From_DbTables() { //Arrange const string userName = "TestUser"; var clientModel = new ClientUserModel { ID = 1, Username = userName, Email = <EMAIL>", Forename = userName, Surname = "<NAME>", Title = "Mr", ClientCompanyId = 5, ClientCompanyContactId = 1, AuthUserId = 1, UpdatedByAuthUserId = 1, UpdatedDateTime = DateTime.Now, Position = "Boss", PhoneNumberDirect = "441111111111", PhoneNumberMobile = "441111111111", PhoneNumberOther = string.Empty, PrimaryContact = false, Notes = string.Empty, Authorized = true, RecNotification = false, RecAmReport = true, RecActivityReport = false, NiNumber = string.Empty, BloombergGpi = string.Empty, AssignedCategoryIds = new int[0], IsAdmin = false, IsSignatory = false, IsAuthorisedSignatory = false, IsApproved = true, ApprovedByAuthUserId = 2, ApplicationId = 2, IsLockedOut = false, IsDeleted = false }; var appUserModel = GetAppUserFromClientUserModel(clientModel); var authUserModel = GetAuthUserFromClientUserModel(clientModel); var clientCompanyContactModel = GetContactFromClientUserModel(clientModel); var mockContext = _fxdbContext.GetDbContext(); var mockSecurityContext = _serviceProvider.GetService<SecurityDbContext>(); _fxdbContext.AddEntities(authUserModel); _fxdbContext.AddEntities(clientCompanyContactModel); mockSecurityContext.Users.Add(appUserModel); mockSecurityContext.SaveChanges(); //Act var userResult = await _userUow.DeleteUserAsync(appUserModel); //Assert using (var assertContext = _fxdbContext.GetDbContext()) using (var securityAssertContext = _serviceProvider.GetService<SecurityDbContext>()) { var appUser = securityAssertContext.Users.Single(u => u.Id == appUserModel.Id); Assert.Equal(($"{userName}-{Guid.NewGuid().ToString()}").Length, appUser.UserName.Length); Assert.True(appUser.IsDeleted); Assert.True(appUser.LockoutEnabled); var authUser = assertContext.AuthUser.Single(u => u.Id == appUser.AuthUserId); Assert.Equal(appUser.UserName, authUser.UserName); Assert.True(authUser.IsLockedOut); var authUserCreateLog = assertContext.LogAuthUser.SingleOrDefault(x => x.Id == authUser.Id && x.LogAction == "DELETE"); authUserCreateLog.Should().NotBeNull("A delete log should be created for the AuthUser"); authUserCreateLog.Should().BeEquivalentTo(clientModel, x => { x = GetEquivalencyOptionsForInsertion(x); x.Excluding(f => f.Username); x.Excluding(f => f.IsLockedOut); return x; }, "AuthUserLog should match the details provided"); var contactUser = assertContext.ClientCompanyContact.Single(c => c.Id == appUser.ClientCompanyContactId); Assert.True(contactUser.IsDeleted); var contactUserLog = assertContext.LogClientCompanyContact.SingleOrDefault(x => x.Id == contactUser.Id); contactUserLog.Should().NotBeNull("A delete log should be created for the Client Company Contact"); contactUserLog.Should().BeEquivalentTo(clientModel, x => { x = GetEquivalencyOptionsForInsertion(x); x.Excluding(f => f.IsDeleted); return x; }, "Client Company contact log should match the details provided"); } } private static EquivalencyAssertionOptions<ClientUserModel> GetEquivalencyOptionsForInsertion(EquivalencyAssertionOptions<ClientUserModel> equivalencyOptions) { equivalencyOptions.Excluding(x => x.LastPasswordChangeDate); //Autogenerated equivalencyOptions.Excluding(x => x.CreateDate); //AutoGenerated equivalencyOptions.Excluding(x => x.FailedPasswordAttemptWindowStart); //autogenerated equivalencyOptions.Excluding(x => x.AuthUserId); //autogenerated equivalencyOptions.Excluding(x => x.Fullname); //auto generated equivalencyOptions.Excluding(x => x.PasswordHash); //generated by the application equivalencyOptions.ExcludingMissingMembers(); //miss out the fields not being used return equivalencyOptions; } private AuthUser GetAuthUserFromClientUserModel(ClientUserModel clientModel) { return new AuthUser { Id = clientModel.AuthUserId, UserName = clientModel.Username, Password = <PASSWORD>, Email = clientModel.Email ?? "NoEmail", IsApproved = clientModel.IsApproved, IsLockedOut = clientModel.IsLockedOut ?? false, Comment = clientModel.Comment, ApplicationId = clientModel.ApplicationId }; } private ClientCompanyContact GetContactFromClientUserModel(ClientUserModel clientModel) { return new ClientCompanyContact { Id = clientModel.ClientCompanyContactId, AuthUserId = clientModel.AuthUserId, ClientCompanyId = clientModel.ClientCompanyId, Title = clientModel.Title, Forename = clientModel.Forename, Surname = clientModel.Surname, Fullname = $"{clientModel.Title} {clientModel.Forename} {clientModel.Surname}", Email = clientModel.Email, LastEmailChangeDate = clientModel.LastEmailChangeDate, Position = clientModel.Position, PrimaryContact = clientModel.PrimaryContact, TelephoneDirect = clientModel.PhoneNumberDirect, TelephoneMobile = clientModel.PhoneNumberMobile, TelephoneOther = clientModel.PhoneNumberOther, LastTelephoneChangeDate = clientModel.LastPhoneNumberMobileChangeDate, Birthday = clientModel.Birthday, Aspnumber = clientModel.ASPNumber, AspcreationDate = clientModel.ASPCreationDate, UpdatedByAuthUserId = clientModel.UpdatedByAuthUserId, UpdatedDateTime = clientModel.UpdatedDateTime, Notes = clientModel.Notes, Authorized = clientModel.Authorized, RecNotifications = clientModel.RecNotification, RecAmreport = clientModel.RecAmReport, RecActivityReport = clientModel.RecActivityReport, BloombergGpi = clientModel.BloombergGpi, NiNumber = clientModel.NiNumber, IsDeleted = clientModel.IsDeleted }; } private ApplicationUser GetAppUserFromClientUserModel(ClientUserModel clientModel) { return new ApplicationUser { Id = clientModel.ID, AuthUserId = clientModel.AuthUserId, ClientCompanyContactId = clientModel.ClientCompanyContactId, UserName = clientModel.Username, Email = clientModel.Email, Forename = clientModel.Forename, Surname = clientModel.Surname, Title = clientModel.Title, ClientCompanyId = clientModel.ClientCompanyId, UpdatedByAuthUserId = clientModel.UpdatedByAuthUserId, LastUpdate = clientModel.UpdatedDateTime, Position = clientModel.Position, PhoneNumber = clientModel.PhoneNumberDirect, PhoneNumberMobile = clientModel.PhoneNumberMobile, PhoneNumberOther = clientModel.PhoneNumberOther, PrimaryContact = clientModel.PrimaryContact, Notes = clientModel.Notes, LockoutEnabled = clientModel.IsLockedOut ?? false, IsAdmin = clientModel.IsAdmin, IsSignatory = clientModel.IsSignatory, IsAuthorisedSignatory = clientModel.IsAuthorisedSignatory, IsApproved = clientModel.IsApproved, ApprovedByAuthUserId = clientModel.ApprovedByAuthUserId, SecurityStamp = Guid.NewGuid().ToString(), ConcurrencyStamp = Guid.NewGuid().ToString() }; } public void Dispose() { _fxdbContext.GetDbContext().Database.EnsureDeleted(); } } public class UserIdentityTestContext { public IServiceProvider ServiceProvider { get; } public UserIdentityTestContext() { var serviceCollection = new ServiceCollection(); serviceCollection.AddDbContext<SecurityDbContext>(options => { options.UseInMemoryDatabase("IdentityDB_InMemory"); options.UseOpenIddict(); options.ConfigureWarnings(w => w.Ignore(InMemoryEventId.TransactionIgnoredWarning)); }); serviceCollection.AddIdentity<ApplicationUser, ApplicationRole>() .AddEntityFrameworkStores<SecurityDbContext>() .AddDefaultTokenProviders(); ServiceProvider = serviceCollection.BuildServiceProvider(); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess.Tests/InMemoryDbContext.cs using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Text; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Diagnostics; namespace Argentex.Core.DataAccess.Tests { public class InMemoryDbContext<T> where T: DbContext { public InMemoryDbContext(string dbName) { DbContextOptionsBuilder = new DbContextOptionsBuilder<T>() .UseInMemoryDatabase(dbName) .ConfigureWarnings(w => w.Ignore(InMemoryEventId.TransactionIgnoredWarning)); InMemoryDatabaseName = dbName; } public DbContextOptionsBuilder<T> DbContextOptionsBuilder { get; } public string InMemoryDatabaseName { get; } public T GetDbContext() => (T)Activator.CreateInstance(typeof(T), DbContextOptionsBuilder.Options); public void AddEntities<TEntity>(IEnumerable<TEntity> entities) where TEntity: class { using (var context = GetDbContext()) { context.Set<TEntity>().AddRange(entities); context.SaveChanges(); } } public void AddEntities<TEntity>(TEntity entity) where TEntity : class { using (var context = GetDbContext()) { context.Set<TEntity>().Add(entity); context.SaveChanges(); } } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Fix/IBarxFxService.cs using Argentex.Core.Service.Models.Fix; using System; using System.Threading.Tasks; namespace Argentex.Core.Service.Fix { public interface IBarxFxService : IDisposable { Task<FixQuoteResponseModel> GetQuoteAsync(FixQuoteRequestModel quoteRequest); Task<FixNewOrderResponseModel> NewOrderSingleAsync(FixNewOrderRequestModel dealRequest); void SetHttpTimeout(TimeSpan timeout); } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Countries/CountryUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System.Linq; namespace Argentex.Core.UnitsOfWork.Countries { public class CountryUow : BaseUow, ICountryUow { private IGenericRepo<Country> _countryRepository; private IGenericRepo<Country> CountryRepository => _countryRepository = _countryRepository ?? new GenericRepo<Country>(Context); public CountryUow(FXDB1Context context) : base(context) { } public IQueryable<Country> GetCountries() { return CountryRepository .GetQueryable(); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientSiteAction2ClientCompanyOpi.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientSiteAction2ClientCompanyOpi { public long Id { get; set; } public long ClientSiteActionId { get; set; } public int ClientCompanyOpiid { get; set; } public ClientCompanyOpi ClientCompanyOpi { get; set; } public ClientSiteAction ClientSiteAction { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Extensions/ClaimsIdentityExtensions.cs using System.Security.Claims; using Argentex.Core.Service.Enums; namespace Argentex.Core.Api.Extensions { public static class ClaimsIdentityExtensions { public static RequestOrigin GetRequestOrigin(this ClaimsIdentity claimsIdentity) { const string grantType = "grantType"; if (claimsIdentity.HasClaim(claim => claim.Type == grantType && claim.Value == "client_credentials")) { return RequestOrigin.ArgentexTrader; } if (claimsIdentity.HasClaim(claim => claim.Type == grantType && claim.Value == "password")) { return RequestOrigin.ClientSite; } return RequestOrigin.Unknown; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AuthRole.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AuthRole { public AuthRole() { AuthRolePermission = new HashSet<AuthRolePermission>(); AuthUserRole = new HashSet<AuthUserRole>(); } public int Id { get; set; } public string Description { get; set; } public ICollection<AuthRolePermission> AuthRolePermission { get; set; } public ICollection<AuthUserRole> AuthUserRole { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/TradeNotFoundException.cs using System; namespace Argentex.Core.Service.Exceptions { public class TradeNotFoundException : Exception { public TradeNotFoundException() : base() { } public TradeNotFoundException(string message) : base(message) { } public TradeNotFoundException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Users/Model/UserValidationModel.cs namespace Argentex.Core.UnitsOfWork.Users.Model { public class UserValidationModel { public string Username { get; set; } public string Email { get; set; } public int ClientCompanyId { get; set; } public int ClientCompanyContactId { get; set; } public bool? ValidateUserDetails { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Http/HttpService.cs using Newtonsoft.Json; using System; using System.Collections.Generic; using System.Net.Http; using System.Net.Http.Headers; using System.Threading.Tasks; namespace Argentex.ClientSite.Service.Http { public class HttpService : IHttpService { private HttpMethod _method = null; private string _baseUri = ""; private string _requestUri = ""; private HttpContent _content = null; private string _bearerToken = ""; private string _acceptHeader = ""; protected readonly HttpClient _httpClient; protected bool _disposed; public HttpService(HttpClient httpClient) { _httpClient = httpClient; } public string GenerateUri<T>(string baseUri, T obj) { var result = new List<string>(); foreach (var property in typeof(T).GetProperties()) { result.Add(char.ToLower(property.Name[0]) + property.Name.Substring(1) + "=" + property.GetValue(obj)); } return string.Join("?", baseUri, string.Join("&", result)); } public async Task<HttpResponseMessage> SendAsync() { var request = new HttpRequestMessage { Method = _method, RequestUri = new Uri(_requestUri), }; if (_content != null) request.Content = _content; if (!string.IsNullOrEmpty(_bearerToken)) request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", _bearerToken); if (!string.IsNullOrEmpty(_acceptHeader)) request.Headers.Accept.Add( new MediaTypeWithQualityHeaderValue(_acceptHeader)); return await _httpClient.SendAsync(request); } public async Task<HttpResponseMessage> HttpGetAsync<T>(string url, T requestModel) { if (url == null || requestModel == null) throw new ArgumentException("url or requestModel parameters cannot be null "); return await _httpClient.GetAsync(new Uri(GenerateUri(url, requestModel))); } public async Task<HttpResponseMessage> HttpGetAsync(string url) { if (url == null) throw new ArgumentException("url or requestModel parameters cannot be null "); return await _httpClient.GetAsync(new Uri(url)); } public async Task<HttpResponseMessage> HttpPostAsync<T>(string url, T requestModel) { if (url == null || requestModel == null) throw new ArgumentException("url or requestModel parameters cannot be null "); return await _httpClient.PostAsync(url, new JsonContent(requestModel)); } public async Task<T> GetResponseObject<T>(HttpResponseMessage message) { return JsonConvert.DeserializeObject<T>( await message.Content.ReadAsStringAsync()); } public async Task<string> GetResponseAsString(HttpResponseMessage message) { return await message.Content.ReadAsStringAsync(); } public void AddMethod(HttpMethod method) { _method = method; } public void AddRequestUri(string requestUri) { _requestUri = requestUri; } public void AddContent(HttpContent content) { _content = content; } public void AddBearerToken(string bearerToken) { _bearerToken = bearerToken; } public void AddAcceptHeader(string acceptHeader) { _acceptHeader = acceptHeader; } public void AddTimeout(TimeSpan timeout) { _httpClient.Timeout = timeout; } /// <summary> /// disposing = true coming from Dispose() /// disposig == false coming from finaliser /// </summary> /// <param name="disposing"></param> protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _httpClient?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/AppSettings/AppSettingService.cs using Argentex.Core.Service.Helpers; using Argentex.Core.UnitsOfWork.AppSettings; using System; using System.Collections.Generic; namespace Argentex.Core.Service.AppSettings { public class AppSettingService : IAppSettingService { private readonly IAppSettingUow _appSettingUow; private bool _disposed; public AppSettingService(IAppSettingUow uow) { _appSettingUow = uow; } public int GetStreamingQuoteDuration() { var entity = _appSettingUow.GetAppSetting("StreamingQuoteDuration"); if (entity == null) { throw new KeyNotFoundException("FixQuoteRequestTimeOut is not set in AppSetting"); } int duration; if (!int.TryParse(entity.SettingValue, out duration)) { throw new FormatException("StreamingQuoteDuration string cannot be converted to int"); } return duration; } public int GetTimeOut() { var entity = _appSettingUow.GetAppSetting("FixFXTimeOut"); if (entity == null) { throw new KeyNotFoundException("FixFXTimeOut is not set in AppSetting"); } if (!int.TryParse(entity.SettingValue, out int timeOut)) { throw new FormatException("TimeOut string cannot be converted to int"); } return timeOut; } public string GetBarxFXFixQuoteUrl() { var entity = _appSettingUow.GetAppSetting("SynetecFixGetQuoteUrl"); if (entity == null) { throw new KeyNotFoundException("SynetecFixGetQuoteUrl is not set in AppSetting"); } if (string.IsNullOrWhiteSpace(entity.SettingValue)) { throw new FormatException("SynetecFixGetQuoteUrl string is not valid URL"); } return entity.SettingValue; } public string GetBarxFXFixNewOrderUrl() { var entity = _appSettingUow.GetAppSetting("SynetecFixNewOrderUrl"); if (entity == null) { throw new KeyNotFoundException("SynetecFixNewOrderUrl is not set in AppSetting"); } if (string.IsNullOrWhiteSpace(entity.SettingValue)) { throw new FormatException("SynetecFixNewOrderUrl string is not valid URL"); } return entity.SettingValue; } public string GetEmirUtiCode() { var entity = _appSettingUow.GetAppSetting("EMIR_FXForwardTrade_UTI_Prefix"); if (entity == null) { throw new KeyNotFoundException("GetEmirUtiCode is not set in AppSetting"); } if (string.IsNullOrWhiteSpace(entity.SettingValue)) { throw new FormatException("GetEmirUtiCode string is not valid URL"); } return entity.SettingValue; } public int GetFixTimeout() { var entity = _appSettingUow.GetAppSetting("FixFXTimeOut"); if (entity == null) { throw new KeyNotFoundException("FixFXTimeOut is not set in AppSetting"); } if (string.IsNullOrWhiteSpace(entity.SettingValue)) { throw new FormatException("FixFXTimeOut string is not valid URL"); } int timeout = int.Parse(entity.SettingValue); return timeout; } public int GetStreamingDuration() { var entity = _appSettingUow.GetAppSetting("StreamingQuoteDuration"); if (entity == null) { throw new KeyNotFoundException("StreamingQuoteDuration is not set in AppSetting"); } if (string.IsNullOrWhiteSpace(entity.SettingValue)) { throw new FormatException("StreamingQuoteDuration string is not valid URL"); } int streamingDuration = int.Parse(entity.SettingValue); return streamingDuration; } public int GetTradeNotificationCounter() { var entity = _appSettingUow.GetAppSetting("TradeNotificationCounter"); if (entity == null) { throw new KeyNotFoundException("TradeNotificationCounter is not set in AppSetting"); } if (string.IsNullOrWhiteSpace(entity.SettingValue)) { throw new FormatException("TradeNotificationCounter value is null"); } return int.Parse(entity.SettingValue); } public int GetSpreadAdjustmentValidity() { var entity = _appSettingUow.GetAppSetting("SpreadAdjustmentValidityMinutes"); if (entity == null) { throw new KeyNotFoundException("SpreadAdjustmentValidityMinutes is not set in AppSetting"); } if (string.IsNullOrWhiteSpace(entity.SettingValue)) { throw new FormatException("SpreadAdjustmentValidityMinutes value is null"); } return int.Parse(entity.SettingValue); } public int GetUserChangeDaysRequiredForApproval() { var entity = _appSettingUow.GetAppSetting(SystemConstant.Setting_UserChangeDaysRequiresForApproval); if (entity == null || string.IsNullOrWhiteSpace(entity.SettingValue)) return int.Parse(entity.SettingValue); else return SystemConstant.Setting_UserChangeDaysRequiresForApproval_Default; } protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _appSettingUow?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/ClientCompany/ClientCompanyAccountsService.cs using System; using System.Collections.Generic; using System.Linq; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.ClientSiteAction; using Argentex.Core.Service.Enums; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Models.ClearingCodePrefix; using Argentex.Core.Service.Models.ClientCompany; using Argentex.Core.Service.Settlements; using Argentex.Core.UnitsOfWork.ClientCompanies; namespace Argentex.Core.Service.ClientCompanies { public class ClientCompanyAccountsService : IClientCompanyAccountsService { private readonly IClientCompanyAccountsUoW _clientCompanyAccountsUow; private readonly IClientSiteActionService _clientSiteActionService; private readonly ISettlementService _settlementService; private bool _disposed; public ClientCompanyAccountsService(IClientCompanyAccountsUoW clientCompanyAccountsUoW, IClientSiteActionService clientSiteActionService, ISettlementService settlementService) { _clientCompanyAccountsUow = clientCompanyAccountsUoW; _clientSiteActionService = clientSiteActionService; _settlementService = settlementService; } public IEnumerable<ClientCompanyAccountModel> GetClientCompanyAccounts(int clientCompanyId) { var clientCompany = _clientCompanyAccountsUow.ClientCompanyRepository.GetByPrimaryKey(clientCompanyId); if (clientCompany == null) throw new ClientCompanyNotFoundException($"Client company with id {clientCompanyId} does not exist"); var clientCompanyAccounts = _clientCompanyAccountsUow.ClientCompanyOpiRepository .Get(x => x.ClientCompanyId == clientCompanyId && x.Authorised && !x.Rejected && !x.IsDeleted); return clientCompanyAccounts.Any() ? clientCompanyAccounts.Select(x => new ClientCompanyAccountModel { ClientCompanyId = x.ClientCompanyId, ClientCompanyOpiId = x.Id, AccountName = x.AccountName, AccountNumber = !string.IsNullOrWhiteSpace(x.AccountNumber) ? x.AccountNumber : x.Iban, Currency = _clientCompanyAccountsUow.CurrencyRepository.GetByPrimaryKey(x.CurrencyId)?.Code }).OrderBy(x => x.AccountName).ToList() : new List<ClientCompanyAccountModel>(); } public ClientCompanyAccountModel GetClientCompanyAccount(int clientCompanyOpiId) { return _clientCompanyAccountsUow .GetClientCompanyAccountQueryable(clientCompanyOpiId) .Select(x => new ClientCompanyAccountModel { ClientCompanyOpiId = x.Id, ClientCompanyId = x.ClientCompanyId, CurrencyId = x.CurrencyId, Currency = x.Currency.Code, CountryId = x.CountryId ?? 0, Country = x.Country.Name, Description = x.Description, BankName = x.BankName, BankAddress = x.BankAddress, ClearingCodePrefixId = x.ClearingCodePrefixId ?? 0, AccountNumber = x.AccountNumber, AccountName = x.AccountName, SortCode = x.SortCode, SwiftCode = x.SwiftCode, Iban = x.Iban, IsDefault = x.ClientCompanyCurrencyDefaultOpi .Select(y => y.ClientCompanyOpiid == clientCompanyOpiId) .SingleOrDefault(), Approved = x.Authorised, BeneficiaryName = x.BeneficiaryName, BeneficiaryAddress = x.BeneficiaryAddress, Reference = x.Reference, UpdatedByAuthUserId = x.UpdatedByAuthUserId }).FirstOrDefault(); } public void AddSettlementAccount(SettlementAccountModel settlementAccount) { var clientCompanyOpi = MapClientCompanyOpi( new ClientCompanyOpi { CreatedDate = DateTime.UtcNow, IsOwnAccount = false, Rejected = false, Authorised = false, IsCompanyAccount = false }, settlementAccount); _clientCompanyAccountsUow.AddClientCompanyOpi(clientCompanyOpi); _clientSiteActionService.LogActionNewOpi(settlementAccount.UpdatedByAuthUserId, clientCompanyOpi.Id); } public IEnumerable<ClearingCodePrefixModel> GetClearingCodePrefixes() { return _clientCompanyAccountsUow.GetClearingPrefixCodes() .Select(x => new ClearingCodePrefixModel { Id = x.Id, Code = x.Code }) .ToList(); } public void EditSettlementAccount(SettlementAccountModel settlementAccount) { var opi = _clientCompanyAccountsUow.GetClientCompanyAccount(settlementAccount.ClientCompanyOpiId); var mappedChanges = MapClientCompanyOpi(opi, settlementAccount); _clientCompanyAccountsUow.UpdateAccount(mappedChanges); } /// <summary> /// Mark Client Company Account as deleted by setting IsDeleted property to True /// and add "DELETED" to the name /// </summary> /// <param name="clientCompanyOpiId"></param> /// <param name="authUserId"></param> public void DeleteSettlementAccount(int clientCompanyOpiId, int authUserId) { var opi = _clientCompanyAccountsUow.GetClientCompanyAccount(clientCompanyOpiId); opi.IsDeleted = true; opi.AccountName = $"{opi.AccountName} DELETED"; opi.UpdatedByAuthUserId = authUserId; // Mark Settlement Account as deleted _clientCompanyAccountsUow.UpdateAccount(opi); _clientCompanyAccountsUow.GetSettlementIDs(clientCompanyOpiId).ToList() .ForEach(x => _settlementService.DeleteAssignedSettlements(x)); } public int GetNumberOfAssociatedTrades(int clientCompanyOpiId) { return _clientCompanyAccountsUow.GetAssociatedTradesCount(clientCompanyOpiId, (int)TradeStatus.Delivered); } public void SetAccountAsDefault(SetDefaultAccountModel model) { var opi = _clientCompanyAccountsUow.GetClientCompanyAccount(model.ClientCompanyOpiId); var defaultAccountsForCurrency = _clientCompanyAccountsUow.GetClientCompanyDefaultAccount(opi.ClientCompanyId, opi.CurrencyId).ToList(); foreach (var account in defaultAccountsForCurrency) { _clientCompanyAccountsUow.RemoveDefaultAccount(account); } var defaultAccount = CreateDefaultAccountEntity(opi, model.AuthUserId); _clientCompanyAccountsUow.AddDefaultAccount(defaultAccount); } public IEnumerable<FXForwardTrade2OPIModel> GetTradeOPIs(string tradeCode) { const string ClientApplicationName = "ArgentexClient"; var query = _clientCompanyAccountsUow.GetTradeOPIs(tradeCode); /* CreatedByAuthUserName: If the user is a client we get the name from the ClientCompanyContact If the user is an Argentex user we get the name from the AppUser */ return query.Select(x => new FXForwardTrade2OPIModel { ID = x.Id, FXForwardTradeCode = x.FxforwardTradeCodeNavigation.Code, AccountID = x.ClientCompanyOpi.Id, AccountName = x.ClientCompanyOpi.AccountName, Amount = x.Amount, CurrencyCode = x.FxforwardTradeCodeNavigation.IsBuy ? x.FxforwardTradeCodeNavigation.Lhsccy.Code : x.FxforwardTradeCodeNavigation.Rhsccy.Code, Details = x.Details, CreatedByAuthUserID = x.CreatedByAuthUser.Id, CreatedByAuthUserName = x.CreatedByAuthUser.Application.Description == ClientApplicationName ? GetClientUserName(x.CreatedByAuthUser.ClientCompanyContactAuthUser) : GetArgentexUserName(x.CreatedByAuthUser.AppUser), CreatedDateTime = x.CreatedDateTime, IsClient = x.CreatedByAuthUser.Application.Description == ClientApplicationName }).ToList(); } public void AddTradeOPI(FXForwardTrade2OPIModel model) { FxforwardTrade2Opi fxforwardTrade2Opi = MapFxforwardTrade2Opi(new FxforwardTrade2Opi(), model); _clientCompanyAccountsUow.AddTradeOPI(fxforwardTrade2Opi); } private FxforwardTrade2Opi MapFxforwardTrade2Opi(FxforwardTrade2Opi fxforwardTrade2Opi, FXForwardTrade2OPIModel model) { fxforwardTrade2Opi.ClientCompanyOpiid = model.AccountID; fxforwardTrade2Opi.CreatedByAuthUserId = model.CreatedByAuthUserID; fxforwardTrade2Opi.CreatedDateTime = model.CreatedDateTime; fxforwardTrade2Opi.Amount = model.Amount; fxforwardTrade2Opi.TradeValueDate = model.ValueDate; fxforwardTrade2Opi.Details = model.Details; fxforwardTrade2Opi.FxforwardTradeCode = model.FXForwardTradeCode; return fxforwardTrade2Opi; } private string GetClientUserName(ICollection<ClientCompanyContact> clientCompanyContactAuthUser) { if (clientCompanyContactAuthUser != null && clientCompanyContactAuthUser.Count > 0) { ClientCompanyContact clientCompanyContact = clientCompanyContactAuthUser.FirstOrDefault(); return $"{clientCompanyContact.Forename} {clientCompanyContact.Surname}"; } return string.Empty; } private string GetArgentexUserName(ICollection<AppUser> appUser) { if (appUser != null && appUser.Count > 0) { return appUser.FirstOrDefault().FullName; } return string.Empty; } private static ClientCompanyCurrencyDefaultOpi CreateDefaultAccountEntity(ClientCompanyOpi opi, int authUserId) { return new ClientCompanyCurrencyDefaultOpi { ClientCompanyId = opi.ClientCompanyId, CurrencyId = opi.CurrencyId, ClientCompanyOpiid = opi.Id, UpdateAuthUserId = authUserId }; } private static ClientCompanyOpi MapClientCompanyOpi(ClientCompanyOpi clientCompanyOpi, SettlementAccountModel settlementAccount) { clientCompanyOpi.Description = settlementAccount.Description ?? ""; clientCompanyOpi.CurrencyId = settlementAccount.CurrencyId; clientCompanyOpi.CountryId = settlementAccount.CountryId; clientCompanyOpi.BankName = settlementAccount.BankName ?? ""; clientCompanyOpi.BankAddress = settlementAccount.BankAddress; clientCompanyOpi.AccountName = settlementAccount.AccountName; clientCompanyOpi.AccountNumber = settlementAccount.AccountNumber.ToString(); clientCompanyOpi.ClearingCodePrefixId = settlementAccount.ClearingCodePrefixId == 0 ? null : settlementAccount.ClearingCodePrefixId; clientCompanyOpi.SortCode = settlementAccount.SortCode; clientCompanyOpi.Reference = settlementAccount.Reference ?? ""; clientCompanyOpi.SwiftCode = settlementAccount.SwiftCode; clientCompanyOpi.Iban = settlementAccount.Iban; clientCompanyOpi.BeneficiaryName = settlementAccount.BeneficiaryName; clientCompanyOpi.BeneficiaryAddress = settlementAccount.BeneficiaryAddress; clientCompanyOpi.CreatedDate = clientCompanyOpi.CreatedDate; clientCompanyOpi.CreatedByAuthUserId = settlementAccount.UpdatedByAuthUserId; clientCompanyOpi.UpdatedDate = DateTime.UtcNow; clientCompanyOpi.UpdatedByAuthUserId = settlementAccount.UpdatedByAuthUserId; clientCompanyOpi.ClientCompanyId = settlementAccount.ClientCompanyId; clientCompanyOpi.IsOwnAccount = clientCompanyOpi.IsOwnAccount; clientCompanyOpi.Rejected = clientCompanyOpi.Rejected; clientCompanyOpi.Authorised = clientCompanyOpi.Authorised; clientCompanyOpi.IsCompanyAccount = clientCompanyOpi.IsCompanyAccount; return clientCompanyOpi; } protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _clientCompanyAccountsUow?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Migrations/20180607134327_Initial.cs using System; using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Migrations; namespace Argentex.Core.Identity.DataAccess.Migrations { public partial class Initial : Migration { protected override void Up(MigrationBuilder migrationBuilder) { migrationBuilder.CreateTable( name: "Activity", columns: table => new { ActivityId = table.Column<int>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), Type = table.Column<string>(maxLength: 256, nullable: false) }, constraints: table => { table.PrimaryKey("PK_Activity", x => x.ActivityId); }); migrationBuilder.CreateTable( name: "CountryGroup", columns: table => new { Id = table.Column<int>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), Description = table.Column<string>(maxLength: 128, nullable: false), Sequence = table.Column<int>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_CountryGroup", x => x.Id); }); migrationBuilder.CreateTable( name: "OpenIddictApplications", columns: table => new { ClientId = table.Column<string>(nullable: false), ClientSecret = table.Column<string>(nullable: true), ConcurrencyToken = table.Column<string>(nullable: true), ConsentType = table.Column<string>(nullable: true), DisplayName = table.Column<string>(nullable: true), Id = table.Column<string>(nullable: false), Permissions = table.Column<string>(nullable: true), PostLogoutRedirectUris = table.Column<string>(nullable: true), Properties = table.Column<string>(nullable: true), RedirectUris = table.Column<string>(nullable: true), Type = table.Column<string>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_OpenIddictApplications", x => x.Id); }); migrationBuilder.CreateTable( name: "OpenIddictScopes", columns: table => new { ConcurrencyToken = table.Column<string>(nullable: true), Description = table.Column<string>(nullable: true), DisplayName = table.Column<string>(nullable: true), Id = table.Column<string>(nullable: false), Name = table.Column<string>(nullable: false), Properties = table.Column<string>(nullable: true), Resources = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_OpenIddictScopes", x => x.Id); }); migrationBuilder.CreateTable( name: "Report", columns: table => new { ReportId = table.Column<long>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), Description = table.Column<string>(maxLength: 128, nullable: true) }, constraints: table => { table.PrimaryKey("PK_Report", x => x.ReportId); }); migrationBuilder.CreateTable( name: "Role", columns: table => new { Id = table.Column<long>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), Name = table.Column<string>(maxLength: 256, nullable: true), NormalizedName = table.Column<string>(maxLength: 256, nullable: true), ConcurrencyStamp = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_Role", x => x.Id); }); migrationBuilder.CreateTable( name: "User", columns: table => new { Id = table.Column<long>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), UserName = table.Column<string>(maxLength: 256, nullable: true), NormalizedUserName = table.Column<string>(maxLength: 256, nullable: true), Email = table.Column<string>(maxLength: 256, nullable: true), NormalizedEmail = table.Column<string>(maxLength: 256, nullable: true), EmailConfirmed = table.Column<bool>(nullable: false), PasswordHash = table.Column<string>(nullable: true), SecurityStamp = table.Column<string>(nullable: true), ConcurrencyStamp = table.Column<string>(nullable: true), PhoneNumber = table.Column<string>(nullable: true), PhoneNumberConfirmed = table.Column<bool>(nullable: false), TwoFactorEnabled = table.Column<bool>(nullable: false), LockoutEnd = table.Column<DateTimeOffset>(nullable: true), LockoutEnabled = table.Column<bool>(nullable: false), AccessFailedCount = table.Column<int>(nullable: false), AuthUserId = table.Column<int>(nullable: false), Title = table.Column<string>(maxLength: 16, nullable: false), Forename = table.Column<string>(maxLength: 256, nullable: false), Surname = table.Column<string>(maxLength: 100, nullable: false), ClientCompanyId = table.Column<int>(nullable: false), ClientCompanyContactId = table.Column<int>(nullable: false), UpdatedByAuthUserId = table.Column<int>(nullable: false), PhoneNumberMobile = table.Column<string>(maxLength: 128, nullable: true), PhoneNumberOther = table.Column<string>(maxLength: 128, nullable: true), LastUpdate = table.Column<DateTime>(nullable: true), ASPNumber = table.Column<string>(nullable: true), ASPCreationDate = table.Column<DateTime>(nullable: true), LastTelephoneChange = table.Column<DateTime>(nullable: true), LastEmailChange = table.Column<DateTime>(nullable: true), LastPasswordChange = table.Column<DateTime>(nullable: false, defaultValueSql: "GETDATE()"), CreateDate = table.Column<DateTime>(nullable: false, defaultValueSql: "getdate()"), IsApproved = table.Column<bool>(nullable: false), Birthday = table.Column<DateTime>(nullable: true), Notes = table.Column<string>(nullable: true), Position = table.Column<string>(maxLength: 50, nullable: true), PrimaryContact = table.Column<bool>(nullable: true), IsDeleted = table.Column<bool>(nullable: false), IsAdmin = table.Column<bool>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_User", x => x.Id); }); migrationBuilder.CreateTable( name: "ActivityLog", columns: table => new { ActivityLogId = table.Column<long>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), UserName = table.Column<string>(maxLength: 256, nullable: false), LogDate = table.Column<DateTime>(nullable: false), IsSuccess = table.Column<bool>(nullable: false), PrimaryIP = table.Column<string>(maxLength: 128, nullable: true), SecondaryIP = table.Column<string>(maxLength: 128, nullable: true), ActivityId = table.Column<int>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_ActivityLog", x => x.ActivityLogId); table.ForeignKey( name: "FK_ActivityLog_Activity_ActivityId", column: x => x.ActivityId, principalTable: "Activity", principalColumn: "ActivityId", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "Country", columns: table => new { Id = table.Column<long>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), Name = table.Column<string>(maxLength: 128, nullable: false), FormalName = table.Column<string>(maxLength: 256, nullable: false), CodeISO2 = table.Column<string>(maxLength: 2, nullable: false), CodeISO3 = table.Column<string>(maxLength: 3, nullable: false), PhoneCode = table.Column<string>(maxLength: 25, nullable: true), CodeISO3Numeric = table.Column<int>(nullable: false), Sequence = table.Column<int>(nullable: false), CountryGroupId = table.Column<int>(nullable: false), LengthIBAN = table.Column<int>(nullable: true), RegexBBAN = table.Column<string>(maxLength: 256, nullable: true) }, constraints: table => { table.PrimaryKey("PK_Country", x => x.Id); table.ForeignKey( name: "FK_Country_CountryGroup_CountryGroupId", column: x => x.CountryGroupId, principalTable: "CountryGroup", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "OpenIddictAuthorizations", columns: table => new { ApplicationId = table.Column<string>(nullable: true), ConcurrencyToken = table.Column<string>(nullable: true), Id = table.Column<string>(nullable: false), Properties = table.Column<string>(nullable: true), Scopes = table.Column<string>(nullable: true), Status = table.Column<string>(nullable: false), Subject = table.Column<string>(nullable: false), Type = table.Column<string>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_OpenIddictAuthorizations", x => x.Id); table.ForeignKey( name: "FK_OpenIddictAuthorizations_OpenIddictApplications_ApplicationId", column: x => x.ApplicationId, principalTable: "OpenIddictApplications", principalColumn: "Id", onDelete: ReferentialAction.Restrict); }); migrationBuilder.CreateTable( name: "RoleClaim", columns: table => new { Id = table.Column<int>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), RoleId = table.Column<long>(nullable: false), ClaimType = table.Column<string>(nullable: true), ClaimValue = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_RoleClaim", x => x.Id); table.ForeignKey( name: "FK_RoleClaim_Role_RoleId", column: x => x.RoleId, principalTable: "Role", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "PreviousPasswords", columns: table => new { Id = table.Column<int>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), UserId = table.Column<long>(nullable: false), CreatedDate = table.Column<DateTime>(nullable: false), PasswordHash = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_PreviousPasswords", x => x.Id); table.ForeignKey( name: "FK_PreviousPasswords_User", column: x => x.UserId, principalTable: "User", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "Token", columns: table => new { Id = table.Column<int>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), ClientId = table.Column<string>(nullable: false), Type = table.Column<int>(nullable: false), Value = table.Column<string>(nullable: false), UserId = table.Column<long>(nullable: false), CreatedDate = table.Column<DateTime>(nullable: false), LastModifiedDate = table.Column<DateTime>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_Token", x => x.Id); table.ForeignKey( name: "FK_Token_User_UserId", column: x => x.UserId, principalTable: "User", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "UserClaim", columns: table => new { Id = table.Column<int>(nullable: false) .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn), UserId = table.Column<long>(nullable: false), ClaimType = table.Column<string>(nullable: true), ClaimValue = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_UserClaim", x => x.Id); table.ForeignKey( name: "FK_UserClaim_User_UserId", column: x => x.UserId, principalTable: "User", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "UserLogin", columns: table => new { LoginProvider = table.Column<string>(nullable: false), ProviderKey = table.Column<string>(nullable: false), ProviderDisplayName = table.Column<string>(nullable: true), UserId = table.Column<long>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_UserLogin", x => new { x.LoginProvider, x.ProviderKey }); table.ForeignKey( name: "FK_UserLogin_User_UserId", column: x => x.UserId, principalTable: "User", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "UserReport", columns: table => new { UserId = table.Column<long>(nullable: false), ReportId = table.Column<long>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_UserReport", x => new { x.ReportId, x.UserId }); table.ForeignKey( name: "FK_UserReport_Report_ReportId", column: x => x.ReportId, principalTable: "Report", principalColumn: "ReportId", onDelete: ReferentialAction.Cascade); table.ForeignKey( name: "FK_UserReport_User_UserId", column: x => x.UserId, principalTable: "User", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "UserRole", columns: table => new { UserId = table.Column<long>(nullable: false), RoleId = table.Column<long>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_UserRole", x => new { x.UserId, x.RoleId }); table.ForeignKey( name: "FK_UserRole_Role_RoleId", column: x => x.RoleId, principalTable: "Role", principalColumn: "Id", onDelete: ReferentialAction.Cascade); table.ForeignKey( name: "FK_UserRole_User_UserId", column: x => x.UserId, principalTable: "User", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "UserToken", columns: table => new { UserId = table.Column<long>(nullable: false), LoginProvider = table.Column<string>(nullable: false), Name = table.Column<string>(nullable: false), Value = table.Column<string>(nullable: true) }, constraints: table => { table.PrimaryKey("PK_UserToken", x => new { x.UserId, x.LoginProvider, x.Name }); table.ForeignKey( name: "FK_UserToken_User_UserId", column: x => x.UserId, principalTable: "User", principalColumn: "Id", onDelete: ReferentialAction.Cascade); }); migrationBuilder.CreateTable( name: "OpenIddictTokens", columns: table => new { ApplicationId = table.Column<string>(nullable: true), AuthorizationId = table.Column<string>(nullable: true), CreationDate = table.Column<DateTimeOffset>(nullable: true), ExpirationDate = table.Column<DateTimeOffset>(nullable: true), ConcurrencyToken = table.Column<string>(nullable: true), Id = table.Column<string>(nullable: false), Payload = table.Column<string>(nullable: true), Properties = table.Column<string>(nullable: true), ReferenceId = table.Column<string>(nullable: true), Status = table.Column<string>(nullable: true), Subject = table.Column<string>(nullable: false), Type = table.Column<string>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_OpenIddictTokens", x => x.Id); table.ForeignKey( name: "FK_OpenIddictTokens_OpenIddictApplications_ApplicationId", column: x => x.ApplicationId, principalTable: "OpenIddictApplications", principalColumn: "Id", onDelete: ReferentialAction.Restrict); table.ForeignKey( name: "FK_OpenIddictTokens_OpenIddictAuthorizations_AuthorizationId", column: x => x.AuthorizationId, principalTable: "OpenIddictAuthorizations", principalColumn: "Id", onDelete: ReferentialAction.Restrict); }); migrationBuilder.CreateIndex( name: "IX_ActivityLog_ActivityId", table: "ActivityLog", column: "ActivityId"); migrationBuilder.CreateIndex( name: "IX_Country_CountryGroupId", table: "Country", column: "CountryGroupId"); migrationBuilder.CreateIndex( name: "IX_OpenIddictApplications_ClientId", table: "OpenIddictApplications", column: "ClientId", unique: true); migrationBuilder.CreateIndex( name: "IX_OpenIddictAuthorizations_ApplicationId", table: "OpenIddictAuthorizations", column: "ApplicationId"); migrationBuilder.CreateIndex( name: "IX_OpenIddictScopes_Name", table: "OpenIddictScopes", column: "Name", unique: true); migrationBuilder.CreateIndex( name: "IX_OpenIddictTokens_ApplicationId", table: "OpenIddictTokens", column: "ApplicationId"); migrationBuilder.CreateIndex( name: "IX_OpenIddictTokens_AuthorizationId", table: "OpenIddictTokens", column: "AuthorizationId"); migrationBuilder.CreateIndex( name: "IX_OpenIddictTokens_ReferenceId", table: "OpenIddictTokens", column: "ReferenceId", unique: true, filter: "[ReferenceId] IS NOT NULL"); migrationBuilder.CreateIndex( name: "IX_PreviousPasswords_UserId", table: "PreviousPasswords", column: "UserId"); migrationBuilder.CreateIndex( name: "RoleNameIndex", table: "Role", column: "NormalizedName", unique: true, filter: "[NormalizedName] IS NOT NULL"); migrationBuilder.CreateIndex( name: "IX_RoleClaim_RoleId", table: "RoleClaim", column: "RoleId"); migrationBuilder.CreateIndex( name: "IX_Token_UserId", table: "Token", column: "UserId"); migrationBuilder.CreateIndex( name: "EmailIndex", table: "User", column: "NormalizedEmail"); migrationBuilder.CreateIndex( name: "UserNameIndex", table: "User", column: "NormalizedUserName", unique: true, filter: "[NormalizedUserName] IS NOT NULL"); migrationBuilder.CreateIndex( name: "IX_UserClaim_UserId", table: "UserClaim", column: "UserId"); migrationBuilder.CreateIndex( name: "IX_UserLogin_UserId", table: "UserLogin", column: "UserId"); migrationBuilder.CreateIndex( name: "IX_UserReport_UserId", table: "UserReport", column: "UserId"); migrationBuilder.CreateIndex( name: "IX_UserRole_RoleId", table: "UserRole", column: "RoleId"); } protected override void Down(MigrationBuilder migrationBuilder) { migrationBuilder.DropTable( name: "ActivityLog"); migrationBuilder.DropTable( name: "Country"); migrationBuilder.DropTable( name: "OpenIddictScopes"); migrationBuilder.DropTable( name: "OpenIddictTokens"); migrationBuilder.DropTable( name: "PreviousPasswords"); migrationBuilder.DropTable( name: "RoleClaim"); migrationBuilder.DropTable( name: "Token"); migrationBuilder.DropTable( name: "UserClaim"); migrationBuilder.DropTable( name: "UserLogin"); migrationBuilder.DropTable( name: "UserReport"); migrationBuilder.DropTable( name: "UserRole"); migrationBuilder.DropTable( name: "UserToken"); migrationBuilder.DropTable( name: "Activity"); migrationBuilder.DropTable( name: "CountryGroup"); migrationBuilder.DropTable( name: "OpenIddictAuthorizations"); migrationBuilder.DropTable( name: "Report"); migrationBuilder.DropTable( name: "Role"); migrationBuilder.DropTable( name: "User"); migrationBuilder.DropTable( name: "OpenIddictApplications"); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Breach.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Breach { public Breach() { BreachInvoice = new HashSet<BreachInvoice>(); } public int Id { get; set; } public int BreachTypeId { get; set; } public int BreachLevelId { get; set; } public string TradeCode { get; set; } public int? ClientCompanyOpiid { get; set; } public string OriginalLimit { get; set; } public string OverrideValue { get; set; } public string Notes { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime CreatedDateTime { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } public int? PaymentId { get; set; } public BreachLevel BreachLevel { get; set; } public BreachType BreachType { get; set; } public ClientCompanyOpi ClientCompanyOpi { get; set; } public AuthUser CreatedByAuthUser { get; set; } public Payment Payment { get; set; } public FxforwardTrade TradeCodeNavigation { get; set; } public AuthUser UpdatedByAuthUser { get; set; } public ICollection<BreachInvoice> BreachInvoice { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/TransactionCommit.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class TransactionCommit { public TransactionCommit() { BankAccountCurrencyBalance = new HashSet<BankAccountCurrencyBalance>(); BankAccountCurrencyBalanceHistory = new HashSet<BankAccountCurrencyBalanceHistory>(); ClientCompanyVirtualAccountCurrencyBalanceHistory = new HashSet<ClientCompanyVirtualAccountCurrencyBalanceHistory>(); FxforwardTrade = new HashSet<FxforwardTrade>(); Fxoption = new HashSet<Fxoption>(); Payment = new HashSet<Payment>(); } public int Id { get; set; } public DateTime CommitDateTime { get; set; } public int AuthUserId { get; set; } public AuthUser AuthUser { get; set; } public ICollection<BankAccountCurrencyBalance> BankAccountCurrencyBalance { get; set; } public ICollection<BankAccountCurrencyBalanceHistory> BankAccountCurrencyBalanceHistory { get; set; } public ICollection<ClientCompanyVirtualAccountCurrencyBalanceHistory> ClientCompanyVirtualAccountCurrencyBalanceHistory { get; set; } public ICollection<FxforwardTrade> FxforwardTrade { get; set; } public ICollection<Fxoption> Fxoption { get; set; } public ICollection<Payment> Payment { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ComplianceCorporateSectorNonFinancial.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ComplianceCorporateSectorNonFinancial { public ComplianceCorporateSectorNonFinancial() { ClientCompanyComplianceCorporateSector = new HashSet<ClientCompanyComplianceCorporateSector>(); } public int Id { get; set; } public string Value { get; set; } public string Description { get; set; } public int Sequence { get; set; } public ICollection<ClientCompanyComplianceCorporateSector> ClientCompanyComplianceCorporateSector { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Email/OrderNoteModel.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Models.Identity; using System; namespace Argentex.Core.Service.Models.Email { public class OrderNoteModel { public string InstructedBy { get; set; } public DateTime InstructedDateTime { get; set; } public string Method { get; set; } public string TradeRef { get; set; } public string SellCcy { get; set; } public decimal SellAmount { get; set; } public string BuyCcy { get; set; } public decimal BuyAmount { get; set; } public double Rate { get; set; } public DateTime ValueDate { get; set; } public string CurrencyPair { get; set; } public string ClientEmail { get; set; } public DateTime CreatedDate { get; set; } public DateTime? ValidityDate { get; set; } public ApplicationServiceUser DealerAuthUser { get; set; } public DataAccess.Entities.ClientCompany ClientCompany { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/UserChangeRequest.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class UserChangeRequest { public UserChangeRequest() { UserChangeRequestApproval = new HashSet<UserChangeRequestApproval>(); } public int Id { get; set; } public int AuthUserId { get; set; } public string CurrentValue { get; set; } public string ProposedValue { get; set; } public string ChangeValueType { get; set; } public DateTime ChangeDateTime { get; set; } public int ChangedByAuthUserId { get; set; } public string ChangeStatus { get; set; } public AuthUser AuthUser { get; set; } public AuthUser ChangedByAuthUser { get; set; } public ICollection<UserChangeRequestApproval> UserChangeRequestApproval { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/CurrencyFxrate.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class CurrencyFxrate { public int LhsCcyid { get; set; } public int RhsCcyid { get; set; } public decimal? Rate { get; set; } public DateTime UpdatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AuthUser.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AuthUser { public AuthUser() { AppUser = new HashSet<AppUser>(); AuthUserPasswordToken = new HashSet<AuthUserPasswordToken>(); AuthUserPreviousPasswords = new HashSet<AuthUserPreviousPasswords>(); BankAccountCurrencyDetailsCreatedByAuthUser = new HashSet<BankAccountCurrencyDetails>(); BankAccountCurrencyDetailsUpdatedByAuthUser = new HashSet<BankAccountCurrencyDetails>(); BreachCreatedByAuthUser = new HashSet<Breach>(); BreachInvoiceUpdatedByAuthUser = new HashSet<BreachInvoice>(); BreachInvoiceUploadedByAuthUser = new HashSet<BreachInvoice>(); BreachUpdatedByAuthUser = new HashSet<Breach>(); CassRecsCheck1ByAuthUser = new HashSet<CassRecs>(); CassRecsCheck2ByAuthUser = new HashSet<CassRecs>(); CassRecsCompletedByAuthUser = new HashSet<CassRecs>(); CassRecsPaymentFileUpdatedByAuthUser = new HashSet<CassRecsPaymentFile>(); CassRecsPaymentFileUploadedByAuthUser = new HashSet<CassRecsPaymentFile>(); CassRecsUpdatedByAuthUser = new HashSet<CassRecs>(); ClientCompanyComplianceCorporateSector = new HashSet<ClientCompanyComplianceCorporateSector>(); ClientCompanyComplianceCurrency = new HashSet<ClientCompanyComplianceCurrency>(); ClientCompanyComplianceNote = new HashSet<ClientCompanyComplianceNote>(); ClientCompanyContactAuthUser = new HashSet<ClientCompanyContact>(); ClientCompanyContactCategory = new HashSet<ClientCompanyContactCategory>(); ClientCompanyContactUpdatedByAuthUser = new HashSet<ClientCompanyContact>(); ClientCompanyCurrencyDefaultOpi = new HashSet<ClientCompanyCurrencyDefaultOpi>(); ClientCompanyIbrelationship = new HashSet<ClientCompanyIbrelationship>(); ClientCompanyLinkedGroup = new HashSet<ClientCompanyLinkedGroup>(); ClientCompanyNote = new HashSet<ClientCompanyNote>(); ClientCompanyOpi = new HashSet<ClientCompanyOpi>(); ClientCompanyOpiduplicateCreatedByAuthUser = new HashSet<ClientCompanyOpiduplicate>(); ClientCompanyOpiduplicateIsOkupdatedByAuthUser = new HashSet<ClientCompanyOpiduplicate>(); ClientCompanyPipeline = new HashSet<ClientCompanyPipeline>(); ClientCompanySalesAppUser = new HashSet<ClientCompanySalesAppUser>(); ClientCompanyVirtualAccountCurrencyBalanceHistory = new HashSet<ClientCompanyVirtualAccountCurrencyBalanceHistory>(); ClientSiteActionCreatedByAuthUser = new HashSet<ClientSiteAction>(); ClientSiteActionUpdatedByAuthUser = new HashSet<ClientSiteAction>(); ComplianceClassificationFileUpdatedByAuthUser = new HashSet<ComplianceClassificationFile>(); ComplianceClassificationFileUploadedByAuthUser = new HashSet<ComplianceClassificationFile>(); ComplianceIsincurrencyValueDate = new HashSet<ComplianceIsincurrencyValueDate>(); ComplianceQuestionnaire = new HashSet<ComplianceQuestionnaire>(); CurrencyCreatedByAuthUser = new HashSet<Currency>(); CurrencyUpdatedByAuthUser = new HashSet<Currency>(); FixApatradeCapture = new HashSet<FixApatradeCapture>(); FixFxforwardTradeOrder = new HashSet<FixFxforwardTradeOrder>(); FxforwardTrade2Opi = new HashSet<FxforwardTrade2Opi>(); FxforwardTradeBrokeredByAuthUser = new HashSet<FxforwardTrade>(); FxforwardTradeCreatedByAuthUser = new HashSet<FxforwardTrade>(); FxforwardTradeFilledByAuthUser = new HashSet<FxforwardTrade>(); FxforwardTradeInvoice = new HashSet<FxforwardTradeInvoice>(); FxforwardTradeOpiupdatedByAuthUser = new HashSet<FxforwardTrade>(); FxforwardTradeUpdatedByAuthUser = new HashSet<FxforwardTrade>(); FxforwardTradeVerifiedByAuthUser = new HashSet<FxforwardTrade>(); FxoptionCreatedByAuthUser = new HashSet<Fxoption>(); FxoptionUpdatedByAuthUser = new HashSet<Fxoption>(); FxoptionVerifiedByAuthUser = new HashSet<Fxoption>(); Fxswap = new HashSet<Fxswap>(); PaymentAuthorisedByAuthUser = new HashSet<Payment>(); PaymentCreatedByAuthUser = new HashSet<Payment>(); PaymentSwiftAuth1ByAuthUser = new HashSet<Payment>(); PaymentSwiftAuth2ByAuthUser = new HashSet<Payment>(); PaymentUpdatedByAuthUser = new HashSet<Payment>(); ReportProcessedLog = new HashSet<ReportProcessedLog>(); ReportQueueToProcess = new HashSet<ReportQueueToProcess>(); SuspiciousActivityReportCreatedByAuthUser = new HashSet<SuspiciousActivityReport>(); SuspiciousActivityReportIssueClosedByAuthUser = new HashSet<SuspiciousActivityReport>(); SuspiciousActivityReportUpdatedByAuthUser = new HashSet<SuspiciousActivityReport>(); SwiftincomingMatchedAccountCreatedByAuthUser = new HashSet<SwiftincomingMatchedAccount>(); SwiftincomingMatchedAccountUpdatedByAuthUser = new HashSet<SwiftincomingMatchedAccount>(); SwiftintegrationService = new HashSet<SwiftintegrationService>(); TransactionCommit = new HashSet<TransactionCommit>(); UserAuditLogPageViews = new HashSet<UserAuditLogPageViews>(); UserChangeRequestApproval = new HashSet<UserChangeRequestApproval>(); UserChangeRequestAuthUser = new HashSet<UserChangeRequest>(); UserChangeRequestChangedByAuthUser = new HashSet<UserChangeRequest>(); } public int Id { get; set; } public string UserName { get; set; } public string Password { get; set; } public string Email { get; set; } public bool IsApproved { get; set; } public bool IsLockedOut { get; set; } public string Comment { get; set; } public DateTime CreateDate { get; set; } public DateTime? LastPasswordChangeDate { get; set; } public DateTime? LastLoginDate { get; set; } public DateTime? LastActivityDate { get; set; } public DateTime? LastLockOutDate { get; set; } public int FailedPasswordAttemptCount { get; set; } public DateTime FailedPasswordAttemptWindowStart { get; set; } public int ApplicationId { get; set; } public AuthApplication Application { get; set; } public ICollection<AppUser> AppUser { get; set; } public ICollection<AuthUserPasswordToken> AuthUserPasswordToken { get; set; } public ICollection<AuthUserPreviousPasswords> AuthUserPreviousPasswords { get; set; } public ICollection<BankAccountCurrencyDetails> BankAccountCurrencyDetailsCreatedByAuthUser { get; set; } public ICollection<BankAccountCurrencyDetails> BankAccountCurrencyDetailsUpdatedByAuthUser { get; set; } public ICollection<Breach> BreachCreatedByAuthUser { get; set; } public ICollection<BreachInvoice> BreachInvoiceUpdatedByAuthUser { get; set; } public ICollection<BreachInvoice> BreachInvoiceUploadedByAuthUser { get; set; } public ICollection<Breach> BreachUpdatedByAuthUser { get; set; } public ICollection<CassRecs> CassRecsCheck1ByAuthUser { get; set; } public ICollection<CassRecs> CassRecsCheck2ByAuthUser { get; set; } public ICollection<CassRecs> CassRecsCompletedByAuthUser { get; set; } public ICollection<CassRecsPaymentFile> CassRecsPaymentFileUpdatedByAuthUser { get; set; } public ICollection<CassRecsPaymentFile> CassRecsPaymentFileUploadedByAuthUser { get; set; } public ICollection<CassRecs> CassRecsUpdatedByAuthUser { get; set; } public ICollection<ClientCompanyComplianceCorporateSector> ClientCompanyComplianceCorporateSector { get; set; } public ICollection<ClientCompanyComplianceCurrency> ClientCompanyComplianceCurrency { get; set; } public ICollection<ClientCompanyComplianceNote> ClientCompanyComplianceNote { get; set; } public ICollection<ClientCompanyContact> ClientCompanyContactAuthUser { get; set; } public ICollection<ClientCompanyContactCategory> ClientCompanyContactCategory { get; set; } public ICollection<ClientCompanyContact> ClientCompanyContactUpdatedByAuthUser { get; set; } public ICollection<ClientCompanyCurrencyDefaultOpi> ClientCompanyCurrencyDefaultOpi { get; set; } public ICollection<ClientCompanyIbrelationship> ClientCompanyIbrelationship { get; set; } public ICollection<ClientCompanyLinkedGroup> ClientCompanyLinkedGroup { get; set; } public ICollection<ClientCompanyNote> ClientCompanyNote { get; set; } public ICollection<ClientCompanyOpi> ClientCompanyOpi { get; set; } public ICollection<ClientCompanyOpiduplicate> ClientCompanyOpiduplicateCreatedByAuthUser { get; set; } public ICollection<ClientCompanyOpiduplicate> ClientCompanyOpiduplicateIsOkupdatedByAuthUser { get; set; } public ICollection<ClientCompanyPipeline> ClientCompanyPipeline { get; set; } public ICollection<ClientCompanySalesAppUser> ClientCompanySalesAppUser { get; set; } public ICollection<ClientCompanyVirtualAccountCurrencyBalanceHistory> ClientCompanyVirtualAccountCurrencyBalanceHistory { get; set; } public ICollection<ClientSiteAction> ClientSiteActionCreatedByAuthUser { get; set; } public ICollection<ClientSiteAction> ClientSiteActionUpdatedByAuthUser { get; set; } public ICollection<ComplianceClassificationFile> ComplianceClassificationFileUpdatedByAuthUser { get; set; } public ICollection<ComplianceClassificationFile> ComplianceClassificationFileUploadedByAuthUser { get; set; } public ICollection<ComplianceIsincurrencyValueDate> ComplianceIsincurrencyValueDate { get; set; } public ICollection<ComplianceQuestionnaire> ComplianceQuestionnaire { get; set; } public ICollection<Currency> CurrencyCreatedByAuthUser { get; set; } public ICollection<Currency> CurrencyUpdatedByAuthUser { get; set; } public ICollection<FixApatradeCapture> FixApatradeCapture { get; set; } public ICollection<FixFxforwardTradeOrder> FixFxforwardTradeOrder { get; set; } public ICollection<FxforwardTrade2Opi> FxforwardTrade2Opi { get; set; } public ICollection<FxforwardTrade> FxforwardTradeBrokeredByAuthUser { get; set; } public ICollection<FxforwardTrade> FxforwardTradeCreatedByAuthUser { get; set; } public ICollection<FxforwardTrade> FxforwardTradeFilledByAuthUser { get; set; } public ICollection<FxforwardTradeInvoice> FxforwardTradeInvoice { get; set; } public ICollection<FxforwardTrade> FxforwardTradeOpiupdatedByAuthUser { get; set; } public ICollection<FxforwardTrade> FxforwardTradeUpdatedByAuthUser { get; set; } public ICollection<FxforwardTrade> FxforwardTradeVerifiedByAuthUser { get; set; } public ICollection<Fxoption> FxoptionCreatedByAuthUser { get; set; } public ICollection<Fxoption> FxoptionUpdatedByAuthUser { get; set; } public ICollection<Fxoption> FxoptionVerifiedByAuthUser { get; set; } public ICollection<Fxswap> Fxswap { get; set; } public ICollection<Payment> PaymentAuthorisedByAuthUser { get; set; } public ICollection<Payment> PaymentCreatedByAuthUser { get; set; } public ICollection<Payment> PaymentSwiftAuth1ByAuthUser { get; set; } public ICollection<Payment> PaymentSwiftAuth2ByAuthUser { get; set; } public ICollection<Payment> PaymentUpdatedByAuthUser { get; set; } public ICollection<ReportProcessedLog> ReportProcessedLog { get; set; } public ICollection<ReportQueueToProcess> ReportQueueToProcess { get; set; } public ICollection<SuspiciousActivityReport> SuspiciousActivityReportCreatedByAuthUser { get; set; } public ICollection<SuspiciousActivityReport> SuspiciousActivityReportIssueClosedByAuthUser { get; set; } public ICollection<SuspiciousActivityReport> SuspiciousActivityReportUpdatedByAuthUser { get; set; } public ICollection<SwiftincomingMatchedAccount> SwiftincomingMatchedAccountCreatedByAuthUser { get; set; } public ICollection<SwiftincomingMatchedAccount> SwiftincomingMatchedAccountUpdatedByAuthUser { get; set; } public ICollection<SwiftintegrationService> SwiftintegrationService { get; set; } public ICollection<TransactionCommit> TransactionCommit { get; set; } public ICollection<UserAuditLogPageViews> UserAuditLogPageViews { get; set; } public ICollection<UserChangeRequestApproval> UserChangeRequestApproval { get; set; } public ICollection<UserChangeRequest> UserChangeRequestAuthUser { get; set; } public ICollection<UserChangeRequest> UserChangeRequestChangedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api.Tests/Order/OrderControllerTests.cs using Argentex.Core.Api.Controllers.Order; using Argentex.Core.Service.Models.Order; using Argentex.Core.Service.Order; using Microsoft.AspNetCore.Mvc; using Moq; using System; using System.Collections.Generic; using System.Threading.Tasks; using Xunit; namespace Argentex.Core.Api.Tests.Order { public class OrderControllerTests { [Fact] public void ExecuteOrders_Success_With_Valid_Model_Input() { //Arrange var mockResponseList = new Mock<IList<OrderResponseModel>>(); var mockService = new Mock<IOrderService>(); mockService.Setup(x => x.ExecuteOrdersAsync(It.IsAny<OrderRequestModel>())) .Returns(Task.FromResult(mockResponseList.Object)); var controller = new OrderController(mockService.Object); var orderModel = new OrderModel { ClientAmount = 1000, RhsCcy = "GBP", LhsCcy = "EUR", ValueDate = DateTime.Now }; OrderRequestModel orderRequest = new OrderRequestModel { AuthUserId = 1, ClientCompanyId = 1, OrderModels = new List<OrderModel> { orderModel } }; //Act var result = controller.ExecuteOrdersAsync(orderRequest).Result; //Assert Assert.IsType<OkObjectResult>(result); } [Fact] public void ExecuteOrders_Failed_When_InvalidModel_With_Bad_Request() { //Arrange var mockService = new Mock<IOrderService>(); var controller = new OrderController(mockService.Object); controller.ModelState.AddModelError("", "Error"); //Act var result = controller.ExecuteOrdersAsync(null).Result; //Assert Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void CanceOrder_Failed_When_Not_Passing_Code() { //Arrange var mockService = new Mock<IOrderService>(); mockService.Setup(x => x.CancelOrderAsync("Code")) .Returns(Task.FromResult(true)); var controller = new OrderController(mockService.Object); //Act var result = controller.CancelOrderAsync(null).Result; //Assert Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void CanceOrder_Success() { //Arrange var mockService = new Mock<IOrderService>(); mockService.Setup(x => x.CancelOrderAsync("Code")) .Returns(Task.FromResult(true)); var controller = new OrderController(mockService.Object); //Act var result = controller.CancelOrderAsync("Code").Result; //Assert Assert.IsType<OkObjectResult>(result); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyIbrelationship.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyIbrelationship { public int Id { get; set; } public int ClientCompanyId { get; set; } public int IntroducingBrokerId { get; set; } public decimal? Percentage { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdateDateTime { get; set; } public ClientCompany ClientCompany { get; set; } public AuthUser UpdatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/ClientCompany/ClientCompanyService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Models.ClientCompany; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.EntityFrameworkCore; using System.Net; using Argentex.Core.UnitsOfWork.ClientCompanyContacts.Model; namespace Argentex.Core.Service { public class ClientCompanyService : IClientCompanyService { private readonly IClientCompanyUow _clientCompanyUow; private readonly IUserService _userService; private readonly ICurrencyService _currencyService; private readonly IAppSettingService _appSettingService; private readonly IConfigWrapper _config; private bool _disposed; public ClientCompanyService(IClientCompanyUow clientCompanyUow, IUserService userService, ICurrencyService currencyService, IAppSettingService appSettingService, IConfigWrapper config) { _clientCompanyUow = clientCompanyUow; _userService = userService; _currencyService = currencyService; _appSettingService = appSettingService; _config = config; } public string GetClientCompanyName(int clientCompanyId) { return _clientCompanyUow .ClientCompanyContactRepository .GetQueryable(x => x.ClientCompanyId == clientCompanyId) .Select(x => x.ClientCompany.Name) .FirstOrDefault(); } public ClientCompanyModel GetClientCompany(int clientCompanyId) { return _clientCompanyUow .GetClientCompany(clientCompanyId) .Select(x => new ClientCompanyModel { Name = x.Name, Crn = x.Crn, DealerAppUserID = x.DealerAppUserId }) .SingleOrDefault(); } public ICollection<ClientCompaniesModel> GetClientCompanies() { return _clientCompanyUow .GetClientCompanies() .Select(x => new ClientCompaniesModel { ClientCompanyId = x.Id, ClientCompanyName = x.Name }).ToList(); } public ICollection<ClientCompanyAccountModel> GetClientCompanyAccounts(int clientCompanyId) { return _clientCompanyUow .GetClientCompanyAccounts(clientCompanyId) .Select(x => new ClientCompanyAccountModel { ClientCompanyOpiId = x.Id, ClientCompanyId = x.ClientCompanyId, CurrencyId = x.CurrencyId, Currency = x.Currency.Code, CountryId = x.CountryId ?? 0, Country = x.Country.Name, Description = x.Description, BankName = x.BankName, BankAddress = x.BankAddress, ClearingCodePrefixId = x.ClearingCodePrefixId ?? 0, AccountNumber = x.AccountNumber, AccountName = x.AccountName, SortCode = x.SortCode, SwiftCode = x.SwiftCode, Iban = x.Iban, IsDefault = x.ClientCompanyCurrencyDefaultOpi .Select(y => y.ClientCompanyOpiid) .Contains(x.Id), Approved = x.Authorised, BeneficiaryName = x.BeneficiaryName, BeneficiaryAddress = x.BeneficiaryAddress, Reference = x.Reference, UpdatedByAuthUserId = x.UpdatedByAuthUserId }).ToList(); } public ClientCompanyContactResponseModel GetClientCompanyContact(ClientCompanyContactSearchContext clientCompanyContactSearchContext) { var clientCompanyContactSearchModel = new ClientCompanyContactSearchModel() { ClientCompanyContactId = clientCompanyContactSearchContext.ClientCompanyContactId, AuthUsertId = clientCompanyContactSearchContext.AuthUsertId }; var contact = _clientCompanyUow.GetCurrentClientCompanyContact(clientCompanyContactSearchModel); if (contact == null) return new ClientCompanyContactResponseModel(); var companyContact = new ClientCompanyContactResponseModel { CompanyContactModel = new ClientCompanyContactModel { ID = contact.Id, ContactTitle = contact.Title, ContactForename = contact.Forename, ContactSurname = contact.Surname, ContactEmail = contact.Email, ContactTelephone = contact.TelephoneDirect, Authorized = contact.Authorized, UserName = contact.AuthUser?.UserName, UpdatedByAuthUserId = contact.UpdatedByAuthUserId, Position = contact.Position, TelephoneMobile = contact.TelephoneMobile, TelephoneOther = contact.TelephoneOther, BirthDay = contact.Birthday, IsApproved = contact.AuthUser?.IsApproved ?? false, PrimaryContact = contact.PrimaryContact ?? false, ClientSiteAuthUserID = contact.AuthUserId.HasValue ? (int)contact.AuthUserId : 0, LastTelephoneChangeDate = contact.LastTelephoneChangeDate.GetValueOrDefault(), LastEmailChangeDate = contact.LastEmailChangeDate.GetValueOrDefault(), BloombergGpi = contact.BloombergGpi, NiNumber = contact.NiNumber, ReceiveNotifications = contact.RecNotifications, ReceiveAMReport = contact.RecAmreport, ReceiveActivityReport = contact.RecActivityReport, ASPNumber = contact.Aspnumber ?? string.Empty, ASPCreationDate = contact.AspcreationDate, FullName = contact.Fullname, Notes = contact.Notes, ClientCompany = MapClientCompany(contact.ClientCompany) }, Succeeded = true }; return companyContact; } public ClientCompanyContactResponseModel GetErrorMessages(HttpStatusCode statusCode, Exception exception, ClientCompanyContactSearchContext clientCompanyContactSearchContext) { ClientCompanyContactResponseModel responseModel = new ClientCompanyContactResponseModel(); var IdOrUsername = clientCompanyContactSearchContext.ClientCompanyContactId != null ? "Client Company Contact ID" : "AuthUser ID"; var userId = clientCompanyContactSearchContext.ClientCompanyContactId != null ? clientCompanyContactSearchContext.ClientCompanyContactId : clientCompanyContactSearchContext.AuthUsertId; switch (statusCode) { case HttpStatusCode.BadRequest: responseModel.ResponseMessages = new Dictionary<string, string[]> { { "Errors", new string[] {$"Client Company Contact with {IdOrUsername} {userId} could not be retrieved. {exception?.Message}"} } }; break; case HttpStatusCode.NotFound: responseModel.ResponseMessages = new Dictionary<string, string[]> { { "Errors", new string[] { $"Client Company Contact with {IdOrUsername} {userId} does not exist in the database. {exception?.Message}" } } }; break; } return responseModel; } /// <summary> /// /// </summary> /// <param name="clientCompanyContactId"></param> /// <returns></returns> public void AddSpredAdjustment(SpreadAdjustmentModel spreadAdjustment) { var spreadAdjustmentValidity = _appSettingService.GetSpreadAdjustmentValidity(); var clientCompanyOnlineDetails = _clientCompanyUow.GetClientCompanyOnlineDetails(spreadAdjustment.ClientCompanyID).FirstOrDefault(); var currency1 = _currencyService.GetCurrencyId(spreadAdjustment.BuyCcy); var currency2 = _currencyService.GetCurrencyId(spreadAdjustment.SellCcy); var model = new ClientCompanyOnlineSpreadAdjustment() { ClientCompanyOnlineDetailsId = clientCompanyOnlineDetails.Id, Currency1Id = currency1, Currency2Id = currency2, IsBuy = spreadAdjustment.IsBuy, Spread = spreadAdjustment.SpreadAdjustment, ExpirationDateTime = DateTime.UtcNow.AddMinutes(spreadAdjustmentValidity), UpdatedByAuthUserId = spreadAdjustment.UpdatedByAuthUserId, UpdatedDateTime = DateTime.UtcNow, }; _clientCompanyUow.AddClientCompanyOnlineSpreadAdjustment(model); } /// <summary> /// Set ClientCompanyOnlineDetails Kicked flag to true /// </summary> /// <param name="clientCompanyID"></param> public void SetKicked(int clientCompanyID) { _clientCompanyUow.SetClientCompanyOnlineKicked(clientCompanyID); } /// <summary> /// Determines if the trade should be executed based on the spread set by the company /// In case there is an active temporary spread the trade can be executed /// Otherwise the CSR user must wait for the Trader site user to adjust the spread, /// cancel the spread adjusting or the adjust spread counter to time out /// </summary> /// <param name="clientCompanyID"></param> /// <param name="currency1"></param> /// <param name="currency2"></param> /// <param name="isBuyDirection"></param> /// <returns>True if there is a temporary spread for the given filters</returns> public bool GetTradeExecutionStatusBySpread(int clientCompanyID, string currency1, string currency2, bool isBuyDirection) { int currency1Id = _currencyService.GetCurrencyId(currency1); int currency2Id = _currencyService.GetCurrencyId(currency2); List<DataAccess.Entities.ClientCompanyOnlineSpreadAdjustment> list = _clientCompanyUow.GetClientCompanyOnlineSpreadAdjustment(clientCompanyID, currency1Id, currency2Id, isBuyDirection) .Where(x => x.ExpirationDateTime.Subtract(DateTime.UtcNow).TotalMinutes >= 0).ToList(); return list != null && list.Count > 0; } /// <summary> /// Get the spread for the given company /// Order in which the spread is searched for: Temporary active spread, skew spread, default spread /// </summary> public int GetClientCompanySpread(int clientCompanyID, string currency1, string currency2, bool isBuy, DateTime valueDate, DateTime contractDate) { int spread = 0; //Temporary active spread int currency1Id = _currencyService.GetCurrencyId(currency1); int currency2Id = _currencyService.GetCurrencyId(currency2); List<ClientCompanyOnlineSpreadAdjustment> list = _clientCompanyUow.GetClientCompanyOnlineSpreadAdjustment(clientCompanyID, currency1Id, currency2Id, isBuy) .Where(x => x.ExpirationDateTime.Subtract(DateTime.UtcNow).TotalMinutes >= 0).ToList(); if (list != null && list.Count > 0) { //get the newest active temporary spread spread = list[0].Spread; return spread; } //Skew spread DataAccess.Entities.ClientCompanyOnlineDetailsSkew clientCompanyOnlineDetailsSkew = _clientCompanyUow.GetClientCompanyOnlineDetailsSkew(clientCompanyID, currency1Id, currency2Id, isBuy).FirstOrDefault(); if (clientCompanyOnlineDetailsSkew != null) { spread = clientCompanyOnlineDetailsSkew.Spread; return spread; } //Default spread DataAccess.Entities.ClientCompanyOnlineDetails clientCompanyOnlineDetails = GetClientCompanyOnlineDetails(clientCompanyID); if (clientCompanyOnlineDetails != null && clientCompanyOnlineDetails.AllowOnlineTrading) { int spotSpread = clientCompanyOnlineDetails.SpotSpread ?? 0; int fwSpread = clientCompanyOnlineDetails.FwdSpread ?? 0; //determine which spread to use /* SPOT if the Value Date is 2, 1 or 0 business days away from the Contract Date FORWARD if the Value Date further than 2, 1 or 0 business days away from the Contract Date */ if (IsForward(contractDate, valueDate)) { spread = fwSpread; } else { spread = spotSpread; } return spread; } //in case there are no spread details return the default spread value 0 return spread; } public DataAccess.Entities.ClientCompanyOnlineDetails GetClientCompanyOnlineDetails(int clientCompanyId) { return _clientCompanyUow.GetClientCompanyOnlineDetails(clientCompanyId).FirstOrDefault(); } public ClientCompanyOnlineDetailsModel GetClientCompanyOnlineDetailsModel(int clientCompanyId) { var result = _clientCompanyUow.GetClientCompanyOnlineDetails(clientCompanyId) .Select(e => new ClientCompanyOnlineDetailsModel { Id = e.Id, ClientCompanyId = e.ClientCompanyId, AllowOnlineTrading = e.AllowOnlineTrading, MaxTradeSize = e.MaxTradeSize, MaxOpen = e.MaxOpen, MaxTenor = e.MaxTenor, Collateral = e.Collateral, SpotSpread = e.SpotSpread, FwdSpread = e.FwdSpread, Kicked = e.Kicked, DealerFullName = string.Empty, DealerPhoneNumber = string.Empty }).FirstOrDefault(); if(result == null) { result = new ClientCompanyOnlineDetailsModel(); } var company = _clientCompanyUow.GetClientCompany(clientCompanyId).FirstOrDefault(); if (company != null && company.DealerAppUserId != null) { var dealerAppUser = _userService.GetFXDBAppUserById((int)company.DealerAppUserId); if (dealerAppUser != null) { result.DealerFullName = dealerAppUser.FullName; // TODO add country code in CSR Core first //result.DealerPhoneNumber = $"+{dealerAppUser.TelephoneCountryCode} {dealerAppUser.TelephoneNumber}"; result.DealerPhoneNumber = $"{dealerAppUser.TelephoneNumber}"; } } if (string.IsNullOrEmpty(result.DealerPhoneNumber)) { // TODO change to appsettings result.DealerPhoneNumber = _config.Get("Phones:ArgentexSupport"); } return result; } public ClientCompanyAccountModel GetClientCompanyDefaultAccount(int clientCompanyId, int currencyId) { return _clientCompanyUow .GetClientCompanyAccounts(clientCompanyId) .Where(x => x.Authorised && x.CurrencyId == currencyId && x.ClientCompanyCurrencyDefaultOpi .Select(y => y.ClientCompanyOpiid) .Contains(x.Id)) .Select(x => new ClientCompanyAccountModel { ClientCompanyOpiId = x.Id, ClientCompanyId = x.ClientCompanyId, CurrencyId = x.CurrencyId, Currency = x.Currency.Code, CountryId = x.CountryId ?? 0, Country = x.Country.Name, Description = x.Description, BankName = x.BankName, BankAddress = x.BankAddress, ClearingCodePrefixId = x.ClearingCodePrefixId ?? 0, AccountNumber = x.AccountNumber, AccountName = x.AccountName, SortCode = x.SortCode, SwiftCode = x.SwiftCode, Iban = x.Iban, IsDefault = x.ClientCompanyCurrencyDefaultOpi .Select(y => y.ClientCompanyOpiid) .Contains(x.Id), Approved = x.Authorised, BeneficiaryName = x.BeneficiaryName, BeneficiaryAddress = x.BeneficiaryAddress, Reference = x.Reference, UpdatedByAuthUserId = x.UpdatedByAuthUserId }) .FirstOrDefault(); } public async Task<IEnumerable<ContactCategoryModel>> GetContactCategories() { return await _clientCompanyUow.GetContactCategories() .Select(x => new ContactCategoryModel() { Id = x.Id, Description = x.Description, Sequence = x.Sequence }).ToListAsync(); } public bool AddContactCategory(ContactCategoryModel model) { var contactCategory = _clientCompanyUow.GetContactCategory(model.Description).FirstOrDefault(); //the contact category already exists if (contactCategory != null) return false; _clientCompanyUow.AddContactCategory(new ContactCategory() { Description = model.Description }); return true; } public ContactCategory GetContactCategory(int contactCategoryId) { return _clientCompanyUow.GetContactCategory(contactCategoryId).FirstOrDefault(); } public ContactCategory GetContactCategory(string contactCategoryDescription) { return _clientCompanyUow.GetContactCategory(contactCategoryDescription).FirstOrDefault(); } public bool ProcessClientCompanyContactCategories(ClientCompanyContactBulkCategoryModel model) { List<int> existingClientCompanyContactCategoryIds = _clientCompanyUow .GetClientCompanyContactCategories(model.ClientCompanyContactId) .Select(x => x.ContactCategoryId).ToList(); List<int> newClientCompanyContactCategoryIds = model.ContactCategoryIds.ToList(); List<int> unassignClientCompanyContactCategoryIds = existingClientCompanyContactCategoryIds .Except(newClientCompanyContactCategoryIds).ToList(); List<int> assignClientCompanyContactCategoryIds = newClientCompanyContactCategoryIds .Except(existingClientCompanyContactCategoryIds).ToList(); return _clientCompanyUow.ProcessClientCompanyContactCategories(unassignClientCompanyContactCategoryIds, assignClientCompanyContactCategoryIds, model.ClientCompanyContactId, model.CreatedByAuthUserId); } public async Task<IEnumerable<ClientCompanyContactCategoryModel>> GetClientCompanyContactCategories(int clientCompanyContactId) { return await _clientCompanyUow.GetClientCompanyContactCategories(clientCompanyContactId) .Select(x => new ClientCompanyContactCategoryModel() { ClientCompanyContactId = x.ClientCompanyContactId, ContactCategoryId = x.ContactCategoryId, ContactCategoryDescription = x.ContactCategory.Description }).ToListAsync(); } public ClientCompanyContactListResponseModel GetCompanyContactList(int clientCompanyId) { var applicationServiceUserList = _clientCompanyUow.GetClientCompanyContactList(clientCompanyId) .Select(x => new ClientCompanyContactList { ID = x.Id, ContactTitle = x.Title, ContactForename = x.Forename, ContactSurname = x.Surname, ContactEmail = x.Email, ClientCompanyId = x.ClientCompanyId, Position = x.Position, FullName = x.Fullname, Authorized = x.Authorized, PrimaryContact = x.PrimaryContact.GetValueOrDefault(), }).OrderByDescending(x => x.PrimaryContact).ThenBy(x => x.ContactSurname); ClientCompanyContactListResponseModel responseModel = new ClientCompanyContactListResponseModel() { CompanyContactListModel = applicationServiceUserList.ToList(), Succeeded = true }; return responseModel; } public ClientCompanyContactListResponseModel GetErrorMessagesForContactList(HttpStatusCode statusCode, Exception exception, int clientCompanyId) { ClientCompanyContactListResponseModel responseModel = new ClientCompanyContactListResponseModel(); switch (statusCode) { case HttpStatusCode.BadRequest: responseModel.ResponseMessages = new Dictionary<string, string[]> { { "Errors", new string[] {$"List of contacts could not be retrieved for Client Company with ID {clientCompanyId}. {exception?.Message}" } } }; break; } return responseModel; } private ClientCompanyModel MapClientCompany(ClientCompany clientCompany) { return new ClientCompanyModel() { ID = clientCompany.Id, Name = clientCompany.Name, Crn = clientCompany.Crn, DealerAppUserID = clientCompany.DealerAppUserId, Description = clientCompany.Description, TradingName = clientCompany.TradingName, TelephoneNumber = clientCompany.TelephoneNumber, FaxNumber = clientCompany.FaxNumber, WebsiteURL = clientCompany.WebsiteUrl, Address = clientCompany.Address, ClientCompanyTypeID = clientCompany.ClientCompanyTypeId, ClientCompanyStatusID = clientCompany.ClientCompanyStatusId, UpdatedByAuthUserID = clientCompany.UpdatedByAuthUserId, UpdatedDateTime = clientCompany.UpdatedDateTime, ImportantNote = clientCompany.ImportantNote, ClientCompanyCategoryID = clientCompany.ClientCompanyCategoryId, IsHouseAccount = clientCompany.IsHouseAccount, PostCode = clientCompany.PostCode, ApprovedDateTime = clientCompany.ApprovedDateTime, IsKYC = clientCompany.IsKyc, IsTandCs = clientCompany.IsTandCs, IsRiskWarning = clientCompany.IsRiskWarning, ClientCompanyOptionStatusID = clientCompany.ClientCompanyStatusId, ApprovedOptionDateTime = clientCompany.ApprovedOptionDateTime, IsPitched = clientCompany.IsPitched, PitchedByAppUserID = clientCompany.PitchedByAppUserId, PitchedDateTime = clientCompany.PitchedDateTime, AccountFormsSentDateTime = clientCompany.AccountFormsSentDateTime, IsInternalAccount = clientCompany.IsInternalAccount, QualifiedNewTradeCode = clientCompany.QualifiedNewTradeCode, TradingAddress = clientCompany.TradingAddress, MaxOpenGBP = clientCompany.MaxOpenGbp, MaxTradeSizeGBP = clientCompany.MaxTradeSizeGbp, MaxTenorMonths = clientCompany.MaxTenorMonths, MaxCreditLimit = clientCompany.MaxCreditLimit, TradingPostCode = clientCompany.TradingPostCode, EMIR_LEI = clientCompany.EmirLei, EMIR_EEA = clientCompany.EmirEea, AssignNewTrades = clientCompany.AssignNewTrades, ClientCompanyIndustrySectorID = clientCompany.ClientCompanyIndustrySectorId, ClientCompanySalesRegionID = clientCompany.ClientCompanySalesRegionId, SpreadsNote = clientCompany.SpreadsNote, ClientCompanyLinkedGroupID = clientCompany.ClientCompanyLinkedGroupId, IsExcludedFromEMoney = clientCompany.IsExcludedFromEmoney, FirstTradeDate = clientCompany.FirstTradeDate, ClientCompanyCreditTypeID = clientCompany.ClientCompanyCreditTypeId }; } private static bool IsForward(DateTime contractDate, DateTime valueDate) { var dayDifference = (int)valueDate.Date.Subtract(contractDate.Date).TotalDays; if (dayDifference < 0) return true; int workingDays = Enumerable .Range(1, dayDifference) .Select(x => contractDate.AddDays(x)) .Count(x => x.DayOfWeek != DayOfWeek.Saturday && x.DayOfWeek != DayOfWeek.Sunday); return workingDays > 2; } /// <summary> /// disposing == true coming from Dispose() /// disposing == false coming from finaliser /// </summary> /// <param name="disposing"></param> protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _clientCompanyUow?.Dispose(); _currencyService?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/FXForwardTrade2OPIModel.cs using System; namespace Argentex.Core.Service.Models.ClientCompany { public class FXForwardTrade2OPIModel { public long ID { get; set; } public string FXForwardTradeCode { get; set; } public int AccountID { get; set; } public string AccountName { get; set; } public decimal Amount { get; set; } public string CurrencyCode { get; set; } public string Details { get; set; } public DateTime CreatedDateTime { get; set; } public int CreatedByAuthUserID { get; set; } public string CreatedByAuthUserName { get; set; } public bool IsClient { get; set; } public DateTime ValueDate { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Notification/NotificationController.cs using Argentex.Core.Service.Models.Order; using Argentex.Core.Service.Order; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using System.Threading.Tasks; namespace Argentex.Core.Api.Controllers.Order { [Produces("application/json")] [Route("api/notification")] public class NotificationController : Controller { private readonly INotificationService _notificationService; public NotificationController(INotificationService notificationService) { _notificationService = notificationService; } [HttpGet] [Route("get-company-users/{clientCompanyId:int}")] public IActionResult GetCompanyUsers(int clientCompanyId) { return Ok(_notificationService.GetCompanyUsers(clientCompanyId)); } [HttpPost] [Route("save-app-user-notification")] public IActionResult SaveUserNotification([FromBody] AppUserNotificationModel model) { return Ok(_notificationService.SaveAppUserNotification(model)); } #region V2 //[HttpGet] //[AllowAnonymous] //Remove after Trader Token implementation //[Route("get-types-v2")] //public IActionResult GetNotificationTypesV2() //{ // return Ok(_notificationService.GetNotificationTypes()); //} //[HttpGet] //[AllowAnonymous] //Remove after Trader Token implementation //[Route("get-company-notifications-v2/{clientCompanyId:int}")] //public IActionResult GetCompanyNotificationsV2(int clientCompanyId) //{ // return Ok(_notificationService.GetCompanyNotifications(clientCompanyId)); //} //[HttpPost] //[AllowAnonymous] //Remove after Trader Token implementation //[Route("save-user-notification-v2")] //public IActionResult SaveUserNotificationV2([FromBody] AppUserNotificationModel model) //{ // return Ok(_notificationService.SaveAppUserNotification(model)); //} #endregion } }<file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/ClientCompanies/ClientCompanyAccountsUoW.cs using Argentex.Core.DataAccess.Entities; using Microsoft.EntityFrameworkCore; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System.Collections.Generic; using System.Linq; namespace Argentex.Core.UnitsOfWork.ClientCompanies { public class ClientCompanyAccountsUoW : BaseUow, IClientCompanyAccountsUoW { private IGenericRepo<ClientCompanyOpi> _clientCompanyOpiRepository; private IGenericRepo<ClientCompany> _clientCompanyRepository; private IGenericRepo<Currency> _currencyRepository; private IGenericRepo<ClearingCodePrefix> _clearingCodePrefixRepository; private IGenericRepo<ClientCompanyCurrencyDefaultOpi> _clientCompanyCurrencyDefaultOpi; private IGenericRepo<ClientCompanyVirtualAccount> _clientCompanyVirtualAccountRepository; private IGenericRepo<VirtualAccountType> _virtualAccountTypeRepository; private IGenericRepo<VirtualAccountTypeBankAccount> _virtualAccountTypeBankAccountRepository; private IGenericRepo<FxforwardTrade2Opi> _fxforwardTrade2OpiRepository; private IGenericRepo<FxforwardTrade> _tradeRepository; public ClientCompanyAccountsUoW(FXDB1Context context) : base(context) { } public IGenericRepo<ClientCompany> ClientCompanyRepository => _clientCompanyRepository = _clientCompanyRepository ?? new GenericRepo<ClientCompany>(Context); public IGenericRepo<ClientCompanyOpi> ClientCompanyOpiRepository => _clientCompanyOpiRepository = _clientCompanyOpiRepository ?? new GenericRepo<ClientCompanyOpi>(Context); public IGenericRepo<Currency> CurrencyRepository => _currencyRepository = _currencyRepository ?? new GenericRepo<Currency>(Context); private IGenericRepo<ClearingCodePrefix> ClearingCodePrefixRepository => _clearingCodePrefixRepository = _clearingCodePrefixRepository ?? new GenericRepo<ClearingCodePrefix>(Context); private IGenericRepo<ClientCompanyCurrencyDefaultOpi> ClientCompanyCurrencyDefaultOpi => _clientCompanyCurrencyDefaultOpi = _clientCompanyCurrencyDefaultOpi ?? new GenericRepo<ClientCompanyCurrencyDefaultOpi>(Context); private IGenericRepo<ClientCompanyVirtualAccount> ClientCompanyVirtualAccountRepository => _clientCompanyVirtualAccountRepository = _clientCompanyVirtualAccountRepository ?? new GenericRepo<ClientCompanyVirtualAccount>(Context); private IGenericRepo<VirtualAccountType> VirtualAccountTypeRepository => _virtualAccountTypeRepository = _virtualAccountTypeRepository ?? new GenericRepo<VirtualAccountType>(Context); private IGenericRepo<VirtualAccountTypeBankAccount> VirtualAccountTypeBankAccountRepository => _virtualAccountTypeBankAccountRepository = _virtualAccountTypeBankAccountRepository ?? new GenericRepo<VirtualAccountTypeBankAccount>(Context); private IGenericRepo<FxforwardTrade2Opi> FxforwardTrade2OpiRepository => _fxforwardTrade2OpiRepository = _fxforwardTrade2OpiRepository ?? new GenericRepo<FxforwardTrade2Opi>(Context); private IGenericRepo<FxforwardTrade> TradeRepository => _tradeRepository = _tradeRepository ?? new GenericRepo<FxforwardTrade>(Context); public IEnumerable<ClientCompanyOpi> GetClientCompanyAccounts(int clientCompanyId) { return ClientCompanyOpiRepository .GetQueryable(x => x.ClientCompanyId == clientCompanyId); } public IQueryable<ClientCompanyOpi> GetClientCompanyAccountQueryable(int clientCompanyOpiId) { return ClientCompanyOpiRepository .GetQueryable(x => x.Id == clientCompanyOpiId); } public IQueryable<ClearingCodePrefix> GetClearingPrefixCodes() { return ClearingCodePrefixRepository .GetQueryable(); } public ClientCompanyOpi GetClientCompanyAccount(int opiId) { return ClientCompanyOpiRepository.GetByPrimaryKey(opiId); } public void UpdateAccount(ClientCompanyOpi clientCompanyOpi) { ClientCompanyOpiRepository.Update(clientCompanyOpi); SaveContext(); } public IQueryable<ClientCompanyCurrencyDefaultOpi> GetClientCompanyDefaultAccount(int clientCompanyId, int currencyId) { return ClientCompanyCurrencyDefaultOpi.GetQueryable(x => x.ClientCompanyId == clientCompanyId && x.CurrencyId == currencyId, orderBy: null, includeProperties: "ClientCompany,ClientCompanyOpi"); } public void RemoveDefaultAccount(ClientCompanyCurrencyDefaultOpi account) { ClientCompanyCurrencyDefaultOpi.Delete(account); SaveContext(); } public void AddDefaultAccount(ClientCompanyCurrencyDefaultOpi defaultAccount) { ClientCompanyCurrencyDefaultOpi.Insert(defaultAccount); SaveContext(); } public void AddClientCompanyOpi(ClientCompanyOpi clientCompanyOpi) { ClientCompanyOpiRepository.Insert(clientCompanyOpi); SaveContext(); } public IEnumerable<VirtualAccountType> GetVirtualAccountType(string description) { return VirtualAccountTypeRepository .GetQueryable(x => x.Description == description); } public IEnumerable<ClientCompanyVirtualAccount> GetClientCompanyVirtualAccount(ClientCompany company, VirtualAccountType vat) { return ClientCompanyVirtualAccountRepository .GetQueryable(x => x.ClientCompanyId == company.Id && x.VirtualAccountTypeId == vat.Id); } public IEnumerable<VirtualAccountTypeBankAccount> GetVirtualAccountTypeBankAccount(VirtualAccountType vat) { return VirtualAccountTypeBankAccountRepository .GetQueryable(x => x.VirtualAccountTypeId == vat.Id, orderBy: null, includeProperties: "BankAccount"); } public IQueryable<FxforwardTrade2Opi> GetTradeOPIs(string tradeCode) { var query = FxforwardTrade2OpiRepository.GetQueryable(x => x.FxforwardTradeCode == tradeCode); query = query.Include(x => x.ClientCompanyOpi) .Include(x => x.CreatedByAuthUser) .Include(x => x.CreatedByAuthUser.Application) .Include(x => x.CreatedByAuthUser.AppUser) //for the Argentex user .Include(x => x.CreatedByAuthUser.ClientCompanyContactAuthUser) //for the Client user .Include(x => x.FxforwardTradeCodeNavigation) .Include(x => x.FxforwardTradeCodeNavigation.Lhsccy) .Include(x => x.FxforwardTradeCodeNavigation.Rhsccy); return query; } public void AddTradeOPI(FxforwardTrade2Opi fxforwardTrade2Opi) { FxforwardTrade2OpiRepository.Insert(fxforwardTrade2Opi); SaveContext(); } public int GetAssociatedTradesCount(int clientCompanyOpiId, int statusDeliveredID) { var associatedOpiSettlements = FxforwardTrade2OpiRepository .GetQueryable(e => e.ClientCompanyOpiid == clientCompanyOpiId && e.FxforwardTradeCodeNavigation.FxforwardTradeStatusId != statusDeliveredID) .Select(x => x.FxforwardTradeCode).Distinct().ToList(); var associatedTrades = TradeRepository .GetQueryable(t => t.ClientCompanyOpiid == clientCompanyOpiId && t.FxforwardTradeStatusId != statusDeliveredID && !t.Code.Contains("/RL")) .Select(x => x.Code).Distinct().ToList(); return associatedOpiSettlements.Union(associatedTrades).Count(); } public IList<long> GetSettlementIDs(int clientCompanyOpiId) { return FxforwardTrade2OpiRepository.GetQueryable(x => x.ClientCompanyOpiid == clientCompanyOpiId).Select(x => x.Id).ToList(); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/ClientSiteAction/ClientSiteActionService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Models.ClientSiteAction; using Argentex.Core.UnitsOfWork.ClientSiteAction; using System; using System.Collections.Generic; using System.Linq; namespace Argentex.Core.Service.ClientSiteAction { public class ClientSiteActionService : IClientSiteActionService { private readonly IClientSiteActionUow _clientSiteActionUow; private readonly IAppSettingService _appSettingService; private bool _disposed; public ClientSiteActionService(IClientSiteActionUow clientSiteActionUow, IAppSettingService appSettingService) { _clientSiteActionUow = clientSiteActionUow; _appSettingService = appSettingService; } public ClientSiteActionModel GetClientSiteAction(long clientSiteActionID) { return _clientSiteActionUow.GetClientSiteAction(clientSiteActionID).Select(x => new ClientSiteActionModel() { ID = x.Id, ActionType = x.ClientSiteActionType.Name, ActionStatus = x.ClientSiteActionStatus.Name, Details = x.Details, CreatedByUser = x.CreatedByAuthUser.UserName, CreatedDateTime = x.CreatedDateTime, UpdatedByUser = x.UpdatedByAuthUser.UserName, UpdatedDateTime = x.UpdatedDateTime }).SingleOrDefault(); } public ClientSiteActionModel GetClientSiteActionByOPIID(int clientCompanyOPIID) { return _clientSiteActionUow.GetClientSiteActionByOPIID(clientCompanyOPIID).Select(x => new ClientSiteActionModel() { ID = x.ClientSiteAction.Id, ActionType = x.ClientSiteAction.ClientSiteActionType.Name, ActionStatus = x.ClientSiteAction.ClientSiteActionStatus.Name, Details = x.ClientSiteAction.Details, CreatedByUser = x.ClientSiteAction.CreatedByAuthUser.UserName, CreatedDateTime = x.ClientSiteAction.CreatedDateTime, UpdatedByUser = x.ClientSiteAction.UpdatedByAuthUser.UserName, UpdatedDateTime = x.ClientSiteAction.UpdatedDateTime }).SingleOrDefault(); } public void LogActionOpiPayment(int authUserId, long fxforwardTrade2OpiId, string accountName, decimal amount) { var actionType = _clientSiteActionUow.GetClientSiteActionTypeFromName(SystemConstant.ClientSiteAction_Type_OPIPayment); var actionStatus = _clientSiteActionUow.GetClientSiteActionStatusFromName(SystemConstant.ClientSiteAction_Status_Pending); var details = $"{fxforwardTrade2OpiId}: {accountName}, Amount: {amount}"; var action = CreateAction(authUserId, details, actionType.Id, actionStatus.Id); _clientSiteActionUow.LogAction(action, fxforwardTrade2OpiId.ToString()); } public void LogActionSwapCreation(int authUserId, int FxswapId) { var actionType = _clientSiteActionUow.GetClientSiteActionTypeFromName(SystemConstant.ClientSiteAction_Type_SwapCreation); var actionStatus = _clientSiteActionUow.GetClientSiteActionStatusFromName(SystemConstant.ClientSiteAction_Status_Pending); var action = CreateAction(authUserId, FxswapId.ToString(), actionType.Id, actionStatus.Id); _clientSiteActionUow.LogAction(action, FxswapId.ToString()); } public void LogActionUnconfirmedTrade(int authUserId, string tradeCode) { var actionType = _clientSiteActionUow.GetClientSiteActionTypeFromName(SystemConstant.ClientSiteAction_Type_NoFIXConfirmation); var actionStatus = _clientSiteActionUow.GetClientSiteActionStatusFromName(SystemConstant.ClientSiteAction_Status_Pending); var action = CreateAction(authUserId, tradeCode, actionType.Id, actionStatus.Id); _clientSiteActionUow.LogAction(action, tradeCode); } public void LogActionNewOpi(int authUserId, int newOpiId) { var actionType = _clientSiteActionUow.GetClientSiteActionTypeFromName(SystemConstant.ClientSiteAction_Type_NewOPI); var actionStatus = _clientSiteActionUow.GetClientSiteActionStatusFromName(SystemConstant.ClientSiteAction_Status_Requested); var action = CreateAction(authUserId, newOpiId.ToString(), actionType.Id, actionStatus.Id); _clientSiteActionUow.LogAction(action, newOpiId.ToString()); } private static DataAccess.Entities.ClientSiteAction CreateAction(int authUserId, string details, int actionTypeId, int actionStatusId) { var action = new DataAccess.Entities.ClientSiteAction { ClientSiteActionTypeId = actionTypeId, ClientSiteActionStatusId = actionStatusId, Details = details, CreatedByAuthUserId = authUserId, CreatedDateTime = DateTime.UtcNow, UpdatedByAuthUserId = authUserId, UpdatedDateTime = DateTime.UtcNow }; return action; } public IEnumerable<CSATradesWithoutFIXConfirmationModel> GetTradesWithoutFIXConfirmation() { return _clientSiteActionUow.GetTradesWithoutFIXConfirmation() .Select(x => new CSATradesWithoutFIXConfirmationModel() { ActionID = x.ClientSiteAction.Id, FXForwardTradeCode = x.FxforwardTradeCodeNavigation.Code, ValueDate = x.FxforwardTradeCodeNavigation.ValueDate, SellAmount = x.FxforwardTradeCodeNavigation.IsBuy ? x.FxforwardTradeCodeNavigation.ClientRhsamt : x.FxforwardTradeCodeNavigation.ClientLhsamt, BuyAmount = x.FxforwardTradeCodeNavigation.IsBuy ? x.FxforwardTradeCodeNavigation.ClientLhsamt : x.FxforwardTradeCodeNavigation.ClientRhsamt, Rate = x.FxforwardTradeCodeNavigation.ClientRate ?? 0.0m, CurrencyPair = x.FxforwardTradeCodeNavigation.CurrencyPair, ActionCreatedDateTime = x.ClientSiteAction.CreatedDateTime, ActionStatus = x.ClientSiteAction.ClientSiteActionStatus.Name, ActionStatusID = x.ClientSiteAction.ClientSiteActionStatus.Id }) .OrderByDescending(x => x.ActionCreatedDateTime) .ToList(); } public void UpdateClientSiteAction(ClientSiteActionModel model) { _clientSiteActionUow.UpdateClientSiteAction(new DataAccess.Entities.ClientSiteAction() { Id = model.ID, UpdatedByAuthUserId = model.UpdatedByUserID, UpdatedDateTime = model.UpdatedDateTime, ClientSiteActionStatusId = model.ActionStatusID }); } public ClientSiteActionStatus GetClientSiteActionStatus(string name) { return _clientSiteActionUow.GetClientSiteActionStatusFromName(name); } public IEnumerable<CSAOPIsAssignedToTradesDisplayModel> GetOPIsAssignedToTrades() { var clientSiteActions = _clientSiteActionUow.GetOPIsAssignedToTrades(); return clientSiteActions .Select(x => new CSAOPIsAssignedToTradesDisplayModel { CompanyName = x.FxforwardTrade2Opi.ClientCompanyOpi.ClientCompany.Name, CompanyID = x.FxforwardTrade2Opi.ClientCompanyOpi.ClientCompany.Id, TradeCode = x.FxforwardTrade2Opi.FxforwardTradeCode, OPIName = x.FxforwardTrade2Opi.ClientCompanyOpi.AccountName, Amount = x.FxforwardTrade2Opi.Amount, CreatedByClientName = GetClientUserName(x.ClientSiteAction.CreatedByAuthUser.ClientCompanyContactAuthUser), CreatedDateTime = x.ClientSiteAction.CreatedDateTime }) .OrderByDescending(y => y.CreatedDateTime) .ToList(); } public IEnumerable<CSASwapsModel> GetSwaps() { IQueryable<CSASwapsModel> parentTradeList = _clientSiteActionUow.GetSwaps() .Select(x => new CSASwapsModel() { ActionID = x.ClientSiteAction.Id, ClientCompanyID = x.Fxswap.ParentTradeCodeNavigation.ClientCompanyNavigation.Id, ClientCompanyName = x.Fxswap.ParentTradeCodeNavigation.ClientCompanyNavigation.Name, FXForwardTradeCode = x.Fxswap.ParentTradeCode, CreatedByClientName = GetClientUserName(x.ClientSiteAction.CreatedByAuthUser.ClientCompanyContactAuthUser), ValueDate = x.Fxswap.ParentTradeCodeNavigation.ValueDate, SellAmount = x.Fxswap.ParentTradeCodeNavigation.IsBuy ? x.Fxswap.ParentTradeCodeNavigation.ClientRhsamt : x.Fxswap.ParentTradeCodeNavigation.ClientLhsamt, BuyAmount = x.Fxswap.ParentTradeCodeNavigation.IsBuy ? x.Fxswap.ParentTradeCodeNavigation.ClientLhsamt : x.Fxswap.ParentTradeCodeNavigation.ClientRhsamt, Rate = x.Fxswap.ParentTradeCodeNavigation.ClientRate ?? 0.0m, CurrencyPair = x.Fxswap.ParentTradeCodeNavigation.CurrencyPair, ActionCreatedDateTime = x.ClientSiteAction.CreatedDateTime, ActionStatus = x.ClientSiteAction.ClientSiteActionStatus.Name, ActionStatusID = x.ClientSiteAction.ClientSiteActionStatus.Id, IsParentTrade = true }); IQueryable<CSASwapsModel> deliveryLegList = _clientSiteActionUow.GetSwaps() .Select(x => new CSASwapsModel() { ActionID = x.ClientSiteAction.Id, ClientCompanyID = x.Fxswap.DeliveryLegTradeCodeNavigation.ClientCompanyNavigation.Id, ClientCompanyName = x.Fxswap.DeliveryLegTradeCodeNavigation.ClientCompanyNavigation.Name, FXForwardTradeCode = x.Fxswap.DeliveryLegTradeCode, CreatedByClientName = GetClientUserName(x.ClientSiteAction.CreatedByAuthUser.ClientCompanyContactAuthUser), ValueDate = x.Fxswap.DeliveryLegTradeCodeNavigation.ValueDate, SellAmount = x.Fxswap.DeliveryLegTradeCodeNavigation.IsBuy ? x.Fxswap.DeliveryLegTradeCodeNavigation.ClientRhsamt : x.Fxswap.DeliveryLegTradeCodeNavigation.ClientLhsamt, BuyAmount = x.Fxswap.DeliveryLegTradeCodeNavigation.IsBuy ? x.Fxswap.DeliveryLegTradeCodeNavigation.ClientLhsamt : x.Fxswap.DeliveryLegTradeCodeNavigation.ClientRhsamt, Rate = x.Fxswap.DeliveryLegTradeCodeNavigation.ClientRate ?? 0.0m, CurrencyPair = x.Fxswap.DeliveryLegTradeCodeNavigation.CurrencyPair, ActionCreatedDateTime = x.ClientSiteAction.CreatedDateTime, ActionStatus = x.ClientSiteAction.ClientSiteActionStatus.Name, ActionStatusID = x.ClientSiteAction.ClientSiteActionStatus.Id, IsParentTrade = false }); IQueryable<CSASwapsModel> reversalLegList = _clientSiteActionUow.GetSwaps() .Select(x => new CSASwapsModel() { ActionID = x.ClientSiteAction.Id, ClientCompanyID = x.Fxswap.ReversalLegTradeCodeNavigation.ClientCompanyNavigation.Id, ClientCompanyName = x.Fxswap.ReversalLegTradeCodeNavigation.ClientCompanyNavigation.Name, FXForwardTradeCode = x.Fxswap.ReversalLegTradeCode, CreatedByClientName = GetClientUserName(x.ClientSiteAction.CreatedByAuthUser.ClientCompanyContactAuthUser), ValueDate = x.Fxswap.ReversalLegTradeCodeNavigation.ValueDate, SellAmount = x.Fxswap.ReversalLegTradeCodeNavigation.IsBuy ? x.Fxswap.ReversalLegTradeCodeNavigation.ClientRhsamt : x.Fxswap.ReversalLegTradeCodeNavigation.ClientLhsamt, BuyAmount = x.Fxswap.ReversalLegTradeCodeNavigation.IsBuy ? x.Fxswap.ReversalLegTradeCodeNavigation.ClientLhsamt : x.Fxswap.ReversalLegTradeCodeNavigation.ClientRhsamt, Rate = x.Fxswap.ReversalLegTradeCodeNavigation.ClientRate ?? 0.0m, CurrencyPair = x.Fxswap.ReversalLegTradeCodeNavigation.CurrencyPair, ActionCreatedDateTime = x.ClientSiteAction.CreatedDateTime, ActionStatus = x.ClientSiteAction.ClientSiteActionStatus.Name, ActionStatusID = x.ClientSiteAction.ClientSiteActionStatus.Id, IsParentTrade = false }); return parentTradeList.Concat(deliveryLegList).Concat(reversalLegList) .OrderByDescending(x => x.ActionCreatedDateTime) .ToList(); } private string GetClientUserName(ICollection<ClientCompanyContact> clientCompanyContactAuthUser) { if (clientCompanyContactAuthUser != null && clientCompanyContactAuthUser.Count > 0) { ClientCompanyContact clientCompanyContact = clientCompanyContactAuthUser.FirstOrDefault(); return $"{clientCompanyContact.Forename} {clientCompanyContact.Surname}"; } return string.Empty; } public IEnumerable<CSANewOPIRequestDisplayModel> GetNewOPIRequested() { var clientSiteActions = _clientSiteActionUow.GetNewOPIRequested(); return clientSiteActions .Select(x => new CSANewOPIRequestDisplayModel { CompanyName = x.ClientCompanyOpi.ClientCompany.Name, CompanyID = x.ClientCompanyOpi.ClientCompany.Id, CurrencyCode = x.ClientCompanyOpi.Currency.Code, OPIName = x.ClientCompanyOpi.AccountName, Status= x.ClientSiteAction.ClientSiteActionStatus.Name, CreatedByClientName = GetClientUserName(x.ClientSiteAction.CreatedByAuthUser.ClientCompanyContactAuthUser), CreatedDateTime = x.ClientSiteAction.CreatedDateTime }) .OrderByDescending(y => y.CreatedDateTime) .ToList(); } public void DeleteAction2AssignedSettlementLink(long settlementId) { _clientSiteActionUow.DeleteAction2AssignedSettlementLink(settlementId); } #region disposing protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _clientSiteActionUow?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/SetDefaultAccountModel.cs namespace Argentex.Core.Service.Models.ClientCompany { public class SetDefaultAccountModel { public int ClientCompanyOpiId { get; set; } public int AuthUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Models/Order/ExecuteOrderDto.cs using System; namespace Argentex.Core.Api.Models { public class ExecuteOrderDto { public string SellCcy { get; set; } public string BuyCcy { get; set; } public decimal? Amount { get; set; } public DateTime? ValueDate { get; set; } public decimal ReciprocalValue { get; set; } public string AuthUserName { get; set; } public int ClientCompanyId { get; set; } public decimal? ClientSellAmount { get; set; } public decimal? ClientRate { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyTradeCount.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyTradeCount { public int ClientCompanyId { get; set; } public int TradeCount { get; set; } public ClientCompany ClientCompany { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Identity/TokenModel.cs namespace Argentex.Core.Service.Models.Identity { public class TokenModel { public string Token_type { get; set; } public string Access_token { get; set; } public int Expires_in { get; set; } public string Refresh_token { get; set; } public string Id_token { get; set; } public string Validation_code { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ComplianceNature.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ComplianceNature { public ComplianceNature() { ClientCompanyCompliance = new HashSet<ClientCompanyCompliance>(); } public int Id { get; set; } public string Description { get; set; } public string EmirValue { get; set; } public int Sequence { get; set; } public ICollection<ClientCompanyCompliance> ClientCompanyCompliance { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/ClientCompany/IClientCompanyService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Models.ClientCompany; using System; using System.Collections.Generic; using System.Net; using System.Threading.Tasks; namespace Argentex.Core.Service { public interface IClientCompanyService : IDisposable { string GetClientCompanyName(int clientCompanyId); ClientCompanyModel GetClientCompany(int clientCompanyId); ICollection<ClientCompaniesModel> GetClientCompanies(); ICollection<ClientCompanyAccountModel> GetClientCompanyAccounts(int clientCompanyId); ClientCompanyContactResponseModel GetClientCompanyContact(ClientCompanyContactSearchContext clientCompanyContactSearchContext); bool GetTradeExecutionStatusBySpread(int clientCompanyID, string currency1, string currency2, bool isBuyDirection); int GetClientCompanySpread(int clientCompanyID, string currency1, string currency2, bool isBuyDirection, DateTime valueDate, DateTime contractDate); ClientCompanyOnlineDetailsModel GetClientCompanyOnlineDetailsModel(int clientCompanyId); ClientCompanyOnlineDetails GetClientCompanyOnlineDetails(int clientCompanyId); void AddSpredAdjustment(SpreadAdjustmentModel model); void SetKicked(int clientCompanyID); ClientCompanyAccountModel GetClientCompanyDefaultAccount(int clientCompanyId, int currencyId); Task<IEnumerable<ClientCompanyContactCategoryModel>> GetClientCompanyContactCategories(int clientCompanyContactId); Task<IEnumerable<ContactCategoryModel>> GetContactCategories(); bool AddContactCategory(ContactCategoryModel model); ContactCategory GetContactCategory(int contactCategoryId); ContactCategory GetContactCategory(string contactCategoryDescription); bool ProcessClientCompanyContactCategories(ClientCompanyContactBulkCategoryModel model); ClientCompanyContactListResponseModel GetCompanyContactList(int clientCompanyId); ClientCompanyContactResponseModel GetErrorMessages(HttpStatusCode statusCode, Exception exception, ClientCompanyContactSearchContext clientCompanyContactSearchContext); ClientCompanyContactListResponseModel GetErrorMessagesForContactList(HttpStatusCode statusCode, Exception exception, int clientCompanyId); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Exceptions/IdentityException.cs using System; using System.Collections.Generic; using System.Linq; using System.Runtime.Serialization; using System.Threading.Tasks; namespace Argentex.Core.Api.Exceptions { public class IdentityException : Exception { public IdentityException() { } public IdentityException(string message) : base(message) { } public IdentityException(string message, Exception innerException) : base(message, innerException) { } protected IdentityException(SerializationInfo info, StreamingContext context) : base(info, context) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/ClientAuthentication/ClientConfig.cs using System; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using Argentex.Core.Identity.DataAccess; using Microsoft.Extensions.DependencyInjection; namespace Argentex.Core.Api.ClientAuthentication { [SuppressMessage("ReSharper", "UnusedMember.Global")] [DebuggerDisplay("{ClientId} - {DisplayName}")] public class ClientConfig { public string ClientId { get; set; } public string Secret { get; set; } public string DisplayName { get; set; } public List<string> Permissions { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyPipeline.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyPipeline { public int ClientCompanyId { get; set; } public int TotalCalls { get; set; } public DateTime? LastCall { get; set; } public DateTime? LastLongCall { get; set; } public DateTime? LastEmail { get; set; } public string LastEmailFrom { get; set; } public string LastEmailTo { get; set; } public int? NextPipelineActionId { get; set; } public DateTime? NextActionDueDate { get; set; } public DateTime? NextActionUpdated { get; set; } public int? Rating { get; set; } public int? Confidence { get; set; } public int? Progress { get; set; } public DateTime? NextTradeDate { get; set; } public byte[] UpdateTimeStamp { get; set; } public int? UpdateAuthUserId { get; set; } public int? CallsToBrochure { get; set; } public int? CallsToAccFormSent { get; set; } public int? CallsToAccOpened { get; set; } public ClientCompany ClientCompany { get; set; } public PipelineAction NextPipelineAction { get; set; } public AuthUser UpdateAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/User/UserController.cs using Argentex.Core.Api.Exceptions; using Argentex.Core.Api.Models; using Argentex.Core.Api.Models.SecurityModels; using Argentex.Core.Service; using Argentex.Core.Service.Models.ClientCompany; using Argentex.Core.Service.Models.Identity; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.Users.Model; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; using SynetecLogger; using System; using System.Collections.Generic; using System.Linq; using System.Security.Authentication; using System.Security.Claims; using System.Threading.Tasks; using Argentex.Core.Service.Enums; using Argentex.Core.Service.Extensions; namespace Argentex.Core.Api.Controllers.User { [Route("api/user")] public class UserController : Controller { private readonly IUserService _userService; private readonly ILogWrapper _logger; private readonly IConfiguration _config; private readonly IClientCompanyService _clientCompanyService; public UserController(IUserService userService, ILogWrapper logger, IConfiguration config, IClientCompanyService clientCompanyService) { _userService = userService; _logger = logger; _config = config; _clientCompanyService = clientCompanyService; } [HttpGet] [Route("get-all-unapproved-users")] public IActionResult GetUnapprovedApplicationUsers() { var appUserList = _userService.GetUnapprovedApplicationUsers(); if (!appUserList.Any()) return NoContent(); return Ok(appUserList); } [HttpGet] [Route("get-users-of-company/{clientCompanyId:int}")] public IActionResult GetApplicationUsersOfCompany(int clientCompanyId) { var appUserList = _userService.GetApplicationUsersOfCompany(clientCompanyId); if (!appUserList.Any()) return NoContent(); return Ok(appUserList); } [HttpGet] [Route("{userId:int}")] public async Task<IActionResult> GetApplicationUser(int userId) { var appUser = await _userService.GetApplicationUserAsync(userId.ToString()); if (appUser == null) return BadRequest(); return Ok(appUser); } [HttpPost] [Route("add")] public async Task<IActionResult> AddUser([FromBody] AddUserModel model) { if (model == null) return BadRequest(ResponseModel.ResponseWithErrors("Model must be supplied in the body of the request")); if (!ModelState.IsValid) return BadRequest(ResponseModel.ResponseFromInvalidModelState(ModelState)); ApplicationServiceUser user = MapApplicationServiceUserFrom(model); var result = await _userService.AddUnapprovedUserAsync(user); if (result.Succeeded) return Ok(ResponseModel.ResponseWithInfo($"User {model.Username} created successfully")); string message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error creating new user {model.Username}. Message: {message}")); return BadRequest(ResponseModel.ResponseFromIdentityModel(result)); } [HttpPost] [Route("sendActivationEmail")] public async Task<IActionResult> SendActivationEmail([FromBody] AddUserModel model) { if (!ModelState.IsValid) return BadRequest(ResponseModel.ResponseFromInvalidModelState(ModelState)); ApplicationServiceUser user = MapApplicationServiceUserFrom(model); var result = await _userService.SendUserNewPasswordEmailAsync(user, _clientCompanyService.GetClientCompanyName(user.ClientCompanyId)); if (result.Succeeded) return Ok(ResponseModel.ResponseWithInfo( $"New Password Email sent to User: {model.Username} with Email: {model.Email}")); string message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error creating new user {model.Username}. Message: {message}")); return BadRequest(ResponseModel.ResponseFromIdentityModel(result)); } [HttpPut] [Route("update")] public async Task<IActionResult> Update([FromBody] AddUserModel model) { if (!ModelState.IsValid) return BadRequest(ResponseModel.ResponseFromInvalidModelState(ModelState)); ApplicationServiceUser user = MapApplicationServiceUserFrom(model); var result = await _userService.UpdateUserAsync(user); if (result.Succeeded) return Ok(ResponseModel.ResponseWithInfo($"User {model.Username} has been updated successfully")); var message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error updating user {model.Username}. Message: {message}")); return BadRequest(ResponseModel.ResponseWithErrors(result.Errors.Select(x => x.Description).ToArray())); } [HttpPut] [Route("updateContact")] public async Task<IActionResult> UpdateContact([FromBody] AddUserModel model) { if (!ModelState.IsValid) return BadRequest(ResponseModel.ResponseFromInvalidModelState(ModelState)); ApplicationServiceUser user = MapApplicationServiceUserFrom(model); var result = await _userService.UpdateUserContactAsync(user); if (result.Succeeded) { return Ok(ResponseModel.ResponseWithInfo($"User {model.Username} has been updated successfully")); } if (result.Errors.Any(e => e.Code == "404")) return NotFound(ResponseModel.ResponseWithErrors($"User {user.ClientCompanyContactId} not found.")); var message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error updating user {model.Username}. Message: {message}")); return BadRequest(ResponseModel.ResponseWithErrors(result.Errors.Select(x => x.Description).ToArray())); } [HttpPut] [Route("update-my-account")] public async Task<IActionResult> UpdateMyAccount([FromBody] UpdateUserModel model) { if (!ModelState.IsValid) { return BadRequest(ModelState); } ApplicationServiceUser user = MapApplicationServiceUserFrom(model); var message = string.Empty; var result = await _userService.UpdateMyAccountAsync(user); if (result.Succeeded) { message = $"User {model.Username} updated successfully"; return Ok(new { data = message }); } message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error updating user {model.Username}. Message: {message}")); return BadRequest(new { data = result.Errors?.FirstOrDefault()?.Description }); } [HttpPost] [Route("approve-users")] public async Task<IActionResult> ApproveUsers([FromBody] ApproveUsersRequest approvalsRequest) { if (!ModelState.IsValid) { return BadRequest(ModelState); } var results = await _userService.ApproveUsersAsync(approvalsRequest, GetCompaniesList()); var message = string.Empty; bool aFailure = false; foreach (var result in results) { if (!result.Succeeded) { aFailure = true; message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error approving user. Message: {message}")); } } if (aFailure) return BadRequest(); return Ok(); } [HttpPost] [Route("authorise-signatories")] public async Task<IActionResult> AuthoriseSignatories([FromBody] AuthoriseSignatoryRequest authorisationsRequest) { if (!ModelState.IsValid) { return BadRequest(ModelState); } var results = await _userService.AuthoriseSignatoryAsync(authorisationsRequest, GetCompaniesList()); var message = string.Empty; bool aFailure = false; foreach (var result in results) { if (!result.Succeeded) { aFailure = true; message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error authorising signatory. Message: {message}")); } } if (aFailure) return BadRequest(); return Ok(); } [HttpDelete] [Route("delete/{userId:int}")] public async Task<IActionResult> Delete(int userId) { if (!ModelState.IsValid) { return BadRequest(ModelState); } var message = string.Empty; var result = await _userService.DeleteUserAsync(userId.ToString()); if (result.Succeeded) { message = $"User with ID {userId.ToString()} , deleted successfully"; return Ok(new { data = message }); } message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error deleting user {userId.ToString()}. Message: {message}")); return BadRequest(new { data = result.Errors?.FirstOrDefault()?.Description }); } [HttpDelete] [Route("deleteContact/{clientCompanyContactId:int}")] public async Task<IActionResult> DeleteContact(int clientCompanyContactId) { if (!ModelState.IsValid) return BadRequest(ResponseModel.ResponseFromInvalidModelState(ModelState)); var result = await _userService.DeleteUserContactAsync(clientCompanyContactId); if (result.Succeeded) return Ok(ResponseModel.ResponseWithInfo($"User with ID {clientCompanyContactId} , deleted successfully")); string message = string.Join(";", result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error deleting user {clientCompanyContactId.ToString()}. Message: {message}")); return BadRequest(ResponseModel.ResponseFromIdentityModel(result)); } [HttpPost] [Route("login-details")] public IActionResult GetUserLoginDetails([FromBody] IList<int> clientCompanyIDs) { if (!ModelState.IsValid) { return BadRequest(ModelState); } var message = string.Empty; var results = _userService.GetUserLoginDetails(clientCompanyIDs); return Ok(results); } [HttpGet] [Route("authorised-signatories/{clientCompanyId:int}")] public IActionResult GetAuthorisedSignatories(int clientCompanyId) { return Ok(_userService.GetAuthorisedSignatories(clientCompanyId)); } [HttpPost] [Route("approve-change-request")] public async Task<IActionResult> ApproveUserChangeRequest([FromBody] ApproveUserChangeRequest approveUserChangeRequest) { if (!ModelState.IsValid) return BadRequest(ResponseModel.ResponseFromInvalidModelState(ModelState)); var approveUserChangeResponse = await _userService.ApproveUserChangeRequest(approveUserChangeRequest); if (approveUserChangeResponse.Result.Succeeded) { return Ok(ResponseModel.ResponseWithInfo("True")); } var message = string.Join(";", approveUserChangeResponse.Result.Errors.Select(x => $"Code: {x.Code}. Description: {x.Description}")); _logger.Error(new IdentityException($"Error approving user change requests. Message: {message}")); return BadRequest(ResponseModel.ResponseFromIdentityModel(approveUserChangeResponse.Result)); } [HttpGet] [Route("pending-change-request")] public IActionResult GetPendingChangeRequests() { try { var pendingChangeRequest = _userService.GetPendingChangeRequest(); if (!pendingChangeRequest.Any()) return NoContent(); return Ok(pendingChangeRequest); } catch (Exception e) { return BadRequest(e.Message); } } private ICollection<ClientCompaniesModel> GetCompaniesList() { return _clientCompanyService.GetClientCompanies(); } private static ApplicationServiceUser MapApplicationServiceUserFrom(RegisterModel model) { return new ApplicationServiceUser { Title = model.Title, Forename = model.Forename, Surname = model.Surname, Username = !string.IsNullOrWhiteSpace(model.Username) ? model.Username : model.Email, Email = model.Email, Password = <PASSWORD>, ClientCompanyId = model.ClientCompanyId, UpdatedByAuthUserId = model.UpdatedByAuthUserId }; } private ApplicationServiceUser MapApplicationServiceUserFrom(UpdateUserModel model) { return new ApplicationServiceUser { Id = model.Id, Forename = model.Forename, Surname = model.Surname, Username = model.Username, Email = model.Email, ClientCompanyId = model.ClientCompanyId, UpdatedByAuthUserId = model.UpdatedByAuthUserId, }; } private ApplicationServiceUser MapApplicationServiceUserFrom(AddUserModel model) { return new ApplicationServiceUser { Id = model.Id, Title = model.Title, Forename = model.Forename, Surname = model.Surname, Username = !string.IsNullOrWhiteSpace(model.Username) ? model.Username : GenerateUniqueUsername(model.Email), Email = model.Email, ClientCompanyId = model.ClientCompanyId, ClientCompanyContactId = model.ClientCompanyContactId.GetValueOrDefault(), AuthUserId = model.AuthUserId.GetValueOrDefault(), UpdatedByAuthUserId = model.UpdatedByAuthUserId, UpdatedDateTime = model.UpdatedDateTime ?? DateTime.Now, Birthday = model.Birthday, Position = model.Position, PhoneNumberDirect = model.PhoneNumberDirect, PhoneNumberMobile = model.PhoneNumberMobile, PhoneNumberOther = model.PhoneNumberOther, ASPNumber = model.ASPNumber, ASPCreationDate = model.ASPCreationDate, PrimaryContact = model.PrimaryContact, Notes = model.Notes, Authorized = model.Authorized, RecNotification = model.RecNotification, RecAmReport = model.RecAmReport, RecActivityReport = model.RecActivityReport, NiNumber = model.NiNumber, BloombergGpi = model.BloombergGpi, AssignedCategoryIds = model.AssignedCategoryIds, IsLockedOut = model.IsLockedOut, Comment = model.Comment, IsApproved = model.IsApproved, ApprovedByAuthUserId = model.ApprovedByAuthUserId, IsAdmin = model.IsAdmin, IsSignatory = model.IsSignatory, IsAuthorisedSignatory = model.IsAuthorisedSignatory, AppClientUrl = model.AppClientUrl, FindUserByUsername = model.FindUserByUsername, FindUserByEmail = model.FindUserByEmail, ValidateUserDetails = ValidateUserDetails(model.IsApproved) }; } /// <summary> /// Determine if the user details should be validated, based on the origin of the request /// </summary> /// <returns></returns> private bool? ValidateUserDetails(bool isApproved) { RequestOrigin requestOrigin = _userService.GetRequestOrigin(this.User.Identity); switch (requestOrigin) { case RequestOrigin.ArgentexTrader: return isApproved; case RequestOrigin.ClientSite: return true; case RequestOrigin.Unknown: default: throw new AuthenticationException("Request origin is unknown"); } } /// <summary> /// Get an unique username using an initialValue (e.g. email) and a GUID /// Max length restriction due to AuthUser.Username length /// </summary> /// <param name="initialValue"></param> /// <returns></returns> private string GenerateUniqueUsername(string initialValue) => _userService.GenerateUniqueUsername(initialValue); protected override void Dispose(bool disposing) { if (disposing) { _userService.Dispose(); //_logger.Dispose(); //TODO base.Dispose(disposing); } } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/CurrencyPairValidation.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class CurrencyPairValidation { public int Id { get; set; } public string CurrencyPair { get; set; } public int? UpdatedByAuthUserId { get; set; } public DateTime? UpdatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/EmirreportField.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class EmirreportField { public int Id { get; set; } public int EmirreportTypeId { get; set; } public string Description { get; set; } public string FieldCode { get; set; } public string FieldName { get; set; } public string FieldValue { get; set; } public string AppSettingKey { get; set; } public bool IsBlank { get; set; } public bool? IsActive { get; set; } public int Sequence { get; set; } public EmirreportType EmirreportType { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/EmirreportFxforwardTrade.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class EmirreportFxforwardTrade { public EmirreportFxforwardTrade() { EmirreportTradeResponseError = new HashSet<EmirreportTradeResponseError>(); } public int Id { get; set; } public int EmirreportId { get; set; } public string FxforwardTradeCode { get; set; } public int EmirreportTypeId { get; set; } public int EmirstatusId { get; set; } public DateTime EmirstatusUpdatedDateTime { get; set; } public Emirreport Emirreport { get; set; } public EmirreportType EmirreportType { get; set; } public Emirstatus Emirstatus { get; set; } public FxforwardTrade FxforwardTradeCodeNavigation { get; set; } public ICollection<EmirreportTradeResponseError> EmirreportTradeResponseError { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyOnlineDetailsSkew.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyOnlineDetailsSkew { public int Id { get; set; } public int ClientCompanyOnlineDetailsId { get; set; } public int Currency1Id { get; set; } public int Currency2Id { get; set; } public bool IsBuy { get; set; } public int Spread { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public ClientCompanyOnlineDetails ClientCompanyOnlineDetails { get; set; } public Currency Currency1 { get; set; } public Currency Currency2 { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/VirtualAccountTypeBankAccount.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class VirtualAccountTypeBankAccount { public int VirtualAccountTypeId { get; set; } public int BankAccountId { get; set; } public BankAccount BankAccount { get; set; } public VirtualAccountType VirtualAccountType { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Enums/TradeStatus.cs namespace Argentex.Core.Service.Enums { public enum TradeStatus { Pending = 1, Filled = 2, Brokered = 3, Due = 4, Settled = 5, Delivered = 6 } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftincomingFileStatement.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftincomingFileStatement { public int Id { get; set; } public DateTime CreatedDateTime { get; set; } public int SwiftincomingFileId { get; set; } public int FilePartNumber { get; set; } public int? PaymentId { get; set; } public string RawContentLine61 { get; set; } public string RawContentLine86 { get; set; } public string MatchingContent { get; set; } public string DisplayError { get; set; } public string ProcessingError { get; set; } public bool MatchedProvisionally { get; set; } public Payment Payment { get; set; } public SwiftincomingFile SwiftincomingFile { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Payments/IPaymentUoW.cs using System.Collections.Generic; using System.Data; using System.Linq; using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; namespace Argentex.Core.UnitsOfWork.Payments { public interface IPaymentUoW : IBaseUow { IQueryable<Payment> GetPayment(string paymentCode); IQueryable<ClientCompanyOpitransaction> GetClientCompanyOpiTransaction(string paymentCode); IEnumerable<PaymentType> GetPaymentType(string paymentType); DataTable MakePayment(Payment payment, ClientCompanyOpi clientCompanyOpi, BankAccount debitBankAccount, ClientCompanyVirtualAccount debitVirtualAccount, int paymentSwiftOutgoingStatusId, bool paymentAuthorised, string authUserName); IQueryable<Payment> GetPaymentNotification(string paymentCode); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Models/SecurityModels/LoginModel.cs using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Api.Models.SecurityModels { public class LoginModel { [Required] public string Username { get; set; } [Required] [DataType(DataType.Password)] public string Password { get; set; } [Required] public string Grant_type { get; set; } //[Required] public string Client_id { get; set; } public string Client_secret { get; set; } public string Refresh_token { get; set; } public string Primary_ip { get; set; } public string Secondary_ip { get; set; } public string Scope { get; set; } public string Resource { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Country/CountryController.cs using Argentex.Core.Service.Country; using Microsoft.AspNetCore.Mvc; namespace Argentex.Core.Api.Controllers.Country { [Produces("application/json")] [Route("api/country")] public class CountryController : Controller { private readonly ICountryService _countryService; public CountryController(ICountryService countryService) { _countryService = countryService; } [HttpGet] [Route("countries")] public IActionResult GetCountries() { return Ok(_countryService.GetCountries()); } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/CassRecsStatementFile.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class CassRecsStatementFile { public CassRecsStatementFile() { CassRecs = new HashSet<CassRecs>(); } public int Id { get; set; } public DateTime CassRecsDate { get; set; } public string FileName { get; set; } public int FileSize { get; set; } public string DocumentId { get; set; } public DateTime UploadedDateTime { get; set; } public int UploadedByAuthUserId { get; set; } public ICollection<CassRecs> CassRecs { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/FixQuoteException.cs using System; namespace Argentex.Core.Service.Exceptions { public class FixQuoteException : Exception { public FixQuoteException() : base() { } public FixQuoteException(string message) : base(message) { } public FixQuoteException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Identity/ApplicationServiceUser.cs using System; namespace Argentex.Core.Service.Models.Identity { // Add profile data for application users by adding properties to the ApplicationUser class public class ApplicationServiceUser { public long Id { get; set; } public string Title { get; set; } public string Forename { get; set; } public string Surname { get; set; } public string Username { get; set; } public string Email { get; set; } public string Password { get; set; } public int ClientCompanyId { get; set; } public int AuthUserId { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public string Position { get; set; } public string PhoneNumberDirect { get; set; } public string PhoneNumberMobile { get; set; } public string PhoneNumberOther { get; set; } public string Birthday { get; set; } public bool IsApproved { get; set; } public int? ApprovedByAuthUserId { get; set; } public bool PrimaryContact { get; set; } public string Notes { get; set; } public string ASPNumber { get; set; } public DateTime? ASPCreationDate { get; set; } public DateTime ASPExpirationDate { get; set; } public bool IsAdmin { get; set; } public bool IsSignatory { get; set; } public bool IsAuthorisedSignatory { get; set; } public string AppClientUrl { get; set; } public int ClientCompanyContactId { get; set; } public DateTime? LastPasswordChangeDate { get; set; } public DateTime LastTelephoneChangeDate { get; set; } public DateTime LastEmailChangeDate { get; set; } public string Fullname { get; set; } public bool Authorized { get; set; } public bool RecNotification { get; set; } public bool RecAmReport { get; set; } public bool RecActivityReport { get; set; } public bool IsDeleted { get; set; } public string BloombergGpi { get; set; } public string NiNumber { get; set; } public int[] AssignedCategoryIds { get; set; } public bool? IsLockedOut { get; set; } public string Comment { get; set; } public DateTime CreateDate { get; set; } public DateTime? LastLoginDate { get; set; } public DateTime? LastActivityDate { get; set; } public DateTime? LastLockOutDate { get; set; } public int FailedPasswordAttemptCount { get; set; } public DateTime FailedPasswordAttemptWindowStart { get; set; } public int ApplicationId { get; set; } public bool? FindUserByUsername { get; set; } public bool? FindUserByEmail { get; set; } public bool? ValidateUserDetails { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyOnlineDetails.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyOnlineDetails { public ClientCompanyOnlineDetails() { ClientCompanyOnlineDetailsSkew = new HashSet<ClientCompanyOnlineDetailsSkew>(); ClientCompanyOnlineSpreadAdjustment = new HashSet<ClientCompanyOnlineSpreadAdjustment>(); } public int Id { get; set; } public int ClientCompanyId { get; set; } public bool AllowOnlineTrading { get; set; } public decimal? MaxTradeSize { get; set; } public decimal? MaxOpen { get; set; } public DateTime? MaxTenor { get; set; } public decimal? Collateral { get; set; } public int? SpotSpread { get; set; } public int? FwdSpread { get; set; } public bool? Kicked { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public ClientCompany ClientCompany { get; set; } public ICollection<ClientCompanyOnlineDetailsSkew> ClientCompanyOnlineDetailsSkew { get; set; } public ICollection<ClientCompanyOnlineSpreadAdjustment> ClientCompanyOnlineSpreadAdjustment { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Client/ClientCompanyContactServiceTests.cs using Argentex.Core.Service.Identity.Services; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using Moq; using System.Collections.Generic; using Xunit; namespace Argentex.Core.Service.Tests.ClientCompanyContact { public class ClientCompanyContactServiceTests { //[Fact] //public void GetAuthorizedSignatories_Successful_When_It_Has_The_Correct_Input() //{ // //Arrange // var mockTradeUow = new Mock<IClientCompanyContactUow>(); // var mockObject = new ClientCompanyContactModel(); // var mockList = new List<ClientCompanyContactModel>(); // mockList.Add(mockObject); // mockTradeUow.Setup(x => x.GetAuthorizedSignatories(1)) // .Returns(mockList); // var service = new ClientCompanyContactService(mockTradeUow.Object); // //Act // var result = service.GetAuthorizedSignatories(1); // //Assert // Assert.IsType<List<ClientCompanyContactModel>>(result); //} } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyComplianceCurrency.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyComplianceCurrency { public int Id { get; set; } public int ClientCompanyComplianceId { get; set; } public int CurrencyId { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public ClientCompanyCompliance ClientCompanyCompliance { get; set; } public Currency Currency { get; set; } public AuthUser UpdatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/EmirreportOutgoingFile.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class EmirreportOutgoingFile { public EmirreportOutgoingFile() { Emirreport = new HashSet<Emirreport>(); } public int Id { get; set; } public string Xmlfilename { get; set; } public string UploadedFilename { get; set; } public DateTime? UploadedDateTime { get; set; } public int? EmirreportOutgoingFileContentId { get; set; } public EmirreportOutgoingFileContent EmirreportOutgoingFileContent { get; set; } public ICollection<Emirreport> Emirreport { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ContactCategoryModel.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Text; using RestSharp.Validation; namespace Argentex.Core.Service.Models.ClientCompany { public class ContactCategoryModel { public int? Id { get; set; } public string Description { get; set; } public int? Sequence { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/ClientSiteAction/IClientSiteActionService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Models.ClientSiteAction; using System; using System.Collections.Generic; namespace Argentex.Core.Service.ClientSiteAction { public interface IClientSiteActionService : IDisposable { ClientSiteActionModel GetClientSiteAction(long clientSiteActionID); void LogActionOpiPayment(int authUserId, long fxforwardTrade2OpiId, string accountName, decimal amount); void LogActionSwapCreation(int authUserId, int FxswapId); void LogActionUnconfirmedTrade(int authUserId, string tradeCode); void LogActionNewOpi(int authUserId, int newOpiId); IEnumerable<CSATradesWithoutFIXConfirmationModel> GetTradesWithoutFIXConfirmation(); IEnumerable<CSAOPIsAssignedToTradesDisplayModel> GetOPIsAssignedToTrades(); void UpdateClientSiteAction(ClientSiteActionModel model); ClientSiteActionStatus GetClientSiteActionStatus(string name); IEnumerable<CSANewOPIRequestDisplayModel> GetNewOPIRequested(); ClientSiteActionModel GetClientSiteActionByOPIID(int clientCompanyOPIID); IEnumerable<CSASwapsModel> GetSwaps(); void DeleteAction2AssignedSettlementLink(long settlementId); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/PipelineAction.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class PipelineAction { public PipelineAction() { ClientCompanyPipeline = new HashSet<ClientCompanyPipeline>(); } public int Id { get; set; } public string Description { get; set; } public int PipelineActionTypeId { get; set; } public int DisplayOrder { get; set; } public PipelineActionType PipelineActionType { get; set; } public ICollection<ClientCompanyPipeline> ClientCompanyPipeline { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Settlements/ISettlementService.cs using Argentex.Core.Service.Models.Payments; using Argentex.Core.Service.Models.Settlements; using System; using System.Collections.Generic; using System.Threading.Tasks; namespace Argentex.Core.Service.Settlements { public interface ISettlementService : IDisposable { PaymentInformationModel GetPaymentInformation(string paymentCode, bool isPaymentOut); Task<IList<AssignSettlementModel>> AssignAsync(AssignSettlementRequestModel assignSettlementRequest); IList<AssignSettlementModel> GetAssignedSettlements(string tradeCode); void DeleteAssignedSettlements(long settlementId); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Statements/IStatementService.cs using System; using System.Collections.Generic; using System.Threading.Tasks; using Argentex.Core.Service.Models.Statements; namespace Argentex.Core.Service.Statements { public interface IStatementService : IDisposable { IDictionary<string, List<StatementModel>> GetStatements(int clientCompanyId, DateTime startDate, DateTime endDate); bool CheckCompany(int clientCompanyId); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FxoptionType.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FxoptionType { public int Id { get; set; } public string Description { get; set; } public string TermSheetImg { get; set; } public string VisibleInputs { get; set; } public int? DisplayOrder { get; set; } public bool? IsPrimary { get; set; } public int? ExtOptionTypeId { get; set; } public int? LevOptionTypeId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogClientCompanyOpiduplicate.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogClientCompanyOpiduplicate { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public int DuplicateClientCompanyOpiid { get; set; } public int OriginalClientCompanyOpiid { get; set; } public bool? IsOk { get; set; } public string Note { get; set; } public int? IsOkupdatedByAuthUserId { get; set; } public DateTime? IsOkupdatedDateTime { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime CreatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Fix/FixNewOrderRequestModel.cs using System; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Service.Models.Fix { public class FixNewOrderRequestModel { [Required] public string TradeCode { get; set; } [Required] public string QuoteId { get; set; } [Required] public string QuoteReqId { get; set; } [Required] [StringLength(3)] public string LHSCCY { get; set; } [Required] [StringLength(3)] public string RHSCCY { get; set; } [Required] [StringLength(3)] public string MajorCurrency { get; set; } [Required] [Range(1, 2, ErrorMessage = "Side must be 1 for Buy and 2 for Sale")] public int Side { get; set; } [Required] public decimal BrokerMajorAmount { get; set; } [Required] [DataType(DataType.Date)] public string ValueDate { get; set; } [Required] public decimal Price { get; set; } [Required] public decimal ClientPrice { get; set; } [Required] [Range(1000, 60000)] public int TimeOut { get; set; } [Required] [Range(1, 1440)] public int Duration { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FixFxforwardTradeOrder.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FixFxforwardTradeOrder { public int Id { get; set; } public string FxforwardCode { get; set; } public string BarclaysTradeId { get; set; } public string BarclaysAssignedId { get; set; } public string ErrorMessage { get; set; } public bool IsFilled { get; set; } public string RejectReason { get; set; } public int UserId { get; set; } public DateTime OrderDate { get; set; } public FxforwardTrade FxforwardCodeNavigation { get; set; } public AuthUser User { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftvalidationCurrencyMessageField.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftvalidationCurrencyMessageField { public int CurrencyId { get; set; } public int MessageId { get; set; } public int MessageFieldId { get; set; } public Currency Currency { get; set; } public SwiftvalidationMessage Message { get; set; } public SwiftvalidationMessageField MessageField { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Attributes/DateRequiredAttribute.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Text; namespace Argentex.Core.Service.Attributes { public class DateRequiredAttribute : ValidationAttribute { public override bool IsValid(object value) { return value is DateTime && ((DateTime)value).Date >= DateTime.UtcNow.Date; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Payments/PaymentUoW.cs using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.Linq; using Argentex.Core.DataAccess.Entities; using Microsoft.EntityFrameworkCore; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; namespace Argentex.Core.UnitsOfWork.Payments { public class PaymentUoW : BaseUow, IPaymentUoW { #region Repositories private IGenericRepo<Payment> _paymentRepository; private IGenericRepo<ClientCompanyOpitransaction> _clientCompanyOpiTransactionRepository; private IGenericRepo<PaymentType> _paymentTypeRepository; private IGenericRepo<Payment> PaymentRepository => _paymentRepository = _paymentRepository ?? new GenericRepo<Payment>(Context); private IGenericRepo<ClientCompanyOpitransaction> ClientCompanyOpiTransactionRepository => _clientCompanyOpiTransactionRepository = _clientCompanyOpiTransactionRepository ?? new GenericRepo<ClientCompanyOpitransaction>(Context); private IGenericRepo<PaymentType> PaymentTypeRepository => _paymentTypeRepository = _paymentTypeRepository ?? new GenericRepo<PaymentType>(Context); #endregion public PaymentUoW(FXDB1Context context) : base(context) { } public IQueryable<Payment> GetPayment(string paymentCode) { return PaymentRepository.GetQueryable(x => x.Code == paymentCode); } public IQueryable<ClientCompanyOpitransaction> GetClientCompanyOpiTransaction(string paymentCode) { return ClientCompanyOpiTransactionRepository.GetQueryable(x => x.Payment.Code == paymentCode) .Include(x => x.ClientCompanyOpi); } public IEnumerable<PaymentType> GetPaymentType(string paymentType) { return PaymentTypeRepository.GetQueryable(x => x.Description == paymentType); } public DataTable MakePayment(Payment payment, ClientCompanyOpi clientCompanyOpi, BankAccount debitBankAccount, ClientCompanyVirtualAccount debitVirtualAccount, int paymentSwiftOutgoingStatusId, bool paymentAuthorised, string authUserName) { DataTable dataTable = new DataTable(); using (SqlConnection sqlConn = (SqlConnection)Context.Database.GetDbConnection()) { string sql = "PaymentCreate"; using (SqlCommand sqlCmd = new SqlCommand(sql, sqlConn)) { sqlCmd.CommandType = CommandType.StoredProcedure; #region Sql parameters sqlCmd.Parameters.Add(new SqlParameter("@PaymentTypeID", SqlDbType.Int) { Value = payment.PaymentType.Id }); sqlCmd.Parameters.Add(new SqlParameter("@IsSWIFTPayment", SqlDbType.Bit) { Value = payment.PaymentType.DefaultSendToSwift }); sqlCmd.Parameters.Add(new SqlParameter("@AuthUserName", SqlDbType.NVarChar, 50) { Value = authUserName }); sqlCmd.Parameters.Add(new SqlParameter("@Amount", SqlDbType.Decimal) { Value = payment.Amount, Precision = 25, Scale = 8 }); sqlCmd.Parameters.Add(new SqlParameter("@CurrencyID", SqlDbType.Int) { Value = payment.Currency.Id }); sqlCmd.Parameters.Add(new SqlParameter("@ValueDate", SqlDbType.DateTime) { Value = payment.ValueDate }); sqlCmd.Parameters.Add(new SqlParameter("@NotifyClient", SqlDbType.Bit) { Value = payment.NotifyClient }); sqlCmd.Parameters.Add(new SqlParameter("@Comments", SqlDbType.NVarChar, -1) { Value = payment.Comments }); sqlCmd.Parameters.Add(new SqlParameter("@PaymentSwiftOutgoingStatusId", SqlDbType.Int) { Value = paymentSwiftOutgoingStatusId }); sqlCmd.Parameters.Add(new SqlParameter("@Authorised", SqlDbType.Bit) { Value = paymentAuthorised }); if (!string.IsNullOrWhiteSpace(payment.Reference)) { sqlCmd.Parameters.Add(new SqlParameter("@Reference", SqlDbType.NVarChar, 255) { Value = payment.Reference }); } if (payment.ClientCompany != null) { sqlCmd.Parameters.Add(new SqlParameter("@ClientCompanyID", SqlDbType.Int) { Value = payment.ClientCompany.Id }); } if (debitBankAccount != null) { sqlCmd.Parameters.Add(new SqlParameter("@DebitBankAccountID", SqlDbType.Int) { Value = debitBankAccount.Id }); } if (debitVirtualAccount != null) { sqlCmd.Parameters.Add(new SqlParameter("@DebitClientCompanyVirtualAccountID", SqlDbType.Int) { Value = debitVirtualAccount.Id }); } if (clientCompanyOpi != null) { sqlCmd.Parameters.Add(new SqlParameter("@CreditClientCompanyOPIID", SqlDbType.Int) { Value = clientCompanyOpi.Id }); } #endregion Sql parameters sqlConn.Open(); using (SqlDataAdapter sqlAdapter = new SqlDataAdapter(sqlCmd)) { sqlAdapter.Fill(dataTable); } } } return dataTable; } public IQueryable<Payment> GetPaymentNotification(string paymentCode) { return PaymentRepository .GetQueryable(x => x.Code == paymentCode, null, "ClientCompany, Currency, ClientCompanyOpitransaction.ClientCompanyOpi, PaymentType"); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Config/IdentityConfig.cs using AspNet.Security.OpenIdConnect.Primitives; using Microsoft.AspNetCore.Identity; using Microsoft.Extensions.DependencyInjection; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace Argentex.Core.Api.Config { public static class IdentityConfig { public static void ConfigureIdentityOptions(IServiceCollection services) { services.Configure<IdentityOptions>(options => { // Password settings options.Password.RequireDigit = true; options.Password.RequiredLength = 8; options.Password.RequireNonAlphanumeric = false; options.Password.RequireUppercase = true; options.Password.RequireLowercase = false; options.Password.RequiredUniqueChars = 6; // Lockout settings options.Lockout.DefaultLockoutTimeSpan = TimeSpan.FromMinutes(30); options.Lockout.MaxFailedAccessAttempts = 10; options.Lockout.AllowedForNewUsers = false; // New users are not locked out when created. // User settings options.User.RequireUniqueEmail = false; options.ClaimsIdentity.UserNameClaimType = OpenIdConnectConstants.Claims.Name; options.ClaimsIdentity.UserIdClaimType = OpenIdConnectConstants.Claims.Subject; options.ClaimsIdentity.RoleClaimType = OpenIdConnectConstants.Claims.Role; }); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Trade/TradeController.cs using Argentex.Core.Service.Trade; using Argentex.Core.Service.Models.Trades; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using SynetecLogger; using System.Threading.Tasks; using System; namespace Argentex.Core.Api.Controllers.Trade { [Produces("application/json")] [Route("api/trade")] public class TradeController : Controller { private readonly ITradeService _tradeService; private readonly ILogWrapper _logger; public TradeController(ITradeService tradeService, ILogWrapper logger) { _tradeService = tradeService; _logger = logger; } [HttpGet] [Route("unsettled-trades/{clientCompanyId:int}")] public IActionResult GetUnsettledTrades(int clientCompanyId) { return Ok(_tradeService.GetUnsettledTrades(clientCompanyId)); } [HttpGet] [Route("currency-codes")] public IActionResult GetCurrencyCodes() { return Ok(_tradeService.GetCurrencyCodes()); } [HttpGet] [Route("allowed-currency-pairs")] public IActionResult GetAllowedCurrencyPairs() { return Ok(_tradeService.GetAllowedCurrencyPairs()); } [HttpGet("trade-note")] public async Task<IActionResult> GetTradeNote(string tradeCode) { return Ok(await _tradeService.GetTradeNote(tradeCode)); } [HttpGet("trade-information")] public IActionResult GetTradeInformation(string tradeCode) { return Ok(_tradeService.GetTradeInformation(tradeCode)); } [HttpPost("trade-default-opi-set")] public IActionResult SetTradeDefaultOPI([FromQuery]string tradeCode, int clientCompanyId, bool setAsDefault) { return Ok(_tradeService.SetTradeDefaultOPI(tradeCode, clientCompanyId, setAsDefault)); } [HttpPost] [Route("quote")] public async Task<IActionResult> Quote([FromBody] QuoteRequestModel quoteRequest) { if (!ModelState.IsValid) return BadRequest(); var quoteResponse = await _tradeService.GetQuotesAsync(quoteRequest); return Ok(quoteResponse); } [HttpPost] [Route("deal")] public async Task<IActionResult> Deal([FromBody] DealRequestModel dealRequest) { if (!ModelState.IsValid) return BadRequest(); var dealResponse = await _tradeService.Deal(dealRequest); return Ok(dealResponse); } [HttpGet] [Route("closed-trades/{clientCompanyId:int}")] public IActionResult GetClosedTrades(int clientCompanyId) { return Ok(_tradeService.GetClosedTrades(clientCompanyId)); } protected override void Dispose(bool disposing) { if (disposing) { _tradeService.Dispose(); } } } }<file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Statements/IStatementUoW.cs using System; using System.Data; using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; namespace Argentex.Core.UnitsOfWork.Statements { public interface IStatementUoW : IBaseUow { IGenericRepo<FxforwardTrade> FxForwardTradeRepository { get; } IGenericRepo<Payment> PaymentRepository { get; } IGenericRepo<ClientCompany> ClientCompanyRepository { get; } IGenericRepo<BankAccountTransaction> BankAccountTransactionRepository { get; } IGenericRepo<Currency> CurrencyRepository { get; } DataTable GetClientCompanyVirtualAccountBalances(int clientCompanyId, DateTime valueDate); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Identity/RefreshTokenModel.cs namespace Argentex.Core.Service.Models.Identity { public class RefreshTokenModel { public int UserID { get; set; } public string UserName { get; set; } public string AccessToken { get; set; } public string RefreshToken { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompaniesModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Models.ClientCompany { public class ClientCompaniesModel { public int ClientCompanyId {get; set;} public string ClientCompanyName { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api.Tests/Security/SecurityControllerTests.cs using Argentex.Core.Api.Controllers.Security; using Argentex.Core.Api.Models.AccountViewModels; using Argentex.Core.Api.Models.SecurityModels; using Argentex.Core.Service; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Identity.Services; using Argentex.Core.Service.Models.Identity; using AspNet.Security.OpenIdConnect.Primitives; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; using Moq; using SynetecLogger; using System; using System.Net; using System.Threading.Tasks; using Xunit; namespace Argentex.Core.Api.Tests.Security { public class SecurityControllerTests { [Fact] public void ForgotPassword_ModelNotValid() { var service = new Mock<IIdentityService>(); var controller = new SecurityController(service.Object, null, null, null, null); controller.ModelState.AddModelError("", "Error"); var result = controller.ForgotPassword(new ForgotPasswordViewModel()).Result; Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void ResetPasswordAsync_Model_not_valid() { var service = new Mock<IIdentityService>(); var controller = new SecurityController(service.Object, null, null, null, null); controller.ModelState.AddModelError("", "Error"); var result = controller.ResetPasswordAsync(new ResetPasswordViewModel()).Result; Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void CreateTokenTest_InvalidModel() { var service = new Mock<IIdentityService>(); service.Setup(x => x.AuthenticateAsync(It.IsAny<LoginServiceModel>())) .Returns(Task.FromResult<TokenModel>(null)); var controller = new SecurityController(service.Object, null, null, null, null); controller.ModelState.AddModelError("", "Error"); var result = controller.CreateToken(new OpenIdConnectRequest()).Result; Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void CreateTokenTest_BadRequest() { var service = new Mock<IIdentityService>(); service.Setup(x => x.AuthenticateAsync(It.IsAny<LoginServiceModel>())) .Returns(Task.FromResult<TokenModel>(null)); var controller = new SecurityController(service.Object, null, null, null, null); var result = controller.CreateToken(new OpenIdConnectRequest { Username = "rado", Password = "<PASSWORD>" }).Result; Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void CreateTokenTest_Success() { var service = new Mock<IIdentityService>(); var smsService = new Mock<ISmsService>(); var request = new OpenIdConnectRequest { Username = "rado", Password = "<PASSWORD>" }; string tokenValidationCode = "<PASSWORD>"; service.Setup(x => x.AuthenticateAsync(It.IsAny<LoginServiceModel>())) .Returns(Task.FromResult(new TokenModel { Access_token = "token", Expires_in = 3600 })); smsService.Setup(x => x.Send2FAMessage(It.IsAny<string>())) .Returns(Task.FromResult(tokenValidationCode)); var controller = new SecurityController(service.Object, null, null, null, smsService.Object); var result = controller.CreateToken(request).Result; Assert.IsType<TokenModel>(((OkObjectResult)result).Value); } [Fact] public void ChangePassword_Success_With_Valid_Password_Input() { //Arrange var mockIdentityService = new Mock<IIdentityService>(); mockIdentityService.Setup(x => x.ChangePasswordAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())) .Returns(Task.FromResult(IdentityResult.Success)); mockIdentityService.Setup(x => x.AuthenticateAsync(It.IsAny<LoginServiceModel>())) .Returns(Task.FromResult(new TokenModel { Access_token = "token", Expires_in = 3600 })); var mockUrlHelper = new Mock<IUrlHelper>(); mockUrlHelper.Setup(x => x.Action(It.IsAny<Microsoft.AspNetCore.Mvc.Routing.UrlActionContext>())) .Returns("colbackUrl") .Verifiable(); var mockLogger = new Mock<ILogWrapper>(); mockLogger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new SecurityController(mockIdentityService.Object, mockLogger.Object, null, null, null); controller.Url = mockUrlHelper.Object; controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var model = new ChangePasswordModel { UserId = 1, UserName = "<NAME>", CurrentPassword = "<PASSWORD>", NewPassword = "<PASSWORD>", }; var result = controller.ChangePassword(model).Result; //Assert Assert.IsType<OkObjectResult>(result); } [Fact] public void ChangePassword_Failed_With_Service_Returns_Failed() { //Arrange var changePasswordModel = new ChangePasswordModel() { UserId = 42, UserName = "chucknorris", CurrentPassword = "<PASSWORD>", NewPassword = "<PASSWORD>", ConfirmPassword = "<PASSWORD>" }; var mockIdentityService = new Mock<IIdentityService>(); var mockLogger = new Mock<ILogWrapper>(); mockIdentityService.Setup(x => x.ChangePasswordAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())) .Returns(Task.FromResult(IdentityResult.Failed())); mockIdentityService.Setup(x => x.AuthenticateAsync(It.IsAny<LoginServiceModel>())) .Returns(Task.FromResult(new TokenModel { Access_token = "token", Expires_in = 3600 })); var controller = new SecurityController(mockIdentityService.Object, mockLogger.Object, null, null, null); var expectedStatusCode = HttpStatusCode.BadRequest; //Act var result = controller.ChangePassword(changePasswordModel).Result; var objectResult = result as BadRequestObjectResult; //Assert Assert.NotNull(objectResult); Assert.Equal((int) expectedStatusCode, objectResult.StatusCode); } [Fact] public void ChangePassword_Should_Return_An_OK_Result() { //Arrange var model = new ChangePasswordModel { UserId = 1, UserName = "testaccount", CurrentPassword = "<PASSWORD>", NewPassword = "<PASSWORD>", ConfirmPassword = "<PASSWORD>" }; var mockLogger = new Mock<ILogWrapper>(); var mockIdentityService = new Mock<IIdentityService>(); mockIdentityService.Setup(x => x.ChangePasswordAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())) .Returns(Task.FromResult(IdentityResult.Success)); var controller = new SecurityController(mockIdentityService.Object, mockLogger.Object, null, null, null); //Act var result = controller.ChangePassword(model).Result; //Assert Assert.IsType<OkObjectResult>(result); } [Fact] public void ChangePassword_Should_Return_A_Bad_Request_When_PasswordDoNotMatchException_Is_Caught() { // Given var model = new ChangePasswordModel { UserId = 1, UserName = "testaccount", CurrentPassword = "<PASSWORD>", NewPassword = "<PASSWORD>", ConfirmPassword = "<PASSWORD>" }; var mockLogger = new Mock<ILogWrapper>(); var mockIdentityService = new Mock<IIdentityService>(); mockIdentityService.Setup(x => x.ChangePasswordAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())) .ThrowsAsync(new PasswordsDoNotMatchException()); var controller = new SecurityController(mockIdentityService.Object, mockLogger.Object, null, null, null); // When var result = controller.ChangePassword(model).Result; // Then Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void ChangePassword_Should_Return_A_Bad_Request_When_ApplicationUserNotFoundException_Is_Caught() { // Given var model = new ChangePasswordModel { UserId = 1, UserName = "testaccount", CurrentPassword = "<PASSWORD>", NewPassword = "<PASSWORD>", ConfirmPassword = "<PASSWORD>" }; var mockLogger = new Mock<ILogWrapper>(); var mockIdentityService = new Mock<IIdentityService>(); mockIdentityService.Setup(x => x.ChangePasswordAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())) .ThrowsAsync(new ApplicationUserNotFoundException()); var controller = new SecurityController(mockIdentityService.Object, mockLogger.Object, null, null, null); // When var result = controller.ChangePassword(model).Result; // Then Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void ChangePassword_Should_Return_A_Bad_Request_When_PasswordAlreadyUsedException_Is_Caught() { // Given var model = new ChangePasswordModel { UserId = 1, UserName = "testaccount", CurrentPassword = "<PASSWORD>", NewPassword = "<PASSWORD>", ConfirmPassword = "<PASSWORD>" }; var mockLogger = new Mock<ILogWrapper>(); var mockIdentityService = new Mock<IIdentityService>(); mockIdentityService.Setup(x => x.ChangePasswordAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())) .ThrowsAsync(new PasswordAlreadyUsedException()); var controller = new SecurityController(mockIdentityService.Object, mockLogger.Object, null, null, null); // When var result = controller.ChangePassword(model).Result; // Then Assert.IsType<BadRequestObjectResult>(result); } //[Fact] //public void ForgotPassword_Success_With_Valid_UserId_Input() //{ // //Arrange // var model = new ForgotPasswordViewModel // { // Email = "<EMAIL>", // SiteUrl = "http://www.aUrl.co.uk/" // }; // var mockIdentityService = new Mock<IIdentityService>(); // mockIdentityService.Setup(x => x.SendResetPasswordEmailAsync(model.Email, model.SiteUrl)) // .Returns(Task.FromResult(IdentityResult.Success)); // var mockUrlHelper = new Mock<IUrlHelper>(); // mockUrlHelper.Setup(x => x.Action(It.IsAny<Microsoft.AspNetCore.Mvc.Routing.UrlActionContext>())) // .Returns("colbackUrl") // .Verifiable(); // var mockLogger = new Mock<ILogWrapper>(); // mockLogger.Setup(x => x.Error(It.IsAny<Exception>())); // var controller = new SecurityController(mockIdentityService.Object, mockLogger.Object, null, null); // controller.Url = mockUrlHelper.Object; // controller.ControllerContext = new ControllerContext(); // controller.ControllerContext.HttpContext = new DefaultHttpContext(); // controller.ControllerContext.HttpContext.Request.Scheme = "test"; // //Act // var result = controller.ForgotPassword(model).Result; // //Assert // Assert.IsType<OkObjectResult>(result); //} //[Fact] //public void ForgotPassword_Failed_With_Service_Returns_Failed() //{ // //Arrange // var model = new ForgotPasswordViewModel // { // Email = "<EMAIL>", // SiteUrl = "http://www.aUrl.co.uk/" // }; // var mockIdentityService = new Mock<IIdentityService>(); // mockIdentityService.Setup(x => x.SendResetPasswordEmailAsync(model.Email, model.SiteUrl)) // .Returns(Task.FromResult(IdentityResult.Failed())); // mockIdentityService.Setup(x => x.AuthenticateAsync(It.IsAny<LoginServiceModel>())) // .Returns(Task.FromResult(new TokenModel { Access_token = "token", Expires_in = 3600 })); // var mockUrlHelper = new Mock<IUrlHelper>(); // mockUrlHelper.Setup(x => x.Action(It.IsAny<Microsoft.AspNetCore.Mvc.Routing.UrlActionContext>())) // .Returns("colbackUrl") // .Verifiable(); // var mockLogger = new Mock<ILogWrapper>(); // mockLogger.Setup(x => x.Error(It.IsAny<Exception>())); // var controller = new SecurityController(mockIdentityService.Object, mockLogger.Object, null, null); // controller.Url = mockUrlHelper.Object; // controller.ControllerContext = new ControllerContext(); // controller.ControllerContext.HttpContext = new DefaultHttpContext(); // controller.ControllerContext.HttpContext.Request.Scheme = "test"; // //Act // var result = controller.ForgotPassword(model).Result; // //Assert // Assert.IsNotType<OkObjectResult>(result); //} //[Fact] //public void ForgotPassword_Failed_When_Identity_Returns_BadRequest() //{ // //Arrange // var model = new ForgotPasswordViewModel // { // Email = "<EMAIL>", // SiteUrl = "http://www.aUrl.co.uk/" // }; // var mockIdentityService = new Mock<IIdentityService>(); // mockIdentityService.Setup(x => x.SendResetPasswordEmailAsync(model.Email, model.SiteUrl)) // .Returns(Task.FromResult(IdentityResult.Failed())); // mockIdentityService.Setup(x => x.AuthenticateAsync(It.IsAny<LoginServiceModel>())) // .Returns(Task.FromResult(new TokenModel { Access_token = "token", Expires_in = 3600 })); // var mockUrlHelper = new Mock<IUrlHelper>(); // mockUrlHelper.Setup(x => x.Action(It.IsAny<Microsoft.AspNetCore.Mvc.Routing.UrlActionContext>())) // .Returns("colbackUrl") // .Verifiable(); // var mockLogger = new Mock<ILogWrapper>(); // mockLogger.Setup(x => x.Error(It.IsAny<Exception>())); // var controller = new SecurityController(mockIdentityService.Object, mockLogger.Object, null, null); // controller.Url = mockUrlHelper.Object; // controller.ControllerContext = new ControllerContext(); // controller.ControllerContext.HttpContext = new DefaultHttpContext(); // controller.ControllerContext.HttpContext.Request.Scheme = "test"; // //Act // var result = controller.ForgotPassword(model).Result; // //Assert // Assert.IsType<BadRequestObjectResult>(result); //} } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Payments/PaymentOutResponseModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Models.Payments { public class PaymentOutResponseModel { public string Code { get; set; } public bool IsSuccessful { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Payments/IPaymentsService.cs using System; using System.Threading.Tasks; namespace Argentex.Core.Service.Payments { public interface IPaymentsService : IDisposable { Task<bool> NotifyContacts(string paymentCode); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Email/BrokerTradeNoteModel.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Models.Identity; using System; namespace Argentex.Core.Service.Models.Email { public class BrokerTradeNoteModel { public string TradeCode { get; set; } // RegulatoryID public string SellCcy { get; set; } public decimal SellAmount { get; set; } public string BuyCcy { get; set; } public decimal BuyAmount { get; set; } public double Rate { get; set; } public DateTime ValueDate { get; set; } public DataAccess.Entities.ClientCompany ClientCompany { get; set; } public ClientCompanyOpi SettlementAccountDetails { get; set; } public Broker Broker { get; set; } public AuthUser DealerAuthUser { get; set; } public string InstructedBy { get; set; } public DateTime InstructedDateTime { get; set; } public string Method { get; set; } public string CurrencyPair { get; set; } public decimal Collateral { get; set; } public string CollateralCcy { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Client/ClientCompanyContactController.cs using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Threading.Tasks; using Argentex.Core.Api.Models; using Argentex.Core.Service; using Argentex.Core.Service.Models.ClientCompany; using Microsoft.AspNetCore.Mvc; using SynetecLogger; namespace Argentex.Core.Api.Controllers.Client { [Produces("application/json")] [Route("api/client-company-contact")] public class ClientCompanyContactController : Controller { private readonly IClientCompanyService _clientCompanyContactService; private readonly ILogWrapper _logger; public ClientCompanyContactController(IClientCompanyService clientCompanyContactService, ILogWrapper logger) { _clientCompanyContactService = clientCompanyContactService; _logger = logger; } [HttpGet] [Route("company-name/{clientCompanyId:int}")] public IActionResult GetCompanyName(int clientCompanyId) { return Ok(_clientCompanyContactService.GetClientCompanyName(clientCompanyId)); } [HttpPost] [Route("categories")] public IActionResult AddContactCategory([FromBody] ContactCategoryModel model) { if (!ModelState.IsValid) return BadRequest(ResponseModel.ResponseFromInvalidModelState(ModelState)); var succeeded = _clientCompanyContactService.AddContactCategory(model); if (succeeded) return Ok(ResponseModel.ResponseWithInfo($"Contact category {model.Description} created successfully")); return BadRequest( ResponseModel.ResponseWithErrors($"Contact category {model.Description} could not be added")); } [HttpGet] [Route("categories")] public async Task<IActionResult> GetContactCategories() { try { return Ok(await _clientCompanyContactService.GetContactCategories()); } catch (Exception exception) { _logger.Error(exception); return BadRequest(ResponseModel.ResponseWithErrors("Contact Categories could not be retrieved")); } } [HttpGet] [Route("{clientCompanyContactId:int}/categories/")] public async Task<IActionResult> GetClientCompanyContactCategories(int clientCompanyContactId) { try { return Ok(await _clientCompanyContactService.GetClientCompanyContactCategories(clientCompanyContactId)); } catch (Exception exception) { _logger.Error(exception); return BadRequest(ResponseModel.ResponseWithErrors( $"ClientCompanyContactCategories could not be retrieved for ClientCompanyContactId {clientCompanyContactId}")); } } [HttpGet] [Route("categories/{contactCategoryId:int}")] public IActionResult GetContactCategory(int contactCategoryId) { try { return Ok(_clientCompanyContactService.GetContactCategory(contactCategoryId)); } catch (Exception exception) { _logger.Error(exception); return BadRequest( ResponseModel.ResponseWithErrors($"Contact Category {contactCategoryId} could not be retrieved")); } } [HttpGet] [Route("categories/{*contactCategoryDescription}")] public IActionResult GetContactCategory(string contactCategoryDescription) { try { return Ok(_clientCompanyContactService.GetContactCategory(contactCategoryDescription)); } catch (Exception exception) { _logger.Error(exception); return BadRequest( ResponseModel.ResponseWithErrors( $"Contact Category {contactCategoryDescription} could not be retrieved")); } } [HttpPut] [Route("categories")] public IActionResult ProcessClientCompanyContactCategories([FromBody] ClientCompanyContactBulkCategoryModel model) { if (!ModelState.IsValid) return BadRequest(ResponseModel.ResponseFromInvalidModelState(ModelState)); bool succeeded = _clientCompanyContactService.ProcessClientCompanyContactCategories(model); if (succeeded) return Ok(ResponseModel.ResponseWithInfo( $"Client Company Contact Categories have been processed successfully for Company Contact Id {model.ClientCompanyContactId}")); return BadRequest(ResponseModel.ResponseWithErrors( $"Client Company Contact Categories could not be processed successfully for Company Contact Id {model.ClientCompanyContactId}")); } [HttpGet] [Route("contact")] public IActionResult GetClientCompanyContact([FromQuery] ClientCompanyContactSearchContext clientCompanyContactSearchContext) { try { var companyContact = _clientCompanyContactService.GetClientCompanyContact(clientCompanyContactSearchContext); if (companyContact.CompanyContactModel == null) return NotFound(_clientCompanyContactService.GetErrorMessages(HttpStatusCode.NotFound, null, clientCompanyContactSearchContext)); return Ok(companyContact); } catch (Exception exception) { _logger.Error(exception); return BadRequest(_clientCompanyContactService.GetErrorMessages(HttpStatusCode.BadRequest, exception, clientCompanyContactSearchContext)); } } protected override void Dispose(bool disposing) { if (disposing) { _clientCompanyContactService.Dispose(); base.Dispose(disposing); } } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/PaymentSwiftoutgoingStatus.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class PaymentSwiftoutgoingStatus { public PaymentSwiftoutgoingStatus() { Payment = new HashSet<Payment>(); } public int Id { get; set; } public string Status { get; set; } public bool IsSwiftRejected { get; set; } public ICollection<Payment> Payment { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/BankAccount.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class BankAccount { public BankAccount() { BankAccountCurrencyBalance = new HashSet<BankAccountCurrencyBalance>(); BankAccountCurrencyBalanceHistory = new HashSet<BankAccountCurrencyBalanceHistory>(); BankAccountCurrencyDetails = new HashSet<BankAccountCurrencyDetails>(); BankAccountTransaction = new HashSet<BankAccountTransaction>(); BrokerBankAccountBrokerPaymentsIn = new HashSet<Broker>(); BrokerBankAccountBrokerPaymentsOut = new HashSet<Broker>(); BrokerBankAccountClientPaymentsIn = new HashSet<Broker>(); BrokerBankAccountClientPaymentsOut = new HashSet<Broker>(); VirtualAccountTypeBankAccount = new HashSet<VirtualAccountTypeBankAccount>(); } public int Id { get; set; } public string Description { get; set; } public bool IsDefault { get; set; } public ICollection<BankAccountCurrencyBalance> BankAccountCurrencyBalance { get; set; } public ICollection<BankAccountCurrencyBalanceHistory> BankAccountCurrencyBalanceHistory { get; set; } public ICollection<BankAccountCurrencyDetails> BankAccountCurrencyDetails { get; set; } public ICollection<BankAccountTransaction> BankAccountTransaction { get; set; } public ICollection<Broker> BrokerBankAccountBrokerPaymentsIn { get; set; } public ICollection<Broker> BrokerBankAccountBrokerPaymentsOut { get; set; } public ICollection<Broker> BrokerBankAccountClientPaymentsIn { get; set; } public ICollection<Broker> BrokerBankAccountClientPaymentsOut { get; set; } public ICollection<VirtualAccountTypeBankAccount> VirtualAccountTypeBankAccount { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api.Tests/User/UserControllerTests.cs using Argentex.Core.Api.Controllers.User; using Argentex.Core.Api.Models.SecurityModels; using Argentex.Core.Service; using Argentex.Core.Service.Models.Identity; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.Users.Model; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; using Moq; using SynetecLogger; using System; using System.Collections; using System.Collections.Generic; using System.Net; using System.Security.Claims; using System.Security.Principal; using System.Threading.Tasks; using Argentex.Core.Api.Models; using Argentex.Core.Service.Enums; using FluentAssertions; using Xunit; using Argentex.Core.DataAccess.Entities; namespace Argentex.Core.Api.Tests.User { public class UserControllerTests { [Fact] public void GetApplicationUsersOfCompany_Success_With_Valid_ClientCompanyId_Input() { //Arrange var applicationServiceUser = new ApplicationServiceUser(); var applicationServiceUserList = new List<ApplicationServiceUser>(); applicationServiceUserList.Add(applicationServiceUser); var mockUserService = new Mock<IUserService>(); mockUserService.Setup(x => x.GetApplicationUsersOfCompany(1)) .Returns(applicationServiceUserList); var controller = new UserController(mockUserService.Object, null, null,null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var result = controller.GetApplicationUsersOfCompany(1); //Assert Assert.IsType<OkObjectResult>(result); } [Fact] public void GetApplicationUsersOfCompany_Returns_No_Content_When_Service_Returns_Empty_List() { //Arrange var applicationServiceUserList = new List<ApplicationServiceUser>(); var mockUserService = new Mock<IUserService>(); mockUserService.Setup(x => x.GetApplicationUsersOfCompany(1)) .Returns(applicationServiceUserList); var controller = new UserController(mockUserService.Object, null, null,null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var result = controller.GetApplicationUsersOfCompany(1); //Assert Assert.IsType<NoContentResult>(result); } [Fact] public void GetApplicationUser_Success_With_Valid_ClientCompanyId_Input() { //Arrange var applicationServiceUser = new ApplicationServiceUser(); var mockUserService = new Mock<IUserService>(); mockUserService.Setup(x => x.GetApplicationUserAsync("1")) .Returns((Task.FromResult(applicationServiceUser))); var controller = new UserController(mockUserService.Object, null, null,null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var result = controller.GetApplicationUser(1).Result; //Assert Assert.IsType<OkObjectResult>(result); } [Fact] public void GetApplicationUser_Failed_With_Bad_Request_When_Service_Returns_Null_User() { //Arrange var mockUserService = new Mock<IUserService>(); mockUserService.Setup(x => x.GetApplicationUserAsync("<EMAIL>")) .Returns(It.IsAny<Task<ApplicationServiceUser>>()); var controller = new UserController(mockUserService.Object, null, null,null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var result = controller.GetApplicationUser(1).Result; //Assert Assert.IsType<BadRequestResult>(result); } [Fact] public void AddUser_Success_With_Valid_Model_Input() { //Arrange var service = new Mock<IUserService>(); service.Setup(x => x.AddUnapprovedUserAsync(It.IsAny<ApplicationServiceUser>())) .Returns(Task.FromResult(IdentityResult.Success)); service.Setup(x => x.GetRequestOrigin(It.IsAny<IIdentity>())).Returns(RequestOrigin.ClientSite); var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(service.Object, logger.Object, null,null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); var expectedType = typeof(ResponseModel); //Act var response = controller.AddUser(new AddUserModel { Username = "user", Email = "<EMAIL>" }); var result = response.Result as OkObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Info"); } [Fact] public async Task ApproveUserChangeRequest_Success_With_Valid_Model_Input() { //Arrange var service = new Mock<IUserService>(); var approveUserChangeResponse = new ApproveUserChangeResponse() { Result = IdentityResult.Success, SendNotification = true, UserChangeRequest = null }; service.Setup(x => x.ApproveUserChangeRequest(It.IsAny<ApproveUserChangeRequest>())) .ReturnsAsync(approveUserChangeResponse); var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(service.Object, logger.Object, null, null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var result = await controller.ApproveUserChangeRequest(new ApproveUserChangeRequest() { UserChangeRequestID = 48, ApprovedByAuthUserId = 123 }); //Assert Assert.IsType<OkObjectResult>(result); } [Fact] public async void ApproveUserChangeRequest_Failed_When_InvalidModel_With_Bad_Request() { //Arrange var service = new Mock<IUserService>(); var controller = new UserController(service.Object, null, null, null); controller.ModelState.AddModelError("", "Error"); //Act var result = await controller.ApproveUserChangeRequest(null); //Assert Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void AddUser_Failed_When_Identity_Returns_Failed_Result_With_Bad_Request() { //Arrange var service = new Mock<IUserService>(); service.Setup(x => x.AddUnapprovedUserAsync(It.IsAny<ApplicationServiceUser>())) .Returns(Task.FromResult(IdentityResult.Failed())); service.Setup(x => x.GetRequestOrigin(It.IsAny<IIdentity>())).Returns(RequestOrigin.ClientSite); var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(service.Object, logger.Object, null,null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.User = new ClaimsPrincipal(new ClaimsIdentity(new List<Claim>())); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); //Act var response = controller.AddUser(new AddUserModel { Username = "user", Email = "<EMAIL>" }); var result = response.Result as BadRequestObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); } [Fact] public void AddUser_Failed_When_NoModel() { //Arrange var service = new Mock<IUserService>(); var controller = new UserController(service.Object, null, null,null); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); //Act var response = controller.AddUser(null); var result = response.Result as BadRequestObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); var responseModel = (ResponseModel)result.Value; responseModel.ResponseMessages.Should().NotBeEmpty(); responseModel.ResponseMessages.Should().ContainKey("Errors"); responseModel.ResponseMessages.Should().HaveCount(1); } [Fact] public void Edit_Success_With_Valid_Model_Input() { //Arrange var service = new Mock<IUserService>(); service.Setup(x => x.UpdateUserAsync(It.IsAny<ApplicationServiceUser>())) .Returns(Task.FromResult(IdentityResult.Success)); service.Setup(x => x.GetRequestOrigin(It.IsAny<IIdentity>())).Returns(RequestOrigin.ClientSite); var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(service.Object, logger.Object, null,null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; controller.ControllerContext.HttpContext.User = new ClaimsPrincipal(new ClaimsIdentity()); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); var expectedType = typeof(ResponseModel); //Act var response = controller.Update(new AddUserModel { Username = "rado", Email = "rado" }); var result = response.Result as OkObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Info"); } [Fact] public void Edit_Failed_When_Identity_Returns_Failed_Result_With_Bad_Request() { //Arrange var service = new Mock<IUserService>(); service.Setup(x => x.UpdateUserAsync(It.IsAny<ApplicationServiceUser>())) .Returns(Task.FromResult(IdentityResult.Failed())); service.Setup(x => x.GetRequestOrigin(It.IsAny<IIdentity>())).Returns(RequestOrigin.ClientSite); var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(service.Object, logger.Object, null,null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.User = new ClaimsPrincipal(new ClaimsIdentity()); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); //Act var response = controller.Update(new AddUserModel { Username = "rado", Email = "rado" }); var result = response.Result as BadRequestObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); } [Fact] public void Edit_Failed_When_InvalidModel_With_Bad_Request() { //Arrange var service = new Mock<IUserService>(); var controller = new UserController(service.Object, null, null,null); controller.ModelState.AddModelError("", "Error"); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); //Act var response = controller.Update(null); var result = response.Result as BadRequestObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); var responseModel = (ResponseModel)result.Value; responseModel.ResponseMessages.Should().NotBeEmpty(); responseModel.ResponseMessages.Should().ContainKey("Errors"); responseModel.ResponseMessages.Should().HaveCount(1); } [Fact] public void UpdateMyAccount_Success_With_Valid_Model_Input() { //Arrange var service = new Mock<IUserService>(); service.Setup(x => x.UpdateMyAccountAsync(It.IsAny<ApplicationServiceUser>())) .Returns(Task.FromResult(IdentityResult.Success)); var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(service.Object, logger.Object, null,null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var result = controller.UpdateMyAccount(new UpdateUserModel { Username = "rado", Email = "rado" }).Result; //Assert Assert.IsType<OkObjectResult>(result); } [Fact] public void UpdateMyAccount_Failed_When_Identity_Returns_Failed_Result_With_Bad_Request() { //Arrange var service = new Mock<IUserService>(); service.Setup(x => x.UpdateMyAccountAsync(It.IsAny<ApplicationServiceUser>())) .Returns(Task.FromResult(IdentityResult.Failed())); var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(service.Object, logger.Object, null,null); //Act var result = controller.UpdateMyAccount(new UpdateUserModel { Username = "rado", Email = "rado" }).Result; //Assert Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void UpdateMyAccount_Failed_When_InvalidModel_With_Bad_Request() { //Arrange var service = new Mock<IUserService>(); var controller = new UserController(service.Object, null, null,null); controller.ModelState.AddModelError("", "Error"); //Act var result = controller.UpdateMyAccount(null).Result; //Assert Assert.IsType<BadRequestObjectResult>(result); } [Fact] public void GetUnsettledClientCompanyContacts_Success_With_Valid_ClientCompanyId_Input() { //Arrange var mockObject = new ClientCompanyContactModel(); var mockList = new List<ClientCompanyContactModel>(); mockList.Add(mockObject); var mockService = new Mock<IUserService>(); mockService.Setup(x => x.GetAuthorisedSignatories(1)) .Returns(mockList); var mockUrlHelper = new Mock<IUrlHelper>(); mockUrlHelper.Setup(x => x.Action(It.IsAny<Microsoft.AspNetCore.Mvc.Routing.UrlActionContext>())) .Returns("colbackUrl") .Verifiable(); var mockLogger = new Mock<ILogWrapper>(); mockLogger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(mockService.Object, mockLogger.Object, null,null); controller.Url = mockUrlHelper.Object; controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var result = controller.GetAuthorisedSignatories(1); //Assert Assert.IsType<OkObjectResult>(result); } [Fact] public void UpdateContact_Success_With_Valid_Model_Input() { //Arrange var service = new Mock<IUserService>(); service.Setup(x => x.UpdateUserContactAsync(It.IsAny<ApplicationServiceUser>())) .Returns(Task.FromResult(IdentityResult.Success)); service.Setup(x => x.GetRequestOrigin(It.IsAny<IIdentity>())).Returns(RequestOrigin.ClientSite); var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(service.Object, logger.Object, null, null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; controller.ControllerContext.HttpContext.User = new ClaimsPrincipal(new ClaimsIdentity()); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); var expectedType = typeof(ResponseModel); //Act var response = controller.UpdateContact(new AddUserModel { Username = "rado", Email = "rado" }); var result = response.Result as OkObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Info"); } [Fact] public void UpdateContact_Failed_When_Identity_Returns_Failed_Result_With_Bad_Request() { //Arrange var service = new Mock<IUserService>(); service.Setup(x => x.UpdateUserContactAsync(It.IsAny<ApplicationServiceUser>())) .Returns(Task.FromResult(IdentityResult.Failed())); service.Setup(x => x.GetRequestOrigin(It.IsAny<IIdentity>())).Returns(RequestOrigin.ClientSite); var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var controller = new UserController(service.Object, logger.Object, null, null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.User = new ClaimsPrincipal(new ClaimsIdentity()); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); //Act var response = controller.UpdateContact(new AddUserModel { Username = "rado", Email = "rado" }); var result = response.Result as BadRequestObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); } [Fact] public void UpdateContact_Failed_When_InvalidModel_With_Bad_Request() { //Arrange var service = new Mock<IUserService>(); var controller = new UserController(service.Object, null, null, null); controller.ModelState.AddModelError("", "Error"); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); //Act var response = controller.UpdateContact(null); var result = response.Result as BadRequestObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); var responseModel = (ResponseModel)result.Value; responseModel.ResponseMessages.Should().NotBeEmpty(); responseModel.ResponseMessages.Should().ContainKey("Errors"); responseModel.ResponseMessages.Should().HaveCount(1); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Trade/TradeServiceTests.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Fix; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Models.Email; using Argentex.Core.Service.Models.Fix; using Argentex.Core.Service.Models.Identity; using Argentex.Core.Service.Models.Trade; using Argentex.Core.Service.Models.Trades; using Argentex.Core.Service.Trade; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.ClientCompanies; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using Argentex.Core.UnitsOfWork.Trades; using Moq; using System; using System.Collections.Generic; using System.Data; using System.Linq; using System.Threading.Tasks; using Xunit; namespace Argentex.Core.Service.Tests.Trade { public class TradeServiceTests { [Fact] public void GetUnsettledTrades_Successful_When_It_Has_The_Correct_Input() { //Arrange var mockTradeUow = new Mock<ITradeUow>(); var mockClientCompanyAccountsUoW = new Mock<IClientCompanyAccountsUoW>(); var mockFixService = new Mock<IBarxFxService>(); var mockEmailService = new Mock<IEmailService>(); var mockConfig = new Mock<IConfigWrapper>(); var mockDataTable = new DataTable(); mockTradeUow.Setup(x => x.GetUnsettledTrades(1)) .Returns(mockDataTable); var service = new TradeService(mockTradeUow.Object, mockFixService.Object, mockEmailService.Object, mockConfig.Object, mockClientCompanyAccountsUoW.Object, null, null, null, null, null, null); //Act var result = service.GetUnsettledTrades(1); //Assert Assert.IsType<List<TradeModel>>(result); } [Fact] public void Given_There_Is_No_Trade_Associated_With_The_Code_An_Exception_Should_Be_Thrown() { // Given var tradeCode = "TestTradeCode"; var tradeUowMock = new Mock<ITradeUow>(); var clientCompanyAccountsUoWMock = new Mock<IClientCompanyAccountsUoW>(); var fixServiceMock = new Mock<IBarxFxService>(); var mockEmailService = new Mock<IEmailService>(); var mockConfig = new Mock<IConfigWrapper>(); var trades = new List<FxforwardTrade>(); tradeUowMock.Setup(x => x.GetTrade(It.IsAny<string>())).Returns(trades.AsQueryable); var service = new TradeService(tradeUowMock.Object, fixServiceMock.Object, mockEmailService.Object, mockConfig.Object, clientCompanyAccountsUoWMock.Object, null, null, null, null, null, null); var expectedMessage = $"Trade with code {tradeCode} does not exist"; // When var result = Assert.Throws<TradeNotFoundException>(() => service.GetTradeInformation(tradeCode)); // Then Assert.NotNull(result); Assert.Equal(expectedMessage, result.Message); } [Fact] public void Given_There_Is_A_Trade_Associated_With_The_Code_A_Model_Should_Be_Returned_With_Trade_Information() { // Given var now = DateTime.Now; var today = DateTime.Today; var trade = new FxforwardTrade { Code = "Trade 42", Lhsccy = new Currency { Id = 1, Code = "GBP" }, Rhsccy = new Currency { Id = 3, Code = "EUR" }, TradeInstructionMethodId = 2, IsBuy = true, IsRhsmajor = true, CreatedDate = now, ValueDate = today, ClientLhsamt = 25000, ClientRhsamt = 15000, CollateralPerc = 25, CurrencyPair = "GBPEUR", BrokerRate = 1.2m, ClientRate = 1.3m, AuthorisedByClientCompanyContact = new DataAccess.Entities.ClientCompanyContact { Id = 1, ClientCompany = new ClientCompany { Id = 1496, Name = "<NAME>" }, Email = "<EMAIL>" }, ClientCompanyOpi = new ClientCompanyOpi() { Id = 10} }; var tradeUowMock = new Mock<ITradeUow>(); var clientCompanyAccountsUoWMock = new Mock<IClientCompanyAccountsUoW>(); var fixServiceMock = new Mock<IBarxFxService>(); var mockEmailService = new Mock<IEmailService>(); var mockConfig = new Mock<IConfigWrapper>(); var trades = new List<FxforwardTrade> { trade }; tradeUowMock.Setup(x => x.GetTrade(It.IsAny<string>())).Returns(trades.AsQueryable); var mockClientCompanyService = new Mock<IClientCompanyService>(); mockClientCompanyService.Setup(s => s.GetClientCompanySpread(It.IsAny<int>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<DateTime>(), It.IsAny<DateTime>())).Returns(0); var service = new TradeService(tradeUowMock.Object, fixServiceMock.Object, mockEmailService.Object, mockConfig.Object, clientCompanyAccountsUoWMock.Object, null, null, null, null, mockClientCompanyService.Object, null); // When var result = service.GetTradeInformation(trade.Code); // Then Assert.NotNull(result); Assert.Equal(typeof(FxForwardTradeInformationModel), result.GetType()); Assert.Equal(string.Empty, result.InstructedBy); Assert.Equal(now, result.InstructedDateTime); Assert.Equal("Email", result.Method); Assert.Equal(trade.Code, result.TradeRef); Assert.Equal("EUR", result.SellCcy); Assert.Equal(trade.ClientLhsamt.Value, result.BuyAmount); Assert.Equal("GBP", result.BuyCcy); Assert.Equal(trade.ClientRhsamt.Value, result.SellAmount); Assert.Equal(1.3, result.Rate); Assert.Equal(today, result.ValueDate); Assert.Equal(3750, result.Collateral); Assert.Equal("EUR", result.CollateralCcy); Assert.Equal("GBPEUR", result.CurrencyPair); } [Fact] public void GetClosedTrades_Successful_When_It_Has_The_Correct_Input() { //Arrange var mockTradeUow = new Mock<ITradeUow>(); var mockClientCompanyAccountsUoW = new Mock<IClientCompanyAccountsUoW>(); var mockFixService = new Mock<IBarxFxService>(); var mockEmailService = new Mock<IEmailService>(); var mockConfig = new Mock<IConfigWrapper>(); var mockDataTable = new DataTable(); mockDataTable.Columns.Add(new DataColumn("Code", typeof(string))); mockDataTable.Columns.Add(new DataColumn("ContractDate", typeof(DateTime))); mockDataTable.Columns.Add(new DataColumn("ValueDate", typeof(DateTime))); mockDataTable.Columns.Add(new DataColumn("ClientRate", typeof(decimal))); mockDataTable.Columns.Add(new DataColumn("SellCurrencyCode", typeof(string))); mockDataTable.Columns.Add(new DataColumn("BuyCurrencyCode", typeof(string))); mockDataTable.Columns.Add(new DataColumn("BuyClientAmount", typeof(decimal))); mockDataTable.Columns.Add(new DataColumn("SellClientAmount", typeof(decimal))); mockDataTable.Columns.Add(new DataColumn("Reference", typeof(string))); mockDataTable.Columns.Add(new DataColumn("RemainingVolume", typeof(decimal))); mockDataTable.Columns.Add(new DataColumn("FXForwardTradeStatusDescription", typeof(string))); mockDataTable.Columns.Add(new DataColumn("IsDefaultOPI", typeof(bool))); const string expectedTradeCode1 = "AG1000-0001"; DateTime expectedContractDate1 = new DateTime(2018, 6, 1); DateTime expectedValueDate1 = new DateTime(2018, 7, 1); const decimal expectedClientRate1 = 1.3733m; const string expectedBuyCurrencyCode1 = "EUR"; const string expectedSellCurrencyCode1 = "GBP"; const decimal expectedBuyClientAmount1 = 294000.00m; decimal expectedSellClientAmount1 = expectedBuyClientAmount1 / expectedClientRate1; const string expectedReference1 = "REF-" + expectedTradeCode1; const decimal expectedRemainingVolume1 = 4000.00m; const string expectedStatus1 = "Brokered"; bool expectedIsDefaultOPI1 = true; const string expectedTradeCode2 = "AG1000-0002"; DateTime expectedContractDate2 = new DateTime(2018, 6, 4); DateTime expectedValueDate2 = new DateTime(2018, 7, 2); const decimal expectedClientRate2 = 1.2701m; const string expectedSellCurrencyCode2 = "EUR"; const string expectedBuyCurrencyCode2 = "GBP"; const decimal expectedSellClientAmount2 = 220000.00m; decimal expectedBuyClientAmount2 = expectedSellClientAmount2 / expectedClientRate2; const string expectedReference2 = "REF-" + expectedTradeCode2; const decimal expectedRemainingVolume2 = 1000.00m; const string expectedStatus2 = "Brokered"; bool expectedIsDefaultOPI2 = false; DataRow mockDataRow = mockDataTable.NewRow(); mockDataRow["Code"] = expectedTradeCode1; mockDataRow["ContractDate"] = expectedContractDate1; mockDataRow["ValueDate"] = expectedValueDate1; mockDataRow["ClientRate"] = expectedClientRate1; mockDataRow["SellCurrencyCode"] = expectedSellCurrencyCode1; mockDataRow["BuyCurrencyCode"] = expectedBuyCurrencyCode1; mockDataRow["BuyClientAmount"] = expectedBuyClientAmount1; mockDataRow["SellClientAmount"] = expectedSellClientAmount1; mockDataRow["Reference"] = expectedReference1; mockDataRow["RemainingVolume"] = expectedRemainingVolume1; mockDataRow["FXForwardTradeStatusDescription"] = expectedStatus1; mockDataRow["IsDefaultOPI"] = expectedIsDefaultOPI1; mockDataTable.Rows.Add(mockDataRow); mockDataRow = mockDataTable.NewRow(); mockDataRow["Code"] = expectedTradeCode2; mockDataRow["ContractDate"] = expectedContractDate2; mockDataRow["ValueDate"] = expectedValueDate2; mockDataRow["ClientRate"] = expectedClientRate2; mockDataRow["SellCurrencyCode"] = expectedSellCurrencyCode2; mockDataRow["BuyCurrencyCode"] = expectedBuyCurrencyCode2; mockDataRow["BuyClientAmount"] = expectedBuyClientAmount2; mockDataRow["SellClientAmount"] = expectedSellClientAmount2; mockDataRow["Reference"] = expectedReference2; mockDataRow["RemainingVolume"] = expectedRemainingVolume2; mockDataRow["FXForwardTradeStatusDescription"] = expectedStatus2; mockDataRow["IsDefaultOPI"] = expectedIsDefaultOPI2; mockDataTable.Rows.Add(mockDataRow); mockTradeUow.Setup(x => x.GetClosedTrades(1)).Returns(mockDataTable); var service = new TradeService(mockTradeUow.Object, mockFixService.Object, mockEmailService.Object, mockConfig.Object, mockClientCompanyAccountsUoW.Object, null, null, null, null, null, null); //Act var result = service.GetClosedTrades(1); //Assert Assert.IsType<List<TradeModel>>(result); List<TradeModel> list = (List<TradeModel>)result; Assert.Equal(2, list.Count); TradeModel tradeModel = list[0]; Assert.Equal(expectedTradeCode1, tradeModel.TradeId); Assert.Equal(expectedContractDate1, tradeModel.ContractDate); Assert.Equal(expectedValueDate1, tradeModel.ValueDate); Assert.Equal(expectedClientRate1, tradeModel.ClientRate); Assert.Equal(expectedSellCurrencyCode1, tradeModel.SellCcy); Assert.Equal(expectedBuyCurrencyCode1, tradeModel.BuyCcy); Assert.Equal(expectedSellClientAmount1, tradeModel.ClientSellAmount); Assert.Equal(expectedBuyClientAmount1, tradeModel.ClientBuyAmount); Assert.Equal(expectedReference1, tradeModel.Reference); Assert.Equal(expectedRemainingVolume1, tradeModel.Balance); Assert.Equal(expectedStatus1, tradeModel.Status); Assert.Equal(expectedIsDefaultOPI1, tradeModel.PayToDefaultOPI); tradeModel = list[1]; Assert.Equal(expectedTradeCode2, tradeModel.TradeId); Assert.Equal(expectedContractDate2, tradeModel.ContractDate); Assert.Equal(expectedValueDate2, tradeModel.ValueDate); Assert.Equal(expectedClientRate2, tradeModel.ClientRate); Assert.Equal(expectedSellCurrencyCode2, tradeModel.SellCcy); Assert.Equal(expectedBuyCurrencyCode2, tradeModel.BuyCcy); Assert.Equal(expectedSellClientAmount2, tradeModel.ClientSellAmount); Assert.Equal(expectedBuyClientAmount2, tradeModel.ClientBuyAmount); Assert.Equal(expectedReference2, tradeModel.Reference); Assert.Equal(expectedRemainingVolume2, tradeModel.Balance); Assert.Equal(expectedStatus2, tradeModel.Status); Assert.Equal(expectedIsDefaultOPI2, tradeModel.PayToDefaultOPI); } [Fact] public void GetQuotesAsync_Successful_When_It_Has_The_Correct_Input() { //Arrange var quoteResponse = new FixQuoteResponseModel { BrokerRate = 1.3m }; var tradeCountObject = new ClientCompanyTradeCount { TradeCount = 1 }; var mockTradeUow = new Mock<ITradeUow>(); mockTradeUow.Setup(uow => uow.ExecuteOrder(It.IsAny<FxforwardTrade>(), tradeCountObject)) .Returns(true); mockTradeUow.Setup(uow => uow.RejectOrder(It.IsAny<FxforwardTrade>())); mockTradeUow.Setup(uow => uow.GetTradeCountByPrimaryKey(1)) .Returns(tradeCountObject); var mockFixService = new Mock<IBarxFxService>(); mockFixService.Setup(s => s.GetQuoteAsync(It.IsAny<FixQuoteRequestModel>())) .Returns(Task.FromResult(quoteResponse)); var mockAppSettingService = new Mock<IAppSettingService>(); mockAppSettingService.Setup(x => x.GetTimeOut()).Returns(15000); mockAppSettingService.Setup(x => x.GetStreamingQuoteDuration()).Returns(35); var mockClientCompanyService = new Mock<IClientCompanyService>(); mockClientCompanyService.Setup(s => s.GetClientCompanySpread(It.IsAny<int>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<DateTime>(), It.IsAny<DateTime>())).Returns(0); var service = new TradeService(mockTradeUow.Object, mockFixService.Object, null, null, null, null, null, mockAppSettingService.Object, null, mockClientCompanyService.Object, null); var quoteModel = new QuoteModel { Amount = 1000, RhsCcy = "GBP", LhsCcy = "EUR", ValueDate = DateTime.Now }; QuoteRequestModel quoteRequest = new QuoteRequestModel { AuthUserId = 1, ClientCompanyId = 1, QuoteModels = new List<QuoteModel> { quoteModel } }; //Act var taskOutput = service.GetQuotesAsync(quoteRequest); var isCompleted = taskOutput.IsCompletedSuccessfully; //Assert Assert.True(isCompleted); Assert.NotNull(taskOutput); Assert.True(taskOutput.Result.Any()); var firstEntryCode = taskOutput.Result[0].ErrorMessage; Assert.True(String.IsNullOrEmpty(taskOutput.Result[0].ErrorMessage)); } [Fact] public void ExecuteDeals_Successful_When_It_Has_The_Correct_Input() { //Arrange var trade = new FxforwardTrade { Code = "Trade 42", Lhsccy = new Currency { Id = 42, Code = "GBP" }, Rhsccy = new Currency { Id = 24, Code = "EUR" }, TradeInstructionMethodId = 2, IsBuy = true, CreatedDate = DateTime.Now, ValueDate = DateTime.Now, ClientLhsamt = 25000, ClientRhsamt = 15000, CollateralPerc = 25, CurrencyPair = "GBPEUR", BrokerRate = 1.2m, ClientRate = 1.3m, AuthorisedByClientCompanyContact = new DataAccess.Entities.ClientCompanyContact { Id = 1 } }; var quoteResponse = new FixNewOrderResponseModel { BarclaysAssignedId = "101", BarclaysTradeId = "BAR101", ErrorMessage = "" }; var dealResponse = new DealResponseModel { Code = "AG0001-0002", IsSuccessful = true }; var tradeCountObject = new ClientCompanyTradeCount { TradeCount = 1 }; var mockFixService = new Mock<IBarxFxService>(); mockFixService.Setup(s => s.NewOrderSingleAsync(It.IsAny<FixNewOrderRequestModel>())) .Returns(Task.FromResult(quoteResponse)); var mockTradeUow = new Mock<ITradeUow>(); mockTradeUow.Setup(uow => uow.CreateDeal(It.IsAny<FxforwardTrade>(), tradeCountObject)) .Returns(true); mockTradeUow.Setup(uow => uow.BrokerDeal(It.IsAny<FxforwardTrade>(), tradeCountObject)) .Returns(true); mockTradeUow.Setup(uow => uow.GetTradeCountByPrimaryKey(1)) .Returns(tradeCountObject); mockTradeUow.Setup(uow => uow.GetEmirStatus("Pending")) .Returns(new Emirstatus { Id = 2 }); mockTradeUow.Setup(uow => uow.GetFxForwardStatus("Filled")) .Returns(new FxforwardTradeStatus { Id = 2 }); mockTradeUow.Setup(uow => uow.GetTradeInstructionMethod("Online")) .Returns(new TradeInstructionMethod { Id = 4 }); mockTradeUow.Setup(uow => uow.GetFxForwardStatus("Brokered")) .Returns(new FxforwardTradeStatus { Id = 3 }); mockTradeUow.Setup(uow => uow.GetBroker("Barclays")) .Returns(new Broker { Id = 1 }); var trades = new List<FxforwardTrade> { trade }; mockTradeUow.Setup(uow => uow.GetTrade("AG0001-0002")) .Returns(trades.AsQueryable); var mockUserService = new Mock<IUserService>(); mockUserService.Setup(x => x.GetApplicationUserByAuthUserId(It.IsAny<int>())) .Returns(new ApplicationServiceUser { ClientCompanyContactId = 1 }); var mockCurrencyService = new Mock<ICurrencyService>(); mockCurrencyService.Setup(x => x.GetCurrencyId(It.IsAny<string>())) .Returns(1); var appSettingConfig = new Mock<IAppSettingService>(); appSettingConfig.Setup(x => x.GetEmirUtiCode()) .Returns("0875415"); var mockClientCompanyUow = new Mock<IClientCompanyUow>(); mockClientCompanyUow.Setup(uow => uow.GetClientCompany(1)) .Returns((new List<ClientCompany> { new ClientCompany { AssignNewTrades = false } }).AsQueryable); mockClientCompanyUow.Setup(uow => uow.UpdateCompanyFirstTradeDate(1, 1)); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendFailedFIXTradeEmailAsync(It.IsAny<FailedFIXTradeModel>())).Returns(Task.CompletedTask); var mockClientCompanyService = new Mock<IClientCompanyService>(); ClientCompanyAccountModel clientCompanyAccountModel = null; mockClientCompanyService.Setup(s => s.GetClientCompanyDefaultAccount(It.IsAny<int>(), It.IsAny<int>())).Returns(clientCompanyAccountModel); var service = new TradeService(mockTradeUow.Object, mockFixService.Object, mockEmailService.Object, null, null, mockUserService.Object, mockCurrencyService.Object, appSettingConfig.Object, mockClientCompanyUow.Object, mockClientCompanyService.Object, null); var dealModel = new DealModel { Amount = 1000, RhsCcy = "GBP", LhsCcy = "EUR", ValueDate = DateTime.Now, ExpirationDateTime = DateTime.Now.AddMinutes(2), IsBuy = true, IsRhsMajor = true, Rate = 1.3m, BrokerRate = 1.2m }; DealRequestModel dealRequest = new DealRequestModel { AuthUserId = 1, ClientCompanyId = 1, DealModels = new List<DealModel> { dealModel } }; //Act var results = service.Deal(dealRequest).Result; //Assert Assert.NotNull(results); Assert.True(results.Any()); var firstEntryCode = results[0].Code; Assert.Equal("AG0001-0002", firstEntryCode); Assert.True(results[0].IsSuccessful); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AuthApplication.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AuthApplication { public AuthApplication() { AuthUser = new HashSet<AuthUser>(); } public int Id { get; set; } public string Description { get; set; } public AuthApplication IdNavigation { get; set; } public AuthApplication InverseIdNavigation { get; set; } public ICollection<AuthUser> AuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Trades/DealResponseModel.cs namespace Argentex.Core.Service.Models.Trades { public class DealResponseModel { public int TradeIndex { get; set; } public string Code { get; set; } public string BarclaysAssignedId { get; set; } public string BarclaysTradeId { get; set; } public bool IsSuccessful { get; set; } public string ErrorMessage { get; set; } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/ActivityLog.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class ActivityLog { [Key] [DatabaseGenerated(DatabaseGeneratedOption.Identity)] public long ActivityLogId { get; set; } [Required] [MaxLength(256)] public string UserName { get; set; } [Required] public DateTime LogDate { get; set; } [Required] public bool IsSuccess { get; set; } [MaxLength(128)] public string PrimaryIP { get; set; } [MaxLength(128)] public string SecondaryIP { get; set; } [Required] public int ActivityId { get; set; } [ForeignKey(nameof(ActivityId))] public Activity Activity { get; set; } public int? AuthUserId { get; set; } //Name should be UserId, but it must be set as Id to match the Primary Key column in the User table (ApplicationUser class) public long? Id { get; set; } public ApplicationUser ApplicationUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/VirtualAccountType.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class VirtualAccountType { public VirtualAccountType() { ClientCompanyVirtualAccount = new HashSet<ClientCompanyVirtualAccount>(); VirtualAccountTypeBankAccount = new HashSet<VirtualAccountTypeBankAccount>(); } public int Id { get; set; } public string Description { get; set; } public bool? IsPaymentAllowed { get; set; } public ICollection<ClientCompanyVirtualAccount> ClientCompanyVirtualAccount { get; set; } public ICollection<VirtualAccountTypeBankAccount> VirtualAccountTypeBankAccount { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Attributes/DecimalRequiredAttribute.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Text; namespace Argentex.Core.Service.Attributes { public class DecimalRequiredAttribute : ValidationAttribute { public override bool IsValid(object value) { return value is decimal && (decimal)value >= 0m && (decimal) value <= decimal.MaxValue; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogClientCompanyContact.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogClientCompanyContact { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public int ClientCompanyId { get; set; } public string Title { get; set; } public string Forename { get; set; } public string Surname { get; set; } public string Email { get; set; } public string TelephoneDirect { get; set; } public string TelephoneMobile { get; set; } public string TelephoneOther { get; set; } public DateTime? Birthday { get; set; } public bool Authorized { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public string Notes { get; set; } public string Fullname { get; set; } public bool RecNotifications { get; set; } public bool RecAmreport { get; set; } public int? AuthUserId { get; set; } public string Position { get; set; } public bool? PrimaryContact { get; set; } public bool RecActivityReport { get; set; } public bool IsDeleted { get; set; } public string Aspnumber { get; set; } public DateTime? AspcreationDate { get; set; } public DateTime? LastTelephoneChangeDate { get; set; } public DateTime? LastEmailChangeDate { get; set; } public string BloombergGpi { get; set; } public string NiNumber { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FxoptionStatus.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FxoptionStatus { public FxoptionStatus() { Fxoption = new HashSet<Fxoption>(); } public int Id { get; set; } public string Description { get; set; } public ICollection<Fxoption> Fxoption { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Email/EmailSender/EmailType.cs namespace Argentex.Core.Service.Email.EmailSender { public enum EmailType { NewUser, ResetPassword, PasswordChanged, TradeNote, BrokerTradeNote, FailedFIXTrades, OrderNote, DealerOrderNote, CancelOrder, SettlementAssigned, InwardPayment, OutwardPayment, UserChangeRequestAlert, MobileChangeEmailAlert } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FxoptionOutputs.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FxoptionOutputs { public int Id { get; set; } public string FxoptionCode { get; set; } public string Outputs { get; set; } public string ExternalTradeCode { get; set; } public bool? IsDeleted { get; set; } public int? AuthUserId { get; set; } public int? FxoptionOutputsTemplateId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Settlements/SettlementController.cs using Argentex.Core.Service.Models.Settlements; using Argentex.Core.Service.Settlements; using Microsoft.AspNetCore.Mvc; using SynetecLogger; using System.Threading.Tasks; namespace Argentex.Core.Api.Controllers.Settlements { [Produces("application/json")] [Route("api/settlement")] public class SettlementController : Controller { private readonly ISettlementService _settlementService; private readonly ILogWrapper _logger; public SettlementController(ISettlementService settlementService, ILogWrapper logger) { _settlementService = settlementService; _logger = logger; } [HttpGet] [Route("payment-information")] public IActionResult GetPaymentOutInformation(string paymentCode, bool isPaymentOut = false) { var paymentInformation = _settlementService.GetPaymentInformation(paymentCode, isPaymentOut); return Ok(paymentInformation); } [HttpPost] [Route("assign")] public async Task<IActionResult> Assign([FromBody] AssignSettlementRequestModel model) { if (!ModelState.IsValid) return BadRequest(); var response = await _settlementService.AssignAsync(model); return Ok(response); } [HttpGet] [Route("assigned-settlements/{*tradeCode}")] public IActionResult GetAssignedSettlements(string tradeCode) { var assignedSettlements = _settlementService.GetAssignedSettlements(tradeCode); return Ok(assignedSettlements); } [HttpDelete] [Route("delete/{settlementId:int}")] public IActionResult DeleteAssignedSettlements(int settlementId) { if (!ModelState.IsValid) return BadRequest(); _settlementService.DeleteAssignedSettlements(settlementId); return Ok(); } protected override void Dispose(bool disposing) { if (disposing) { _settlementService.Dispose(); } } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Statements/StatementsController.cs using System; using System.Collections.Generic; using System.Linq; using System.Net; using Argentex.Core.Api.Models.Statements; using Argentex.Core.Service.Models.Statements; using Argentex.Core.Service.Statements; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using SynetecLogger; namespace Argentex.Core.Api.Controllers.Statements { [Produces("application/json")] [Route("api/statements")] [Authorize] public class StatementsController : Controller { private readonly IStatementService _statementService; private readonly ILogWrapper _logger; public StatementsController(IStatementService statementService, ILogWrapper logger) { _statementService = statementService; _logger = logger; } [HttpGet("company-statements/{clientCompanyId:int}")] public IActionResult GetStatements(int clientCompanyId, DateTime startDate, DateTime endDate) { if (startDate > endDate) return BadRequest("Start date cannot be posterior to end date"); return Ok(_statementService.GetStatements(clientCompanyId, startDate, endDate)); } protected override void Dispose(bool disposing) { if (disposing) { _statementService.Dispose(); } } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyContactList.cs namespace Argentex.Core.Service.Models.ClientCompany { public class ClientCompanyContactList { public int ID { get; set; } public string ContactTitle { get; set; } public string ContactForename { get; set; } public string ContactSurname { get; set; } public string ContactEmail { get; set; } public string FullName { get; set; } public bool Authorized { get; set; } public string Position { get; set; } public bool PrimaryContact { get; set; } public int ClientCompanyId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyNote.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyNote { public int Id { get; set; } public int ClientCompanyId { get; set; } public string Title { get; set; } public string NoteText { get; set; } public int? AuthUserId { get; set; } public DateTime CreateDateTime { get; set; } public AuthUser AuthUser { get; set; } public ClientCompany ClientCompany { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Statements/StatementUoW.cs using System; using System.Data; using System.Data.SqlClient; using Argentex.Core.DataAccess.Entities; using Microsoft.EntityFrameworkCore; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; namespace Argentex.Core.UnitsOfWork.Statements { public class StatementUoW : BaseUow, IStatementUoW { private IGenericRepo<FxforwardTrade> _fxForwardTradeRepository; private IGenericRepo<Payment> _paymentRepository; private IGenericRepo<ClientCompany> _clientCompanyRepository; private IGenericRepo<BankAccountTransaction> _bankAccountTransactionRepository; private IGenericRepo<Currency> _currencyRepository; public StatementUoW(FXDB1Context context) : base(context) { } public IGenericRepo<FxforwardTrade> FxForwardTradeRepository => _fxForwardTradeRepository = _fxForwardTradeRepository ?? new GenericRepo<FxforwardTrade>(Context); public IGenericRepo<Payment> PaymentRepository => _paymentRepository = _paymentRepository ?? new GenericRepo<Payment>(Context); public IGenericRepo<ClientCompany> ClientCompanyRepository => _clientCompanyRepository = _clientCompanyRepository ?? new GenericRepo<ClientCompany>(Context); public IGenericRepo<BankAccountTransaction> BankAccountTransactionRepository => _bankAccountTransactionRepository = _bankAccountTransactionRepository ?? new GenericRepo<BankAccountTransaction>(Context); public IGenericRepo<Currency> CurrencyRepository => _currencyRepository = _currencyRepository ?? new GenericRepo<Currency>(Context); public DataTable GetClientCompanyVirtualAccountBalances(int clientCompanyId, DateTime valueDate) { var formatedDate = valueDate.ToString("yyyy-MM-dd"); var dt = new DataTable(); using (var sqlConn = (SqlConnection)Context.Database.GetDbConnection()) { var sql = "ClientCompanyVirtualAccountBalanceDisplay"; using (var sqlCmd = new SqlCommand(sql, sqlConn)) { sqlCmd.CommandType = CommandType.StoredProcedure; sqlCmd.CommandTimeout = 300; sqlCmd.Parameters.AddWithValue("@ClientCompanyID", clientCompanyId); sqlCmd.Parameters.AddWithValue("@ValueDate", formatedDate); sqlConn.Open(); using (var sqlAdapter = new SqlDataAdapter(sqlCmd)) { sqlAdapter.Fill(dt); } } } return dt; } } } <file_sep>/agfx-fxdb-core-api-dmz/Synetec.Data.UnitOfWork/BaseUnitOfWork/IBaseUow.cs using Microsoft.EntityFrameworkCore; using System; using System.Threading; using System.Threading.Tasks; namespace Synetec.Data.UnitOfWork.BaseUnitOfWork { public interface IBaseUow : IDisposable { int SaveContext(); Task<int> SaveContextAsync(); Task<int> SaveContextAsync(CancellationToken cancellationToken = default(CancellationToken)); Task<int> SaveContextAsync(bool acceptAllChangesOnSuccess, CancellationToken cancellationToken = default(CancellationToken)); DbContext DbContext { get; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Helpers/CodeBuilder.cs using Argentex.Core.Service.Enums; using System.Text; namespace Argentex.Core.Service.Helpers { public static class CodeBuilder { public static string FormatTradeCode(int tradeCount, int clientCompanyId) { StringBuilder tradeCodeBuilder = new StringBuilder(); if (clientCompanyId < 10000) { tradeCodeBuilder.Append("0000" + clientCompanyId); tradeCodeBuilder.Remove(0, tradeCodeBuilder.Length - 4); tradeCodeBuilder.Insert(0, "AG"); } else { tradeCodeBuilder.Append("AG" + clientCompanyId); } if (tradeCount < 10000) { var tradeCountStr = "0000" + tradeCount; var subString = tradeCountStr.Remove(0, tradeCountStr.Length - 4); tradeCodeBuilder.Append("-" + subString); } else { tradeCodeBuilder.Append("-" + tradeCount); } return tradeCodeBuilder.ToString(); } public static string FormatSwapTradeCode(string tradeCode, int swapCount, SwapType swapType) { var sufix = swapType == SwapType.DeliveryLeg ? "DL" : "RL"; var swapTradeCode = $"{tradeCode}/{sufix}{swapCount}"; return swapTradeCode; } public static string FormatSwapTradeCode(int tradeCount, int swapCount, int clientCompanyId, SwapType swapType) { var tradeCode = FormatTradeCode(tradeCount, clientCompanyId); var swapTradeCode = FormatSwapTradeCode(tradeCode, swapCount, swapType); return swapTradeCode; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogSwiftincomingFile.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogSwiftincomingFile { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public string Filename { get; set; } public DateTime CreatedDateTime { get; set; } public int? SwiftincomingFileTypeId { get; set; } public string Content { get; set; } public string ContentDecoded { get; set; } public string Laufilename { get; set; } public string LaufileContent { get; set; } public int SwiftincomingFileProcessingStatusId { get; set; } public string DisplayError { get; set; } public string ProcessingError { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/SettlementAccountModel.cs using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Service.Models.ClientCompany { public class SettlementAccountModel { public int ClientCompanyOpiId { get; set; } public string Description { get; set; } [Required] [Range(1, int.MaxValue)] public int CurrencyId { get; set; } [Required] [Range(1, int.MaxValue)] public int CountryId { get; set; } public string BankName { get; set; } public string BankAddress { get; set; } [Required] public string AccountName { get; set; } public int? AccountNumber { get; set; } public int? ClearingCodePrefixId { get; set; } public string SortCode { get; set; } public string Reference { get; set; } public string SwiftCode { get; set; } public string Iban { get; set; } public string BeneficiaryName { get; set; } public string BeneficiaryAddress { get; set; } [Required] [Range(1, int.MaxValue)] public int UpdatedByAuthUserId { get; set; } [Required] [Range(1, int.MaxValue)] public int ClientCompanyId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Email/EmailSenderTests.cs using Argentex.Core.Service.Email.EmailSender; using EQService; using Moq; using System; using System.Collections.Generic; using System.Text; using Xunit; namespace Argentex.Core.Service.Tests.Email { public class EmailSenderTests { [Fact] public void CreateBody_NewUser() { var eqsService = new Mock<IServiceEmail>(); var service = new EmailSender(eqsService.Object, null); var result = service.CreateBody(EmailType.NewUser); Assert.Contains("Create Password", result); } [Fact] public void CreateBode_ResetPassword() { var eqsService = new Mock<IServiceEmail>(); var service = new EmailSender(eqsService.Object, null); var result = service.CreateBody(EmailType.ResetPassword); Assert.Contains("Forgot Password", result); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ComplianceClassification.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ComplianceClassification { public ComplianceClassification() { ClientCompanyCompliance = new HashSet<ClientCompanyCompliance>(); ComplianceClassificationFile = new HashSet<ComplianceClassificationFile>(); } public int Id { get; set; } public string Description { get; set; } public int Sequence { get; set; } public ICollection<ClientCompanyCompliance> ClientCompanyCompliance { get; set; } public ICollection<ComplianceClassificationFile> ComplianceClassificationFile { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Currencies/CurrencyService.cs using System; using System.Collections.Generic; using System.Linq; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Models.Currencies; using Argentex.Core.UnitsOfWork.Currencies; namespace Argentex.Core.Service.Currencies { public class CurrencyService : ICurrencyService { private readonly ICurrencyUoW _currencyUoW; private bool _disposed; public CurrencyService(ICurrencyUoW currencyUoW) { _currencyUoW = currencyUoW; } public double GetCurrencyPairRate(string currencyPair) { var currencyPairPricing = _currencyUoW.CurrencyPairPricingRepository .Get(x => x.CurrencyPair.Equals(currencyPair, StringComparison.InvariantCultureIgnoreCase)) .FirstOrDefault(); if(currencyPairPricing == null) throw new CurrencyPairPricingNotFoundException($"{currencyPair} does not exist"); return currencyPairPricing.Rate; } public CurrencyModel GetCurrency(int currencyId) { var currency = _currencyUoW .GetCurrency(currencyId) .Select(x => new CurrencyModel() { Code = x.Code }).SingleOrDefault(); if(currency == null) throw new CurrencyNotFoundException($"Currency with id {currencyId} does not exist"); return currency; } public IEnumerable<CurrencyModel> GetCurrencies() { return _currencyUoW.GetCurrencies().OrderBy(x => x.Code) .Select(x => new CurrencyModel { Id = x.Id, Code = x.Code }) .ToList(); } public int GetCurrencyId(string code) { return _currencyUoW.GetCurrency(code) .Select(x => x.Id) .SingleOrDefault(); } protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _currencyUoW?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogSwiftincomingFileStatement.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogSwiftincomingFileStatement { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public DateTime CreatedDateTime { get; set; } public int SwiftincomingFileId { get; set; } public int FilePartNumber { get; set; } public int? PaymentId { get; set; } public string RawContentLine61 { get; set; } public string RawContentLine86 { get; set; } public string MatchingContent { get; set; } public string DisplayError { get; set; } public string ProcessingError { get; set; } public bool MatchedProvisionally { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/CountryGroup.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class CountryGroup { [Key] public int Id { get; set; } [Required] [MaxLength(128)] public string Description { get;set;} [Required] public int Sequence { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyCompliance.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyCompliance { public ClientCompanyCompliance() { ClientCompanyComplianceCorporateSector = new HashSet<ClientCompanyComplianceCorporateSector>(); ClientCompanyComplianceCurrency = new HashSet<ClientCompanyComplianceCurrency>(); ComplianceClassificationFile = new HashSet<ComplianceClassificationFile>(); ComplianceQuestionnaire = new HashSet<ComplianceQuestionnaire>(); } public int Id { get; set; } public int ClientCompanyId { get; set; } public int? AmlriskId { get; set; } public int? RegisteredDomicileCountryId { get; set; } public DateTime? RefreshDueDateTime { get; set; } public decimal? ExpectedTotalVolume { get; set; } public int? ExpectedFrequencyId { get; set; } public decimal? ExpectedMaxTradeSize { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public decimal? TurnoverGbp { get; set; } public decimal? BalanceSheetGbp { get; set; } public decimal? OwnFundsGbp { get; set; } public bool Regulated { get; set; } public int? ClassificationId { get; set; } public int? ReasonId { get; set; } public bool Ttca { get; set; } public int? NatureId { get; set; } public bool RequestInvoices { get; set; } public bool ThirdPartyPayments { get; set; } public bool DelegatedReporting { get; set; } public bool IsMiFid { get; set; } public Amlrisk Amlrisk { get; set; } public ComplianceClassification Classification { get; set; } public ClientCompany ClientCompany { get; set; } public ExpectedFrequency ExpectedFrequency { get; set; } public ComplianceNature Nature { get; set; } public ComplianceReason Reason { get; set; } public Country RegisteredDomicileCountry { get; set; } public ICollection<ClientCompanyComplianceCorporateSector> ClientCompanyComplianceCorporateSector { get; set; } public ICollection<ClientCompanyComplianceCurrency> ClientCompanyComplianceCurrency { get; set; } public ICollection<ComplianceClassificationFile> ComplianceClassificationFile { get; set; } public ICollection<ComplianceQuestionnaire> ComplianceQuestionnaire { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Fxswap.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Fxswap { public Fxswap() { ClientSiteAction2Fxswap = new HashSet<ClientSiteAction2Fxswap>(); } public int Id { get; set; } public int CreatedAuthUserId { get; set; } public DateTime CreatedDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } public string ParentTradeCode { get; set; } public string DeliveryLegTradeCode { get; set; } public string ReversalLegTradeCode { get; set; } public AuthUser CreatedAuthUser { get; set; } public FxforwardTrade DeliveryLegTradeCodeNavigation { get; set; } public FxforwardTrade ParentTradeCodeNavigation { get; set; } public FxforwardTrade ReversalLegTradeCodeNavigation { get; set; } public ICollection<ClientSiteAction2Fxswap> ClientSiteAction2Fxswap { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/CommissionType.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class CommissionType { public CommissionType() { AppUserType = new HashSet<AppUserType>(); Commission = new HashSet<Commission>(); } public int Id { get; set; } public string Description { get; set; } public double DefaultCommissionRate { get; set; } public ICollection<AppUserType> AppUserType { get; set; } public ICollection<Commission> Commission { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/ClientCompanies/ClientCompanyServiceTests.cs using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.ClientCompanies; using Argentex.Core.Service.ClientSiteAction; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Models.ClientCompany; using Argentex.Core.UnitsOfWork.ClientCompanies; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using Argentex.Core.UnitsOfWork.ClientCompanyContacts.Model; using FluentAssertions; using MockQueryable.Moq; using Moq; using Synetec.Data.UnitOfWork.GenericRepo; using Xunit; namespace Argentex.Core.Service.Tests.ClientCompanies { public class ClientCompanyServiceTests { [Fact] public void Given_No_Company_Is_Found_For_An_Id_An_Exception_Should_Be_Thrown_When_Getting_The_Accounts() { // Given var clientCompanyId = 42; var clientCompanyReposiotyMock = new Mock<IGenericRepo<ClientCompany>>(); var clientCompanyUowMock = new Mock<IClientCompanyAccountsUoW>(); var clientSiteActionServiceMock = new Mock<IClientSiteActionService>(); clientCompanyReposiotyMock.Setup(x => x.GetByPrimaryKey(It.IsAny<int>())).Returns((ClientCompany) null); clientCompanyUowMock.Setup(x => x.ClientCompanyRepository).Returns(clientCompanyReposiotyMock.Object); var service = new ClientCompanyAccountsService(clientCompanyUowMock.Object, clientSiteActionServiceMock.Object, null); var expectedMessage = $"Client company with id {clientCompanyId} does not exist"; // When var exception = Assert.Throws<ClientCompanyNotFoundException>(() => service.GetClientCompanyAccounts(clientCompanyId)); // Then Assert.NotNull(exception); Assert.Equal(expectedMessage, exception.Message); } [Fact] public void Given_There_Are_Not_Accounts_Found_An_Empty_List_Should_Be_Returned() { // Given var clientCompany = new ClientCompany { Id = 42 }; var clientCompanyReposiotyMock = new Mock<IGenericRepo<ClientCompany>>(); var clientCompanyOpiRepositoryMock = new Mock<IGenericRepo<ClientCompanyOpi>>(); var clientCompanyUowMock = new Mock<IClientCompanyAccountsUoW>(); var clientSiteActionServiceMock = new Mock<IClientSiteActionService>(); clientCompanyReposiotyMock.Setup(x => x.GetByPrimaryKey(It.IsAny<int>())).Returns(clientCompany); clientCompanyOpiRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<ClientCompanyOpi, bool>>>(), It.IsAny<Func<IQueryable<ClientCompanyOpi>, IOrderedQueryable<ClientCompanyOpi>>>(), "")) .Returns(new List<ClientCompanyOpi>()); clientCompanyUowMock.Setup(x => x.ClientCompanyRepository).Returns(clientCompanyReposiotyMock.Object); clientCompanyUowMock.Setup(x => x.ClientCompanyOpiRepository).Returns(clientCompanyOpiRepositoryMock.Object); var service = new ClientCompanyAccountsService(clientCompanyUowMock.Object, clientSiteActionServiceMock.Object, null); var expectedType = typeof(List<ClientCompanyAccountModel>); // When var result = service.GetClientCompanyAccounts(clientCompany.Id); // Then Assert.NotNull(result); Assert.Equal(expectedType, result.GetType()); Assert.False(result.Any()); } [Fact] public void Given_There_Are_Accounts_Found_A_List_With_Of_Mapped_Accounts_Should_Be_Returned() { // Given var clientCompany = new ClientCompany { Id = 42 }; var currency = new Currency { Id = 24, Code = "GBP" }; var clientCompanyAccounts = new List<ClientCompanyOpi> { new ClientCompanyOpi { ClientCompanyId = clientCompany.Id, AccountName = "<NAME>", AccountNumber = "123456", Currency = currency, CurrencyId = currency.Id } }; var clientCompanyReposiotyMock = new Mock<IGenericRepo<ClientCompany>>(); var clientCompanyOpiRepositoryMock = new Mock<IGenericRepo<ClientCompanyOpi>>(); var currencyRepositoryMock = new Mock<IGenericRepo<Currency>>(); var clientCompanyUowMock = new Mock<IClientCompanyAccountsUoW>(); var clientSiteActionServiceMock = new Mock<IClientSiteActionService>(); clientCompanyReposiotyMock.Setup(x => x.GetByPrimaryKey(It.IsAny<int>())).Returns(clientCompany); clientCompanyOpiRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<ClientCompanyOpi, bool>>>(), It.IsAny<Func<IQueryable<ClientCompanyOpi>, IOrderedQueryable<ClientCompanyOpi>>>(), "")) .Returns(clientCompanyAccounts); currencyRepositoryMock.Setup(x => x.GetByPrimaryKey(It.IsAny<int>())).Returns(currency); clientCompanyUowMock.Setup(x => x.ClientCompanyRepository).Returns(clientCompanyReposiotyMock.Object); clientCompanyUowMock.Setup(x => x.ClientCompanyOpiRepository).Returns(clientCompanyOpiRepositoryMock.Object); clientCompanyUowMock.Setup(x => x.CurrencyRepository).Returns(currencyRepositoryMock.Object); var service = new ClientCompanyAccountsService(clientCompanyUowMock.Object, clientSiteActionServiceMock.Object, null); var expectedId = 42; var expectedAccountName = "<NAME>"; var expectedAccountNumber = "123456"; var expectedCurrencyCode = "GBP"; // When var result = service.GetClientCompanyAccounts(clientCompany.Id); // Then Assert.NotNull(result); Assert.True(result.Any()); var firstAccount = result.First(); Assert.Equal(expectedId, firstAccount.ClientCompanyId); Assert.Equal(expectedAccountName, firstAccount.AccountName); Assert.Equal(expectedAccountNumber, firstAccount.AccountNumber); Assert.Equal(expectedCurrencyCode, firstAccount.Currency); } [Fact] public void GetNumberOfAssociatedTrades_Should_Return_3_If_AssociatedTradesCount_Equals_3() { //Arrange int clientCompanyOpiId = 44; int associatedTradesCount = 3; var clientCompanyAccountsServiceMock = new Mock<IClientCompanyAccountsService>(); var clientCompanyAccountsUowMock = new Mock<IClientCompanyAccountsUoW>(); clientCompanyAccountsUowMock.Setup(x => x.GetAssociatedTradesCount(It.IsAny<int>(), It.IsAny<int>())).Returns(associatedTradesCount); var service = new ClientCompanyAccountsService(clientCompanyAccountsUowMock.Object, null, null); //Act var expectedResult = 3; var result = service.GetNumberOfAssociatedTrades(clientCompanyOpiId); //Assert result.Should().NotBe(0).And.Be(expectedResult); } [Fact] public void GetNumberOfAssociatedTrades_Should_Return_0_If_AssociatedTradesCount_Equals_0() { //Arrange int clientCompanyOpiId = 44; int associatedTradesCount = 0; var clientCompanyAccountsServiceMock = new Mock<IClientCompanyAccountsService>(); var clientCompanyAccountsUowMock = new Mock<IClientCompanyAccountsUoW>(); clientCompanyAccountsUowMock.Setup(x => x.GetAssociatedTradesCount(It.IsAny<int>(), It.IsAny<int>())).Returns(associatedTradesCount); var service = new ClientCompanyAccountsService(clientCompanyAccountsUowMock.Object, null, null); //Act var expectedResult = 0; var result = service.GetNumberOfAssociatedTrades(clientCompanyOpiId); //Assert result.Should().Be(expectedResult); } [Fact] public void Get_ClientCompanyContact_Categories_For_ClientContact() { //Arrange int clientCompanyContactId = 44; DataAccess.Entities.ClientCompanyContact clientCompanyContact = new DataAccess.Entities.ClientCompanyContact() {Id = clientCompanyContactId}; const string contactCategoryDescription1 = "EUR/USD"; const string contactCategoryDescription2 = "EUR/GBP"; const string contactCategoryDescription3 = "GBP/USD"; const string contactCategoryDescription4 = "GBP/EUR"; const int contactCategoryId1 = 1; const int contactCategoryId2 = 2; const int contactCategoryId3 = 3; const int contactCategoryId4 = 4; List<ClientCompanyContactCategory> contactCategoryList = new List<ClientCompanyContactCategory>() { new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryId = contactCategoryId1, ClientCompanyContact = clientCompanyContact, ContactCategory = new ContactCategory() {Id = contactCategoryId1, Description = contactCategoryDescription1} }, new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryId = contactCategoryId2, ClientCompanyContact = clientCompanyContact, ContactCategory = new ContactCategory() {Id = contactCategoryId2, Description = contactCategoryDescription2} }, new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryId = contactCategoryId3, ClientCompanyContact = clientCompanyContact, ContactCategory = new ContactCategory() {Id = contactCategoryId3, Description = contactCategoryDescription3} }, new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryId = contactCategoryId4, ClientCompanyContact = clientCompanyContact, ContactCategory = new ContactCategory() {Id = contactCategoryId4, Description = contactCategoryDescription4} } }; IList<ClientCompanyContactCategoryModel> expectedResult = new List<ClientCompanyContactCategoryModel>() { new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryDescription = contactCategoryDescription1, ContactCategoryId = contactCategoryId1 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryDescription = contactCategoryDescription2, ContactCategoryId = contactCategoryId2 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryDescription = contactCategoryDescription3, ContactCategoryId = contactCategoryId3 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryDescription = contactCategoryDescription4, ContactCategoryId = contactCategoryId4 }, }; var clientCompanyUow = new Mock<IClientCompanyUow>(); clientCompanyUow.Setup(x => x.GetClientCompanyContactCategories(It.IsAny<int>())).Returns(contactCategoryList.AsQueryable().BuildMock().Object); var service = new ClientCompanyService(clientCompanyUow.Object,null,null,null,null); //Act IEnumerable<ClientCompanyContactCategoryModel> result = service.GetClientCompanyContactCategories(clientCompanyContactId).Result; //Assert result.Should().BeEquivalentTo(expectedResult); } [Fact] public void Get_All_ClientCompanyContact_Categories() { //Arrange int clientCompanyContactId1 = 44; int clientCompanyContactId2 = 50; int clientCompanyContactId3 = 100; DataAccess.Entities.ClientCompanyContact clientCompanyContact1 = new DataAccess.Entities.ClientCompanyContact() { Id = clientCompanyContactId1 }; DataAccess.Entities.ClientCompanyContact clientCompanyContact2 = new DataAccess.Entities.ClientCompanyContact() { Id = clientCompanyContactId2 }; DataAccess.Entities.ClientCompanyContact clientCompanyContact3 = new DataAccess.Entities.ClientCompanyContact() { Id = clientCompanyContactId3 }; const string contactCategoryDescription1 = "EUR/USD"; const string contactCategoryDescription2 = "EUR/GBP"; const string contactCategoryDescription3 = "GBP/USD"; const string contactCategoryDescription4 = "GBP/EUR LHS"; const string contactCategoryDescription5 = "GBP/EUR RHS"; const string contactCategoryDescription6 = "GBP/AUD LHS"; const string contactCategoryDescription7 = "GBP/EUR RHS"; const int contactCategoryId1 = 1; const int contactCategoryId2 = 2; const int contactCategoryId3 = 3; const int contactCategoryId4 = 4; const int contactCategoryId5 = 5; const int contactCategoryId6 = 6; const int contactCategoryId7 = 7; List<ClientCompanyContactCategory> contactCategoryList = new List<ClientCompanyContactCategory>() { new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId1, ContactCategoryId = contactCategoryId1, ClientCompanyContact = clientCompanyContact1, ContactCategory = new ContactCategory() {Id = contactCategoryId1, Description = contactCategoryDescription1} }, new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId1, ContactCategoryId = contactCategoryId2, ClientCompanyContact = clientCompanyContact1, ContactCategory = new ContactCategory() {Id = contactCategoryId2, Description = contactCategoryDescription2} }, new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId1, ContactCategoryId = contactCategoryId3, ClientCompanyContact = clientCompanyContact1, ContactCategory = new ContactCategory() {Id = contactCategoryId3, Description = contactCategoryDescription3} }, new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId1, ContactCategoryId = contactCategoryId4, ClientCompanyContact = clientCompanyContact1, ContactCategory = new ContactCategory() {Id = contactCategoryId4, Description = contactCategoryDescription4} }, new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId2, ContactCategoryId = contactCategoryId5, ClientCompanyContact = clientCompanyContact2, ContactCategory = new ContactCategory() {Id = contactCategoryId5, Description = contactCategoryDescription5} }, new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId2, ContactCategoryId = contactCategoryId6, ClientCompanyContact = clientCompanyContact2, ContactCategory = new ContactCategory() {Id = contactCategoryId6, Description = contactCategoryDescription6} }, new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId3, ContactCategoryId = contactCategoryId7, ClientCompanyContact = clientCompanyContact3, ContactCategory = new ContactCategory() {Id = contactCategoryId7, Description = contactCategoryDescription7} } }; IList<ClientCompanyContactCategoryModel> expectedResult = new List<ClientCompanyContactCategoryModel>() { new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId1, ContactCategoryDescription = contactCategoryDescription1, ContactCategoryId = contactCategoryId1 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId1, ContactCategoryDescription = contactCategoryDescription2, ContactCategoryId = contactCategoryId2 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId1, ContactCategoryDescription = contactCategoryDescription3, ContactCategoryId = contactCategoryId3 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId1, ContactCategoryDescription = contactCategoryDescription4, ContactCategoryId = contactCategoryId4 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId2, ContactCategoryDescription = contactCategoryDescription5, ContactCategoryId = contactCategoryId5 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId2, ContactCategoryDescription = contactCategoryDescription6, ContactCategoryId = contactCategoryId6 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId3, ContactCategoryDescription = contactCategoryDescription7, ContactCategoryId = contactCategoryId7 }, }; var clientCompanyUow = new Mock<IClientCompanyUow>(); clientCompanyUow.Setup(x => x.GetClientCompanyContactCategories(It.IsAny<int>())).Returns(contactCategoryList.AsQueryable().BuildMock().Object); var service = new ClientCompanyService(clientCompanyUow.Object, null, null, null, null); //Act IEnumerable<ClientCompanyContactCategoryModel> result = service.GetClientCompanyContactCategories(clientCompanyContactId1).Result; //Assert result.Should().BeEquivalentTo(expectedResult); } [Fact] public void Add_ContactCategory_Successful() { var model = new ContactCategoryModel(){Description = "New Contact Category"}; var clientCompanyUow = new Mock<IClientCompanyUow>(); var contactCategories = new List<ContactCategory>(); var contactCategory = contactCategories.AsQueryable(); clientCompanyUow.Setup(x => x.GetContactCategory(It.IsAny<string>())).Returns(contactCategory); clientCompanyUow.Setup(x => x.AddContactCategory(It.IsAny<ContactCategory>())); var service = new ClientCompanyService(clientCompanyUow.Object, null, null, null, null); //Act var result = service.AddContactCategory(model); //Assert result.Should().Be(true); } [Fact] public void Add_ContactCategory_Not_Successful() { string description = "New Contact Category"; var model = new ContactCategoryModel() { Description = description }; var clientCompanyUow = new Mock<IClientCompanyUow>(); var contactCategories = new List<ContactCategory>() { new ContactCategory(){Description = description}}; var contactCategory = contactCategories.AsQueryable(); clientCompanyUow.Setup(x => x.GetContactCategory(It.IsAny<string>())).Returns(contactCategory); clientCompanyUow.Setup(x => x.AddContactCategory(It.IsAny<ContactCategory>())); var service = new ClientCompanyService(clientCompanyUow.Object, null, null, null, null); //Act var result = service.AddContactCategory(model); //Assert result.Should().Be(false); } [Fact] public void Get_Contact_Categories() { //Arrange const string contactCategoryDescription1 = "EUR/USD"; const string contactCategoryDescription2 = "EUR/GBP"; const string contactCategoryDescription3 = "GBP/USD"; const string contactCategoryDescription4 = "GBP/EUR"; const int contactCategoryId1 = 1; const int contactCategoryId2 = 2; const int contactCategoryId3 = 3; const int contactCategoryId4 = 4; const int contactCategorySequence1 = 1; const int contactCategorySequence2 = 2; const int contactCategorySequence3 = 3; const int contactCategorySequence4 = 4; List<ContactCategory> contactCategoryList = new List<ContactCategory>() { new ContactCategory() { Id = contactCategoryId1, Description = contactCategoryDescription1, Sequence = contactCategorySequence1 }, new ContactCategory() { Id = contactCategoryId2, Description = contactCategoryDescription2, Sequence = contactCategorySequence2 }, new ContactCategory() { Id = contactCategoryId3, Description = contactCategoryDescription3, Sequence = contactCategorySequence3 }, new ContactCategory() { Id = contactCategoryId4, Description = contactCategoryDescription4, Sequence = contactCategorySequence4 }, }; IList<ContactCategoryModel> expectedResult = new List<ContactCategoryModel>() { new ContactCategoryModel() { Id = contactCategoryId1, Description = contactCategoryDescription1, Sequence = contactCategorySequence1 }, new ContactCategoryModel() { Id = contactCategoryId2, Description = contactCategoryDescription2, Sequence = contactCategorySequence2 }, new ContactCategoryModel() { Id = contactCategoryId3, Description = contactCategoryDescription3, Sequence = contactCategorySequence3 }, new ContactCategoryModel() { Id = contactCategoryId4, Description = contactCategoryDescription4, Sequence = contactCategorySequence4 }, }; var clientCompanyUow = new Mock<IClientCompanyUow>(); clientCompanyUow.Setup(x => x.GetContactCategories()).Returns(contactCategoryList.AsQueryable().BuildMock().Object); var service = new ClientCompanyService(clientCompanyUow.Object, null, null, null, null); //Act IEnumerable<ContactCategoryModel> result = service.GetContactCategories().Result; //Assert result.Should().BeEquivalentTo(expectedResult); } [Fact] public void GetClientCompanyContact_Success_With_Valid_clientCompanyContactID() { //Arrange var clientCompanyUow = new Mock<IClientCompanyUow>(); var clientCompanyContact = new DataAccess.Entities.ClientCompanyContact { Id = 1, Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", ClientCompanyId = 439, UpdatedByAuthUserId = 111, PrimaryContact = false, Birthday = DateTime.Now, AuthUser = new AuthUser(), ClientCompany = new ClientCompany() }; var clientCompanyContactSearchContext = new ClientCompanyContactSearchContext() { ClientCompanyContactId = 1, AuthUsertId = 0 }; clientCompanyUow.Setup(x => x.GetCurrentClientCompanyContact(It.IsAny<ClientCompanyContactSearchModel>())) .Returns(clientCompanyContact); var service = new ClientCompanyService(clientCompanyUow.Object, null, null, null, null); //Act var result = service.GetClientCompanyContact(clientCompanyContactSearchContext); //Assert result.Should().BeOfType<ClientCompanyContactResponseModel>().And.NotBeNull(); result.CompanyContactModel.ID.Should().Be(1); result.CompanyContactModel.ContactEmail.Should().Be("<EMAIL>"); } [Fact] public void GetClientCompanyContact_Success_With_Valid_AuthUsertId() { //Arrange var clientCompanyUow = new Mock<IClientCompanyUow>(); var clientCompanyContact = new DataAccess.Entities.ClientCompanyContact { Id = 1, Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", ClientCompanyId = 439, UpdatedByAuthUserId = 111, PrimaryContact = false, Birthday = DateTime.Now, AuthUser = new AuthUser(), ClientCompany = new ClientCompany() }; var clientCompanyContactSearchContext = new ClientCompanyContactSearchContext() { ClientCompanyContactId = 0, AuthUsertId = 1 }; clientCompanyUow.Setup(x => x.GetCurrentClientCompanyContact(It.IsAny<ClientCompanyContactSearchModel>())) .Returns(clientCompanyContact); var service = new ClientCompanyService(clientCompanyUow.Object, null, null, null, null); //Act var result = service.GetClientCompanyContact(clientCompanyContactSearchContext); //Assert result.Should().BeOfType<ClientCompanyContactResponseModel>().And.NotBeNull(); result.CompanyContactModel.ID.Should().Be(1); result.CompanyContactModel.ContactEmail.Should().Be("<EMAIL>"); } [Fact] public void GetClientCompanyContact_Returns_Empty_Model_When_clientCompanyContactID_And_AuthUsertId_Equal_Zero() { //Arrange var clientCompanyUow = new Mock<IClientCompanyUow>(); var clientCompanyContactSearchContext = new ClientCompanyContactSearchContext() { ClientCompanyContactId = 0, AuthUsertId = 0 }; var service = new ClientCompanyService(clientCompanyUow.Object, null, null, null, null); //Act var result = service.GetClientCompanyContact(clientCompanyContactSearchContext); //Assert result.Should().NotBeNull().And.BeOfType(typeof(ClientCompanyContactResponseModel)); result.CompanyContactModel.Should().BeNull(); } [Fact] public void GetCompanyContactList_Success_With_Valid_clientCompanyContactID() { //Arrange var clientCompanyUow = new Mock<IClientCompanyUow>(); int clientCompanytID = 1; var user = new DataAccess.Entities.ClientCompanyContact { Id = 1, Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, PrimaryContact = false, Birthday = DateTime.Now }; var userList = new List<DataAccess.Entities.ClientCompanyContact> { user }.AsQueryable(); clientCompanyUow.Setup(x => x.GetClientCompanyContactList(It.IsAny<int>())) .Returns(userList); var service = new ClientCompanyService(clientCompanyUow.Object, null, null, null, null); //Act var result = service.GetCompanyContactList(clientCompanytID); //Assert result.Should().NotBeNull(); result.CompanyContactListModel.Count.Should().Be(1); result.CompanyContactListModel.FirstOrDefault().ID.Should().Be(1); result.CompanyContactListModel.FirstOrDefault().ContactEmail.Should().Be("<EMAIL>"); } [Fact] public void GetCompanyContactList_Returns_Empty_Object_When_clientCompanyContactID_Equals_Zero() { //Arrange var clientCompanyUow = new Mock<IClientCompanyUow>(); int clientCompanyID = 0; clientCompanyUow.Setup(x => x.GetClientCompanyContactList(It.IsAny<int>())); var service = new ClientCompanyService(clientCompanyUow.Object, null, null, null, null); //Act var result = service.GetCompanyContactList(clientCompanyID); //Assert result.Should().BeAssignableTo<ClientCompanyContactListResponseModel>(); result.CompanyContactListModel.Should().BeEmpty(); } /* Task<IEnumerable<ClientCompanyContactCategoryModel>> GetClientCompanyContactCategories(int clientCompanyContactId); Task<IEnumerable<ContactCategoryModel>> GetContactCategories(); bool AddContactCategory(ContactCategoryModel model); ContactCategory GetContactCategory(int contactCategoryId); ContactCategory GetContactCategory(string contactCategoryDescription); bool ProcessClientCompanyContactCategories(ClientCompanyContactBulkCategoryModel model); */ } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Enums/SmsProviders.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Enums { public enum SmsProviders { TextMagic } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Sms/ISmsService.cs using Argentex.Core.Service.Sms.Models; using System; using System.Threading.Tasks; namespace Argentex.Core.Service { public interface ISmsService : IDisposable { Task<string> Send2FAMessage(string username); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyContactCategoryModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Models.ClientCompany { public class ClientCompanyContactCategoryModel { public int ClientCompanyContactId { get; set; } public int ContactCategoryId { get; set; } public string ContactCategoryDescription { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Country/CountryModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Models.Country { public class CountryModel { public int Id { get; internal set; } public string Name { get; internal set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Orders/OrderServiceTest.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Fix; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Models.Email; using Argentex.Core.Service.Models.Fix; using Argentex.Core.Service.Models.Identity; using Argentex.Core.Service.Models.Order; using Argentex.Core.Service.Order; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.Trades; using Moq; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Xunit; namespace Argentex.Core.Service.Tests.Orders { public class OrderServiceTest { [Fact(Skip = "To be reviewed after FIX service update")] public void ExecuteOrdersAsync_Successful_When_It_Has_The_Correct_Input() { //Arrange var quoteResponse = new FixQuoteResponseModel { BrokerRate = 1.3m }; var tradeCountObject = new ClientCompanyTradeCount { TradeCount = 1 }; List<FxforwardTrade> list = new List<FxforwardTrade>() { new FxforwardTrade() { Code = "AG0001-0002", Lhsccy = new Currency { Id = 1, Code = "GBP" }, Rhsccy = new Currency { Id = 3, Code = "EUR" }, TradeInstructionMethodId = 5, IsBuy = true, CreatedDate = DateTime.Now, ValueDate = DateTime.Now, ClientLhsamt = 1000, ClientRhsamt = 1100, CollateralPerc = 0, CurrencyPair = "GBPEUR", BrokerRate = 1.2m, ClientRate = 1.3m, AuthorisedByClientCompanyContact = new DataAccess.Entities.ClientCompanyContact { Id = 1 } } }; var mockTradeUow = new Mock<ITradeUow>(); mockTradeUow.Setup(uow => uow.ExecuteOrder(It.IsAny<FxforwardTrade>(), tradeCountObject)) .Returns(true); mockTradeUow.Setup(uow => uow.RejectOrder(It.IsAny<FxforwardTrade>())); mockTradeUow.Setup(uow => uow.GetTradeCountByPrimaryKey(1)) .Returns(tradeCountObject); mockTradeUow.Setup(uow => uow.GetTrade(It.IsAny<string>())) .Returns(list.AsQueryable()); var mockFixService = new Mock<IBarxFxService>(); mockFixService.Setup(s => s.GetQuoteAsync(It.IsAny<FixQuoteRequestModel>())) .Returns(Task.FromResult(quoteResponse)); var mockUserService = new Mock<IUserService>(); mockUserService.Setup(x => x.GetApplicationUserByAuthUserId(It.IsAny<int>())) .Returns(new ApplicationServiceUser { ClientCompanyContactId = 1 }); var mockCurrencyService = new Mock<ICurrencyService>(); mockCurrencyService.Setup(x => x.GetCurrencyId(It.IsAny<string>())) .Returns(1); var mockAppSettingService = new Mock<IAppSettingService>(); mockAppSettingService.Setup(x => x.GetTimeOut()).Returns(15000); mockAppSettingService.Setup(x => x.GetStreamingQuoteDuration()).Returns(35); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendOrderNoteEmailAsync(It.IsAny<OrderNoteModel>())).Returns(Task.CompletedTask); mockEmailService.Setup(s => s.SendDealerOrderNoteEmailAsync(It.IsAny<OrderNoteModel>())).Returns(Task.CompletedTask); var mockConfig = new Mock<IConfigWrapper>(); var mockClientCompanyService = new Mock<IClientCompanyService>(); mockClientCompanyService.Setup(s => s.GetClientCompanySpread(It.IsAny<int>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<DateTime>(), It.IsAny<DateTime>())).Returns(0); var service = new OrderService(mockTradeUow.Object, mockFixService.Object, mockUserService.Object, mockCurrencyService.Object, mockAppSettingService.Object, mockEmailService.Object, mockConfig.Object, mockClientCompanyService.Object); var orderModel = new OrderModel { ClientAmount = 1000, RhsCcy = "GBP", LhsCcy = "EUR", ValueDate = DateTime.Now, ClientRate = 1.3m }; OrderRequestModel orderRequest = new OrderRequestModel { AuthUserId = 1, ClientCompanyId = 1, OrderModels = new List<OrderModel> { orderModel } }; //Act var taskOutput = service.ExecuteOrdersAsync(orderRequest); var isCompleted = taskOutput.IsCompletedSuccessfully; //Assert Assert.True(isCompleted); Assert.NotNull(taskOutput); Assert.True(taskOutput.Result.Any()); var firstEntryCode = taskOutput.Result[0].Code; Assert.Equal("AG0001-0002", firstEntryCode); Assert.True(taskOutput.Result[0].IsSuccessful); } [Fact(Skip ="To be reviewed after FIX service update")] public void ExecuteOrdersAsync_Fails_When_Rate_Is_Not_Favourable() { //Arrange var quoteResponse = new FixQuoteResponseModel { BrokerRate = 0.8m }; var tradeCountObject = new ClientCompanyTradeCount { TradeCount = 1 }; List<FxforwardTrade> list = new List<FxforwardTrade>() { new FxforwardTrade() { Code = "AG0017-32893", Lhsccy = new Currency { Id = 1, Code = "GBP" }, Rhsccy = new Currency { Id = 3, Code = "EUR" }, TradeInstructionMethodId = 5, IsBuy = true, CreatedDate = DateTime.Now, ValueDate = DateTime.Now, ClientLhsamt = 1000, ClientRhsamt = 1100, CollateralPerc = 0, CurrencyPair = "GBPEUR", BrokerRate = 1.5m, AuthorisedByClientCompanyContact = new DataAccess.Entities.ClientCompanyContact { Id = 1 } } }; var mockTradeUow = new Mock<ITradeUow>(); mockTradeUow.Setup(uow => uow.ExecuteOrder(It.IsAny<FxforwardTrade>(), tradeCountObject)) .Returns(true); mockTradeUow.Setup(uow => uow.RejectOrder(It.IsAny<FxforwardTrade>())); mockTradeUow.Setup(uow => uow.GetTradeCountByPrimaryKey(1)) .Returns(tradeCountObject); mockTradeUow.Setup(uow => uow.GetTrade(It.IsAny<string>())) .Returns(list.AsQueryable()); var mockFixService = new Mock<IBarxFxService>(); mockFixService.Setup(s => s.GetQuoteAsync(It.IsAny<FixQuoteRequestModel>())) .Returns(Task.FromResult(quoteResponse)); var mockUserService = new Mock<IUserService>(); mockUserService.Setup(x => x.GetApplicationUserByAuthUserId(It.IsAny<int>())) .Returns(new ApplicationServiceUser { ClientCompanyContactId = 1 }); var mockCurrencyService = new Mock<ICurrencyService>(); mockCurrencyService.Setup(x => x.GetCurrencyId(It.IsAny<string>())) .Returns(1); var mockAppSettingService = new Mock<IAppSettingService>(); mockAppSettingService.Setup(x => x.GetTimeOut()).Returns(15000); mockAppSettingService.Setup(x => x.GetStreamingQuoteDuration()).Returns(35); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendOrderNoteEmailAsync(It.IsAny<OrderNoteModel>())).Returns(Task.CompletedTask); mockEmailService.Setup(s => s.SendDealerOrderNoteEmailAsync(It.IsAny<OrderNoteModel>())).Returns(Task.CompletedTask); var mockConfig = new Mock<IConfigWrapper>(); var mockClientCompanyService = new Mock<IClientCompanyService>(); mockClientCompanyService.Setup(s => s.GetClientCompanySpread(It.IsAny<int>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<DateTime>(), It.IsAny<DateTime>())).Returns(0); var service = new OrderService(mockTradeUow.Object, mockFixService.Object, mockUserService.Object, mockCurrencyService.Object, mockAppSettingService.Object, mockEmailService.Object, mockConfig.Object, mockClientCompanyService.Object); var orderModel = new OrderModel { ClientAmount = 1000, ClientRate = 1.5m, RhsCcy = "GBP", LhsCcy = "EUR", ValueDate = DateTime.Now }; OrderRequestModel orderRequest = new OrderRequestModel { AuthUserId = 1, ClientCompanyId = 1, OrderModels = new List<OrderModel> { orderModel } }; //Act var taskOutput = service.ExecuteOrdersAsync(orderRequest); var isCompleted = taskOutput.IsCompletedSuccessfully; //Assert Assert.True(isCompleted); Assert.NotNull(taskOutput); Assert.True(taskOutput.Result[0].IsSuccessful); } [Fact(Skip = "To be reviewed after FIX service update")] public void GetOpenOrders() { DateTime createdDate = new DateTime(2018, 8, 10); DateTime valueDate = new DateTime(2018, 8, 20); DateTime validityDate = new DateTime(2018, 9, 1); var trades = new List<FxforwardTrade>(); Currency currencyEUR = new Currency { Code = "EUR" }; Currency currencGBP = new Currency { Code = "GBP" }; const string TradeCode1 = "ARG0067-0001"; const string TradeCode2 = "ARG0067-0002"; trades.Add(new FxforwardTrade() { Code = TradeCode1, CreatedDate = createdDate, ValueDate = valueDate, OpenValueDate = validityDate, Rhsccy = currencyEUR, Lhsccy = currencGBP, IsBuy = true, ClientRhsamt = 1000, ClientLhsamt = 1200, ClientRate = 1.2m }); trades.Add(new FxforwardTrade() { Code = TradeCode2, CreatedDate = createdDate, ValueDate = valueDate, OpenValueDate = validityDate, Rhsccy = currencyEUR, Lhsccy = currencGBP, IsBuy = false, ClientRhsamt = 2000, ClientLhsamt = 2200, ClientRate = 1.5m }); var mockTradeUow = new Mock<ITradeUow>(); mockTradeUow.Setup(uow => uow.GetOpenOrders(It.IsAny<int>())).Returns(trades.AsQueryable); var mockFixService = new Mock<IBarxFxService>(); var mockUserService = new Mock<IUserService>(); var mockCurrencyService = new Mock<ICurrencyService>(); var mockAppSettingService = new Mock<IAppSettingService>(); var mockEmailService = new Mock<IEmailService>(); var mockConfig = new Mock<IConfigWrapper>(); var service = new OrderService(mockTradeUow.Object, mockFixService.Object, mockUserService.Object, mockCurrencyService.Object, mockAppSettingService.Object, mockEmailService.Object, mockConfig.Object, null); var list = service.GetOpenOrders(1); Assert.NotNull(list); Assert.IsType<List<Models.Trade.TradeModel>>(list); Assert.Equal(2, list.Count); Models.Trade.TradeModel tradeModel = list[0]; Assert.Equal(TradeCode1, tradeModel.TradeId); Assert.Equal(1.2m, tradeModel.ClientRate); Assert.Equal(createdDate, tradeModel.CreatedDate); Assert.Equal(valueDate, tradeModel.ValueDate); Assert.Equal(validityDate, tradeModel.ValidityDate); Assert.Equal(1000, tradeModel.ClientSellAmount); Assert.Equal("EUR", tradeModel.SellCcy); Assert.Equal(1200, tradeModel.ClientBuyAmount); Assert.Equal("GBP", tradeModel.BuyCcy); tradeModel = list[1]; Assert.Equal(TradeCode2, tradeModel.TradeId); Assert.Equal(1.5m, tradeModel.ClientRate); Assert.Equal(createdDate, tradeModel.CreatedDate); Assert.Equal(valueDate, tradeModel.ValueDate); Assert.Equal(validityDate, tradeModel.ValidityDate); Assert.Equal(2200, tradeModel.ClientSellAmount); Assert.Equal("GBP", tradeModel.SellCcy); Assert.Equal(2000, tradeModel.ClientBuyAmount); Assert.Equal("EUR", tradeModel.BuyCcy); } [Fact] public void GetOpenOrders_No_Trades() { var trades = new List<FxforwardTrade>(); var mockTradeUow = new Mock<ITradeUow>(); mockTradeUow.Setup(uow => uow.GetOpenOrders(It.IsAny<int>())).Returns(trades.AsQueryable); var mockFixService = new Mock<IBarxFxService>(); var mockUserService = new Mock<IUserService>(); var mockCurrencyService = new Mock<ICurrencyService>(); var mockAppSettingService = new Mock<IAppSettingService>(); var mockEmailService = new Mock<IEmailService>(); var mockConfig = new Mock<IConfigWrapper>(); var service = new OrderService(mockTradeUow.Object, mockFixService.Object, mockUserService.Object, mockCurrencyService.Object, mockAppSettingService.Object, mockEmailService.Object, mockConfig.Object, null); var list = service.GetOpenOrders(1); Assert.NotNull(list); Assert.Equal(0, list.Count); } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FixQuote.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FixQuote { public string QuoteId { get; set; } public string TradeId { get; set; } public bool Cancelled { get; set; } public FxforwardTrade Trade { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Trades/TradeModel.cs using System; namespace Argentex.Core.Service.Models.Trade { public class TradeModel { public string TradeId { get; set; } public DateTime? ContractDate { get; set; } public DateTime? ValueDate { get; set; } public decimal? ClientRate { get; set; } public decimal Balance { get; set; } public string SellCcy { get; set; } public string BuyCcy { get; set; } public decimal? ClientBuyAmount { get; set; } public decimal? ClientSellAmount { get; set; } public bool IsFullPayment { get; set; } public string Reference { get; set; } public string Status { get; set; } public bool PayToDefaultOPI { get; set; } public DateTime? CreatedDate { get; set; } public DateTime? ValidityDate { get; set; } public string MajorCcy { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Order/OrderController.cs using Argentex.Core.Service.Models.Order; using Argentex.Core.Service.Order; using Microsoft.AspNetCore.Mvc; using System.Threading.Tasks; namespace Argentex.Core.Api.Controllers.Order { [Produces("application/json")] [Route("api/order")] public class OrderController : Controller { private readonly IOrderService _orderService; public OrderController(IOrderService orderService) { _orderService = orderService; } [HttpPost] [Route("execute")] public async Task<IActionResult> ExecuteOrdersAsync([FromBody] OrderRequestModel model) { if (!ModelState.IsValid) { return BadRequest(ModelState); } var orderResults = await _orderService.ExecuteOrdersAsync(model); return Ok(orderResults); } [HttpGet] [Route("open-orders/{clientCompanyId:int}")] public IActionResult GetOpenOrders(int clientCompanyId) { return Ok(_orderService.GetOpenOrders(clientCompanyId)); } [HttpPost] [Route("cancel")] public async Task<IActionResult> CancelOrderAsync([FromBody] string trackCode) { if (!ModelState.IsValid || string.IsNullOrEmpty(trackCode)) { return BadRequest(ModelState); } return Ok(await _orderService.CancelOrderAsync(trackCode)); } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Identity/Services/IIdentityService.cs using Argentex.Core.Service.Models.Identity; using Microsoft.AspNetCore.Identity; using System; using System.Collections.Generic; using System.IdentityModel.Tokens.Jwt; using System.Security.Claims; using System.Threading.Tasks; namespace Argentex.Core.Service.Identity.Services { public interface IIdentityService : IDisposable { Task<TokenModel> AuthenticateAsync(LoginServiceModel login); Task<TokenModel> RefreshToken(RefreshTokenModel login); JwtSecurityToken BuildToken(string tokenSubject, bool tokenCanExpire, IEnumerable<Claim> additionalClaims = null); Task<IdentityResult> ChangePasswordAsync(string userId, string oldPassword, string newPassword, string confirmPassword); Task<IdentityResult> ResetPasswordAsync(string email, string code, string password); Task<bool> VerifyUserToken(string userName, string tokenCode); Task<string> GenerateEmailConfirmationTokenAsync(ApplicationServiceUser user); Task SetUserAsAdmin(string username); Task LogoutAsync(string identityUserName, string refreshToken); Task<string> GetUserPhoneNumber(string username); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftvalidationField.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftvalidationField { public SwiftvalidationField() { SwiftvalidationFieldFieldComponent = new HashSet<SwiftvalidationFieldFieldComponent>(); SwiftvalidationOptionField = new HashSet<SwiftvalidationOptionField>(); } public int Id { get; set; } public string Tag { get; set; } public string Description { get; set; } public int? PaymentTypeId { get; set; } public PaymentType PaymentType { get; set; } public ICollection<SwiftvalidationFieldFieldComponent> SwiftvalidationFieldFieldComponent { get; set; } public ICollection<SwiftvalidationOptionField> SwiftvalidationOptionField { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Emirreport.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Emirreport { public Emirreport() { EmirreportFxforwardTrade = new HashSet<EmirreportFxforwardTrade>(); EmirreportIncomingFile = new HashSet<EmirreportIncomingFile>(); } public int Id { get; set; } public DateTime CreatedDateTime { get; set; } public int? EmirreportOutgoingFileId { get; set; } public EmirreportOutgoingFile EmirreportOutgoingFile { get; set; } public ICollection<EmirreportFxforwardTrade> EmirreportFxforwardTrade { get; set; } public ICollection<EmirreportIncomingFile> EmirreportIncomingFile { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Sms/SmsSender/Providers/TextMagic/ITextMagicService.cs using Argentex.Core.Service.Sms.Models; using System; namespace Argentex.Core.Service.Sms.SmsSender { public interface ITextMagicService : IDisposable { bool SendMessage(SmsModel smsModel); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogClientCompanyContactCategory.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogClientCompanyContactCategory { public long LogId { get; set; } public string LogAction { get; set; } public int ClientCompanyContactId { get; set; } public int ContactCategoryId { get; set; } public DateTime DateCreated { get; set; } public int CreatedByAuthUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FxforwardTrade.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FxforwardTrade { public FxforwardTrade() { ArmfxForwardTradeStatusesHistory = new HashSet<ArmfxForwardTradeStatusesHistory>(); ArmreportFxforwardTrade = new HashSet<ArmreportFxforwardTrade>(); BankAccountTransaction = new HashSet<BankAccountTransaction>(); Breach = new HashSet<Breach>(); ClientCompany = new HashSet<ClientCompany>(); ClientSiteAction2FixFxforwardTrade = new HashSet<ClientSiteAction2FixFxforwardTrade>(); EmirreportFxforwardTrade = new HashSet<EmirreportFxforwardTrade>(); FixApatradeCapture = new HashSet<FixApatradeCapture>(); FixApatradeMessage = new HashSet<FixApatradeMessage>(); FixFxforwardTradeOrder = new HashSet<FixFxforwardTradeOrder>(); FxforwardTrade2Opi = new HashSet<FxforwardTrade2Opi>(); FxforwardTradeInvoice = new HashSet<FxforwardTradeInvoice>(); FxswapDeliveryLegTradeCodeNavigation = new HashSet<Fxswap>(); FxswapParentTradeCodeNavigation = new HashSet<Fxswap>(); FxswapReversalLegTradeCodeNavigation = new HashSet<Fxswap>(); Payment = new HashSet<Payment>(); VirtualAccountTransaction = new HashSet<VirtualAccountTransaction>(); } public string Code { get; set; } public byte[] UpdateTimeStamp { get; set; } public DateTime CreatedDate { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime UpdatedDate { get; set; } public int UpdatedByAuthUserId { get; set; } public int ClientCompanyId { get; set; } public int? AuthorisedByClientCompanyContactId { get; set; } public int? TradeInstructionMethodId { get; set; } public int? BrokerId { get; set; } public bool Verified { get; set; } public int? VerifiedByAuthUserId { get; set; } public DateTime? ContractDate { get; set; } public DateTime? ValueDate { get; set; } public bool? IsOrder { get; set; } public string CurrencyPair { get; set; } public bool IsBuy { get; set; } public int? Lhsccyid { get; set; } public int? Rhsccyid { get; set; } public decimal? ClientRate { get; set; } public decimal? ClientLhsamt { get; set; } public decimal? ClientRhsamt { get; set; } public decimal? BrokerRate { get; set; } public decimal? BrokerLhsamt { get; set; } public decimal? BrokerRhsamt { get; set; } public decimal? CollateralPerc { get; set; } public int FxforwardTradeStatusId { get; set; } public bool? IsRhsmajor { get; set; } public decimal? Profit { get; set; } public bool ProfitConsolidated { get; set; } public bool Deleted { get; set; } public int? TransactionCommitId { get; set; } public int? ClientCompanyOpiid { get; set; } public decimal? ProfitConsolidatedValue { get; set; } public DateTime? ProfitConsolidatedDateTime { get; set; } public DateTime? SettledDate { get; set; } public DateTime? DeliveredDate { get; set; } public DateTime? CommPaidOutDate { get; set; } public string Notes { get; set; } public decimal? ProfitGbprate { get; set; } public decimal? PrevailingRate2 { get; set; } public string EmirUti { get; set; } public bool EmirReported { get; set; } public DateTime? EmirReportedDateTime { get; set; } public string Reference { get; set; } public decimal? RemainingClientLhsamt { get; set; } public decimal? RemainingClientRhsamt { get; set; } public decimal? MarkToMarketValue { get; set; } public decimal? BrokenDatePrice { get; set; } public DateTime? MarkToMarketValueUpdatedDateTime { get; set; } public bool? IsComplianceSupported { get; set; } public bool? IsComplianceRegulated { get; set; } public int? ComplianceTradeReasonId { get; set; } public bool? IsEmirreportable { get; set; } public int? EmirstatusId { get; set; } public DateTime? EmirstatusUpdatedDateTime { get; set; } public bool EmirDelegatedReported { get; set; } public DateTime? FilledDateTime { get; set; } public DateTime? ContractNoteSentToClientDateTime { get; set; } public DateTime? ContractNoteSentToMyselfDateTime { get; set; } public string ComplianceIsin { get; set; } public string EmirsubmissionId { get; set; } public string EmirdelegatedSubmissionId { get; set; } public decimal? PrevDayMarktoMarket { get; set; } public DateTime? PrevDayMarktoMarketUpdatedDateTime { get; set; } public bool? IsApareportable { get; set; } public int? ApastatusId { get; set; } public DateTime? ApastatusUpdatedDateTime { get; set; } public bool? IsArmreportable { get; set; } public bool Armreported { get; set; } public DateTime? ArmstatusUpdatedDateTime { get; set; } public DateTime? BrokeredDate { get; set; } public int? ArmstatusId { get; set; } public string MarketSideUti { get; set; } public decimal? BdpforwardPoints { get; set; } public int? FilledByAuthUserId { get; set; } public bool FixNewOrder { get; set; } public int? OpiupdatedByAuthUserId { get; set; } public DateTime? OpiupdatedDateTime { get; set; } public int? BrokeredByAuthUserId { get; set; } public DateTime? OpenValueDate { get; set; } public Emirstatus Apastatus { get; set; } public Emirstatus Armstatus { get; set; } public ClientCompanyContact AuthorisedByClientCompanyContact { get; set; } public Broker Broker { get; set; } public AuthUser BrokeredByAuthUser { get; set; } public ClientCompany ClientCompanyNavigation { get; set; } public ClientCompanyOpi ClientCompanyOpi { get; set; } public ComplianceTradeReason ComplianceTradeReason { get; set; } public AuthUser CreatedByAuthUser { get; set; } public Emirstatus Emirstatus { get; set; } public AuthUser FilledByAuthUser { get; set; } public FxforwardTradeStatus FxforwardTradeStatus { get; set; } public Currency Lhsccy { get; set; } public AuthUser OpiupdatedByAuthUser { get; set; } public Currency Rhsccy { get; set; } public TradeInstructionMethod TradeInstructionMethod { get; set; } public TransactionCommit TransactionCommit { get; set; } public AuthUser UpdatedByAuthUser { get; set; } public AuthUser VerifiedByAuthUser { get; set; } public ICollection<ArmfxForwardTradeStatusesHistory> ArmfxForwardTradeStatusesHistory { get; set; } public ICollection<ArmreportFxforwardTrade> ArmreportFxforwardTrade { get; set; } public ICollection<BankAccountTransaction> BankAccountTransaction { get; set; } public ICollection<Breach> Breach { get; set; } public ICollection<ClientCompany> ClientCompany { get; set; } public ICollection<ClientSiteAction2FixFxforwardTrade> ClientSiteAction2FixFxforwardTrade { get; set; } public ICollection<EmirreportFxforwardTrade> EmirreportFxforwardTrade { get; set; } public ICollection<FixApatradeCapture> FixApatradeCapture { get; set; } public ICollection<FixApatradeMessage> FixApatradeMessage { get; set; } public ICollection<FixFxforwardTradeOrder> FixFxforwardTradeOrder { get; set; } public ICollection<FxforwardTrade2Opi> FxforwardTrade2Opi { get; set; } public ICollection<FxforwardTradeInvoice> FxforwardTradeInvoice { get; set; } public ICollection<Fxswap> FxswapDeliveryLegTradeCodeNavigation { get; set; } public ICollection<Fxswap> FxswapParentTradeCodeNavigation { get; set; } public ICollection<Fxswap> FxswapReversalLegTradeCodeNavigation { get; set; } public ICollection<Payment> Payment { get; set; } public ICollection<VirtualAccountTransaction> VirtualAccountTransaction { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Notification/INotificationService.cs using Argentex.Core.Service.Models.Email; using Argentex.Core.Service.Models.Order; using System.Collections.Generic; using System.Threading.Tasks; namespace Argentex.Core.Service.Order { public interface INotificationService { // v1 IList<AppUserNotificationModel> GetCompanyUsers(int clientCompanyID); bool SaveAppUserNotification(AppUserNotificationModel model); //// v2 //IList<NotificationTypeModel> GetNotificationTypes(); //IList<AppUserNotificationTypeModel> GetCompanyNotifications(int clientCompanyID); //bool SaveUserNotification(AppUserNotificationTypeModel model); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Settlements/AssignSettlementModel.cs using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Service.Models.Settlements { public class AssignSettlementModel { public long SettlementId { get; set; } public string TradedCurrency { get; set; } [Required] public AccountModel Account { get; set; } [Required] public decimal Amount { get; set; } [Required] public string ValueDate { get; set; } //[Required] //public string TradeCode { get; set; } public string Reference { get; set; } public bool IsPayTotal { get; set; } public int Status { get; set; } public bool IsWarning { get; set; } public string WarningMessage { get; set; } } public class AccountModel { public int clientCompanyOpiId { get; set; } public int clientCompanyId { get; set; } public string currency { get; set; } public string accountName { get; set; } public string accountNumber { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AuthUserPasswordToken.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AuthUserPasswordToken { public int AuthUserId { get; set; } public string Token { get; set; } public DateTime CreatedDateTime { get; set; } public DateTime? ExpiryDateTime { get; set; } public int IsExpired { get; set; } public AuthUser AuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Notification/NotificationService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Models.Identity; using Argentex.Core.Service.Models.Order; using Argentex.Core.UnitsOfWork.Notifications; using Argentex.Core.UnitsOfWork.Users; using System.Collections.Generic; using System.Linq; namespace Argentex.Core.Service.Order { public class NotificationService : INotificationService { #region Constructor and Properties private readonly INotificationUow _notificationUow; private readonly IUserUow _userUow; public NotificationService(INotificationUow notificationUow, IUserUow userUow) { _notificationUow = notificationUow; _userUow = userUow; } #endregion public bool SaveAppUserNotification(AppUserNotificationModel model) { var saveItem = new AppUserNotification() { Id = model.ID, AppUserId = (int)model.AppUserID, ClientCompanyId = model.ClientCompanyID, InwardPaymentNotifications = model.InwardPaymentNotifications, OutwardPaymentNotifications = model.OutwardPaymentNotifications, SettlementRequests = model.SettlementRequests, TradeNotifications = model.TradeNotifications, }; return _notificationUow.SaveAppUserNotification(saveItem); } public IList<AppUserNotificationModel> GetCompanyUsers(int clientCompanyID) { var applicationServiceUserList = _userUow.GetUsersByCompanyId(clientCompanyID) .Select(x => new ApplicationServiceUser { Id = x.AuthUserId, Title = x.Title, Forename = x.Forename, Surname = x.Surname, Username = x.UserName, Email = x.Email, ClientCompanyId = x.ClientCompanyId, UpdatedByAuthUserId = x.UpdatedByAuthUserId, Position = x.Position, PhoneNumberDirect = x.PhoneNumber, PhoneNumberMobile = x.PhoneNumberMobile, Birthday = x.Birthday.ToString(), IsApproved = x.IsApproved, IsAdmin = x.IsAdmin, IsSignatory = x.IsSignatory, IsAuthorisedSignatory = x.IsAuthorisedSignatory }); var appUserNotification = _notificationUow.GetCompanyAppUserNotification(clientCompanyID) .Select(elem => new AppUserNotificationModel { ID = elem.Id, ClientCompanyID = elem.ClientCompanyId, AppUserID = elem.AppUserId, TradeNotifications = elem.TradeNotifications, InwardPaymentNotifications = elem.InwardPaymentNotifications, OutwardPaymentNotifications = elem.OutwardPaymentNotifications, SettlementRequests = elem.SettlementRequests }).ToList<AppUserNotificationModel>(); var result = applicationServiceUserList.GroupJoin(appUserNotification, user => user.Id, notification => notification.AppUserID, (user, notification) => new AppUserNotificationModel { ID = notification.SingleOrDefault() == null ? 0 : notification.SingleOrDefault().ID, ClientCompanyID = user.ClientCompanyId, AppUserID = user.Id, TradeNotifications = notification.SingleOrDefault() == null ? false : notification.SingleOrDefault().TradeNotifications, InwardPaymentNotifications = notification.SingleOrDefault() == null ? false : notification.SingleOrDefault().InwardPaymentNotifications, OutwardPaymentNotifications = notification.SingleOrDefault() == null ? false : notification.SingleOrDefault().OutwardPaymentNotifications, SettlementRequests = notification.SingleOrDefault() == null ? false : notification.SingleOrDefault().SettlementRequests, Forename = user.Forename, Surname = user.Surname, Username = user.Username, Email = user.Email, Position = user.Position, PhoneNumberDirect = user.PhoneNumberDirect, PhoneNumberMobile = user.PhoneNumberMobile, IsApproved = user.IsApproved, IsAdmin = user.IsAdmin }).ToList(); return result; } #region V2 //public IList<NotificationTypeModel> GetNotificationTypes() //{ // var result = new List<NotificationTypeModel>(); // result = _notificationUow.GetNotificationTypes() // .Select(elem => new NotificationTypeModel // { // ID = elem.ID, // Name = elem.Name // }).ToList<NotificationTypeModel>(); // return result; //} //public IList<AppUserNotificationTypeModel> GetCompanyNotifications(int clientCompanyID) //{ // var result = new List<AppUserNotificationTypeModel>(); // result = _notificationUow.GetCompanyNotifications(clientCompanyID) // .Select(elem => new AppUserNotificationTypeModel // { // ID = elem.ID, // ClientCompanyID = elem.ClientCompanyID, // AppUserID = elem.AppUserID, // NotificationTypeID = elem.NotificationTypeID // }).ToList<AppUserNotificationTypeModel>(); // return result; //} //public bool SaveUserNotification(AppUserNotificationTypeModel model) //{ // var saveItem = new AppUserNotificationType() // { // ID = model.ID, // AppUserID = model.AppUserID, // ClientCompanyID = model.ClientCompanyID, // NotificationTypeID = model.NotificationTypeID // }; // return _notificationUow.SaveUserNotification(saveItem); //} #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyVirtualAccount.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyVirtualAccount { public ClientCompanyVirtualAccount() { ClientCompanyVirtualAccountCurrencyBalance = new HashSet<ClientCompanyVirtualAccountCurrencyBalance>(); ClientCompanyVirtualAccountCurrencyBalanceHistory = new HashSet<ClientCompanyVirtualAccountCurrencyBalanceHistory>(); VirtualAccountTransaction = new HashSet<VirtualAccountTransaction>(); } public int Id { get; set; } public int VirtualAccountTypeId { get; set; } public int ClientCompanyId { get; set; } public byte[] UpdateTimeStamp { get; set; } public ClientCompany ClientCompany { get; set; } public VirtualAccountType VirtualAccountType { get; set; } public ICollection<ClientCompanyVirtualAccountCurrencyBalance> ClientCompanyVirtualAccountCurrencyBalance { get; set; } public ICollection<ClientCompanyVirtualAccountCurrencyBalanceHistory> ClientCompanyVirtualAccountCurrencyBalanceHistory { get; set; } public ICollection<VirtualAccountTransaction> VirtualAccountTransaction { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClearingCodePrefix/ClearingCodePrefixModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Models.ClearingCodePrefix { public class ClearingCodePrefixModel { public int Id { get; internal set; } public string Code { get; internal set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Trades/ITradeUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using System.Data; using System.Linq; using System.Threading.Tasks; namespace Argentex.Core.UnitsOfWork.Trades { public interface ITradeUow : IBaseUow { DataTable GetUnsettledTrades(int clientCompanyId); IQueryable<Currency> GetCurrencies(); IQueryable<CurrencyPairValidation> GetCurrencyPairValidation(); bool ExecuteOrder(FxforwardTrade trade, ClientCompanyTradeCount tradeCountObject); bool CreateDeal(FxforwardTrade trade, ClientCompanyTradeCount tradeCountObject); bool BrokerDeal(FxforwardTrade trade, ClientCompanyTradeCount tradeCountObject); void RejectOrder(FxforwardTrade trade); ClientCompanyTradeCount GetTradeCountByPrimaryKey(int clientCompanyId); IQueryable<FxforwardTrade> GetTrade(string tradeCode); IQueryable<ClientCompanyTradeCount> GetClientCompanyTradeCount(int clientCompanyId); DataTable GetClosedTrades(int clientCompanyId); IQueryable<FxforwardTrade> GetOpenOrders(int clientCompanyId); IQueryable<FxforwardTrade> GetExpiredValidityOrders(); void UpdateTrade(FxforwardTrade trade); FxforwardTrade GetTrade(string tradeCode, bool getAdditionalProperties); FxforwardTradeStatus GetFxForwardStatus(string statusDescription); Emirstatus GetEmirStatus (string emirStatusDescription); TradeInstructionMethod GetTradeInstructionMethod(string tradeInstructionMethod); Broker GetBroker(string brokerDescription); Task<bool> CancelOrder(string code); DataTable GetUnsettledTradesForBalanceCalculation(int clientCompanyId); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ComplianceIsincurrencyValueDate.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ComplianceIsincurrencyValueDate { public int Id { get; set; } public string Isin { get; set; } public string CurrencyPair { get; set; } public DateTime ValueDate { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } public AuthUser UpdatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Monitoring/IMonitoringService.cs using System; using System.Threading.Tasks; using Argentex.Core.Service.Models.Trades; namespace Argentex.Core.Service.Monitoring { public interface IMonitoringService : IDisposable { Task<bool> NotifyTradeStarted(int authUserId); Task<bool> CheckExecuteTrade(TradeNotificationModel model); Task RefreshClientDetails(); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/PendingApprovalUserChangeRequest.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.DataAccess.Entities { public class PendingApprovalUserChangeRequest { public int UserChangeRequestID { get; set; } public int AuthUserID { get; set; } public string AuthUserName { get; set; } public string Forename { get; set; } public string Surname { get; set; } public string Company { get; set; } public string AuthApplicationDescription { get; set; } public string CurrentValue { get; set; } public string ProposedValue { get; set; } public string ChangeValueType { get; set; } public DateTime ChangeDateTime { get; set; } public int ChangedByAuthUserID { get; set; } public string ChangedByAuthUserName { get; set; } public string ChangeStatus { get; set; } public int? ApprovalsRequired { get; set; } public string ApprovedBy { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Enums/TradeInstructionMethods.cs namespace Argentex.Core.Service.Enums { public enum TradeInstructionMethods { Telephone = 1, Email = 2, Bloomberg = 3, Fax = 4, Online = 5 } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogBreach.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogBreach { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public int BreachTypeId { get; set; } public int BreachLevelId { get; set; } public string TradeCode { get; set; } public int? ClientCompanyOpiid { get; set; } public string OriginalLimit { get; set; } public string OverrideValue { get; set; } public string Notes { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime CreatedDateTime { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public int? PaymentId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/Activity.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class Activity { [Key] [DatabaseGenerated(DatabaseGeneratedOption.Identity)] public int ActivityId { get; set; } [Required] [MaxLength(256)] public string Type { get; set; } public ICollection<ActivityLog> ActivityLogs { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/ApplicationUserRole.cs using Microsoft.AspNetCore.Identity; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations.Schema; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class ApplicationUserRole : IdentityUserRole<long> { } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyOpitransaction.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyOpitransaction { public int Id { get; set; } public int ClientCompanyOpiid { get; set; } public int CurrencyId { get; set; } public decimal? Amount { get; set; } public int PaymentId { get; set; } public byte[] UpdateTimeStamp { get; set; } public string Opireference { get; set; } public string Opidescription { get; set; } public string OpiaccountName { get; set; } public string OpiaccountNumber { get; set; } public string OpibankName { get; set; } public string OpibankAddress { get; set; } public string OpisortCode { get; set; } public string OpiswiftCode { get; set; } public string Opiiban { get; set; } public string OpibeneficiaryName { get; set; } public string OpibeneficiaryAddress { get; set; } public bool OpidetailsUpdated { get; set; } public int? OpicountryId { get; set; } public ClientCompanyOpi ClientCompanyOpi { get; set; } public Currency Currency { get; set; } public Country Opicountry { get; set; } public Payment Payment { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FxforwardTrade2Opi.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FxforwardTrade2Opi { public FxforwardTrade2Opi() { ClientSiteAction2FxforwardTrade2Opi = new HashSet<ClientSiteAction2FxforwardTrade2Opi>(); } public long Id { get; set; } public string FxforwardTradeCode { get; set; } public int ClientCompanyOpiid { get; set; } public decimal Amount { get; set; } public DateTime TradeValueDate { get; set; } public string Details { get; set; } public DateTime CreatedDateTime { get; set; } public int CreatedByAuthUserId { get; set; } public ClientCompanyOpi ClientCompanyOpi { get; set; } public AuthUser CreatedByAuthUser { get; set; } public FxforwardTrade FxforwardTradeCodeNavigation { get; set; } public ICollection<ClientSiteAction2FxforwardTrade2Opi> ClientSiteAction2FxforwardTrade2Opi { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Email/FailedFIXTradeModel.cs using System; namespace Argentex.Core.Service.Models.Email { public class FailedFIXTradeModel { public DataAccess.Entities.ClientCompany ClientCompany { get; set; } public string TradeCode { get; set; } public string CurrencyPair { get; set; } public DateTime ValueDate { get; set; } public string SellCcy { get; set; } public string BuyCcy { get; set; } public double Rate { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/SynetecLogger/NLog/NLogWrapper.cs using NLog; using NLog.Web; using System; using System.Collections.Generic; using System.IO; using System.Text; namespace SynetecLogger { /// <summary> /// Wrapper for nlog logger. Please implement methods from nlog should you need them. /// </summary> public class NLogWrapper : ILogWrapper { private readonly ILogger _logger; public NLogWrapper(string connectionString) { Directory.SetCurrentDirectory(System.AppDomain.CurrentDomain.BaseDirectory); _logger = NLogBuilder.ConfigureNLog(Path.Combine(Directory.GetCurrentDirectory(), "nlog.config")).GetCurrentClassLogger(); //LogManager.Configuration.Variables["connectionString"] = connectionString; GlobalDiagnosticsContext.Set("connectionString", connectionString); } /// <summary> /// Logs to database depending on connection passed in. /// Please see nlog.config database configuration for table structure. (FXDBLogDB1 is an example.) /// </summary> /// <param name="ex"></param> public void Error(Exception ex) { _logger.Error(ex, ex.Message); } public void Info(string info) { _logger.Info(info); } public void Fatal(Exception ex) { _logger.Fatal(ex, ex.Message); } public void Debug(string message) { _logger.Debug(message); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyContactListResponseModel.cs using System.Collections.Generic; namespace Argentex.Core.Service.Models.ClientCompany { public class ClientCompanyContactListResponseModel { public IDictionary<string, string[]> ResponseMessages { get; set; } public bool Succeeded { get; set; } public IList<ClientCompanyContactList> CompanyContactListModel { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api.Tests/Trade/TradeControllerTests.cs using Argentex.Core.Api.Controllers.Trade; using Argentex.Core.Service.Models.Trade; using Argentex.Core.Service.Models.Trades; using Argentex.Core.Service.Trade; using Microsoft.AspNetCore.Mvc; using Moq; using SynetecLogger; using System.Collections.Generic; using System.Net; using Xunit; namespace Argentex.Core.Api.Tests.Trade { public class TradeControllerTests { [Fact] public void GetUnsettledTrades_Success_With_Valid_ClientCompanyId_Input() { //Arrange var mockObject = new TradeModel(); var mockList = new List<TradeModel>(); mockList.Add(mockObject); var mockService = new Mock<ITradeService>(); mockService.Setup(x => x.GetUnsettledTrades(1)) .Returns(mockList); var controller = new TradeController(mockService.Object, null); //Act var result = controller.GetUnsettledTrades(1); //Assert Assert.IsType<OkObjectResult>(result); } [Fact] public void Given_A_Trade_Model_Is_Returned_An_Ok_Object_Result_Should_Be_Returned() { // Given var tradeCode = "Trade 42"; var tradeServiceMock = new Mock<ITradeService>(); var loggerMock = new Mock<ILogWrapper>(); tradeServiceMock.Setup(x => x.GetTradeInformation(It.IsAny<string>())) .Returns(new FxForwardTradeInformationModel()); var controller = new TradeController(tradeServiceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; // When var response = controller.GetTradeInformation(tradeCode); var result = response as OkObjectResult; // Then Assert.NotNull(result); Assert.Equal((int)expectedStatusCode, result.StatusCode); var content = result.Value as FxForwardTradeInformationModel; Assert.NotNull(content); } [Fact] public void GetClosedTrades_Success_With_Valid_ClientCompanyId_Input() { //Arrange var mockObject = new TradeModel(); var mockList = new List<TradeModel>(); mockList.Add(mockObject); var mockService = new Mock<ITradeService>(); mockService.Setup(x => x.GetClosedTrades(1)).Returns(mockList); var controller = new TradeController(mockService.Object, null); //Act var result = controller.GetClosedTrades(1); //Assert Assert.IsType<OkObjectResult>(result); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Filters/GlobalExceptionFilter.cs using Argentex.Core.Api.Filters.Models; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.Filters; using SynetecLogger; using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Threading.Tasks; namespace Argentex.Core.Api.Filters { public class GlobalExceptionFilter : IExceptionFilter { private readonly ILogWrapper _logger; public GlobalExceptionFilter(ILogWrapper logger) { if (logger == null) { throw new ArgumentNullException(nameof(logger)); } _logger = logger; } public void OnException(ExceptionContext context) { var response = new ErrorResponse() { Data = context.Exception.Message //temp }; context.Result = new ObjectResult(response) { StatusCode = (int)HttpStatusCode.InternalServerError, DeclaredType = typeof(ErrorResponse) }; _logger.Error(context.Exception); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogClientCompanyOnlineDetails.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogClientCompanyOnlineDetails { public long LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public int ClientCompanyId { get; set; } public bool AllowOnlineTrading { get; set; } public decimal? MaxTradeSize { get; set; } public decimal? MaxOpen { get; set; } public DateTime? MaxTenor { get; set; } public decimal? Collateral { get; set; } public int? SpotSpread { get; set; } public int? FwdSpread { get; set; } public bool? Kicked { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FixApareportField.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FixApareportField { public int Id { get; set; } public string Name { get; set; } public string Description { get; set; } public int Tag { get; set; } public int? GroupTag { get; set; } public int? GroupNo { get; set; } public string Value { get; set; } public string AppSettingKey { get; set; } public bool IsBlank { get; set; } public bool? IsActive { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.SignalRService/MonitoringHub.cs using Microsoft.AspNetCore.SignalR; using System; using System.Threading.Tasks; namespace Argentex.Core.SignalRService { public class MonitoringHub : Hub, IMonitoringHub { private readonly IHubContext<MonitoringHub> _context; private readonly ITraderActionsHub _actionHub; public MonitoringHub(IHubContext<MonitoringHub> context, ITraderActionsHub actionHub) { _context = context; _actionHub = actionHub; } public override async Task OnConnectedAsync() { await Groups.AddToGroupAsync(Context.ConnectionId, "SignalR Users"); await base.OnConnectedAsync(); } public override async Task OnDisconnectedAsync(Exception exception) { await Groups.RemoveFromGroupAsync(Context.ConnectionId, "SignalR Users"); await base.OnDisconnectedAsync(exception); } public async Task TradingStarted(string user) { await _context.Clients.All.SendAsync("TradingStarted", user); } public async Task CheckExecuteTrade(string model) { await _context.Clients.All.SendAsync("CheckExecuteTrade", model); } public async Task RefreshClientDetails() { await _context.Clients.All.SendAsync("RefreshClientDetails"); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/PasswordAlreadyUsedException.cs using System; namespace Argentex.Core.Service.Exceptions { public class PasswordAlreadyUsedException : Exception { public PasswordAlreadyUsedException() : base() { } public PasswordAlreadyUsedException(string message) : base(message) { } public PasswordAlreadyUsedException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientSiteAction/CSAOPIAssignedToTradesDisplayModel.cs using System; namespace Argentex.Core.Service.Models.ClientSiteAction { public class CSAOPIsAssignedToTradesDisplayModel { public string CompanyName { get; set; } public int CompanyID { get; set; } public string TradeCode { get; set; } public string OPIName { get; set; } public decimal Amount { get; set; } public string CreatedByClientName { get; set; } public DateTime CreatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Validation/Attributes/RequiredIfAttribute.cs using System; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Api.Validation.Attributes { public class RequiredIfAttribute: RequiredAttribute { private string PropertyName { get; } //This is the property we're going to be checking the desired value against private object DesiredValue { get; } //If the property we're checking matches this desired value, we then do the required validation on the property public RequiredIfAttribute(string propertyName, object desiredvalue) { PropertyName = propertyName; DesiredValue = desiredvalue; } protected override ValidationResult IsValid(object value, ValidationContext context) { var property = context.ObjectInstance.GetType().GetProperty(PropertyName); //Get the specified property belonging to the object we're validating if (property == null) //simple null check for easy debugging throw new ArgumentException($"Unable to get Property {PropertyName} to check if required", nameof(PropertyName)); var proprtyvalue = property.GetValue(context.ObjectInstance, null); //getting the value from the property on the object if (proprtyvalue.ToString() == DesiredValue.ToString()) return base.IsValid(value, context); //if the object matches the property we expect, use the default RequiredAttribute behaviour return ValidationResult.Success; //if the value doesn't match what we're expecting, we don't validate - so it's a success } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Email/EmailService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Argentex.Core.Service.Email.EmailSender; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Identity; using Argentex.Core.Service.Identity.Services; using Argentex.Core.Service.Models.Email; using Argentex.Core.Service.Models.Trades; using Argentex.Core.UnitsOfWork.Trades; using Argentex.Core.UnitsOfWork.Users; using Microsoft.AspNetCore.Identity; using System; using System.Collections.Generic; using System.Threading.Tasks; namespace Argentex.Core.Service { public class EmailService : IEmailService { private readonly IEmailSender _emailSender; private readonly IConfigWrapper _config; private readonly IUserUow _userUow; private readonly ITradeUow _tradeUow; public EmailService(IEmailSender emailSender, IConfigWrapper config, IUserUow userUow, ITradeUow tradeUow ) { _emailSender = emailSender; _config = config; _userUow = userUow; _tradeUow = tradeUow; } #region User Emails /// <summary> /// /// </summary> /// <param name="email">New user email address</param> /// <returns></returns> public async Task<IdentityResult> SendUserNewPasswordEmailAsync(string userName, string clientCompanyName) { ApplicationUser user = await _userUow.GetUserByNameAsync(userName); if (user == null) return IdentityResult.Failed(new IdentityError { Description = $"Invalid User {userName}" }); try { var sanitizedToken = await GetSanitizedToken(user); var clientSiteUrl = _config.Get("Urls:ClientSiteUrl"); var newPasswordURL = clientSiteUrl + string.Format( _config.Get("Urls:NewPasswordUrl"), sanitizedToken, user.UserName); var emailImages = _config.Get("Urls:ImagesUrl"); var emailSubject = "Argentex Client Site New Password"; var emailBody = _emailSender.CreateBody(EmailType.NewUser); var emailMessage = emailBody.Replace("<!--*[UserName]*-->", user.UserName) .Replace("<!--*[ClientCompanyName]*-->", clientCompanyName) .Replace("<!--*[MailImagesBaseURL]*-->", emailImages) .Replace("<!--*[NewPasswordURL]*-->", newPasswordURL); const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), user.Email, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); return IdentityResult.Success; } catch (Exception ex) { return IdentityResult.Failed(new IdentityError { Description = $"Error sending email : {ex}" }); } } /// <summary> /// /// </summary> /// <param name="email">User email address</param> /// <returns></returns> public async Task<IdentityResult> SendResetPasswordEmailAsync(string userName) { ApplicationUser user = await _userUow.GetUserByNameAsync(userName); if (user == null || user.IsDeleted) return IdentityResult.Failed(new IdentityError { Code = IdentityResultCodes.UserNotFound, Description = $"User with username {userName} does not exist" }); if (!user.IsApproved || user.LockoutEnabled) return IdentityResult.Failed(new IdentityError { Code = IdentityResultCodes.InvalidUserState, Description = $"{userName} is in an invalid state" }); try { var sanitizedToken = await GetSanitizedToken(user); var clientSiteUrl = _config.Get("Urls:ClientSiteUrl"); var resetPasswordURL = clientSiteUrl + string.Format( _config.Get("Urls:ResetPasswordUrl"), sanitizedToken, user.UserName); var emailImages = _config.Get("Urls:ImagesUrl"); var emailSubject = "Argentex Client Site Forgot Password"; var emailBody = _emailSender.CreateBody(EmailType.ResetPassword); var emailMessage = emailBody.Replace("<!--*[UserName]*-->", user.UserName) .Replace("<!--*[MailImagesBaseURL]*-->", emailImages) .Replace("<!--*[ResetPasswordURL]*-->", resetPasswordURL); const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), user.Email, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); return IdentityResult.Success; } catch (Exception ex) { return IdentityResult.Failed(new IdentityError { Description = $"Error sending email : {ex}" }); } } /// <summary> /// /// </summary> /// <param name="email">User email address</param> /// <returns></returns> public async Task<IdentityResult> SendPasswordChangedEmailAsync(string userName) { try { ApplicationUser user = await _userUow.GetUserByNameAsync(userName); if (user == null) return IdentityResult.Failed(new IdentityError { Description = $"Invalid User {userName}" }); var emailImages = _config.Get("Urls:ImagesUrl"); var emailSubject = "Password successfully changed"; var emailBody = _emailSender.CreateBody(EmailType.PasswordChanged); var clientSiteUrl = _config.Get("Urls:ClientSiteUrl"); var emailMessage = emailBody.Replace("<!--*[MailImagesBaseURL]*-->", emailImages) .Replace("<!--*[ClientSiteUrl]*-->", clientSiteUrl) .Replace("<!--*[UserName]*-->", user.UserName); const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), user.Email, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); return IdentityResult.Success; } catch (Exception ex) { return IdentityResult.Failed(new IdentityError { Description = $"Error sending email : {ex}" }); } } private async Task<string> GetSanitizedToken(ApplicationUser user) { var confirmationToken = await _userUow.GeneratePasswordResetTokenAsync(user); var sanitizedToken = confirmationToken .Replace(SanitizeForwardSlash.Old, SanitizeForwardSlash.New) .Replace(SanitizeDoubleEqual.Old, SanitizeDoubleEqual.New) .Replace(SanitizePlus.Old, SanitizePlus.New); return System.Web.HttpUtility.UrlEncode(sanitizedToken); } public async Task<IdentityResult> SendEmailToDirectorsForApproval() { try { IEnumerable<AppUser> directors = _userUow.GetAllDirectorsAsList(); foreach (AppUser user in directors) { SendNewUserChangeRequestEmailAlert(user.AuthUser.Email); } return IdentityResult.Success; } catch (Exception ex) { return IdentityResult.Failed(new IdentityError { Description = $"Error sending email : {ex}" }); } } public async void SendNewUserChangeRequestEmailAlert(string email) { var emailImages = _config.Get("Urls:ImagesUrl"); var emailSubject = "New User Change Request"; var emailBody = _emailSender.CreateBody(EmailType.UserChangeRequestAlert); var emailMessage = emailBody.Replace("<!--*[MailImagesBaseURL]*-->", emailImages); const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), email, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } public async Task SendMobileChangeEmailAsync( string proposedValue, string email) { var emailImages = _config.Get("Urls:ImagesUrl"); var emailSubject = "Mobile Phone Number Updated"; var emailBody = _emailSender.CreateBody(EmailType.MobileChangeEmailAlert); var clientSiteUrl = _config.Get("Urls:ClientSiteUrl"); var emailMessage = emailBody.Replace("<!--*[MailImagesBaseURL]*-->", emailImages) .Replace("<!--*[PhoneNumber]*-->", proposedValue) .Replace("<!--*[ClientSiteUrl]*-->", clientSiteUrl); const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), email, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } #endregion #region Order Emails /// <summary> /// Sending email to client to notify that an order was made /// </summary> /// <param name="model"></param> /// <returns></returns> public async Task SendOrderNoteEmailAsync(OrderNoteModel model) { var body = _emailSender.CreateBody(EmailType.OrderNote); var subject = $"Order confirmation ({model.TradeRef})"; var emailBody = body; var emailSubject = subject; var emailImages = _config.Get("Urls:ImagesUrl"); var clientSiteUrl = _config.Get("Urls:ClientSiteUrl"); var emailMessage = emailBody.Replace("<!--*[ClientCompanyName]*-->", model.ClientCompany.Name) .Replace("<!--*[CRN]*-->", model.ClientCompany.Crn) .Replace("<!--*[InstructedBy]*-->", model.InstructedBy) .Replace("<!--*[InstructedTime]*-->", model.InstructedDateTime.ToString("dd/MM/yyyy HH:mm")) .Replace("<!--*[InstructionMethod]*-->", model.Method) .Replace("<!--*[TradeCode]*-->", model.TradeRef) .Replace("<!--*[SellCCYAndAmount]*-->", string.Format("{0:0.00####}", model.SellAmount) + " " + model.SellCcy) .Replace("<!--*[BuyCCYAndAmount]*-->", string.Format("{0:0.00####}", model.BuyAmount) + " " + model.BuyCcy) .Replace("<!--*[CurrencyPair]*-->", model.CurrencyPair) .Replace("<!--*[ClientRate]*-->", string.Format("{0:0.0000####}", model.Rate)) .Replace("<!--*[ValueDate]*-->", model.ValueDate.ToString("dd/MM/yyyy")) .Replace("<!--*[CreatedDate]*-->", model.CreatedDate.ToString("dd/MM/yyyy")) .Replace("<!--*[Validity]*-->", model.ValidityDate != null ? ((DateTime)model.ValidityDate).ToString("dd/MM/yyyy") : "-") //Images paths .Replace("<!--*[MailImagesBaseURL]*-->", emailImages) .Replace("<!--*[ClientSiteUrl]*-->", clientSiteUrl); var sendTo = model.ClientEmail; const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), sendTo, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } /// <summary> /// Sending email to broker to notify that an order was made /// </summary> /// <param name="model"></param> /// <returns></returns> public async Task SendDealerOrderNoteEmailAsync(OrderNoteModel model) { var body = _emailSender.CreateBody(EmailType.DealerOrderNote); var subject = $"Order confirmation ({model.TradeRef})"; var emailBody = body; var emailSubject = subject; var emailImages = _config.Get("Urls:ImagesUrl"); var emailMessage = emailBody.Replace("<!--*[ClientCompanyName]*-->", model.ClientCompany.Name) .Replace("<!--*[CRN]*-->", model.ClientCompany.Crn) .Replace("<!--*[InstructedBy]*-->", model.InstructedBy) .Replace("<!--*[InstructedTime]*-->", model.InstructedDateTime.ToString("dd/MM/yyyy HH:mm")) .Replace("<!--*[InstructionMethod]*-->", model.Method) .Replace("<!--*[TradeCode]*-->", model.TradeRef) .Replace("<!--*[SellCCYAndAmount]*-->", string.Format("{0:0.00####}", model.SellAmount) + " " + model.SellCcy) .Replace("<!--*[BuyCCYAndAmount]*-->", string.Format("{0:0.00####}", model.BuyAmount) + " " + model.BuyCcy) .Replace("<!--*[CurrencyPair]*-->", model.CurrencyPair) .Replace("<!--*[ClientRate]*-->", string.Format("{0:0.0000####}", model.Rate)) .Replace("<!--*[ValueDate]*-->", model.ValueDate.ToString("dd/MM/yyyy")) .Replace("<!--*[CreatedDate]*-->", model.CreatedDate.ToString("dd/MM/yyyy")) .Replace("<!--*[Validity]*-->", model.ValidityDate != null ? ((DateTime)model.ValidityDate).ToString("dd/MM/yyyy") : "-") //Images paths .Replace("<!--*[MailImagesBaseURL]*-->", emailImages); var sendTo = model.DealerAuthUser != null ? model.DealerAuthUser.Email : null; const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), sendTo, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } /// <summary> /// Sending email to client to notify that an order was cancelled by the client or on validity date overdued /// </summary> /// <param name="model"></param> /// <returns></returns> public async Task SendCancelOrderEmailAsync(CancelOrderModel model) { var body = _emailSender.CreateBody(EmailType.CancelOrder); var subject = "Order cancellation"; var emailBody = body; var emailSubject = subject; var emailImages = _config.Get("Urls:ImagesUrl"); var clientSiteUrl = _config.Get("Urls:ClientSiteUrl"); var emailMessage = emailBody.Replace("<!--*[ClientCompanyName]*-->", model.ClientCompany.Name) .Replace("<!--*[CRN]*-->", model.ClientCompany.Crn) .Replace("<!--*[InstructedBy]*-->", model.InstructedBy) .Replace("<!--*[InstructedTime]*-->", model.InstructedDateTime.ToString("dd/MM/yyyy HH:mm")) .Replace("<!--*[InstructionMethod]*-->", model.Method) .Replace("<!--*[TradeCode]*-->", model.TradeRef) .Replace("<!--*[SellCCYAndAmount]*-->", string.Format("{0:0.00####}", model.SellAmount) + " " + model.SellCcy) .Replace("<!--*[BuyCCYAndAmount]*-->", string.Format("{0:0.00####}", model.BuyAmount) + " " + model.BuyCcy) .Replace("<!--*[CurrencyPair]*-->", model.CurrencyPair) .Replace("<!--*[ClientRate]*-->", string.Format("{0:0.0000####}", model.Rate)) .Replace("<!--*[ValueDate]*-->", model.ValueDate.ToString("dd/MM/yyyy")) .Replace("<!--*[CollateralCCYAndAmount]*-->", string.Format("{0:0.00####}", model.Collateral) + " " + model.CollateralCcy) .Replace("<!--*[CreatedDate]*-->", model.CreatedDate.ToString("dd/MM/yyyy")) .Replace("<!--*[Validity]*-->", model.ValidityDate != null ? ((DateTime)model.ValidityDate).ToString("dd/MM/yyyy") : "-") //Images paths .Replace("<!--*[MailImagesBaseURL]*-->", emailImages) .Replace("<!--*[ClientSiteUrl]*-->", clientSiteUrl); var sendTo = model.ClientEmail; const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), sendTo, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } #endregion #region Trade Emails /// <summary> /// Sending email to client to notify about the newly created trade /// </summary> /// <param name="trade"></param> /// <returns></returns> public async Task SendTradeReceiptEmailAsync(FxForwardTradeInformationModel trade) { //implement universal email handling var emailImages = _config.Get("Urls:ImagesUrl"); var emailSubject = "Trade Contract Note"; var emailBody = _emailSender.CreateBody(EmailType.TradeNote); var clientSiteUrl = _config.Get("Urls:ClientSiteUrl"); var settlementAccountsUrl = clientSiteUrl + _config.Get("Urls:SettlementAccountsUrl"); //Trade information var emailMessage = emailBody.Replace("<!--*[ClientCompanyName]*-->", trade.ClientCompany.Name) .Replace("<!--*[CRN]*-->", trade.ClientCompany.Crn) .Replace("<!--*[InstructedBy]*-->", trade.InstructedBy) .Replace("<!--*[InstructedTime]*-->", trade.InstructedDateTime.ToString("dd/MM/yyyy HH:mm")) .Replace("<!--*[InstructionMethod]*-->", trade.Method) .Replace("<!--*[TradeCode]*-->", trade.TradeRef) .Replace("<!--*[SellCCYAndAmount]*-->", trade.SellAmount.ToString("N2") + " " + trade.SellCcy) .Replace("<!--*[BuyCCYAndAmount]*-->", trade.BuyAmount.ToString("N2") + " " + trade.BuyCcy) .Replace("<!--*[CurrencyPair]*-->", trade.CurrencyPair) .Replace("<!--*[ClientRate]*-->", trade.Rate.ToString("N4")) .Replace("<!--*[ValueDate]*-->", trade.ValueDate.ToString("dd/MM/yyyy")) .Replace("<!--*[CollateralCCYAndAmount]*-->", trade.Collateral.ToString("N2") + " " + trade.CollateralCcy) //Images paths .Replace("<!--*[MailImagesBaseURL]*-->", emailImages) .Replace("<!--*[ClientSiteUrl]*-->", clientSiteUrl) .Replace("<!--*[SettlementAccountsUrl]*-->", settlementAccountsUrl); if (trade.SettlementAccountDetails != null) { //Account information emailMessage = emailMessage.Replace("<!--*[SettlementBankName]*-->", trade.SettlementAccountDetails.BankName) .Replace("<!--*[SettlementAccountName]*-->", trade.SettlementAccountDetails.AccountName) .Replace("<!--*[SettlementAccountSort]*-->", trade.SettlementAccountDetails.SortCode) .Replace("<!--*[SettlementAccountNumber]*-->", trade.SettlementAccountDetails.AccountNumber) .Replace("<!--*[SettlementAccountSwiftCode]*-->", trade.SettlementAccountDetails.SwiftCode) .Replace("<!--*[SettlementAccountIBAN]*-->", trade.SettlementAccountDetails.Iban); } var isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), trade.ClientEmail, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } /// <summary> /// Sending email to broker and dealer assigned /// </summary> /// <param name="trade"></param> /// <returns></returns> public async Task SendBrokerTradeNoteEmailAsync(BrokerTradeNoteModel trade) { //implement universal email handling var emailImages = _config.Get("Urls:ImagesUrl"); var emailSubject = $"{trade.TradeCode} Trade Contract Note"; var emailBody = _emailSender.CreateBody(EmailType.BrokerTradeNote); //Trade information var emailMessage = emailBody.Replace("<!--*[ClientCompanyName]*-->", trade.ClientCompany.Name) .Replace("<!--*[CRN]*-->", trade.ClientCompany.Crn) .Replace("<!--*[InstructedBy]*-->", trade.InstructedBy) .Replace("<!--*[InstructedTime]*-->", trade.InstructedDateTime.ToString("dd/MM/yyyy HH:mm")) .Replace("<!--*[InstructionMethod]*-->", trade.Method) .Replace("<!--*[TradeCode]*-->", trade.TradeCode) .Replace("<!--*[SellCCYAndAmount]*-->", trade.SellAmount.ToString("N2") + " " + trade.SellCcy) .Replace("<!--*[BuyCCYAndAmount]*-->", trade.BuyAmount.ToString("N2") + " " + trade.BuyCcy) .Replace("<!--*[CurrencyPair]*-->", trade.CurrencyPair) .Replace("<!--*[ClientRate]*-->", trade.Rate.ToString("N4")) .Replace("<!--*[ValueDate]*-->", trade.ValueDate.ToString("dd/MM/yyyy")) .Replace("<!--*[CollateralCCYAndAmount]*-->", trade.Collateral.ToString("N2") + " " + trade.CollateralCcy) //Images paths .Replace("<!--*[MailImagesBaseURL]*-->", emailImages); if (trade.SettlementAccountDetails != null) { //Account information emailMessage = emailMessage.Replace("<!--*[SettlementBankName]*-->", trade.SettlementAccountDetails.BankName) .Replace("<!--*[SettlementAccountName]*-->", trade.SettlementAccountDetails.AccountName) .Replace("<!--*[SettlementAccountSort]*-->", trade.SettlementAccountDetails.SortCode) .Replace("<!--*[SettlementAccountNumber]*-->", trade.SettlementAccountDetails.AccountNumber) .Replace("<!--*[SettlementAccountSwiftCode]*-->", trade.SettlementAccountDetails.SwiftCode) .Replace("<!--*[SettlementAccountIBAN]*-->", trade.SettlementAccountDetails.Iban); } var isHtml = true; var emailTo = trade.Broker.BrokerNoteEmailAddress; var emailToCC = trade.DealerAuthUser != null ? trade.DealerAuthUser.Email : null; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), emailTo , emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName"), 0, null, emailToCC); } /// <summary> /// Sending email to Argentex Settlements department when a deal does not receive confirmation from FIX /// </summary> /// <param name="model"></param> /// <returns></returns> public async Task SendFailedFIXTradeEmailAsync(FailedFIXTradeModel model) { var body = _emailSender.CreateBody(EmailType.FailedFIXTrades); var subject = $"{model.TradeCode} confirmation not received from FIX"; var emailBody = body; var emailSubject = subject; var emailImages = _config.Get("Urls:ImagesUrl"); // Replacing $0 with trade code in the url from appsettings var tradeDetailsPath = _config.Get("Urls:FXDBTraderUrl") + _config.Get("Urls:FXDBTradeDetailsPath"); var tradeDetailsUrl = String.Format(tradeDetailsPath, model.TradeCode); var emailMessage = emailBody.Replace("<!--*[ClientCompanyName]*-->", model.ClientCompany.Name) .Replace("<!--*[TradeCode]*-->", model.TradeCode) .Replace("<!--*[CurrencyPair]*-->", model.CurrencyPair) .Replace("<!--*[ValueDate]*-->", model.ValueDate.ToString("dd/MM/yyyy")) .Replace("<!--*[SellCCY]*-->", model.SellCcy) .Replace("<!--*[BuyCCY]*-->", model.BuyCcy) .Replace("<!--*[ClientRate]*-->", model.Rate.ToString("N4")) // FXDB Trade Details Link .Replace("<!--*[TradeDetailsUrl]*-->", tradeDetailsUrl) // Images paths .Replace("<!--*[MailImagesBaseURL]*-->", emailImages); var sendTo = _config.Get("Emails:SettlementsDepartment"); const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), sendTo, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } #endregion #region Settlement Emails /// <summary> /// Sending email to client to notify that a settlement was made /// </summary> /// <param name="model"></param> /// <returns></returns> public async Task SendSettlementEmailsAsync(IList<SettlementNoteModel> modelList, List<string> emailList) { var body = _emailSender.CreateBody(EmailType.SettlementAssigned); var subject = $"Settlements Request for Trade: ({modelList[0].ParentTradeCode})"; var emailBody = body; var emailSubject = subject; var emailImages = _config.Get("Urls:ImagesUrl"); var clientSiteUrl = _config.Get("Urls:ClientSiteUrl"); var settlementInformationMessages = "You have requested: <br /><br />"; var dueForSettlementMessages = ""; int count = 0; foreach (var model in modelList) { count++; settlementInformationMessages += $"{string.Format("{0:n}", model.SettlementAmount)} {model.AccountCurrency} from trade {model.ParentTradeCode} be paid to {model.AccountName} On {model.ValueDate.ToString("dd/MM/yyyy")} <br /><br />"; if (count != modelList.Count) { settlementInformationMessages += "And <br /><br />"; } dueForSettlementMessages += $"On {model.ValueDate.ToString("dd/MM/yyyy")} Settlement due from you is therefore {string.Format("{0:n}", model.Amount)} {model.TradedCurrency}<br />"; } emailBody = emailBody.Replace("<!--*[SettlementInformationMessages]*-->", settlementInformationMessages) .Replace("<!--*[DueForSettlementMessages]*-->", dueForSettlementMessages) .Replace("<!--*[InstructedTime]*-->", modelList[0].InstructedDateTime.ToString("dd/MM/yyyy HH:mm")) .Replace("<!--*[InstructedBy]*-->", modelList[0].InstructedBy) //Images paths .Replace("<!--*[MailImagesBaseURL]*-->", emailImages) .Replace("<!--*[ClientSiteUrl]*-->", clientSiteUrl); foreach (var email in emailList) { var sendTo = email; const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), sendTo, emailSubject, emailBody, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } } #endregion #region Payments Emails public async Task SendInwardPaymentEmailAsync(PaymentNotificationModel model, IEnumerable<string> emailList) { var body = _emailSender.CreateBody(EmailType.InwardPayment); var subject = $"Payment Notification ({model.PaymentCode})"; var emailBody = body; var emailSubject = subject; var emailImages = _config.Get("Urls:ImagesUrl"); var emailMessage = emailBody.Replace("<!--*[ClientCompanyName]*-->", model.ClientCompany.Name) .Replace("<!--*[CRN]*-->", model.ClientCompany.Crn) .Replace("<!--*[PaymentCode]*-->", model.PaymentCode) .Replace("<!--*[PaymentAmount]*-->", string.Format("{0:n}", model.PaymentAmount)) .Replace("<!--*[PaymentCCYCode]*-->", model.Currency.Code) .Replace("<!--*[ValueDate]*-->", model.ValueDate.ToString("dd/MM/yyyy")) .Replace("<!--*[Reference]*-->", model.Reference) //Images paths .Replace("<!--*[MailImagesBaseURL]*-->", emailImages); foreach (var email in emailList) { var sendTo = email; const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), sendTo, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } } public async Task SendOutwardPaymentEmailAsync(PaymentNotificationModel model, IEnumerable<string> emailList) { var body = _emailSender.CreateBody(EmailType.OutwardPayment); var subject = $"Payment Notification ({model.PaymentCode})"; var emailBody = body; var emailSubject = subject; var emailImages = _config.Get("Urls:ImagesUrl"); var emailMessage = emailBody.Replace("<!--*[ClientCompanyName]*-->", model.ClientCompany.Name) .Replace("<!--*[CRN]*-->", model.ClientCompany.Crn) .Replace("<!--*[PaymentCode]*-->", model.PaymentCode) .Replace("<!--*[PaymentAmount]*-->", string.Format("{0:n}", model.PaymentAmount)) .Replace("<!--*[PaymentCCYCode]*-->", model.Currency.Code) .Replace("<!--*[ValueDate]*-->", model.ValueDate.ToString("dd/MM/yyyy")) .Replace("<!--*[Reference]*-->", model.Reference) // OPI .Replace("<!--*[AccountName]*-->", model.ClientCompanyOpi.AccountName) .Replace("<!--*[AccountNumber]*-->", model.ClientCompanyOpi.AccountNumber) .Replace("<!--*[SortCode]*-->", model.ClientCompanyOpi.SortCode) .Replace("<!--*[BankName]*-->", model.ClientCompanyOpi.BankName) .Replace("<!--*[SwiftCode]*-->", model.ClientCompanyOpi.SwiftCode) .Replace("<!--*[IBAN]*-->", model.ClientCompanyOpi.Iban) // Images paths .Replace("<!--*[MailImagesBaseURL]*-->", emailImages); foreach(var email in emailList) { var sendTo = email; const bool isHtml = true; await _emailSender.SendAsync(_config.Get("EQS:EQSAppAccountLogin"), _config.Get("EQS:EQSAppAccountPassword"), _config.Get("EQS:EQSEmailUsername"), sendTo, emailSubject, emailMessage, isHtml, _config.Get("EQS:EQSSenderEmail"), _config.Get("EQS:EQSSenderName")); } } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/AppSettings/IAppSettingService.cs using System; namespace Argentex.Core.Service.AppSettings { public interface IAppSettingService : IDisposable { int GetStreamingQuoteDuration(); int GetTimeOut(); string GetBarxFXFixQuoteUrl(); string GetBarxFXFixNewOrderUrl(); string GetEmirUtiCode(); int GetFixTimeout(); int GetStreamingDuration(); int GetTradeNotificationCounter(); int GetSpreadAdjustmentValidity(); int GetUserChangeDaysRequiredForApproval(); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Notification/AppUserNotificationTypeModel.cs using Argentex.Core.Service.Attributes; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Service.Models.Order { public class AppUserNotificationTypeModel { public int ID { get; set; } public int AppUserID { get; set; } public int ClientCompanyID { get; set; } public int NotificationTypeID { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Config/JwtConfig.cs using Microsoft.AspNetCore.Authentication.Cookies; using Microsoft.AspNetCore.Authentication.JwtBearer; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.IdentityModel.Tokens; using System; using System.Collections.Generic; using System.IdentityModel.Tokens.Jwt; using System.Linq; using System.Text; using System.Threading.Tasks; using AspNet.Security.OpenIdConnect.Primitives; namespace Argentex.Core.Api.Config { public static class JwtConfig { public static void ConfigureJwt(IServiceCollection services, IConfiguration config) { JwtSecurityTokenHandler.DefaultInboundClaimTypeMap.Clear(); JwtSecurityTokenHandler.DefaultOutboundClaimTypeMap.Clear(); services.AddAuthentication(options => { options.DefaultAuthenticateScheme = JwtBearerDefaults.AuthenticationScheme; options.DefaultChallengeScheme = JwtBearerDefaults.AuthenticationScheme; options.DefaultSignInScheme = JwtBearerDefaults.AuthenticationScheme; }) .AddJwtBearer(options => { //options.RequireHttpsMetadata = false; options.SaveToken = true; options.Audience = "resource_server"; options.TokenValidationParameters = new TokenValidationParameters { RequireExpirationTime = false, ValidateIssuer = true, ValidateAudience = true, ValidateLifetime = true, ValidateIssuerSigningKey = true, ValidIssuer = config["Jwt:Issuer"], ValidAudience = config["Jwt:Issuer"], IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(config["Jwt:SecurityKey"])), ClockSkew = TimeSpan.FromSeconds(Convert.ToInt32(config["Jwt:Expires"])), NameClaimType = OpenIdConnectConstants.Claims.Subject, RoleClaimType = OpenIdConnectConstants.Claims.Role }; }); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientSiteAction2FxforwardTrade2Opi.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientSiteAction2FxforwardTrade2Opi { public long Id { get; set; } public long ClientSiteActionId { get; set; } public long FxforwardTrade2Opiid { get; set; } public ClientSiteAction ClientSiteAction { get; set; } public FxforwardTrade2Opi FxforwardTrade2Opi { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Countries/ICountryUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using System.Linq; namespace Argentex.Core.UnitsOfWork.Countries { public interface ICountryUow : IBaseUow { IQueryable<Country> GetCountries(); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AuthUserPreviousPasswords.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AuthUserPreviousPasswords { public int Id { get; set; } public int AuthUserId { get; set; } public string Password { get; set; } public DateTime DateTime { get; set; } public AuthUser AuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Models/SecurityModels/AddUserModel.cs using System; using System.ComponentModel.DataAnnotations; using Argentex.Core.Api.Validation.Attributes; namespace Argentex.Core.Api.Models.SecurityModels { public class AddUserModel { [Display(Name = "Id")] public long Id { get; set; } [MaxLength(50, ErrorMessage = "Username has to be 50 characters or less")] [RequiredIf(nameof(IsApproved), true, AllowEmptyStrings = false, ErrorMessage = "A User name is required")] [Display(Name = "Username")] public string Username { get; set; } [RequiredIf(nameof(IsApproved), true, AllowEmptyStrings = false, ErrorMessage = "An Email address is required")] [EmailAddress] [Display(Name = "Email")] public string Email { get; set; } [Required(AllowEmptyStrings = false, ErrorMessage = "A Forename is required")] [MaxLength(256, ErrorMessage = "Forename has to be 256 characters or less")] public string Forename { get; set; } [Required(AllowEmptyStrings = false, ErrorMessage = "A Surname is required")] [MaxLength(256, ErrorMessage = "Surname has to be 256 characters or less")] public string Surname { get; set; } [Required(AllowEmptyStrings = false, ErrorMessage = "A Title is required")] [MaxLength(10, ErrorMessage = "Title has to be 10 characters or less")] public string Title { get; set; } [Required(ErrorMessage = "No client company specified")] public int ClientCompanyId { get; set; } [Required(ErrorMessage = "No Update user Id is specified")] public int UpdatedByAuthUserId { get; set; } [RequiredIf(nameof(IsApproved), true, AllowEmptyStrings = false, ErrorMessage = "A birthday is required")] public string Birthday { get; set; } [RequiredIf(nameof(IsApproved), true, AllowEmptyStrings = false, ErrorMessage = "A position is required")] public string Position { get; set; } [RequiredIf(nameof(IsApproved), true, AllowEmptyStrings = false, ErrorMessage = "A direct phone number is required")] public string PhoneNumberDirect { get; set; } [RequiredIf(nameof(IsApproved), true, AllowEmptyStrings = false, ErrorMessage = "A mobile phone number is required")] public string PhoneNumberMobile { get; set; } public string AppClientUrl { get; set; } public string PhoneNumberOther { get; set; } public string ASPNumber { get; set; } public DateTime? ASPCreationDate { get; set; } public bool PrimaryContact { get; set; } public int? ClientCompanyContactId { get; set; } public int? AuthUserId { get; set; } public string Notes { get; set; } public string Fullname { get; set; } public bool Authorized { get; set; } public bool RecNotification { get; set; } public bool RecAmReport { get; set; } public bool RecActivityReport { get; set; } public bool IsDeleted { get; set; } public DateTime? UpdatedDateTime { get; set; } public DateTime? LastTelephoneChangeDate { get; set; } public DateTime? LastEmailChangeDate { get; set; } public string BloombergGpi { get; set; } public string NiNumber { get; set; } public int[] AssignedCategoryIds { get; set; } public bool? IsLockedOut { get; set; } public string Comment { get; set; } public DateTime CreateDate { get; set; } public DateTime? LastPasswordChangeDate { get; set; } public DateTime? LastLoginDate { get; set; } public DateTime? LastActivityDate { get; set; } public DateTime? LastLockOutDate { get; set; } public int FailedPasswordAttemptCount { get; set; } public DateTime FailedPasswordAttemptWindowStart { get; set; } public int ApplicationId { get; set; } public bool IsAdmin { get; set; } public bool IsSignatory { get; set; } public bool IsAuthorisedSignatory { get; set; } public bool IsApproved { get; set; } public int? ApprovedByAuthUserId { get; set; } public bool? FindUserByUsername { get; set; } public bool? FindUserByEmail { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyContactBulkCategoryModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Models.ClientCompany { public class ClientCompanyContactBulkCategoryModel { public int ClientCompanyContactId { get; set; } public int[] ContactCategoryIds { get; set; } public int CreatedByAuthUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Payments/PaymentInformationModel.cs using System; namespace Argentex.Core.Service.Models.Payments { public class PaymentInformationModel { public string PaymentCode { get; set; } public string PaymentType { get; set; } public DateTime ValueDate { get; set; } public DateTime CreatedDateTime { get; set; } public decimal Amount { get; set; } public string Reference { get; set; } public string Currency { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Broker.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Broker { public Broker() { FxforwardTrade = new HashSet<FxforwardTrade>(); Fxoption = new HashSet<Fxoption>(); } public int Id { get; set; } public string Description { get; set; } public int? MarginBankAccountId { get; set; } public string BrokerNoteEmailAddress { get; set; } public string EmirLei { get; set; } public int? BankAccountBrokerPaymentsInId { get; set; } public int? BankAccountBrokerPaymentsOutId { get; set; } public int? BankAccountClientPaymentsInId { get; set; } public int? BankAccountClientPaymentsOutId { get; set; } public int? BankAccountSettlePaymentsInId { get; set; } public int? BankAccountSettlePaymentsOutId { get; set; } public BankAccount BankAccountBrokerPaymentsIn { get; set; } public BankAccount BankAccountBrokerPaymentsOut { get; set; } public BankAccount BankAccountClientPaymentsIn { get; set; } public BankAccount BankAccountClientPaymentsOut { get; set; } public ICollection<FxforwardTrade> FxforwardTrade { get; set; } public ICollection<Fxoption> Fxoption { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/UserReport.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class UserReport { //[Key, Column(Order = 1)] public long UserId { get; set; } //[Key, Column(Order = 2)] public long ReportId { get; set; } //[ForeignKey(nameof(ReportId))] public virtual Report Report { get; set; } //[ForeignKey(nameof(UserId))] public virtual ApplicationUser User { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/DatabaseConstant.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.UnitsOfWork { public class DatabaseConstant { //Max length restriction due to AuthUser.Username length public const int Setting_UserManagement_UsernameCharacterLimit = 50; } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Online/OnlineController.cs using Argentex.Core.Service.ClientSiteAction; using Microsoft.AspNetCore.Mvc; using SynetecLogger; using Microsoft.AspNetCore.Authorization; using Argentex.Core.Service.Models.ClientSiteAction; namespace Argentex.Core.Api.Controllers.ClientSiteAction { [Produces("application/json")] [Route("api/online")] public class OnlineController : Controller { private readonly ILogWrapper _logger; public OnlineController() { } [HttpGet] [AllowAnonymous] public IActionResult GetSignal() { return Ok("Core API connection: Successful"); } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Currencies/ICurrencyUoW.cs using System.Collections.Generic; using System.Linq; using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; namespace Argentex.Core.UnitsOfWork.Currencies { public interface ICurrencyUoW : IBaseUow { IGenericRepo<CurrencyPairPricing> CurrencyPairPricingRepository { get; } IQueryable<Currency> GetCurrency(int currencyId); IQueryable<Currency> GetCurrency(string code); IQueryable<Currency> GetCurrencies(); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Identity/IdentityServiceTests.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Argentex.Core.Service.Email.EmailSender; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Identity; using Argentex.Core.Service.Models.Identity; using Argentex.Core.UnitsOfWork.Users; using Microsoft.AspNetCore.Identity; using Moq; using Synetec.Data.UnitOfWork.GenericRepo; using SynetecLogger; using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Threading.Tasks; using FluentAssertions; using Xunit; namespace Argentex.Core.Service.Tests.Identity { public class IdentityServiceTests { [Fact] public async Task ResetPassword_Success() { var uow = new Mock<IUserUow>(); uow.Setup(x => x.GetUserByNameAsync(It.IsAny<string>())) .ReturnsAsync(new ApplicationUser {IsDeleted = false, IsApproved = true, LockoutEnabled = false}); uow.Setup(x => x.ResetPasswordAsync(It.IsAny<ApplicationUser>(), It.IsAny<string>(), It.IsAny<string>())) .ReturnsAsync(IdentityResult.Success); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendPasswordChangedEmailAsync(It.IsAny<string>())) .ReturnsAsync(IdentityResult.Success); var service = new IdentityService(null, uow.Object, mockEmailService.Object, null); var result = await service.ResetPasswordAsync("", "", ""); result.Succeeded.Should().BeTrue(); result.Errors.Should().BeNullOrEmpty(); } [Fact] public async Task ResetPassword_Should_Throw_An_Exception_When_User_Does_Not_Exist() { // Given var username = "rtest"; var uow = new Mock<IUserUow>(); uow.Setup(x => x.GetUserByNameAsync(It.IsAny<string>())) .Returns(Task.FromResult<ApplicationUser>(null)); var service = new IdentityService(null, uow.Object, null, null); var expectedMessage = $"User with username {username} does not exist"; // When var result = await service.ResetPasswordAsync(username, "", ""); // Then result.Should().NotBeNull(); result.Succeeded.Should().BeFalse(); result.Errors.Should().HaveCount(1); result.Errors.Should().Contain(e => e.Code == IdentityResultCodes.UserNotFound); } [Fact] public void ResetPassword_Fail() { var uow = new Mock<IUserUow>(); uow.Setup(x => x.GetUserByNameAsync(It.IsAny<string>())) .Returns(Task.FromResult(new ApplicationUser())); uow.Setup(x => x.ResetPasswordAsync(It.IsAny<ApplicationUser>(), It.IsAny<string>(), It.IsAny<string>())) .Returns(Task.FromResult(IdentityResult.Failed())); var service = new IdentityService(null, uow.Object, null, null); var result = service.ResetPasswordAsync("", "", "").Result; Assert.False(result.Succeeded); } [Fact(Skip = "Useless test needs to be fixed")] public void AuthenticateAsyncTest_NewPassword_Success() { IdentityService service = SetUpMocks(); var serviceModel = new LoginServiceModel { Username = "rado", Password = "<PASSWORD>#", Grant_Type = "password", ClientId = "clientId" }; var result = service.AuthenticateAsync(serviceModel).Result; } [Fact(Skip = "Needs to be fixed")] public void AuthenticateAsyncTest_RefreshToken_Success() { var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var repo = new Mock<IGenericRepo<Token>>(); repo.Setup(x => x.Insert(It.IsAny<Token>())); repo.Setup(x => x.GetQueryable(It.IsAny<Expression<Func<Token, bool>>>(), It.IsAny<Func<IQueryable<Token>, IOrderedQueryable<Token>>>(), It.IsAny<string>())) .Returns((new List<Token> { new Token { UserId = 1, ClientId = "clientId", Value = "refreshToken" } }).AsQueryable()); var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(m => m.PasswordSignInAsync(It.IsAny<string>(), It.IsAny<string>(), true, false)) .Returns(Task.FromResult(SignInResult.Success)); mockUserUow.Setup(x => x.SaveContextAsync()) .Returns(Task.FromResult(1)); var activityRepo = new Mock<IGenericRepo<Activity>>(); activityRepo.Setup(x => x.GetQueryable(It.IsAny<Expression<Func<Activity, bool>>>(), It.IsAny<Func<IQueryable<Activity>, IOrderedQueryable<Activity>>>(), It.IsAny<string>())) .Returns((new List<Activity> { new Activity{ActivityId = 1} }).AsQueryable()); mockUserUow.SetupGet(x => x.ActivityRepo).Returns(activityRepo.Object); mockUserUow.Setup(x => x.SaveContextAsync()) .Returns(Task.FromResult(1)); mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(new ApplicationUser { Id = 1, UserName = "rado", Email = "<EMAIL>" })); IList<string> roles = new string[] { "manager" }; mockUserUow.Setup(x => x.GetRolesAsync(It.IsAny<ApplicationUser>())) .Returns(Task.FromResult(roles)); var mockConfig = new Mock<IConfigWrapper>(); mockConfig.Setup(x => x.Get(It.IsAny<string>())).Returns(It.IsAny<string>()); var service = new IdentityService(mockConfig.Object, mockUserUow.Object, null, null); //it will fire an exception. var result = service.AuthenticateAsync(new LoginServiceModel { Username = "rado", Password = "<PASSWORD>#", Grant_Type = "refresh_token", ClientId = "clientId", RefreshToken = "refreshToken" }).Result; Assert.Null(result); } [Fact] public void AuthenticateAsyncTest_RefreshToken_Fail_TokenNotFound() { IdentityService service = SetUpMocks(); var result = service.AuthenticateAsync(new LoginServiceModel { Username = "rado", Password = "<PASSWORD>#", Grant_Type = "refresh_token", ClientId = "clientId" }).Result; Assert.Null(result); } [Fact] public void AuthenticateAsyncTest_RefreshToken_Fail_UserNotFound() { var logger = new Mock<ILogWrapper>(); logger.Setup(x => x.Error(It.IsAny<Exception>())); var repo = new Mock<IGenericRepo<Token>>(); repo.Setup(x => x.Insert(It.IsAny<Token>())); repo.Setup(x => x.GetQueryable(It.IsAny<Expression<Func<Token, bool>>>(), It.IsAny<Func<IQueryable<Token>, IOrderedQueryable<Token>>>(), It.IsAny<string>())) .Returns((new List<Token> { new Token { UserId = 1, ClientId = "clientId", Value = "refreshToken" } }).AsQueryable()); var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(m => m.PasswordSignInAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<Boolean>(), It.IsAny<Boolean>())) .Returns(Task.FromResult(SignInResult.Success)); mockUserUow.Setup(x => x.SaveContextAsync()) .Returns(Task.FromResult(1)); var activityRepo = new Mock<IGenericRepo<Activity>>(); activityRepo.Setup(x => x.GetQueryable(It.IsAny<Expression<Func<Activity, bool>>>(), It.IsAny<Func<IQueryable<Activity>, IOrderedQueryable<Activity>>>(), It.IsAny<string>())) .Returns((new List<Activity> { new Activity{ActivityId = 1} }).AsQueryable()); mockUserUow.SetupGet(x => x.ActivityRepo).Returns(activityRepo.Object); mockUserUow.Setup(x => x.SaveContextAsync()) .Returns(Task.FromResult(1)); mockUserUow.Setup(x => x.IsUserByNameAsync(It.IsAny<string>())) .Returns(Task.FromResult(true)); mockUserUow.Setup(x => x.GetUserByNameAsync(It.IsAny<string>())) .Returns(Task.FromResult(new ApplicationUser { Id = 1, UserName = "rado", Email = "<EMAIL>", AuthUserId = 1000, IsDeleted = true })); var service = new IdentityService(null, mockUserUow.Object, null, null); var result = service.AuthenticateAsync(new LoginServiceModel { Username = "rado", Password = "<PASSWORD>#", Grant_Type = "password", ClientId = "clientId", RefreshToken = "refreshToken" }).Result; Assert.Null(result); } [Fact(Skip="Useless test needs to be fixed")] public void BuildTokenTest_Success() { var userServiceModel = new UserModel { Email = "<EMAIL>", Name = "rado", Roles = new List<string> { "manager", "admin"} }; var mockConfig = new Mock<IConfigWrapper>(); mockConfig.Setup(x => x.Get(It.IsAny<string>())).Returns(It.IsAny<string>()); //var service = new IdentityService(mockConfig.Object, null, null); //var result = service.BuildToken(userServiceModel); } //no assert [Fact(Skip = "Useless test needs to be fixed")] public void BuildTokenTest_Fail() { var userServiceModel = new UserModel { Email = "<EMAIL>", Name = "rado", Roles = new List<string> { "manager", "admin" } }; var mockConfig = new Mock<IConfigWrapper>(); mockConfig.Setup(x => x.Get(It.IsAny<string>())).Returns(It.IsAny<string>()); //var service = new IdentityService(mockConfig.Object, null, null); //var result = service.BuildToken(userServiceModel); } //no assert [Fact] public void ChangePasswordAsync_Success_With_Correct_And_Different_Password() { //Arrange var mockUserUow = new Mock<IUserUow>(); var user = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", PasswordHash = "<PASSWORD>==", //password hash for "<PASSWORD>" ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100 }; mockUserUow.Setup(uow => uow.AuthUserRepository.GetByPrimaryKey(It.IsAny<int>())) .Returns(new AuthUser { Id = 300, UserName = "testaccount", Email = "<EMAIL>", Password = "<PASSWORD>==" //password hash for "<PASSWORD>" }); mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(user)); mockUserUow.Setup(uow => uow.ChangePasswordAsync(user, It.IsAny<string>())) .Returns(Task.FromResult(IdentityResult.Success)); var mockEmailService = new Mock<IEmailService>(); mockEmailService.Setup(s => s.SendPasswordChangedEmailAsync(It.IsAny<string>())).Returns(Task.FromResult(IdentityResult.Success)); var service = new IdentityService(null, mockUserUow.Object, mockEmailService.Object, null); //Act var updateResult = service.ChangePasswordAsync("1", "Abcd1234", "Changed1234", "Changed1234").Result; //Assert Assert.Equal(IdentityResult.Success, updateResult); } [Fact] public void ChangePasswordAsync_Should_Throw_An_Exception_When_Passwords_Dont_Match() { //Arrange var user = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", Password<PASSWORD> = "<PASSWORD>==", //password hash for "<PASSWORD>" ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100 }; var mockUserUow = new Mock<IUserUow>(); var service = new IdentityService(null, mockUserUow.Object, null, null); var expectedMessage = "Passwords do not match"; //Act var exception = Assert.ThrowsAsync<PasswordsDoNotMatchException>(() => service.ChangePasswordAsync("1", "WrongPassword1234", "Changed1234", "Changed4321")); //Assert Assert.NotNull(exception.Result); Assert.Equal(expectedMessage, exception.Result.Message); } [Fact] public void ChangePasswordAsync_Failed_With_Identical_Old_And_New_Password() { //Arrange var user = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", PasswordHash = "<PASSWORD>Wm5HlA==", //password hash for "<PASSWORD>" ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100 }; var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(uow => uow.AuthUserRepository.GetByPrimaryKey(It.IsAny<int>())) .Returns(new AuthUser { Id = 300, UserName = "testaccount", Email = "<EMAIL>", Password = "<PASSWORD>==" //password <PASSWORD> for "<PASSWORD>" }); mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(user)); var service = new IdentityService(null, mockUserUow.Object, null, null); //Act var updateResult = service.ChangePasswordAsync("1", "<PASSWORD>", "<PASSWORD>", "<PASSWORD>").Result; //Assert Assert.Equal(IdentityResult.Failed().Succeeded, updateResult.Succeeded); } [Fact] public void Change_Password_Should_Throw_An_Exception_When_User_Tries_To_Use_Previous_3_Password() { // Given var mockUserUow = new Mock<IUserUow>(); var user = new ApplicationUser { Id = 1, UserName = "testaccount", Email = "<EMAIL>", Title = "Mr", Forename = "Test", Surname = "Account", PasswordHash = "<PASSWORD>==", //password hash for "<PASSWORD>" ClientCompanyId = 439, UpdatedByAuthUserId = 111, AuthUserId = 300, ClientCompanyContactId = 100 }; mockUserUow.Setup(uow => uow.AuthUserRepository.GetByPrimaryKey(It.IsAny<int>())) .Returns(new AuthUser { Id = 300, UserName = "testaccount", Email = "<EMAIL>", Password = "<PASSWORD>==" //password hash for "<PASSWORD>" }); mockUserUow.Setup(x => x.GetUserByIdAsync(It.IsAny<string>())) .Returns(Task.FromResult(user)); mockUserUow.Setup(x => x.GetLastPasswords(It.IsAny<long>())).Returns(new List<PreviousPassword> { new PreviousPassword() { PasswordHash = "<PASSWORD>==" //<PASSWORD> } }.AsQueryable); mockUserUow.Setup(uow => uow.ChangePasswordAsync(user, It.IsAny<string>())) .Returns(Task.FromResult(IdentityResult.Success)); var service = new IdentityService(null, mockUserUow.Object, null, null); var expectedMessage = "Password already been used within the past 3 passwords"; // When var exception = Assert.ThrowsAsync<PasswordAlreadyUsedException>(() => service.ChangePasswordAsync("1", "<PASSWORD>", "<PASSWORD>", "<PASSWORD>")); // Then Assert.NotNull(exception.Result); Assert.Equal(expectedMessage, exception.Result.Message); } private static IdentityService SetUpMocks() { var mockTokenRepository = new Mock<IGenericRepo<Token>>(); var mockUserUow = new Mock<IUserUow>(); mockUserUow.Setup(x => x.SaveContextAsync()) .Returns(Task.FromResult(1)); mockUserUow.Setup(x => x.IsUserByNameAsync(It.IsAny<string>())) .Returns(Task.FromResult(true)); mockUserUow.Setup(x => x.GetUserByNameAsync(It.IsAny<string>())) .Returns(Task.FromResult(new ApplicationUser { Id = 1, UserName = "rado", Email = "<EMAIL>", AuthUserId = 1000 })); IList<string> roles = new string[] { "manager" }; mockUserUow.Setup(x => x.GetRolesAsync(It.IsAny<ApplicationUser>())) .Returns(Task.FromResult(roles)); var activityRepo = new Mock<IGenericRepo<Activity>>(); activityRepo.Setup(x => x.GetQueryable(It.IsAny<Expression<Func<Activity, bool>>>(), It.IsAny<Func<IQueryable<Activity>, IOrderedQueryable<Activity>>>(), It.IsAny<string>())) .Returns((new List<Activity> { new Activity{ActivityId = 1} }).AsQueryable()); mockUserUow.SetupGet(x => x.ActivityRepo).Returns(activityRepo.Object); mockUserUow.Setup(x => x.SaveContextAsync()) .Returns(Task.FromResult(1)); mockUserUow.Setup(x => x.LogActivity(It.IsAny<ActivityLog>())).Returns(Task.FromResult(1)); var service = new IdentityService(null, mockUserUow.Object, null, null); return service; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyOpiduplicate.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyOpiduplicate { public int Id { get; set; } public int DuplicateClientCompanyOpiid { get; set; } public int OriginalClientCompanyOpiid { get; set; } public bool IsOk { get; set; } public string Note { get; set; } public int? IsOkupdatedByAuthUserId { get; set; } public DateTime? IsOkupdatedDateTime { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime CreatedDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } public AuthUser CreatedByAuthUser { get; set; } public ClientCompanyOpi DuplicateClientCompanyOpi { get; set; } public AuthUser IsOkupdatedByAuthUser { get; set; } public ClientCompanyOpi OriginalClientCompanyOpi { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Fix/BarxFxQuoteServiceTests.cs using Argentex.ClientSite.Service.Http; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.Fix; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Models.Fix; using Moq; using System; using System.Net.Http; using System.Threading.Tasks; using Xunit; namespace Argentex.Core.Service.Tests.Fix { public class BarxFxQuoteServiceTests { [Fact] public void GetFixOrderRequestAsync_Successful_When_It_Has_The_Correct_Input() { //Arrange var mockConfig = new Mock<IAppSettingService>(); mockConfig.Setup(x => x.GetBarxFXFixQuoteUrl()) .Returns(It.IsAny<string>()); var responseMessage = new HttpResponseMessage(); var mockHttpService = new Mock<IHttpService>(); mockHttpService.Setup(x => x.SendAsync()) .Returns(Task.FromResult(responseMessage)); mockHttpService.Setup(x => x.GetResponseObject<FixQuoteResponseModel>(responseMessage)) .Returns(Task.FromResult(new FixQuoteResponseModel())); var service = new BarxFxService(mockHttpService.Object, mockConfig.Object, null); var orderRequest = new FixQuoteRequestModel { TradeCode = "Code", LHSCCY = "GBP", RHSCCY = "EUR", BrokerMajorAmount = 1000, MajorCurrency = "GBP", Side = 1, ValueDate = "2018/01/01", TimeOut = 15000, Duration = 35 }; //Act var result = service.GetQuoteAsync(orderRequest).Result; //Assert Assert.IsType<FixQuoteResponseModel>(result); Assert.NotNull(result); } [Fact] public async Task GetFixOrderRequestAsync_Unreachable_Fix_API() { //Arrange var mockConfig = new Mock<IAppSettingService>(); mockConfig.Setup(x => x.GetBarxFXFixQuoteUrl()) .Returns(It.IsAny<string>()); var responseMessage = new HttpResponseMessage(); var mockHttpService = new Mock<IHttpService>(); mockHttpService.Setup(x => x.SendAsync()) .Throws(new HttpRequestException()); var service = new BarxFxService(mockHttpService.Object, mockConfig.Object, null); var orderRequest = new FixQuoteRequestModel { TradeCode = "Code", LHSCCY = "GBP", RHSCCY = "EUR", BrokerMajorAmount = 1000, MajorCurrency = "GBP", Side = 1, ValueDate = "2018/01/01", TimeOut = 15000, Duration = 35 }; //Act Exception ex = await Assert.ThrowsAsync<HttpRequestException>(() => service.GetQuoteAsync(orderRequest)); Assert.Contains("Synetec FIX API is unreachable", ex.Message); } [Fact] public async Task GetFixOrderRequestAsync_Unsuccessful_When_It_Has_Invalid_Input_BadRequest() { //Arrange var mockConfig = new Mock<IAppSettingService>(); mockConfig.Setup(x => x.GetBarxFXFixQuoteUrl()) .Returns(It.IsAny<string>()); var responseMessage = new HttpResponseMessage { StatusCode = System.Net.HttpStatusCode.BadRequest }; var mockHttpService = new Mock<IHttpService>(); mockHttpService.Setup(x => x.SendAsync()) .Returns(Task.FromResult(responseMessage)); mockHttpService.Setup(x => x.GetResponseObject<FixQuoteResponseModel>(responseMessage)) .Returns(Task.FromResult(new FixQuoteResponseModel())); var service = new BarxFxService(mockHttpService.Object, mockConfig.Object, null); var orderRequest = new FixQuoteRequestModel { TradeCode = "Code", LHSCCY = "GBP", RHSCCY = "EUR", BrokerMajorAmount = 1000, MajorCurrency = "GBP", Side = 1, ValueDate = "2018/01/01", TimeOut = 15000, Duration = 35 }; //Act Exception ex = await Assert.ThrowsAsync<HttpRequestException>(() => service.GetQuoteAsync(orderRequest)); Assert.Contains("Invalid http request to Synetec FIX API.", ex.Message); } [Fact] public async Task GetFixOrderRequestAsync_Unsuccessful_When_BarxFX_Out_Of_Operating_Hours() { //Arrange var mockConfig = new Mock<IAppSettingService>(); mockConfig.Setup(x => x.GetBarxFXFixQuoteUrl()) .Returns(It.IsAny<string>()); var responseMessage = new HttpResponseMessage { StatusCode = System.Net.HttpStatusCode.ServiceUnavailable }; var mockHttpService = new Mock<IHttpService>(); mockHttpService.Setup(x => x.SendAsync()) .Returns(Task.FromResult(responseMessage)); mockHttpService.Setup(x => x.GetResponseObject<FixQuoteResponseModel>(responseMessage)) .Returns(Task.FromResult(new FixQuoteResponseModel())); var service = new BarxFxService(mockHttpService.Object, mockConfig.Object, null); var orderRequest = new FixQuoteRequestModel { TradeCode = "Code", LHSCCY = "GBP", RHSCCY = "EUR", BrokerMajorAmount = 1000, MajorCurrency = "GBP", Side = 1, ValueDate = "2018/01/01", TimeOut = 15000, Duration = 35 }; //Act Exception ex = await Assert.ThrowsAsync<HttpRequestException>(() => service.GetQuoteAsync(orderRequest)); Assert.Contains("BarxFX is not available. Reason: ", ex.Message); } [Fact] public async Task GetFixOrderRequestAsync_Unsuccessful_When_SynetecApi_Is_Available_And_Error_Returned_By_BarxFX() { //Arrange var mockConfig = new Mock<IAppSettingService>(); mockConfig.Setup(x => x.GetBarxFXFixQuoteUrl()) .Returns(It.IsAny<string>()); var responseMessage = new HttpResponseMessage { StatusCode = System.Net.HttpStatusCode.OK }; var mockHttpService = new Mock<IHttpService>(); mockHttpService.Setup(x => x.SendAsync()) .Returns(Task.FromResult(responseMessage)); mockHttpService.Setup(x => x.GetResponseObject<FixQuoteResponseModel>(responseMessage)) .Returns(Task.FromResult(new FixQuoteResponseModel { ErrorMessage = "NO_RESPONSE" })); var service = new BarxFxService(mockHttpService.Object, mockConfig.Object, null); var orderRequest = new FixQuoteRequestModel { TradeCode = "Code", LHSCCY = "GBP", RHSCCY = "EUR", BrokerMajorAmount = 1000, MajorCurrency = "GBP", Side = 1, ValueDate = "2018/01/01", TimeOut = 15000, Duration = 35 }; //Act Exception ex = await Assert.ThrowsAsync<HttpRequestException>(() => service.GetQuoteAsync(orderRequest)); Assert.Contains("Error getting quote from BarxFX. Reason:", ex.Message); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Models/SecurityModels/ChangePasswordModel.cs using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Api.Models.SecurityModels { public class ChangePasswordModel { [Required] public long UserId { get; set; } public string UserName { get; set; } [Required] [DataType(DataType.Password)] public string CurrentPassword { get; set; } [Required] [DataType(DataType.Password)] public string NewPassword { get; set; } [Required] [DataType(DataType.Password)] public string ConfirmPassword { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/ClientCompanyOpiNotFoundException.cs using System; namespace Argentex.Core.Service.Exceptions { public class ClientCompanyOpiNotFoundException : Exception { public ClientCompanyOpiNotFoundException() : base() { } public ClientCompanyOpiNotFoundException(string message) : base(message) { } public ClientCompanyOpiNotFoundException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Settlements/SettlementService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.ClientSiteAction; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Enums; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Models.Email; using Argentex.Core.Service.Models.Payments; using Argentex.Core.Service.Models.Settlements; using Argentex.Core.Service.Settlements; using Argentex.Core.Service.Trade; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.ClientCompanies; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using Argentex.Core.UnitsOfWork.Currencies; using Argentex.Core.UnitsOfWork.Payments; using Argentex.Core.UnitsOfWork.Settlements; using Argentex.Core.UnitsOfWork.Trades; using System; using System.Collections.Generic; using System.Data; using System.Globalization; using System.Linq; using System.Threading.Tasks; using System.Transactions; namespace Argentex.Core.Service.Payments { public class SettlementService : ISettlementService { private const int DecimalPlaces = 2; private bool _isTotalSettlementAmountGreaterThanTradeBalance = false; private const string _warningMessage = "Total settlement amount cannot exceed the total outstanding balance."; private readonly IPaymentUoW _paymentUoW; private readonly ITradeUow _tradeUow; private readonly ICurrencyUoW _currencyUoW; private readonly IClientCompanyUow _clientCompanyUow; private readonly IClientCompanyAccountsUoW _clientCompanyAccountsUoW; private readonly ISettlementUow _settlementUow; private readonly IUserService _userService; private readonly ICurrencyService _currencyService; private readonly IClientSiteActionService _clientSiteActionService; private readonly IAppSettingService _appSettingService; private readonly IEmailService _emailService; private readonly ITradeService _tradeService; private bool _disposed; public SettlementService(IPaymentUoW paymentUoW, ICurrencyUoW currencyUoW, IClientCompanyUow clientCompanyUow, IClientCompanyAccountsUoW clientCompanyAccountsUoW, ISettlementUow settlementUow, ITradeUow tradeUow, IUserService userService, ICurrencyService currencyService, IClientSiteActionService clientSiteActionService, IAppSettingService appSettingService, IEmailService emailService, ITradeService tradeService) { _paymentUoW = paymentUoW; _currencyUoW = currencyUoW; _clientCompanyUow = clientCompanyUow; _clientCompanyAccountsUoW = clientCompanyAccountsUoW; _settlementUow = settlementUow; _tradeUow = tradeUow; _userService = userService; _currencyService = currencyService; _clientSiteActionService = clientSiteActionService; _appSettingService = appSettingService; _emailService = emailService; _tradeService = tradeService; } public PaymentInformationModel GetPaymentInformation(string paymentCode, bool isPaymentOut = false) { if (!isPaymentOut) return GetPaymentmentInInformation(paymentCode); var opiTransaction = GetOpiTransaction(paymentCode); return GetPaymentmentOutInformation(paymentCode, opiTransaction); } private bool CheckIfTradeBalanceIsExceededBySettlementAmounts(AssignSettlementRequestModel requestModel) { decimal totalAcceptedSettlementsAmount = _settlementUow.GetSettlementAmountForTrade(requestModel.Trade.TradeId); decimal totalSwapSettlementsAmount = GetSwaps(requestModel.Trade.TradeId) .Where(x => x.Status != (int)ClientSiteActionStatuses.Rejected && x.Status != (int)ClientSiteActionStatuses.Declined).Sum(x => x.Amount); decimal totalNewSettlementsAmount = requestModel.SettlementModels .Where(x => x.Status == (int)SettlementStatus.New).Sum(x => x.Amount); decimal tradeBalance = _tradeService.GetTradeBalance(requestModel.ClientCompanyId, requestModel.Trade.TradeId); return (totalNewSettlementsAmount + totalAcceptedSettlementsAmount + totalSwapSettlementsAmount) > tradeBalance; } public async Task<IList<AssignSettlementModel>> AssignAsync(AssignSettlementRequestModel requestModel) { using (TransactionScope scope = new TransactionScope(TransactionScopeAsyncFlowOption.Enabled)) { _isTotalSettlementAmountGreaterThanTradeBalance = CheckIfTradeBalanceIsExceededBySettlementAmounts(requestModel); if (!_isTotalSettlementAmountGreaterThanTradeBalance) { var settlementNoteModelList = new List<SettlementNoteModel>(); var authUser = _userService.GetAuthUserById(requestModel.AuthUserId); //temporary var parentTrade = _tradeUow.GetTrade(requestModel.Trade.TradeId).FirstOrDefault(); foreach (var assignSettlement in requestModel.SettlementModels) { var maxSettlementCreateDateForTrade = _settlementUow.GetMaxCreateDateForTrade(requestModel.Trade.TradeId); if (assignSettlement.Status == (int)SettlementStatus.New) { if (maxSettlementCreateDateForTrade != _settlementUow.GetMaxCreateDateForTrade(requestModel.Trade.TradeId)) throw new TransactionAbortedException("The transaction has been aborted."); if (assignSettlement.ValueDate == ((DateTime)requestModel.Trade.ValueDate).ToString("dd'/'MM'/'yyyy")) { CreatePaymentRequest(assignSettlement, requestModel.AuthUserId, requestModel.Trade.TradeId, (DateTime)requestModel.Trade.ValueDate); } else { CreateSwap(assignSettlement, requestModel.AuthUserId, requestModel.Trade.TradeId); } var settlementNoteModel = CreateSettlementNoteModel( assignSettlement, authUser.UserName, requestModel.Trade.TradeId, parentTrade.IsBuy ? 1 / requestModel.Trade.ClientRate : requestModel.Trade.ClientRate); settlementNoteModelList.Add(settlementNoteModel); } } //var emailList = _someService.GetSettlementNotificationEmailList(requestModel.ClientCompanyId); var emailList = new List<string> { authUser.Email }; //temporary await _emailService.SendSettlementEmailsAsync(settlementNoteModelList, emailList); } scope.Complete(); } var result = GetAssignedSettlements(requestModel.Trade.TradeId); return result; } private SettlementNoteModel CreateSettlementNoteModel(AssignSettlementModel assignSettlement, string UserName, string tradeCode, decimal? clientRate) { var settlementNoteModel = new SettlementNoteModel { Amount = assignSettlement.Amount, ValueDate = DateTime.Parse(assignSettlement.ValueDate), InstructedDateTime = DateTime.Now, InstructedBy = UserName, TradedCurrency = assignSettlement.TradedCurrency, ParentTradeCode = tradeCode, AccountName = assignSettlement.Account.accountName, SettlementAmount = assignSettlement.Amount * clientRate, AccountCurrency = assignSettlement.Account.currency }; return settlementNoteModel; } public IList<AssignSettlementModel> GetAssignedSettlements(string tradeCode) { var result = new List<AssignSettlementModel>(); result.AddRange(GetPaymetRequests(tradeCode)); result.AddRange(GetSwaps(tradeCode)); return result; } public void DeleteAssignedSettlements(long settlementId) { //In order to delete an Assigned settlement, its link to the action must first be deleted and then the actual Trade2Opi _clientSiteActionService.DeleteAction2AssignedSettlementLink(settlementId); _settlementUow.DeleteAssignedSettlement(settlementId); } #region Private methods #region Swap methods private void CreateSwap(AssignSettlementModel model, int authUserID, string parentTradeCode) { var swapCount = _settlementUow.GetTradeSwapCount(parentTradeCode).SwapCount; var parentTrade = _tradeUow.GetTrade(parentTradeCode).SingleOrDefault(); swapCount++; var deliveryLegTrade = GetFxForwardTrade(model, authUserID, parentTrade, swapCount, SwapType.DeliveryLeg); var reversalLegTrade = GetFxForwardTrade(model, authUserID, parentTrade, swapCount, SwapType.ReversalLeg); deliveryLegTrade = PopulateDeliveryLeg(deliveryLegTrade, model, parentTrade); reversalLegTrade = PopulateReversalLeg(reversalLegTrade, model, parentTrade); var swapID = _settlementUow.Assign(deliveryLegTrade, reversalLegTrade, parentTradeCode, authUserID); _clientSiteActionService.LogActionSwapCreation(authUserID, swapID); } private FxforwardTrade PopulateDeliveryLeg(FxforwardTrade deliveryLegTrade, AssignSettlementModel model, FxforwardTrade parentTrade) { deliveryLegTrade.ValueDate = Convert.ToDateTime(model.ValueDate); if ((bool)deliveryLegTrade.IsRhsmajor) { deliveryLegTrade.ClientRhsamt = model.Amount; deliveryLegTrade.BrokerRhsamt = model.Amount; } else { deliveryLegTrade.ClientLhsamt = model.Amount; deliveryLegTrade.BrokerLhsamt = model.Amount; } deliveryLegTrade.EmirUti = _appSettingService.GetEmirUtiCode() + deliveryLegTrade.Code.Replace("-", string.Empty).Replace("/", ""); return deliveryLegTrade; } private FxforwardTrade PopulateReversalLeg(FxforwardTrade reversalLegTrade, AssignSettlementModel model, FxforwardTrade parentTrade) { reversalLegTrade.IsBuy = !parentTrade.IsBuy; reversalLegTrade.ValueDate = parentTrade.ValueDate; if ((bool)reversalLegTrade.IsRhsmajor) { reversalLegTrade.ClientRhsamt = model.Amount; reversalLegTrade.BrokerRhsamt = model.Amount; } else { reversalLegTrade.ClientLhsamt = model.Amount; reversalLegTrade.BrokerLhsamt = model.Amount; } reversalLegTrade.EmirUti = _appSettingService.GetEmirUtiCode() + reversalLegTrade.Code.Replace("-", string.Empty).Replace("/", ""); return reversalLegTrade; } private FxforwardTrade GetFxForwardTrade(AssignSettlementModel model, int authUserID, FxforwardTrade parentTrade, int swapCount, SwapType swapType) { var pendingStatus = _tradeUow.GetEmirStatus("Pending"); var pendingFxStatus = _tradeUow.GetFxForwardStatus("Pending"); var tradeInstructionMethod = _tradeUow.GetTradeInstructionMethod("Online"); var tradeCode = CodeBuilder.FormatSwapTradeCode(parentTrade.Code, swapCount, swapType); var valueDate = DateTime.ParseExact(model.ValueDate, "dd/MM/yyyy", CultureInfo.InvariantCulture); var fxTrade = new FxforwardTrade() { Code = tradeCode, CreatedDate = DateTime.Now, CreatedByAuthUserId = authUserID, ClientCompanyId = model.Account.clientCompanyId, AuthorisedByClientCompanyContactId = GetClientCompanyContactId(authUserID), ContractDate = DateTime.Now, ContractNoteSentToClientDateTime = DateTime.Now, ValueDate = valueDate, IsOrder = false, CurrencyPair = parentTrade.CurrencyPair, IsBuy = parentTrade.IsBuy, Lhsccyid = parentTrade.Lhsccyid, Rhsccyid = parentTrade.Rhsccyid, ClientRate = parentTrade.ClientRate, BrokerRate = parentTrade.BrokerRate, CollateralPerc = 0, UpdatedByAuthUserId = authUserID, IsRhsmajor = parentTrade.IsRhsmajor, ProfitConsolidated = false, Deleted = false, EmirReported = false, IsComplianceSupported = true, IsComplianceRegulated = false, EmirDelegatedReported = false, IsEmirreportable = true, IsApareportable = true, IsArmreportable = true, Armreported = false, EmirstatusId = pendingStatus.Id, ApastatusId = pendingStatus.Id, ArmstatusId = pendingStatus.Id, Reference = model.Reference, TradeInstructionMethodId = tradeInstructionMethod.Id, FxforwardTradeStatusId = pendingFxStatus.Id, FilledByAuthUserId = authUserID, FilledDateTime = DateTime.Now, ClientCompanyOpiid = model.Account.clientCompanyOpiId, BrokerId = parentTrade.BrokerId, }; CalculateAmountsAndProfit(model, fxTrade); return fxTrade; } private int GetClientCompanyContactId(int authUserId) { using (TransactionScope scope = new TransactionScope(TransactionScopeOption.RequiresNew)) { var clientCompanyContactId = _userService .GetApplicationUserByAuthUserId(authUserId) .ClientCompanyContactId; scope.Complete(); return clientCompanyContactId; } } private List<AssignSettlementModel> GetSwaps(string parentTradeCode) { var result = new List<AssignSettlementModel>(); result = _settlementUow.GetTradeSwaps(parentTradeCode) .Select(swap => new AssignSettlementModel { Reference = swap.Key.Reference, Status = swap.Value.ClientSiteActionStatusId, TradedCurrency = swap.Key.IsRhsmajor.HasValue ? swap.Key.IsRhsmajor.Value ? swap.Key.Rhsccy.Code : swap.Key.Lhsccy.Code : null, ValueDate = ((DateTime)swap.Key.ValueDate).ToString("dd/MM/yyyy"), Amount = (decimal)((bool)swap.Key.IsRhsmajor ? swap.Key.ClientRhsamt : swap.Key.ClientLhsamt), Account = MapAccountModel(swap.Key.ClientCompanyOpi), IsWarning = _isTotalSettlementAmountGreaterThanTradeBalance, WarningMessage = _isTotalSettlementAmountGreaterThanTradeBalance ? _warningMessage : string.Empty }).ToList<AssignSettlementModel>(); return result; } private void CalculateAmountsAndProfit(AssignSettlementModel model, FxforwardTrade trade) { if ((bool)trade.IsRhsmajor) { trade.ClientLhsamt = decimal.Round(model.Amount / trade.ClientRate.Value, DecimalPlaces); trade.BrokerLhsamt = decimal.Round(model.Amount / trade.BrokerRate.Value, DecimalPlaces); trade.ClientRhsamt = model.Amount; trade.BrokerRhsamt = model.Amount; trade.Profit = trade.IsBuy ? trade.BrokerLhsamt - trade.ClientLhsamt : trade.ClientLhsamt - trade.BrokerLhsamt; } else { trade.ClientRhsamt = decimal.Round(model.Amount * trade.ClientRate.Value, DecimalPlaces); trade.BrokerRhsamt = decimal.Round(model.Amount * trade.BrokerRate.Value, DecimalPlaces); trade.BrokerLhsamt = model.Amount; trade.ClientLhsamt = model.Amount; trade.Profit = trade.IsBuy ? trade.ClientRhsamt - trade.BrokerRhsamt : trade.BrokerRhsamt - trade.ClientRhsamt; } //trade.RemainingClientLhsamt = trade.ClientLhsamt; //trade.RemainingClientRhsamt = trade.ClientRhsamt; } private AccountModel MapAccountModel(ClientCompanyOpi input) { return new AccountModel() { accountName = input.AccountName, accountNumber = input.AccountNumber, clientCompanyId = input.ClientCompanyId, clientCompanyOpiId = input.Id, currency = input.Currency.Code }; } #endregion #region TradeOpi's private void CreatePaymentRequest(AssignSettlementModel model, int authUserID, string tradeCode, DateTime valueDate) { var trade2opi = new FxforwardTrade2Opi { Amount = model.Amount, Details = model.Reference, ClientCompanyOpiid = model.Account.clientCompanyOpiId, CreatedByAuthUserId = authUserID, CreatedDateTime = DateTime.UtcNow, FxforwardTradeCode = tradeCode, TradeValueDate = valueDate }; _settlementUow.AddTrade2Opi(trade2opi); _clientSiteActionService.LogActionOpiPayment(authUserID, trade2opi.Id, model.Account.accountName, trade2opi.Amount); } private List<AssignSettlementModel> GetPaymetRequests(string parentTradeCode) { var result = _settlementUow.GetTradeOpis(parentTradeCode) .Select(tradeOpis => new AssignSettlementModel { SettlementId = tradeOpis.Id, Reference = tradeOpis.Details, Status = (int)SettlementStatus.Accepted, // accepted status TradedCurrency = tradeOpis.FxforwardTradeCodeNavigation.IsRhsmajor.HasValue ? tradeOpis.FxforwardTradeCodeNavigation.IsRhsmajor.Value ? tradeOpis.FxforwardTradeCodeNavigation.Rhsccy.Code : tradeOpis.FxforwardTradeCodeNavigation.Lhsccy.Code : null, ValueDate = (tradeOpis.TradeValueDate).ToString("dd/MM/yyyy"), Amount = tradeOpis.Amount, Account = MapAccountModel(tradeOpis.ClientCompanyOpi), IsWarning = _isTotalSettlementAmountGreaterThanTradeBalance, WarningMessage = _isTotalSettlementAmountGreaterThanTradeBalance ? _warningMessage : string.Empty }).ToList(); return result; } #endregion private PaymentOutResponseModel ProcessResult(DataTable result) { var paymentOutResponseModel = new PaymentOutResponseModel(); if (result != null && result.Rows != null && result.Rows.Count > 0) { paymentOutResponseModel.Code = result.Rows[0]["Code"].ToString(); paymentOutResponseModel.IsSuccessful = true; } else { paymentOutResponseModel.Code = null; paymentOutResponseModel.IsSuccessful = false; } return paymentOutResponseModel; } //private Payment CreatePayment(PaymentOutModel model, ClientCompany clientCompany) //{ // PaymentType paymentType = _paymentUoW.GetPaymentType("OUT").SingleOrDefault(); // paymentType.DefaultSendToSwift = false; // Currency currency = _currencyUoW.GetCurrency(model.Currency).SingleOrDefault(); // Payment payment = new Payment // { // PaymentType = paymentType, // Amount = model.Amount, // Currency = currency, // ValueDate = DateTime.ParseExact(model.ValueDate, "dd/MM/yyyy", System.Globalization.CultureInfo.InvariantCulture), // NotifyClient = false, // Comments = "FX SETTLEMENT for Trade code: " + model.TradeCode, // Reference = model.Reference, // ClientCompany = clientCompany // }; // return payment; //} private PaymentOutInformationModel GetPaymentmentOutInformation(string paymentCode, ClientCompanyOpitransaction opiTransaction) { var paymentInformation = _paymentUoW.GetPayment(paymentCode) .Select(payment => new PaymentOutInformationModel { PaymentCode = payment.Code, PaymentType = Enum.GetName(typeof(PaymentTypes), payment.PaymentTypeId), ValueDate = payment.ValueDate, CreatedDateTime = payment.CreatedDate, Amount = payment.Amount.Value, Reference = !string.IsNullOrWhiteSpace(payment.Reference) ? payment.Reference : string.Empty, Currency = payment.Currency.Code, OpiDescription = opiTransaction.ClientCompanyOpi.Description, OpiAccountName = opiTransaction.ClientCompanyOpi.AccountName, OpiSortCode = opiTransaction.ClientCompanyOpi.SortCode, OpiAccountNumber = opiTransaction.ClientCompanyOpi.AccountNumber, OpiBankName = opiTransaction.ClientCompanyOpi.BankName, OpiSwiftCode = opiTransaction.ClientCompanyOpi.SwiftCode, OpiReference = opiTransaction.ClientCompanyOpi.Reference, OpiIban = opiTransaction.ClientCompanyOpi.Iban }).SingleOrDefault(); if (paymentInformation == null) throw new PaymentNotFoundException($"Payment with code {paymentCode} does not exist"); return paymentInformation; } private PaymentInformationModel GetPaymentmentInInformation(string paymentCode) { var paymentInformation = _paymentUoW.GetPayment(paymentCode) .Select(payment => new PaymentInformationModel { PaymentCode = payment.Code, PaymentType = Enum.GetName(typeof(PaymentTypes), payment.PaymentTypeId), ValueDate = payment.ValueDate, CreatedDateTime = payment.CreatedDate, Amount = payment.Amount.Value, Reference = !string.IsNullOrWhiteSpace(payment.Reference) ? payment.Reference : string.Empty, Currency = payment.Currency.Code }).SingleOrDefault(); if (paymentInformation == null) throw new PaymentNotFoundException($"Payment with code {paymentCode} does not exist"); return paymentInformation; } private ClientCompanyOpitransaction GetOpiTransaction(string paymentCode) { var opiTransaction = _paymentUoW.GetClientCompanyOpiTransaction(paymentCode).FirstOrDefault(); if (opiTransaction == null) throw new ClientCompanyOpiTransactionNotFoundException($"Opi transaction for payment code {paymentCode} does not exist"); return opiTransaction; } #endregion #region Dispose protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { //_paymentUoW?.Dispose(); //_tradeUow?.Dispose(); //_currencyUoW?.Dispose(); //_clientCompanyUow?.Dispose(); //_clientCompanyAccountsUoW?.Dispose(); //_settlementUow?.Dispose(); //_userService?.Dispose(); //_currencyService?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Fix/FixQuoteResponseModel.cs using System; namespace Argentex.Core.Service.Models.Fix { public class FixQuoteResponseModel { public int QuoteIndex { get; set; } public string QuoteId { get; set; } public string QuoteReqId { get; set; } public decimal BrokerRate { get; set; } public DateTime? ExpirationDateTime { get; set; } public string ErrorMessage { get; set; } //Not received from FIX, calculated internally public decimal ClientRate { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Trade/ITradeService.cs using Argentex.Core.Service.Models.Fix; using Argentex.Core.Service.Models.Trade; using Argentex.Core.Service.Models.Trades; using System; using System.Collections.Generic; using System.Threading.Tasks; namespace Argentex.Core.Service.Trade { public interface ITradeService : IDisposable { IList<TradeModel> GetUnsettledTrades(int clientCompanyId); IEnumerable<string> GetCurrencyCodes(); IEnumerable<string> GetAllowedCurrencyPairs(); Task<FxForwardTradeInformationModel> GetTradeNote(string tradeCode); FxForwardTradeInformationModel GetTradeInformation(string tradeCode); Task<IList<FixQuoteResponseModel>> GetQuotesAsync(QuoteRequestModel quoteRequest); Task<IList<DealResponseModel>> Deal(DealRequestModel dealRequest); bool SetTradeDefaultOPI(string tradeCode, int clientCompanyId, bool setAsDefault); IList<TradeModel> GetClosedTrades(int clientCompanyId); decimal GetTradeBalance(int clientCompanyId, string tradeCode); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/_2fapages.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class _2fapages { public int Id { get; set; } public string PagePath { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Currency.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Currency { public Currency() { BankAccountCurrencyBalance = new HashSet<BankAccountCurrencyBalance>(); BankAccountCurrencyBalanceHistory = new HashSet<BankAccountCurrencyBalanceHistory>(); BankAccountCurrencyDetails = new HashSet<BankAccountCurrencyDetails>(); BankAccountTransaction = new HashSet<BankAccountTransaction>(); ClientCompanyComplianceCurrency = new HashSet<ClientCompanyComplianceCurrency>(); ClientCompanyCurrencyDefaultOpi = new HashSet<ClientCompanyCurrencyDefaultOpi>(); ClientCompanyOnlineDetailsSkewCurrency1 = new HashSet<ClientCompanyOnlineDetailsSkew>(); ClientCompanyOnlineDetailsSkewCurrency2 = new HashSet<ClientCompanyOnlineDetailsSkew>(); ClientCompanyOnlineSpreadAdjustmentCurrency1 = new HashSet<ClientCompanyOnlineSpreadAdjustment>(); ClientCompanyOnlineSpreadAdjustmentCurrency2 = new HashSet<ClientCompanyOnlineSpreadAdjustment>(); ClientCompanyOpi = new HashSet<ClientCompanyOpi>(); ClientCompanyOpitransaction = new HashSet<ClientCompanyOpitransaction>(); ClientCompanyVirtualAccountCurrencyBalance = new HashSet<ClientCompanyVirtualAccountCurrencyBalance>(); ClientCompanyVirtualAccountCurrencyBalanceHistory = new HashSet<ClientCompanyVirtualAccountCurrencyBalanceHistory>(); FxforwardTradeLhsccy = new HashSet<FxforwardTrade>(); FxforwardTradeRhsccy = new HashSet<FxforwardTrade>(); FxoptionLhsccy = new HashSet<Fxoption>(); FxoptionRhsccy = new HashSet<Fxoption>(); Payment = new HashSet<Payment>(); SwiftvalidationCurrencyCountry = new HashSet<SwiftvalidationCurrencyCountry>(); SwiftvalidationCurrencyMessageField = new HashSet<SwiftvalidationCurrencyMessageField>(); VirtualAccountTransaction = new HashSet<VirtualAccountTransaction>(); } public int Id { get; set; } public string Code { get; set; } public string Description { get; set; } public string SwiftAmountFormat { get; set; } public byte[] UpdateTimeStamp { get; set; } public int CreatedByAuthUserId { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdateDateTime { get; set; } public AuthUser CreatedByAuthUser { get; set; } public AuthUser UpdatedByAuthUser { get; set; } public ICollection<BankAccountCurrencyBalance> BankAccountCurrencyBalance { get; set; } public ICollection<BankAccountCurrencyBalanceHistory> BankAccountCurrencyBalanceHistory { get; set; } public ICollection<BankAccountCurrencyDetails> BankAccountCurrencyDetails { get; set; } public ICollection<BankAccountTransaction> BankAccountTransaction { get; set; } public ICollection<ClientCompanyComplianceCurrency> ClientCompanyComplianceCurrency { get; set; } public ICollection<ClientCompanyCurrencyDefaultOpi> ClientCompanyCurrencyDefaultOpi { get; set; } public ICollection<ClientCompanyOnlineDetailsSkew> ClientCompanyOnlineDetailsSkewCurrency1 { get; set; } public ICollection<ClientCompanyOnlineDetailsSkew> ClientCompanyOnlineDetailsSkewCurrency2 { get; set; } public ICollection<ClientCompanyOnlineSpreadAdjustment> ClientCompanyOnlineSpreadAdjustmentCurrency1 { get; set; } public ICollection<ClientCompanyOnlineSpreadAdjustment> ClientCompanyOnlineSpreadAdjustmentCurrency2 { get; set; } public ICollection<ClientCompanyOpi> ClientCompanyOpi { get; set; } public ICollection<ClientCompanyOpitransaction> ClientCompanyOpitransaction { get; set; } public ICollection<ClientCompanyVirtualAccountCurrencyBalance> ClientCompanyVirtualAccountCurrencyBalance { get; set; } public ICollection<ClientCompanyVirtualAccountCurrencyBalanceHistory> ClientCompanyVirtualAccountCurrencyBalanceHistory { get; set; } public ICollection<FxforwardTrade> FxforwardTradeLhsccy { get; set; } public ICollection<FxforwardTrade> FxforwardTradeRhsccy { get; set; } public ICollection<Fxoption> FxoptionLhsccy { get; set; } public ICollection<Fxoption> FxoptionRhsccy { get; set; } public ICollection<Payment> Payment { get; set; } public ICollection<SwiftvalidationCurrencyCountry> SwiftvalidationCurrencyCountry { get; set; } public ICollection<SwiftvalidationCurrencyMessageField> SwiftvalidationCurrencyMessageField { get; set; } public ICollection<VirtualAccountTransaction> VirtualAccountTransaction { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Trades/AssignSettlementModel.cs //using System; //namespace Argentex.Core.Service.Models.Trade //{ // public class AssignSettlementModel // { // public string TradeCode { get; set; } // public DateTime TradeValueDate { get; set; } // public string SellCcy { get; set; } // public string BuyCcy { get; set; } // public string MajorCcy { get; set; } // public DateTime ValueDate { get; set; } // public decimal? ClientRate { get; set; } // public decimal? ClientBuyAmount { get; set; } // public decimal? ClientSellAmount { get; set; } // public string Reference { get; set; } // public string Status { get; set; } // public int OpiID { get; set; } // } //} <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Enums/ClientSiteActionStatuses.cs namespace Argentex.Core.Service.Enums { public enum ClientSiteActionStatuses { New = 1, Requested = 2, Accepted = 3, Declined = 4, Processed = 5, Pending = 6, Rejected = 7 } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Sms/Models/SmsModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Sms.Models { public class SmsModel { public string PhoneNumber { get; set; } public string Message { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogClientCompanyOnlineDetailsSkew.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogClientCompanyOnlineDetailsSkew { public long LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public int ClientCompanyOnlineDetailsId { get; set; } public int Currency1Id { get; set; } public int Currency2Id { get; set; } public bool IsBuy { get; set; } public int Spread { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api.Tests/Currencies/CurrencyControllerTests.cs using System.Net; using Argentex.Core.Api.Controllers; using Argentex.Core.Api.Controllers.Currencies; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Exceptions; using Microsoft.AspNetCore.Mvc; using Moq; using SynetecLogger; using Xunit; namespace Argentex.Core.Api.Tests.Currencies { public class CurrencyControllerTests { [Fact] public void Given_And_Exception_Is_Caught_A_Bad_Request_Should_Be_Returned() { // Given var currencyPair = "GBPEUR"; var currencyServiceMock = new Mock<ICurrencyService>(); var loggerMock = new Mock<ILogWrapper>(); currencyServiceMock.Setup(x => x.GetCurrencyPairRate(It.IsAny<string>())) .Throws(new CurrencyPairPricingNotFoundException()); var expectedStatusCode = HttpStatusCode.BadRequest; var currencyController = new CurrencyController(currencyServiceMock.Object, loggerMock.Object); // When var result = currencyController.GetCurrencyPairRate(currencyPair); var badRequest = result as BadRequestObjectResult; // Then Assert.NotNull(badRequest); Assert.Equal((int) expectedStatusCode, badRequest.StatusCode); } [Fact] public void Given_The_Rate_Is_Returned_From_The_Service_An_Ok_Result_Should_Be_Returned() { // Given var currencyPair = "GBPEUR"; var rate = 1.5; var currencyServiceMock = new Mock<ICurrencyService>(); var loggerMock = new Mock<ILogWrapper>(); currencyServiceMock.Setup(x => x.GetCurrencyPairRate(It.IsAny<string>())) .Returns(rate); var expectedStatusCode = HttpStatusCode.OK; var currencyController = new CurrencyController(currencyServiceMock.Object, loggerMock.Object); // When var result = currencyController.GetCurrencyPairRate(currencyPair); var badRequest = result as OkObjectResult; // Then Assert.NotNull(badRequest); Assert.Equal((int)expectedStatusCode, badRequest.StatusCode); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Identity/UserModel.cs using System; using System.Collections.Generic; namespace Argentex.Core.Service.Models.Identity { public class UserModel { public long UserId { get; set; } public string Name { get; set; } public string Email { get; set; } public string FullName { get; set; } public ICollection<string> Roles { get; set; } public string Forename { get; set; } public string Surname { get; set; } public int ClientCompanyId { get; internal set; } public int AuthUserId { get; internal set; } public DateTime PasswordLastChanged { get; set; } public bool IsAdmin { get; set; } public DateTime? LastLoginDate { get; set; } public bool IsSuccesfullLogin { get; set; } public bool IsOnline { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Helpers/IConfigWrapper.cs using AutoMapper.Configuration; namespace Argentex.Core.Service.Helpers { public interface IConfigWrapper { string Get(string key); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Sms/SmsSender/Providers/TextMagic/TextMagicService.cs using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Sms.Models; using System; using TextmagicRest; namespace Argentex.Core.Service.Sms.SmsSender { public class TextMagicService : ITextMagicService { #region Constructor & Dependencies private readonly IConfigWrapper _config; public TextMagicService(IConfigWrapper config) { _config = config; } #endregion #region Properties private bool _disposed; private TextMagicConfigModel _textMagicConfig { get { return new TextMagicConfigModel() { UserName = _config.Get("Sms:TextMagic:UserName"), Token = _config.Get("Sms:TextMagic:Token") }; } } #endregion /// <summary> /// Sending Sms Message with TextMagic service provider /// </summary> /// <param name="smsModel">Phone Number and Message</param> /// <returns>If the message was sent successfuly</returns> public bool SendMessage(SmsModel smsModel) { var textMagicConfig = _textMagicConfig; var client = new Client(textMagicConfig.UserName, textMagicConfig.Token); var link = client.SendMessage(smsModel.Message, smsModel.PhoneNumber); return link.Success; } #region Dispose protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { } } _disposed = true; } public void Dispose() { Dispose(true); } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FXDB1Context.cs using System; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Metadata; namespace Argentex.Core.DataAccess.Entities { public partial class FXDB1Context : DbContext { public virtual DbSet<_2fapages> _2fapages { get; set; } public virtual DbSet<ActivityTabHourDayRange> ActivityTabHourDayRange { get; set; } public virtual DbSet<ActivityTabUserData> ActivityTabUserData { get; set; } public virtual DbSet<Amlrisk> Amlrisk { get; set; } public virtual DbSet<AppSetting> AppSetting { get; set; } public virtual DbSet<AppUser> AppUser { get; set; } public virtual DbSet<AppUserEmailAlternative> AppUserEmailAlternative { get; set; } public virtual DbSet<AppUserType> AppUserType { get; set; } public virtual DbSet<ArgentexAccount> ArgentexAccount { get; set; } public virtual DbSet<ArmfxForwardTradeStatusesHistory> ArmfxForwardTradeStatusesHistory { get; set; } public virtual DbSet<Armreport> Armreport { get; set; } public virtual DbSet<ArmreportField> ArmreportField { get; set; } public virtual DbSet<ArmreportFxforwardTrade> ArmreportFxforwardTrade { get; set; } public virtual DbSet<ArmreportOutgoingFile> ArmreportOutgoingFile { get; set; } public virtual DbSet<ArmreportOutgoingFileContent> ArmreportOutgoingFileContent { get; set; } public virtual DbSet<AuthApplication> AuthApplication { get; set; } public virtual DbSet<AuthPermission> AuthPermission { get; set; } public virtual DbSet<AuthRole> AuthRole { get; set; } public virtual DbSet<AuthRolePermission> AuthRolePermission { get; set; } public virtual DbSet<AuthUser> AuthUser { get; set; } public virtual DbSet<AuthUserPasswordToken> AuthUserPasswordToken { get; set; } public virtual DbSet<AuthUserPreviousPasswords> AuthUserPreviousPasswords { get; set; } public virtual DbSet<AuthUserRole> AuthUserRole { get; set; } public virtual DbSet<BankAccount> BankAccount { get; set; } public virtual DbSet<BankAccountCurrencyBalance> BankAccountCurrencyBalance { get; set; } public virtual DbSet<BankAccountCurrencyBalanceHistory> BankAccountCurrencyBalanceHistory { get; set; } public virtual DbSet<BankAccountCurrencyDetails> BankAccountCurrencyDetails { get; set; } public virtual DbSet<BankAccountTransaction> BankAccountTransaction { get; set; } public virtual DbSet<Breach> Breach { get; set; } public virtual DbSet<BreachInvoice> BreachInvoice { get; set; } public virtual DbSet<BreachLevel> BreachLevel { get; set; } public virtual DbSet<BreachType> BreachType { get; set; } public virtual DbSet<Broker> Broker { get; set; } public virtual DbSet<CassRecs> CassRecs { get; set; } public virtual DbSet<CassRecsPaymentFile> CassRecsPaymentFile { get; set; } public virtual DbSet<CassRecsStatementFile> CassRecsStatementFile { get; set; } public virtual DbSet<ClearingCodePrefix> ClearingCodePrefix { get; set; } public virtual DbSet<ClientCompany> ClientCompany { get; set; } public virtual DbSet<ClientCompanyActivityReport> ClientCompanyActivityReport { get; set; } public virtual DbSet<ClientCompanyCategory> ClientCompanyCategory { get; set; } public virtual DbSet<ClientCompanyCompliance> ClientCompanyCompliance { get; set; } public virtual DbSet<ClientCompanyComplianceCorporateSector> ClientCompanyComplianceCorporateSector { get; set; } public virtual DbSet<ClientCompanyComplianceCurrency> ClientCompanyComplianceCurrency { get; set; } public virtual DbSet<ClientCompanyComplianceNote> ClientCompanyComplianceNote { get; set; } public virtual DbSet<ClientCompanyContact> ClientCompanyContact { get; set; } public virtual DbSet<ClientCompanyContactCategory> ClientCompanyContactCategory { get; set; } public virtual DbSet<ClientCompanyCreditType> ClientCompanyCreditType { get; set; } public virtual DbSet<ClientCompanyCurrencyDefaultOpi> ClientCompanyCurrencyDefaultOpi { get; set; } public virtual DbSet<ClientCompanyIbrelationship> ClientCompanyIbrelationship { get; set; } public virtual DbSet<ClientCompanyIndustrySector> ClientCompanyIndustrySector { get; set; } public virtual DbSet<ClientCompanyLinkedGroup> ClientCompanyLinkedGroup { get; set; } public virtual DbSet<ClientCompanyNote> ClientCompanyNote { get; set; } public virtual DbSet<ClientCompanyOnlineDetails> ClientCompanyOnlineDetails { get; set; } public virtual DbSet<ClientCompanyOnlineDetailsSkew> ClientCompanyOnlineDetailsSkew { get; set; } public virtual DbSet<ClientCompanyOnlineSpreadAdjustment> ClientCompanyOnlineSpreadAdjustment { get; set; } public virtual DbSet<ClientCompanyOpi> ClientCompanyOpi { get; set; } public virtual DbSet<ClientCompanyOpiduplicate> ClientCompanyOpiduplicate { get; set; } public virtual DbSet<ClientCompanyOpitransaction> ClientCompanyOpitransaction { get; set; } public virtual DbSet<ClientCompanyOptionCount> ClientCompanyOptionCount { get; set; } public virtual DbSet<ClientCompanyPipeline> ClientCompanyPipeline { get; set; } public virtual DbSet<ClientCompanySalesAppUser> ClientCompanySalesAppUser { get; set; } public virtual DbSet<ClientCompanySalesRegion> ClientCompanySalesRegion { get; set; } public virtual DbSet<ClientCompanyStatus> ClientCompanyStatus { get; set; } public virtual DbSet<ClientCompanyTradeCount> ClientCompanyTradeCount { get; set; } public virtual DbSet<ClientCompanyType> ClientCompanyType { get; set; } public virtual DbSet<ClientCompanyVirtualAccount> ClientCompanyVirtualAccount { get; set; } public virtual DbSet<ClientCompanyVirtualAccountCurrencyBalance> ClientCompanyVirtualAccountCurrencyBalance { get; set; } public virtual DbSet<ClientCompanyVirtualAccountCurrencyBalanceHistory> ClientCompanyVirtualAccountCurrencyBalanceHistory { get; set; } public virtual DbSet<ClientSiteAction> ClientSiteAction { get; set; } public virtual DbSet<ClientSiteAction2ClientCompanyOpi> ClientSiteAction2ClientCompanyOpi { get; set; } public virtual DbSet<ClientSiteAction2FixFxforwardTrade> ClientSiteAction2FixFxforwardTrade { get; set; } public virtual DbSet<ClientSiteAction2FxforwardTrade2Opi> ClientSiteAction2FxforwardTrade2Opi { get; set; } public virtual DbSet<ClientSiteAction2Fxswap> ClientSiteAction2Fxswap { get; set; } public virtual DbSet<ClientSiteActionStatus> ClientSiteActionStatus { get; set; } public virtual DbSet<ClientSiteActionType> ClientSiteActionType { get; set; } public virtual DbSet<Commission> Commission { get; set; } public virtual DbSet<CommissionType> CommissionType { get; set; } public virtual DbSet<ComplianceClassification> ComplianceClassification { get; set; } public virtual DbSet<ComplianceClassificationFile> ComplianceClassificationFile { get; set; } public virtual DbSet<ComplianceCorporateSectorFinancial> ComplianceCorporateSectorFinancial { get; set; } public virtual DbSet<ComplianceCorporateSectorNonFinancial> ComplianceCorporateSectorNonFinancial { get; set; } public virtual DbSet<ComplianceIsincurrencyValueDate> ComplianceIsincurrencyValueDate { get; set; } public virtual DbSet<ComplianceNature> ComplianceNature { get; set; } public virtual DbSet<ComplianceQuestionnaire> ComplianceQuestionnaire { get; set; } public virtual DbSet<ComplianceQuestionnaireAnswer> ComplianceQuestionnaireAnswer { get; set; } public virtual DbSet<ComplianceQuestionnaireQuestion> ComplianceQuestionnaireQuestion { get; set; } public virtual DbSet<ComplianceReason> ComplianceReason { get; set; } public virtual DbSet<ComplianceTradeReason> ComplianceTradeReason { get; set; } public virtual DbSet<ContactCategory> ContactCategory { get; set; } public virtual DbSet<Country> Country { get; set; } public virtual DbSet<CountryClearingCodePrefix> CountryClearingCodePrefix { get; set; } public virtual DbSet<CountryGroup> CountryGroup { get; set; } public virtual DbSet<Currency> Currency { get; set; } public virtual DbSet<CurrencyFxrate> CurrencyFxrate { get; set; } public virtual DbSet<CurrencyPairPricing> CurrencyPairPricing { get; set; } public virtual DbSet<CurrencyPairPriceHistory> CurrencyPairPriceHistory { get; set; } public virtual DbSet<CurrencyPairValidation> CurrencyPairValidation { get; set; } public virtual DbSet<Emirreport> Emirreport { get; set; } public virtual DbSet<EmirreportField> EmirreportField { get; set; } public virtual DbSet<EmirreportFxforwardTrade> EmirreportFxforwardTrade { get; set; } public virtual DbSet<EmirreportIncomingFile> EmirreportIncomingFile { get; set; } public virtual DbSet<EmirreportIncomingFileContent> EmirreportIncomingFileContent { get; set; } public virtual DbSet<EmirreportOutgoingFile> EmirreportOutgoingFile { get; set; } public virtual DbSet<EmirreportOutgoingFileContent> EmirreportOutgoingFileContent { get; set; } public virtual DbSet<EmirreportResponseCode> EmirreportResponseCode { get; set; } public virtual DbSet<EmirreportTradeResponseError> EmirreportTradeResponseError { get; set; } public virtual DbSet<EmirreportType> EmirreportType { get; set; } public virtual DbSet<Emirstatus> Emirstatus { get; set; } public virtual DbSet<ExpectedFrequency> ExpectedFrequency { get; set; } public virtual DbSet<FixApareportField> FixApareportField { get; set; } public virtual DbSet<FixApatradeCapture> FixApatradeCapture { get; set; } public virtual DbSet<FixApatradeMessage> FixApatradeMessage { get; set; } public virtual DbSet<FixFxforwardTradeOrder> FixFxforwardTradeOrder { get; set; } public virtual DbSet<FixQuote> FixQuote { get; set; } public virtual DbSet<FixQuoteCancelled> FixQuoteCancelled { get; set; } public virtual DbSet<FixTradeMessage> FixTradeMessage { get; set; } public virtual DbSet<FxforwardTrade> FxforwardTrade { get; set; } public virtual DbSet<FxforwardTrade2Opi> FxforwardTrade2Opi { get; set; } public virtual DbSet<FxforwardTradeInvoice> FxforwardTradeInvoice { get; set; } public virtual DbSet<FxforwardTradeStatus> FxforwardTradeStatus { get; set; } public virtual DbSet<FxforwardTradeSwapCount> FxforwardTradeSwapCount { get; set; } public virtual DbSet<Fxoption> Fxoption { get; set; } public virtual DbSet<FxoptionOutputs> FxoptionOutputs { get; set; } public virtual DbSet<FxoptionOutputsTemplate> FxoptionOutputsTemplate { get; set; } public virtual DbSet<FxoptionSettlements> FxoptionSettlements { get; set; } public virtual DbSet<FxoptionSettlementsTemplate> FxoptionSettlementsTemplate { get; set; } public virtual DbSet<FxoptionStatus> FxoptionStatus { get; set; } public virtual DbSet<FxoptionType> FxoptionType { get; set; } public virtual DbSet<Fxswap> Fxswap { get; set; } public virtual DbSet<GlobalSearchScope> GlobalSearchScope { get; set; } public virtual DbSet<IntroducingBroker> IntroducingBroker { get; set; } public virtual DbSet<LastWorkingDay> LastWorkingDay { get; set; } public virtual DbSet<LogAuthUser> LogAuthUser { get; set; } public virtual DbSet<LogBankAccountCurrencyDetails> LogBankAccountCurrencyDetails { get; set; } public virtual DbSet<LogBreach> LogBreach { get; set; } public virtual DbSet<LogBreachInvoice> LogBreachInvoice { get; set; } public virtual DbSet<LogCassRecs> LogCassRecs { get; set; } public virtual DbSet<LogCassRecsPaymentFile> LogCassRecsPaymentFile { get; set; } public virtual DbSet<LogCassRecsStatementFile> LogCassRecsStatementFile { get; set; } public virtual DbSet<LogClientCompanyCompliance> LogClientCompanyCompliance { get; set; } public virtual DbSet<LogClientCompanyComplianceCorporateSector> LogClientCompanyComplianceCorporateSector { get; set; } public virtual DbSet<LogClientCompanyComplianceNote> LogClientCompanyComplianceNote { get; set; } public virtual DbSet<LogClientCompanyContact> LogClientCompanyContact { get; set; } public virtual DbSet<LogClientCompanyContactCategory> LogClientCompanyContactCategory { get; set; } public virtual DbSet<LogClientCompanyLinkedGroup> LogClientCompanyLinkedGroup { get; set; } public virtual DbSet<LogClientCompanyOnlineDetails> LogClientCompanyOnlineDetails { get; set; } public virtual DbSet<LogClientCompanyOnlineDetailsSkew> LogClientCompanyOnlineDetailsSkew { get; set; } public virtual DbSet<LogClientCompanyOpi> LogClientCompanyOpi { get; set; } public virtual DbSet<LogClientCompanyOpiduplicate> LogClientCompanyOpiduplicate { get; set; } public virtual DbSet<LogClientCompanySalesAppUser> LogClientCompanySalesAppUser { get; set; } public virtual DbSet<LogComplianceClassificationFile> LogComplianceClassificationFile { get; set; } public virtual DbSet<LogComplianceIsincurrencyValueDate> LogComplianceIsincurrencyValueDate { get; set; } public virtual DbSet<LogComplianceQuestionnaire> LogComplianceQuestionnaire { get; set; } public virtual DbSet<LogCurrency> LogCurrency { get; set; } public virtual DbSet<LogCurrencyPairPriceHistory> LogCurrencyPairPriceHistory { get; set; } public virtual DbSet<LogFxforwardTrade> LogFxforwardTrade { get; set; } public virtual DbSet<LogFxforwardTradeCcmlimitOverride> LogFxforwardTradeCcmlimitOverride { get; set; } public virtual DbSet<LogFxforwardTradeInvoice> LogFxforwardTradeInvoice { get; set; } public virtual DbSet<LogFxoption> LogFxoption { get; set; } public virtual DbSet<LogPayment> LogPayment { get; set; } public virtual DbSet<LogSwiftincomingFile> LogSwiftincomingFile { get; set; } public virtual DbSet<LogSwiftincomingFileStatement> LogSwiftincomingFileStatement { get; set; } public virtual DbSet<LogSwiftincomingMatchedAccount> LogSwiftincomingMatchedAccount { get; set; } public virtual DbSet<LogSwiftintegrationService> LogSwiftintegrationService { get; set; } public virtual DbSet<NavMenuItem> NavMenuItem { get; set; } public virtual DbSet<NavMenuSection> NavMenuSection { get; set; } public virtual DbSet<Payment> Payment { get; set; } public virtual DbSet<PaymentRecReason> PaymentRecReason { get; set; } public virtual DbSet<PaymentSwiftoutgoingStatus> PaymentSwiftoutgoingStatus { get; set; } public virtual DbSet<PaymentSwiftoutgoingStatusTransitions> PaymentSwiftoutgoingStatusTransitions { get; set; } public virtual DbSet<PaymentType> PaymentType { get; set; } public virtual DbSet<PipelineAction> PipelineAction { get; set; } public virtual DbSet<PipelineActionType> PipelineActionType { get; set; } public virtual DbSet<ReportProcessedLog> ReportProcessedLog { get; set; } public virtual DbSet<ReportQueueToProcess> ReportQueueToProcess { get; set; } public virtual DbSet<ReportStatus> ReportStatus { get; set; } public virtual DbSet<ScheduledReportDummyPluginTable> ScheduledReportDummyPluginTable { get; set; } public virtual DbSet<SchemaVersions> SchemaVersions { get; set; } public virtual DbSet<SuspiciousActivityReport> SuspiciousActivityReport { get; set; } public virtual DbSet<SwiftincomingFile> SwiftincomingFile { get; set; } public virtual DbSet<SwiftincomingFileProcessingStatus> SwiftincomingFileProcessingStatus { get; set; } public virtual DbSet<SwiftincomingFileStatement> SwiftincomingFileStatement { get; set; } public virtual DbSet<SwiftincomingFileType> SwiftincomingFileType { get; set; } public virtual DbSet<SwiftincomingMatchedAccount> SwiftincomingMatchedAccount { get; set; } public virtual DbSet<SwiftintegrationService> SwiftintegrationService { get; set; } public virtual DbSet<Swiftmessage> Swiftmessage { get; set; } public virtual DbSet<SwiftvalidationCurrencyCountry> SwiftvalidationCurrencyCountry { get; set; } public virtual DbSet<SwiftvalidationCurrencyMessageField> SwiftvalidationCurrencyMessageField { get; set; } public virtual DbSet<SwiftvalidationField> SwiftvalidationField { get; set; } public virtual DbSet<SwiftvalidationFieldComponent> SwiftvalidationFieldComponent { get; set; } public virtual DbSet<SwiftvalidationFieldFieldComponent> SwiftvalidationFieldFieldComponent { get; set; } public virtual DbSet<SwiftvalidationMessage> SwiftvalidationMessage { get; set; } public virtual DbSet<SwiftvalidationMessageField> SwiftvalidationMessageField { get; set; } public virtual DbSet<SwiftvalidationOption> SwiftvalidationOption { get; set; } public virtual DbSet<SwiftvalidationOptionField> SwiftvalidationOptionField { get; set; } public virtual DbSet<SystemEmailSenderAddress> SystemEmailSenderAddress { get; set; } public virtual DbSet<TelephoneCountryCode> TelephoneCountryCode { get; set; } public virtual DbSet<TradeInstructionMethod> TradeInstructionMethod { get; set; } public virtual DbSet<TransactionCommit> TransactionCommit { get; set; } public virtual DbSet<UserAuditLogChanges> UserAuditLogChanges { get; set; } public virtual DbSet<UserAuditLogPageViews> UserAuditLogPageViews { get; set; } public virtual DbSet<UserChangeRequest> UserChangeRequest { get; set; } public virtual DbSet<UserChangeRequestApproval> UserChangeRequestApproval { get; set; } public virtual DbSet<VirtualAccountTransaction> VirtualAccountTransaction { get; set; } public virtual DbSet<VirtualAccountType> VirtualAccountType { get; set; } public virtual DbSet<VirtualAccountTypeBankAccount> VirtualAccountTypeBankAccount { get; set; } public virtual DbSet<AppUserNotification> AppUserNotification { get; set; } // Unable to generate entity type for table 'dbo.LogClientCompanyIndustrySector'. Please see the warning messages. // Unable to generate entity type for table 'dbo.LogClientCompanyContactCategory'. Please see the warning messages. // Unable to generate entity type for table 'dbo.LogClientCompany'. Please see the warning messages. // Unable to generate entity type for table 'dbo.LogClientCompanyComplianceCurrency'. Please see the warning messages. // Unable to generate entity type for table 'dbo.LogClientCompanyNote'. Please see the warning messages. // Unable to generate entity type for table 'dbo.AuthLoginEvent'. Please see the warning messages. // Unable to generate entity type for table 'dbo.LogClientCompanyPipeline'. Please see the warning messages. // Unable to generate entity type for table 'dbo.AppUserCommission'. Please see the warning messages. // Unable to generate entity type for table 'dbo.PaymentOutOverride'. Please see the warning messages. // Unable to generate entity type for table 'dbo.ClientCompanyCommission'. Please see the warning messages. // Unable to generate entity type for table 'dbo.LogAppUser'. Please see the warning messages. public FXDB1Context(DbContextOptions<FXDB1Context> options) : base(options) { } protected override void OnModelCreating(ModelBuilder modelBuilder) { modelBuilder.Entity<_2fapages>(entity => { entity.ToTable("2FAPages"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.PagePath) .IsRequired() .HasMaxLength(200); }); modelBuilder.Entity<ActivityTabHourDayRange>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Range) .IsRequired() .HasMaxLength(10); }); modelBuilder.Entity<ActivityTabUserData>(entity => { entity.HasKey(e => new { e.AppUserId, e.DataDatetime, e.HourDayRangeId }); entity.Property(e => e.AppUserId).HasColumnName("AppUserID"); entity.Property(e => e.DataDatetime).HasColumnType("date"); entity.Property(e => e.HourDayRangeId).HasColumnName("HourDayRangeID"); entity.Property(e => e.DayOfWeek) .IsRequired() .HasMaxLength(10); entity.HasOne(d => d.HourDayRange) .WithMany(p => p.ActivityTabUserData) .HasForeignKey(d => d.HourDayRangeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ActivityTabUserData_ActivityTabHourDayRange"); }); modelBuilder.Entity<Amlrisk>(entity => { entity.ToTable("AMLRisk"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(100); }); modelBuilder.Entity<AppSetting>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.SettingKey) .IsRequired() .HasMaxLength(100); entity.Property(e => e.SettingValue) .HasMaxLength(250) .IsUnicode(false); }); modelBuilder.Entity<AppUser>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AppUserTypeId).HasColumnName("AppUserTypeID"); entity.Property(e => e.AspcreationDate) .HasColumnName("ASPCreationDate") .HasColumnType("datetime"); entity.Property(e => e.Aspnumber) .HasColumnName("ASPNumber") .HasMaxLength(9); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.BloombergGpi).HasMaxLength(255); entity.Property(e => e.Extension).HasMaxLength(50); entity.Property(e => e.Forename) .IsRequired() .HasMaxLength(100); entity.Property(e => e.FullName) .IsRequired() .HasMaxLength(201) .HasComputedColumnSql("(([Forename]+' ')+[Surname])"); entity.Property(e => e.Ipaddress) .HasColumnName("IPAddress") .HasMaxLength(500); entity.Property(e => e.Is2Famember).HasColumnName("Is2FAMember"); entity.Property(e => e.LastEmailChangeDate).HasColumnType("datetime"); entity.Property(e => e.LastTelephoneChangeDate).HasColumnType("datetime"); entity.Property(e => e.Surname) .IsRequired() .HasMaxLength(100); entity.Property(e => e.TelephoneCountryCodeId).HasColumnName("TelephoneCountryCodeID"); entity.Property(e => e.TelephoneNumber).HasMaxLength(20); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UserStartDate).HasColumnType("datetime"); entity.HasOne(d => d.AppUserType) .WithMany(p => p.AppUser) .HasForeignKey(d => d.AppUserTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AppUser_AppUserType"); entity.HasOne(d => d.AuthUser) .WithMany(p => p.AppUser) .HasForeignKey(d => d.AuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AppUser_AuthUser"); entity.HasOne(d => d.TelephoneCountryCode) .WithMany(p => p.AppUser) .HasForeignKey(d => d.TelephoneCountryCodeId) .HasConstraintName("FK_AppUser_TelephoneCountryCode"); }); modelBuilder.Entity<AppUserEmailAlternative>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AlternativeEmailAddress) .IsRequired() .HasMaxLength(100); entity.Property(e => e.AppUserId).HasColumnName("AppUserID"); entity.HasOne(d => d.AppUser) .WithMany(p => p.AppUserEmailAlternative) .HasForeignKey(d => d.AppUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AppUserEmailAlternative_AppUser"); }); modelBuilder.Entity<AppUserType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanySalesRegionId).HasColumnName("ClientCompanySalesRegionID"); entity.Property(e => e.CommissionTypeId).HasColumnName("CommissionTypeID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.HomePage) .IsRequired() .HasMaxLength(50); entity.HasOne(d => d.ClientCompanySalesRegion) .WithMany(p => p.AppUserType) .HasForeignKey(d => d.ClientCompanySalesRegionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AppUserType_ClientCompanySalesRegion"); entity.HasOne(d => d.CommissionType) .WithMany(p => p.AppUserType) .HasForeignKey(d => d.CommissionTypeId) .HasConstraintName("FK__AppUserTy__Commi__1DD065E0"); }); modelBuilder.Entity<ArgentexAccount>(entity => { entity.HasIndex(e => e.ChecksumMatchingContent) .HasName("IX_ArgentexAccount_MatchingContent"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ChecksumMatchingContent).HasComputedColumnSql("(checksum([MatchingContent]))"); entity.Property(e => e.MatchingContent) .IsRequired() .HasMaxLength(500); }); modelBuilder.Entity<ArmfxForwardTradeStatusesHistory>(entity => { entity.ToTable("ARMFxForwardTradeStatusesHistory"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ArmreportId).HasColumnName("ARMReportID"); entity.Property(e => e.ArmstatusId).HasColumnName("ARMStatusID"); entity.Property(e => e.ArmstatusUpdatedDateTime) .HasColumnName("ARMStatusUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.ErrorDescription).HasMaxLength(255); entity.Property(e => e.FxForwardTradeCode) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.HasOne(d => d.Armreport) .WithMany(p => p.ArmfxForwardTradeStatusesHistory) .HasForeignKey(d => d.ArmreportId) .HasConstraintName("FK_ARMFxForwardTradeStatusesHistory_ARMReport"); entity.HasOne(d => d.Armstatus) .WithMany(p => p.ArmfxForwardTradeStatusesHistory) .HasForeignKey(d => d.ArmstatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ARMFxForwardTradeStatusesHistory_ARMStatus"); entity.HasOne(d => d.FxForwardTradeCodeNavigation) .WithMany(p => p.ArmfxForwardTradeStatusesHistory) .HasForeignKey(d => d.FxForwardTradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ARMFxForwardTradeStatusesHistory_FXForwardTrade"); }); modelBuilder.Entity<Armreport>(entity => { entity.ToTable("ARMReport"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ArmreportOutgoingFileId).HasColumnName("ARMReportOutgoingFileID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.ArmreportOutgoingFile) .WithMany(p => p.Armreport) .HasForeignKey(d => d.ArmreportOutgoingFileId) .HasConstraintName("FK_ARMReport_ARMReportOutgoingFile"); }); modelBuilder.Entity<ArmreportField>(entity => { entity.ToTable("ARMReportField"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AppSettingKey).HasMaxLength(250); entity.Property(e => e.BrokerValue).HasMaxLength(100); entity.Property(e => e.ClientValue).HasMaxLength(100); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(250); entity.Property(e => e.IsActive) .IsRequired() .HasDefaultValueSql("((1))"); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(100); }); modelBuilder.Entity<ArmreportFxforwardTrade>(entity => { entity.ToTable("ARMReportFXForwardTrade"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ArmreportId).HasColumnName("ARMReportID"); entity.Property(e => e.ArmstatusId).HasColumnName("ARMStatusID"); entity.Property(e => e.FxforwardTradeCode) .IsRequired() .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.ReportedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.Armreport) .WithMany(p => p.ArmreportFxforwardTrade) .HasForeignKey(d => d.ArmreportId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ARMReportFxForwardTrade_ARMReport"); entity.HasOne(d => d.Armstatus) .WithMany(p => p.ArmreportFxforwardTrade) .HasForeignKey(d => d.ArmstatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ARMReportFxForwardTrade_ARMStatus"); entity.HasOne(d => d.FxforwardTradeCodeNavigation) .WithMany(p => p.ArmreportFxforwardTrade) .HasForeignKey(d => d.FxforwardTradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ARMReportFxForwardTrade_FXForwardTrade"); }); modelBuilder.Entity<ArmreportOutgoingFile>(entity => { entity.ToTable("ARMReportOutgoingFile"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ArmreportOutgoingFileContentId).HasColumnName("ARMReportOutgoingFileContentID"); entity.Property(e => e.Csvfilename) .IsRequired() .HasColumnName("CSVFilename") .HasMaxLength(255); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); entity.Property(e => e.UploadedFilename).HasMaxLength(255); entity.HasOne(d => d.ArmreportOutgoingFileContent) .WithMany(p => p.ArmreportOutgoingFile) .HasForeignKey(d => d.ArmreportOutgoingFileContentId) .HasConstraintName("FK_ARMReportOutgoingFile_ARMReportOutgoingFileContent"); }); modelBuilder.Entity<ArmreportOutgoingFileContent>(entity => { entity.ToTable("ARMReportOutgoingFileContent"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.FileContent).IsRequired(); }); modelBuilder.Entity<AuthApplication>(entity => { entity.Property(e => e.Id) .HasColumnName("ID") .ValueGeneratedOnAdd(); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.HasOne(d => d.IdNavigation) .WithOne(p => p.InverseIdNavigation) .HasForeignKey<AuthApplication>(d => d.Id) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AuthApplication_AuthApplication"); }); modelBuilder.Entity<AuthPermission>(entity => { entity.HasIndex(e => e.Description) .HasName("IX_AuthPermission") .IsUnique(); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<AuthRole>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<AuthRolePermission>(entity => { entity.HasKey(e => new { e.RoleId, e.PermissionId }); entity.Property(e => e.RoleId).HasColumnName("RoleID"); entity.Property(e => e.PermissionId).HasColumnName("PermissionID"); entity.HasOne(d => d.Permission) .WithMany(p => p.AuthRolePermission) .HasForeignKey(d => d.PermissionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AuthRolePermission_AuthPermission"); entity.HasOne(d => d.Role) .WithMany(p => p.AuthRolePermission) .HasForeignKey(d => d.RoleId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AuthRolePermission_AuthRole"); }); modelBuilder.Entity<AuthUser>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ApplicationId).HasColumnName("ApplicationID"); entity.Property(e => e.Comment).HasMaxLength(200); entity.Property(e => e.CreateDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.Email) .IsRequired() .HasMaxLength(100); entity.Property(e => e.FailedPasswordAttemptCount).HasDefaultValueSql("((0))"); entity.Property(e => e.FailedPasswordAttemptWindowStart) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.LastActivityDate).HasColumnType("datetime"); entity.Property(e => e.LastLockOutDate).HasColumnType("datetime"); entity.Property(e => e.LastLoginDate).HasColumnType("datetime"); entity.Property(e => e.LastPasswordChangeDate).HasColumnType("datetime"); entity.Property(e => e.Password) .IsRequired() .HasMaxLength(255) .IsUnicode(false); entity.Property(e => e.UserName) .IsRequired() .HasMaxLength(50); entity.HasOne(d => d.Application) .WithMany(p => p.AuthUser) .HasForeignKey(d => d.ApplicationId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AuthUser_AuthApplication"); }); modelBuilder.Entity<AuthUserPasswordToken>(entity => { entity.HasKey(e => new { e.AuthUserId, e.Token }); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.Token) .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.ExpiryDateTime).HasColumnType("datetime"); entity.Property(e => e.IsExpired).HasComputedColumnSql("(case when [ExpiryDateTime] IS NULL then (0) when datediff(second,[ExpiryDateTime],getdate())>(0) then (1) else (0) end)"); entity.HasOne(d => d.AuthUser) .WithMany(p => p.AuthUserPasswordToken) .HasForeignKey(d => d.AuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AuthUserPasswordToken_AuthUser"); }); modelBuilder.Entity<AuthUserPreviousPasswords>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.DateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.Password) .IsRequired() .HasMaxLength(255) .IsUnicode(false); entity.HasOne(d => d.AuthUser) .WithMany(p => p.AuthUserPreviousPasswords) .HasForeignKey(d => d.AuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AuthUserPreviousPasswords_AuthUser"); }); modelBuilder.Entity<AuthUserRole>(entity => { entity.HasKey(e => new { e.UserId, e.RoleId }); entity.Property(e => e.UserId).HasColumnName("UserID"); entity.Property(e => e.RoleId).HasColumnName("RoleID"); entity.HasOne(d => d.Role) .WithMany(p => p.AuthUserRole) .HasForeignKey(d => d.RoleId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_AuthUserRole_AuthRole"); }); modelBuilder.Entity<BankAccount>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<BankAccountCurrencyBalance>(entity => { entity.HasKey(e => new { e.BankAccountId, e.CurrencyId }); entity.Property(e => e.BankAccountId).HasColumnName("BankAccountID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.Balance) .HasColumnType("decimal(25, 8)") .HasDefaultValueSql("((0))"); entity.Property(e => e.BalanceDate).HasColumnType("datetime"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.BankAccount) .WithMany(p => p.BankAccountCurrencyBalance) .HasForeignKey(d => d.BankAccountId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyBalance_BankAccount"); entity.HasOne(d => d.Currency) .WithMany(p => p.BankAccountCurrencyBalance) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyBalance_Currency"); entity.HasOne(d => d.TransactionCommit) .WithMany(p => p.BankAccountCurrencyBalance) .HasForeignKey(d => d.TransactionCommitId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyBalance_TransactionCommit"); }); modelBuilder.Entity<BankAccountCurrencyBalanceHistory>(entity => { entity.HasKey(e => new { e.BankAccountId, e.CurrencyId, e.TransactionCommitId }); entity.Property(e => e.BankAccountId).HasColumnName("BankAccountID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.Balance) .HasColumnType("decimal(25, 8)") .HasDefaultValueSql("((0))"); entity.Property(e => e.BalanceDate).HasColumnType("datetime"); entity.HasOne(d => d.BankAccount) .WithMany(p => p.BankAccountCurrencyBalanceHistory) .HasForeignKey(d => d.BankAccountId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyBalanceHistory_BankAccount"); entity.HasOne(d => d.Currency) .WithMany(p => p.BankAccountCurrencyBalanceHistory) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyBalanceHistory_Currency"); entity.HasOne(d => d.TransactionCommit) .WithMany(p => p.BankAccountCurrencyBalanceHistory) .HasForeignKey(d => d.TransactionCommitId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyBalanceHistory_TransactionCommit"); }); modelBuilder.Entity<BankAccountCurrencyDetails>(entity => { entity.HasKey(e => new { e.BankAccountId, e.CurrencyId }); entity.Property(e => e.BankAccountId).HasColumnName("BankAccountID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.BankAccountIban) .HasColumnName("BankAccountIBAN") .HasMaxLength(50); entity.Property(e => e.BankAccountName).HasMaxLength(100); entity.Property(e => e.BankAccountNumber).HasMaxLength(50); entity.Property(e => e.BankAccountSort).HasMaxLength(8); entity.Property(e => e.BankAccountSwift).HasMaxLength(11); entity.Property(e => e.BankAddress).HasMaxLength(400); entity.Property(e => e.BankName).HasMaxLength(100); entity.Property(e => e.BeneficiaryAddress).HasMaxLength(400); entity.Property(e => e.BeneficiaryName).HasMaxLength(100); entity.Property(e => e.ClearingCodePrefixId).HasColumnName("ClearingCodePrefixID"); entity.Property(e => e.CountryId).HasColumnName("CountryID"); entity.Property(e => e.UpdateDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.BankAccount) .WithMany(p => p.BankAccountCurrencyDetails) .HasForeignKey(d => d.BankAccountId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyDetails_BankAccount"); entity.HasOne(d => d.ClearingCodePrefix) .WithMany(p => p.BankAccountCurrencyDetails) .HasForeignKey(d => d.ClearingCodePrefixId) .HasConstraintName("FK_BankAccountCurrencyDetails_ClearingPrefix"); entity.HasOne(d => d.Country) .WithMany(p => p.BankAccountCurrencyDetails) .HasForeignKey(d => d.CountryId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyDetails_Country"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.BankAccountCurrencyDetailsCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyDetails_CreatedByAuthUserId"); entity.HasOne(d => d.Currency) .WithMany(p => p.BankAccountCurrencyDetails) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyDetails_Currency"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.BankAccountCurrencyDetailsUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountCurrencyDetails_UpdatedByAuthUserId"); }); modelBuilder.Entity<BankAccountTransaction>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Amount) .HasColumnType("decimal(25, 8)") .HasDefaultValueSql("((0))"); entity.Property(e => e.BankAccountId).HasColumnName("BankAccountID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.FxforwardTradeCode) .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.PaymentId).HasColumnName("PaymentID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.BankAccount) .WithMany(p => p.BankAccountTransaction) .HasForeignKey(d => d.BankAccountId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountTransaction_BankAccount"); entity.HasOne(d => d.Currency) .WithMany(p => p.BankAccountTransaction) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BankAccountTransaction_Currency"); entity.HasOne(d => d.FxforwardTradeCodeNavigation) .WithMany(p => p.BankAccountTransaction) .HasForeignKey(d => d.FxforwardTradeCode) .HasConstraintName("FK_BankAccountTransaction_FXForwardTrade"); entity.HasOne(d => d.Payment) .WithMany(p => p.BankAccountTransaction) .HasForeignKey(d => d.PaymentId) .HasConstraintName("FK_BankAccountTransaction_Payment"); }); modelBuilder.Entity<Breach>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.BreachLevelId).HasColumnName("BreachLevelID"); entity.Property(e => e.BreachTypeId).HasColumnName("BreachTypeID"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.Notes).HasMaxLength(500); entity.Property(e => e.OriginalLimit).HasMaxLength(250); entity.Property(e => e.OverrideValue).HasMaxLength(250); entity.Property(e => e.PaymentId).HasColumnName("PaymentID"); entity.Property(e => e.TradeCode) .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.BreachLevel) .WithMany(p => p.Breach) .HasForeignKey(d => d.BreachLevelId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Breach_BreachLevel"); entity.HasOne(d => d.BreachType) .WithMany(p => p.Breach) .HasForeignKey(d => d.BreachTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Breach_BreachType"); entity.HasOne(d => d.ClientCompanyOpi) .WithMany(p => p.Breach) .HasForeignKey(d => d.ClientCompanyOpiid) .HasConstraintName("FK_Breach_ClientCompanyOPI"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.BreachCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Breach_AuthUser"); entity.HasOne(d => d.Payment) .WithMany(p => p.Breach) .HasForeignKey(d => d.PaymentId) .HasConstraintName("FK_Breach_Payment"); entity.HasOne(d => d.TradeCodeNavigation) .WithMany(p => p.Breach) .HasForeignKey(d => d.TradeCode) .HasConstraintName("FK_Breach_FXForwardTrade"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.BreachUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Breach_AuthUser1"); }); modelBuilder.Entity<BreachInvoice>(entity => { entity.Property(e => e.Id) .HasColumnName("ID") .ValueGeneratedOnAdd(); entity.Property(e => e.Comment).HasMaxLength(500); entity.Property(e => e.DocumentId) .IsRequired() .HasColumnName("DocumentID") .HasMaxLength(100); entity.Property(e => e.FileName).HasMaxLength(250); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.UploadedByAuthUserId).HasColumnName("UploadedByAuthUserID"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.Breach) .WithMany(p => p.BreachInvoice) .HasForeignKey(d => d.BreachId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BreachInvoice_Breach"); entity.HasOne(d => d.IdNavigation) .WithOne(p => p.InverseIdNavigation) .HasForeignKey<BreachInvoice>(d => d.Id) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BreachInvoice_BreachInvoice"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.BreachInvoiceUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BreachInvoice_AuthUser"); entity.HasOne(d => d.UploadedByAuthUser) .WithMany(p => p.BreachInvoiceUploadedByAuthUser) .HasForeignKey(d => d.UploadedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BreachInvoiceUploadedBy_AuthUser"); }); modelBuilder.Entity<BreachLevel>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<BreachType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.DefaultBreachLevelId).HasColumnName("DefaultBreachLevelID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50); entity.HasOne(d => d.DefaultBreachLevel) .WithMany(p => p.BreachType) .HasForeignKey(d => d.DefaultBreachLevelId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_BreachType_BreachLevel"); }); modelBuilder.Entity<Broker>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.BankAccountBrokerPaymentsInId).HasColumnName("BankAccountBrokerPaymentsInID"); entity.Property(e => e.BankAccountBrokerPaymentsOutId).HasColumnName("BankAccountBrokerPaymentsOutID"); entity.Property(e => e.BankAccountClientPaymentsInId).HasColumnName("BankAccountClientPaymentsInID"); entity.Property(e => e.BankAccountClientPaymentsOutId).HasColumnName("BankAccountClientPaymentsOutID"); entity.Property(e => e.BankAccountSettlePaymentsInId).HasColumnName("BankAccountSettlePaymentsInID"); entity.Property(e => e.BankAccountSettlePaymentsOutId).HasColumnName("BankAccountSettlePaymentsOutID"); entity.Property(e => e.BrokerNoteEmailAddress) .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.EmirLei) .HasColumnName("EMIR_LEI") .HasMaxLength(50); entity.Property(e => e.MarginBankAccountId).HasColumnName("MarginBankAccountID"); entity.HasOne(d => d.BankAccountBrokerPaymentsIn) .WithMany(p => p.BrokerBankAccountBrokerPaymentsIn) .HasForeignKey(d => d.BankAccountBrokerPaymentsInId) .HasConstraintName("FK_Broker_BankAccountBrokerPaymentsIn"); entity.HasOne(d => d.BankAccountBrokerPaymentsOut) .WithMany(p => p.BrokerBankAccountBrokerPaymentsOut) .HasForeignKey(d => d.BankAccountBrokerPaymentsOutId) .HasConstraintName("FK_Broker_BankAccountBrokerPaymentsOut"); entity.HasOne(d => d.BankAccountClientPaymentsIn) .WithMany(p => p.BrokerBankAccountClientPaymentsIn) .HasForeignKey(d => d.BankAccountClientPaymentsInId) .HasConstraintName("FK_Broker_BankAccountClientPaymentsIn"); entity.HasOne(d => d.BankAccountClientPaymentsOut) .WithMany(p => p.BrokerBankAccountClientPaymentsOut) .HasForeignKey(d => d.BankAccountClientPaymentsOutId) .HasConstraintName("FK_Broker_BankAccountClientPaymentsOut"); }); modelBuilder.Entity<CassRecs>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CassRecsDate).HasColumnType("date"); entity.Property(e => e.CassRecsStatementFileId).HasColumnName("CassRecsStatementFileID"); entity.Property(e => e.Check1ByAuthUserId).HasColumnName("Check1ByAuthUserID"); entity.Property(e => e.Check1UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.Check2ByAuthUserId).HasColumnName("Check2ByAuthUserID"); entity.Property(e => e.Check2UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.CompletedByAuthUserId).HasColumnName("CompletedByAuthUserID"); entity.Property(e => e.CompletedDateTime).HasColumnType("datetime"); entity.Property(e => e.CurrencyCode) .IsRequired() .HasMaxLength(3) .IsUnicode(false); entity.Property(e => e.LastNightsClosingLedger).HasColumnType("decimal(25, 8)"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.CassRecsStatementFile) .WithMany(p => p.CassRecs) .HasForeignKey(d => d.CassRecsStatementFileId) .HasConstraintName("FK_CassRecs_CassRecsStatementFile"); entity.HasOne(d => d.Check1ByAuthUser) .WithMany(p => p.CassRecsCheck1ByAuthUser) .HasForeignKey(d => d.Check1ByAuthUserId) .HasConstraintName("FK_CassRecs_AuthUser"); entity.HasOne(d => d.Check2ByAuthUser) .WithMany(p => p.CassRecsCheck2ByAuthUser) .HasForeignKey(d => d.Check2ByAuthUserId) .HasConstraintName("FK_CassRecs_AuthUser1"); entity.HasOne(d => d.CompletedByAuthUser) .WithMany(p => p.CassRecsCompletedByAuthUser) .HasForeignKey(d => d.CompletedByAuthUserId) .HasConstraintName("FK_CassRecs_CompletedByAuthUser"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.CassRecsUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_CassRecs_AuthUser2"); }); modelBuilder.Entity<CassRecsPaymentFile>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CassRecsDate).HasColumnType("date"); entity.Property(e => e.DocumentId) .IsRequired() .HasColumnName("DocumentID") .HasMaxLength(100); entity.Property(e => e.FileName) .IsRequired() .HasMaxLength(250); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.UploadedByAuthUserId).HasColumnName("UploadedByAuthUserID"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.CassRecsPaymentFileUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_CassRecsPaymentFile_AuthUser"); entity.HasOne(d => d.UploadedByAuthUser) .WithMany(p => p.CassRecsPaymentFileUploadedByAuthUser) .HasForeignKey(d => d.UploadedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_CassRecsPaymentFile_AuthUser1"); }); modelBuilder.Entity<CassRecsStatementFile>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CassRecsDate).HasColumnType("date"); entity.Property(e => e.DocumentId) .IsRequired() .HasColumnName("DocumentID") .HasMaxLength(100); entity.Property(e => e.FileName) .IsRequired() .HasMaxLength(250); entity.Property(e => e.UploadedByAuthUserId).HasColumnName("UploadedByAuthUserID"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<ClearingCodePrefix>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Code) .IsRequired() .HasMaxLength(2); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(250); }); modelBuilder.Entity<ClientCompany>(entity => { entity.HasIndex(e => e.Crn) .HasName("IX_ClientCompany__CRN") .IsUnique(); entity.HasIndex(e => e.Name) .IsUnique(); entity.HasIndex(e => new { e.ClientCompanyStatusId, e.ClientCompanyTypeId, e.ClientCompanyOptionStatusId, e.ClientCompanyCategoryId, e.DealerAppUserId, e.Id }) .HasName("_dta_index_ClientCompany_52_1205579333__K11_K12_K10_K25_K18_K13_K1"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AccountFormsSentDateTime).HasColumnType("datetime"); entity.Property(e => e.Address) .IsRequired() .HasMaxLength(2000); entity.Property(e => e.ApprovedDateTime).HasColumnType("datetime"); entity.Property(e => e.ApprovedOptionDateTime).HasColumnType("datetime"); entity.Property(e => e.AssignNewTrades).HasDefaultValueSql("((1))"); entity.Property(e => e.ClientCompanyCategoryId).HasColumnName("ClientCompanyCategoryID"); entity.Property(e => e.ClientCompanyCreditTypeId) .HasColumnName("ClientCompanyCreditTypeID") .HasDefaultValueSql("((2))"); entity.Property(e => e.ClientCompanyIndustrySectorId).HasColumnName("ClientCompanyIndustrySectorID"); entity.Property(e => e.ClientCompanyLinkedGroupId).HasColumnName("ClientCompanyLinkedGroupID"); entity.Property(e => e.ClientCompanyOptionStatusId).HasColumnName("ClientCompanyOptionStatusID"); entity.Property(e => e.ClientCompanySalesRegionId).HasColumnName("ClientCompanySalesRegionID"); entity.Property(e => e.ClientCompanyStatusId).HasColumnName("ClientCompanyStatusID"); entity.Property(e => e.ClientCompanyTypeId).HasColumnName("ClientCompanyTypeID"); entity.Property(e => e.Crn) .IsRequired() .HasColumnName("CRN") .HasMaxLength(50); entity.Property(e => e.DealerAppUserId).HasColumnName("DealerAppUserID"); entity.Property(e => e.Description).HasMaxLength(200); entity.Property(e => e.EmirEea) .HasColumnName("EMIR_EEA") .HasDefaultValueSql("((0))"); entity.Property(e => e.EmirLei) .HasColumnName("EMIR_LEI") .HasMaxLength(50); entity.Property(e => e.FaxNumber) .IsRequired() .HasMaxLength(50); entity.Property(e => e.FirstTradeDate).HasColumnType("date"); entity.Property(e => e.ImportantNote).HasMaxLength(2000); entity.Property(e => e.IsExcludedFromEmoney).HasColumnName("IsExcludedFromEMoney"); entity.Property(e => e.IsKyc) .HasColumnName("IsKYC") .HasDefaultValueSql("((0))"); entity.Property(e => e.IsRiskWarning).HasDefaultValueSql("((0))"); entity.Property(e => e.IsTandCs).HasDefaultValueSql("((0))"); entity.Property(e => e.LastContractDate).HasColumnType("datetime"); entity.Property(e => e.MaxCreditLimit).HasColumnType("decimal(12, 2)"); entity.Property(e => e.MaxOpenGbp).HasColumnName("MaxOpenGBP"); entity.Property(e => e.MaxTradeSizeGbp).HasColumnName("MaxTradeSizeGBP"); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(200); entity.Property(e => e.PitchedByAppUserId).HasColumnName("PitchedByAppUserID"); entity.Property(e => e.PitchedDateTime).HasColumnType("datetime"); entity.Property(e => e.PostCode).HasMaxLength(50); entity.Property(e => e.QualifiedNewTradeCode) .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.SpreadsNote).HasMaxLength(2000); entity.Property(e => e.TelephoneNumber) .IsRequired() .HasMaxLength(50); entity.Property(e => e.TradingAddress).HasMaxLength(2000); entity.Property(e => e.TradingName).HasMaxLength(200); entity.Property(e => e.TradingPostCode).HasMaxLength(2000); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.WebsiteUrl) .IsRequired() .HasColumnName("WebsiteURL") .HasMaxLength(200); entity.HasOne(d => d.ClientCompanyCategory) .WithMany(p => p.ClientCompany) .HasForeignKey(d => d.ClientCompanyCategoryId) .HasConstraintName("FK_ClientCompany_ClientCompanyCategory"); entity.HasOne(d => d.ClientCompanyCreditType) .WithMany(p => p.ClientCompany) .HasForeignKey(d => d.ClientCompanyCreditTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompany_ClientCompanyCreditType"); entity.HasOne(d => d.ClientCompanyIndustrySector) .WithMany(p => p.ClientCompany) .HasForeignKey(d => d.ClientCompanyIndustrySectorId) .HasConstraintName("FK_ClientCompany_ClientCompanyIndustrySector"); entity.HasOne(d => d.ClientCompanyLinkedGroup) .WithMany(p => p.ClientCompany) .HasForeignKey(d => d.ClientCompanyLinkedGroupId) .HasConstraintName("FK_ClientCompany_ClientCompanyLinkedGroup"); entity.HasOne(d => d.ClientCompanySalesRegion) .WithMany(p => p.ClientCompany) .HasForeignKey(d => d.ClientCompanySalesRegionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompany_ClientCompanySalesRegion"); entity.HasOne(d => d.ClientCompanyStatus) .WithMany(p => p.ClientCompany) .HasForeignKey(d => d.ClientCompanyStatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompany_ClientCompanyStatus"); entity.HasOne(d => d.ClientCompanyType) .WithMany(p => p.ClientCompany) .HasForeignKey(d => d.ClientCompanyTypeId) .HasConstraintName("FK_ClientCompany_ClientCompanyType"); entity.HasOne(d => d.QualifiedNewTradeCodeNavigation) .WithMany(p => p.ClientCompany) .HasForeignKey(d => d.QualifiedNewTradeCode) .HasConstraintName("FK_ClientCompany_FXForwardTrade"); }); modelBuilder.Entity<ClientCompanyActivityReport>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.ClientCompanyName) .IsRequired() .HasMaxLength(200); entity.Property(e => e.LastActivityReportSentByAppUserId).HasColumnName("LastActivityReportSentByAppUserID"); entity.Property(e => e.LastActivityReportSentDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<ClientCompanyCategory>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ClientCompanyCompliance>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AmlriskId).HasColumnName("AMLRiskID"); entity.Property(e => e.BalanceSheetGbp) .HasColumnName("BalanceSheetGBP") .HasColumnType("decimal(25, 2)"); entity.Property(e => e.ClassificationId).HasColumnName("ClassificationID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.ExpectedFrequencyId).HasColumnName("ExpectedFrequencyID"); entity.Property(e => e.ExpectedMaxTradeSize).HasColumnType("decimal(25, 2)"); entity.Property(e => e.ExpectedTotalVolume).HasColumnType("decimal(25, 2)"); entity.Property(e => e.IsMiFid).HasColumnName("IsMiFID"); entity.Property(e => e.NatureId).HasColumnName("NatureID"); entity.Property(e => e.OwnFundsGbp) .HasColumnName("OwnFundsGBP") .HasColumnType("decimal(25, 2)"); entity.Property(e => e.ReasonId).HasColumnName("ReasonID"); entity.Property(e => e.RefreshDueDateTime).HasColumnType("datetime"); entity.Property(e => e.RegisteredDomicileCountryId).HasColumnName("RegisteredDomicileCountryID"); entity.Property(e => e.Ttca).HasColumnName("TTCA"); entity.Property(e => e.TurnoverGbp) .HasColumnName("TurnoverGBP") .HasColumnType("decimal(25, 2)"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.HasOne(d => d.Amlrisk) .WithMany(p => p.ClientCompanyCompliance) .HasForeignKey(d => d.AmlriskId) .HasConstraintName("FK_ClientCompanyCompliance_AMLRisk"); entity.HasOne(d => d.Classification) .WithMany(p => p.ClientCompanyCompliance) .HasForeignKey(d => d.ClassificationId) .HasConstraintName("FK_ClientCompanyCompliance_ComplianceClassification"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanyCompliance) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyCompliance_ClientCompany"); entity.HasOne(d => d.ExpectedFrequency) .WithMany(p => p.ClientCompanyCompliance) .HasForeignKey(d => d.ExpectedFrequencyId) .HasConstraintName("FK_ClientCompanyCompliance_ExpectedFrequency"); entity.HasOne(d => d.Nature) .WithMany(p => p.ClientCompanyCompliance) .HasForeignKey(d => d.NatureId) .HasConstraintName("FK_ClientCompanyCompliance_ComplianceNature"); entity.HasOne(d => d.Reason) .WithMany(p => p.ClientCompanyCompliance) .HasForeignKey(d => d.ReasonId) .HasConstraintName("FK_ClientCompanyCompliance_ComplianceReason"); entity.HasOne(d => d.RegisteredDomicileCountry) .WithMany(p => p.ClientCompanyCompliance) .HasForeignKey(d => d.RegisteredDomicileCountryId) .HasConstraintName("FK_ClientCompanyCompliance_Country"); }); modelBuilder.Entity<ClientCompanyComplianceCorporateSector>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyComplianceId).HasColumnName("ClientCompanyComplianceID"); entity.Property(e => e.ComplianceCorporateSectorFinancialId).HasColumnName("ComplianceCorporateSectorFinancialID"); entity.Property(e => e.ComplianceCorporateSectorNonFinancialId).HasColumnName("ComplianceCorporateSectorNonFinancialID"); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.HasOne(d => d.ClientCompanyCompliance) .WithMany(p => p.ClientCompanyComplianceCorporateSector) .HasForeignKey(d => d.ClientCompanyComplianceId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyComplianceCorporateSector_ClientCompanyCompliance"); entity.HasOne(d => d.ComplianceCorporateSectorFinancial) .WithMany(p => p.ClientCompanyComplianceCorporateSector) .HasForeignKey(d => d.ComplianceCorporateSectorFinancialId) .HasConstraintName("FK_ClientCompanyComplianceCorporateSector_ComplianceCorporateSectorFinancial"); entity.HasOne(d => d.ComplianceCorporateSectorNonFinancial) .WithMany(p => p.ClientCompanyComplianceCorporateSector) .HasForeignKey(d => d.ComplianceCorporateSectorNonFinancialId) .HasConstraintName("FK_ClientCompanyComplianceCorporateSector_ComplianceCorporateSectorNonFinancial"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ClientCompanyComplianceCorporateSector) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyComplianceCorporateSector_AuthUser"); }); modelBuilder.Entity<ClientCompanyComplianceCurrency>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyComplianceId).HasColumnName("ClientCompanyComplianceID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.ClientCompanyCompliance) .WithMany(p => p.ClientCompanyComplianceCurrency) .HasForeignKey(d => d.ClientCompanyComplianceId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyComplianceCurrency_ClientCompanyCompliance"); entity.HasOne(d => d.Currency) .WithMany(p => p.ClientCompanyComplianceCurrency) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyComplianceCurrency_Currency"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ClientCompanyComplianceCurrency) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyComplianceCurrency_AuthUser"); }); modelBuilder.Entity<ClientCompanyComplianceNote>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.NoteText).IsRequired(); entity.Property(e => e.Title).HasMaxLength(100); entity.HasOne(d => d.AuthUser) .WithMany(p => p.ClientCompanyComplianceNote) .HasForeignKey(d => d.AuthUserId) .HasConstraintName("FK_ClientCompanyComplianceNote_AuthUser"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanyComplianceNote) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyComplianceNote_ClientCompany"); }); modelBuilder.Entity<ClientCompanyContact>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AspcreationDate) .HasColumnName("ASPCreationDate") .HasColumnType("datetime"); entity.Property(e => e.Aspnumber) .HasColumnName("ASPNumber") .HasMaxLength(9); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.Birthday).HasColumnType("date"); entity.Property(e => e.BloombergGpi).HasMaxLength(255); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.Email).HasMaxLength(200); entity.Property(e => e.Forename) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Fullname) .IsRequired() .HasMaxLength(162) .HasComputedColumnSql("(((isnull([Title]+' ','')+[Forename])+' ')+[Surname])"); entity.Property(e => e.LastEmailChangeDate).HasColumnType("datetime"); entity.Property(e => e.LastTelephoneChangeDate).HasColumnType("datetime"); entity.Property(e => e.NiNumber).HasMaxLength(50); entity.Property(e => e.Position).HasMaxLength(50); entity.Property(e => e.RecAmreport).HasColumnName("RecAMReport"); entity.Property(e => e.Surname) .IsRequired() .HasMaxLength(100); entity.Property(e => e.TelephoneDirect).HasMaxLength(50); entity.Property(e => e.TelephoneMobile).HasMaxLength(50); entity.Property(e => e.TelephoneOther).HasMaxLength(50); entity.Property(e => e.Title).HasMaxLength(10); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.HasOne(d => d.AuthUser) .WithMany(p => p.ClientCompanyContactAuthUser) .HasForeignKey(d => d.AuthUserId) .HasConstraintName("FK_ClientCompanyContact_AuthUser"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanyContact) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyContact_ClientCompany"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ClientCompanyContactUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyContact_UpdatedByAuthUser"); }); modelBuilder.Entity<ClientCompanyContactCategory>(entity => { entity.HasKey(e => new { e.ClientCompanyContactId, e.ContactCategoryId }); entity.Property(e => e.ClientCompanyContactId).HasColumnName("ClientCompanyContactID"); entity.Property(e => e.ContactCategoryId).HasColumnName("ContactCategoryID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.DateCreated) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); }); modelBuilder.Entity<ClientCompanyCreditType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ClientCompanyCurrencyDefaultOpi>(entity => { entity.HasKey(e => new { e.ClientCompanyId, e.CurrencyId }); entity.ToTable("ClientCompanyCurrencyDefaultOPI"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.UpdateAuthUserId).HasColumnName("UpdateAuthUserID"); entity.Property(e => e.UpdateDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanyCurrencyDefaultOpi) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyCurrencyDefaultOPI_ClientCompany"); entity.HasOne(d => d.ClientCompanyOpi) .WithMany(p => p.ClientCompanyCurrencyDefaultOpi) .HasForeignKey(d => d.ClientCompanyOpiid) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyCurrencyDefaultOPI_ClientCompanyOPI"); entity.HasOne(d => d.Currency) .WithMany(p => p.ClientCompanyCurrencyDefaultOpi) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyCurrencyDefaultOPI_Currency"); entity.HasOne(d => d.UpdateAuthUser) .WithMany(p => p.ClientCompanyCurrencyDefaultOpi) .HasForeignKey(d => d.UpdateAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyCurrencyDefaultOPI_AuthUser"); }); modelBuilder.Entity<ClientCompanyIbrelationship>(entity => { entity.ToTable("ClientCompanyIBRelationship"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.IntroducingBrokerId).HasColumnName("IntroducingBrokerID"); entity.Property(e => e.Percentage).HasColumnType("decimal(6, 2)"); entity.Property(e => e.UpdateDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanyIbrelationship) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyIBRelationship_ClientCompany"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ClientCompanyIbrelationship) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyIBRelationship_IntroducingBroker"); }); modelBuilder.Entity<ClientCompanyIndustrySector>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ClientCompanyLinkedGroup>(entity => { entity.HasIndex(e => e.Description) .HasName("IX_ClientCompanyLinkedGroup") .IsUnique(); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.LastUpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ClientCompanyLinkedGroup) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyLinkedGroup_AuthUser"); }); modelBuilder.Entity<ClientCompanyNote>(entity => { entity.HasIndex(e => new { e.ClientCompanyId, e.CreateDateTime }) .HasName("_dta_index_ClientCompanyNote_52_1605580758__K2_K6"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.CreateDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.NoteText).IsRequired(); entity.Property(e => e.Title).HasMaxLength(100); entity.HasOne(d => d.AuthUser) .WithMany(p => p.ClientCompanyNote) .HasForeignKey(d => d.AuthUserId) .HasConstraintName("FK_ClientCompanyNote_AuthUser"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanyNote) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyNote_ClientCompany"); }); modelBuilder.Entity<ClientCompanyOnlineDetails>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.Collateral).HasColumnType("decimal(25, 8)"); entity.Property(e => e.MaxOpen).HasColumnType("decimal(25, 8)"); entity.Property(e => e.MaxTenor).HasColumnType("datetime"); entity.Property(e => e.MaxTradeSize).HasColumnType("decimal(25, 8)"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanyOnlineDetails) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompany_ClientCompanyOnlineDetails"); }); modelBuilder.Entity<ClientCompanyOnlineDetailsSkew>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyOnlineDetailsId).HasColumnName("ClientCompanyOnlineDetailsID"); entity.Property(e => e.Currency1Id).HasColumnName("Currency1ID"); entity.Property(e => e.Currency2Id).HasColumnName("Currency2ID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.ClientCompanyOnlineDetails) .WithMany(p => p.ClientCompanyOnlineDetailsSkew) .HasForeignKey(d => d.ClientCompanyOnlineDetailsId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOnlineDetailsSkew_ClientCompanyOnlineDetails"); entity.HasOne(d => d.Currency1) .WithMany(p => p.ClientCompanyOnlineDetailsSkewCurrency1) .HasForeignKey(d => d.Currency1Id) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOnlineDetailsSkew_Currency1"); entity.HasOne(d => d.Currency2) .WithMany(p => p.ClientCompanyOnlineDetailsSkewCurrency2) .HasForeignKey(d => d.Currency2Id) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOnlineDetailsSkew_Currency2"); }); modelBuilder.Entity<ClientCompanyOnlineSpreadAdjustment>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyOnlineDetailsId).HasColumnName("ClientCompanyOnlineDetailsID"); entity.Property(e => e.Currency1Id).HasColumnName("Currency1ID"); entity.Property(e => e.Currency2Id).HasColumnName("Currency2ID"); entity.Property(e => e.ExpirationDateTime).HasColumnType("datetime"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.ClientCompanyOnlineDetails) .WithMany(p => p.ClientCompanyOnlineSpreadAdjustment) .HasForeignKey(d => d.ClientCompanyOnlineDetailsId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOnlineSpreadAdjustment_ClientCompanyOnlineDetails"); entity.HasOne(d => d.Currency1) .WithMany(p => p.ClientCompanyOnlineSpreadAdjustmentCurrency1) .HasForeignKey(d => d.Currency1Id) .HasConstraintName("FK_ClientCompanyOnlineSpreadAdjustment_Currency1"); entity.HasOne(d => d.Currency2) .WithMany(p => p.ClientCompanyOnlineSpreadAdjustmentCurrency2) .HasForeignKey(d => d.Currency2Id) .HasConstraintName("FK_ClientCompanyOnlineSpreadAdjustment_Currency2"); }); modelBuilder.Entity<ClientCompanyOpi>(entity => { entity.ToTable("ClientCompanyOPI"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AccountName) .IsRequired() .HasMaxLength(100); entity.Property(e => e.AccountNumber) .IsRequired() .HasMaxLength(50); entity.Property(e => e.AuthorisedByAuthUserId).HasColumnName("AuthorisedByAuthUserID"); entity.Property(e => e.AuthorisedDateTime).HasColumnType("datetime"); entity.Property(e => e.BankName) .IsRequired() .HasMaxLength(100); entity.Property(e => e.BeneficiaryName).HasMaxLength(250); entity.Property(e => e.ClearingCodePrefixId).HasColumnName("ClearingCodePrefixID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.CountryId).HasColumnName("CountryID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Iban) .HasColumnName("IBAN") .HasMaxLength(50); entity.Property(e => e.Reference) .IsRequired() .HasMaxLength(50); entity.Property(e => e.RejectedByAuthUserId).HasColumnName("RejectedByAuthUserID"); entity.Property(e => e.RejectedDateTime).HasColumnType("datetime"); entity.Property(e => e.SortCode).HasMaxLength(50); entity.Property(e => e.SwiftCode) .HasMaxLength(11) .IsUnicode(false); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.HasOne(d => d.ClearingCodePrefix) .WithMany(p => p.ClientCompanyOpi) .HasForeignKey(d => d.ClearingCodePrefixId) .HasConstraintName("FK_ClientCompanyOPI_ClearingCodePrefix"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanyOpi) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOPI_ClientCompany"); entity.HasOne(d => d.Country) .WithMany(p => p.ClientCompanyOpi) .HasForeignKey(d => d.CountryId) .HasConstraintName("FK_ClientCompanyOPI_Country"); entity.HasOne(d => d.Currency) .WithMany(p => p.ClientCompanyOpi) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOPI_Currency"); entity.HasOne(d => d.RejectedByAuthUser) .WithMany(p => p.ClientCompanyOpi) .HasForeignKey(d => d.RejectedByAuthUserId) .HasConstraintName("FK_ClientCompanyOPI_AuthUser"); }); modelBuilder.Entity<ClientCompanyOpiduplicate>(entity => { entity.ToTable("ClientCompanyOPIDuplicate"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.DuplicateClientCompanyOpiid).HasColumnName("DuplicateClientCompanyOPIID"); entity.Property(e => e.IsOk).HasColumnName("IsOK"); entity.Property(e => e.IsOkupdatedByAuthUserId).HasColumnName("IsOKUpdatedByAuthUserID"); entity.Property(e => e.IsOkupdatedDateTime) .HasColumnName("isOKUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.Note).HasMaxLength(250); entity.Property(e => e.OriginalClientCompanyOpiid).HasColumnName("OriginalClientCompanyOPIID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.ClientCompanyOpiduplicateCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOPIDuplicate_AuthUser1"); entity.HasOne(d => d.DuplicateClientCompanyOpi) .WithMany(p => p.ClientCompanyOpiduplicateDuplicateClientCompanyOpi) .HasForeignKey(d => d.DuplicateClientCompanyOpiid) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOPIDuplicate_ClientCompanyOPI"); entity.HasOne(d => d.IsOkupdatedByAuthUser) .WithMany(p => p.ClientCompanyOpiduplicateIsOkupdatedByAuthUser) .HasForeignKey(d => d.IsOkupdatedByAuthUserId) .HasConstraintName("FK_ClientCompanyOPIDuplicate_AuthUser"); entity.HasOne(d => d.OriginalClientCompanyOpi) .WithMany(p => p.ClientCompanyOpiduplicateOriginalClientCompanyOpi) .HasForeignKey(d => d.OriginalClientCompanyOpiid) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOPIDuplicate_ClientCompanyOPI1"); }); modelBuilder.Entity<ClientCompanyOpitransaction>(entity => { entity.ToTable("ClientCompanyOPITransaction"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Amount).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.OpiaccountName) .HasColumnName("OPIAccountName") .HasMaxLength(100); entity.Property(e => e.OpiaccountNumber) .HasColumnName("OPIAccountNumber") .HasMaxLength(50); entity.Property(e => e.OpibankAddress).HasColumnName("OPIBankAddress"); entity.Property(e => e.OpibankName) .HasColumnName("OPIBankName") .HasMaxLength(100); entity.Property(e => e.OpibeneficiaryAddress).HasColumnName("OPIBeneficiaryAddress"); entity.Property(e => e.OpibeneficiaryName) .HasColumnName("OPIBeneficiaryName") .HasMaxLength(250); entity.Property(e => e.OpicountryId).HasColumnName("OPICountryID"); entity.Property(e => e.Opidescription) .HasColumnName("OPIDescription") .HasMaxLength(50); entity.Property(e => e.OpidetailsUpdated).HasColumnName("OPIDetailsUpdated"); entity.Property(e => e.Opiiban) .HasColumnName("OPIIBAN") .HasMaxLength(50); entity.Property(e => e.Opireference) .HasColumnName("OPIReference") .HasMaxLength(50); entity.Property(e => e.OpisortCode) .HasColumnName("OPISortCode") .HasMaxLength(50); entity.Property(e => e.OpiswiftCode) .HasColumnName("OPISwiftCode") .HasMaxLength(11) .IsUnicode(false); entity.Property(e => e.PaymentId).HasColumnName("PaymentID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.ClientCompanyOpi) .WithMany(p => p.ClientCompanyOpitransaction) .HasForeignKey(d => d.ClientCompanyOpiid) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOPITransaction_ClientCompanyOPI"); entity.HasOne(d => d.Currency) .WithMany(p => p.ClientCompanyOpitransaction) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOPITransaction_Currency"); entity.HasOne(d => d.Opicountry) .WithMany(p => p.ClientCompanyOpitransaction) .HasForeignKey(d => d.OpicountryId) .HasConstraintName("FK_ClientCompanyOPITransaction_Country"); entity.HasOne(d => d.Payment) .WithMany(p => p.ClientCompanyOpitransaction) .HasForeignKey(d => d.PaymentId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOPITransaction_Payment"); }); modelBuilder.Entity<ClientCompanyOptionCount>(entity => { entity.HasKey(e => e.ClientCompanyId); entity.Property(e => e.ClientCompanyId) .HasColumnName("ClientCompanyID") .ValueGeneratedNever(); entity.Property(e => e.OptionCount).HasDefaultValueSql("((0))"); entity.HasOne(d => d.ClientCompany) .WithOne(p => p.ClientCompanyOptionCount) .HasForeignKey<ClientCompanyOptionCount>(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyOptionCount_ClientCompany"); }); modelBuilder.Entity<ClientCompanyPipeline>(entity => { entity.HasKey(e => e.ClientCompanyId); entity.Property(e => e.ClientCompanyId) .HasColumnName("ClientCompanyID") .ValueGeneratedNever(); entity.Property(e => e.LastCall).HasColumnType("datetime"); entity.Property(e => e.LastEmail).HasColumnType("datetime"); entity.Property(e => e.LastEmailFrom).HasMaxLength(256); entity.Property(e => e.LastEmailTo).HasMaxLength(256); entity.Property(e => e.LastLongCall).HasColumnType("datetime"); entity.Property(e => e.NextActionDueDate).HasColumnType("date"); entity.Property(e => e.NextActionUpdated).HasColumnType("datetime"); entity.Property(e => e.NextPipelineActionId).HasColumnName("NextPipelineActionID"); entity.Property(e => e.NextTradeDate).HasColumnType("datetime"); entity.Property(e => e.Progress).HasDefaultValueSql("((0))"); entity.Property(e => e.TotalCalls).HasDefaultValueSql("((0))"); entity.Property(e => e.UpdateAuthUserId).HasColumnName("UpdateAuthUserID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.ClientCompany) .WithOne(p => p.ClientCompanyPipeline) .HasForeignKey<ClientCompanyPipeline>(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanySalesPipeline_ClientCompany"); entity.HasOne(d => d.NextPipelineAction) .WithMany(p => p.ClientCompanyPipeline) .HasForeignKey(d => d.NextPipelineActionId) .HasConstraintName("FK_ClientCompanySalesPipeline_SalesPipelineAction"); entity.HasOne(d => d.UpdateAuthUser) .WithMany(p => p.ClientCompanyPipeline) .HasForeignKey(d => d.UpdateAuthUserId) .HasConstraintName("FK_ClientCompanySalesPipeline_AuthUser"); }); modelBuilder.Entity<ClientCompanySalesAppUser>(entity => { entity.HasKey(e => new { e.ClientCompanyId, e.SalesPersonAppUserId }); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.SalesPersonAppUserId).HasColumnName("SalesPersonAppUserID"); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanySalesAppUser) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanySalesAppUser_ClientCompany"); entity.HasOne(d => d.SalesPersonAppUser) .WithMany(p => p.ClientCompanySalesAppUser) .HasForeignKey(d => d.SalesPersonAppUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanySalesAppUser_AppUser"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ClientCompanySalesAppUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanySalesAppUser_AuthUser"); }); modelBuilder.Entity<ClientCompanySalesRegion>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.PermissionId).HasColumnName("PermissionID"); }); modelBuilder.Entity<ClientCompanyStatus>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ClientCompanyTradeCount>(entity => { entity.HasKey(e => e.ClientCompanyId); entity.Property(e => e.ClientCompanyId) .HasColumnName("ClientCompanyID") .ValueGeneratedNever(); entity.Property(e => e.TradeCount).HasDefaultValueSql("((0))"); entity.HasOne(d => d.ClientCompany) .WithOne(p => p.ClientCompanyTradeCount) .HasForeignKey<ClientCompanyTradeCount>(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyTradeCount_ClientCompany"); }); modelBuilder.Entity<ClientCompanyType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ClientCompanyVirtualAccount>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.VirtualAccountTypeId).HasColumnName("VirtualAccountTypeID"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.ClientCompanyVirtualAccount) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyVirtualAccount_ClientCompany"); entity.HasOne(d => d.VirtualAccountType) .WithMany(p => p.ClientCompanyVirtualAccount) .HasForeignKey(d => d.VirtualAccountTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyVirtualAccount_VirtualAccountType"); }); modelBuilder.Entity<ClientCompanyVirtualAccountCurrencyBalance>(entity => { entity.HasKey(e => new { e.ClientCompanyVirtualAccountId, e.CurrencyId }); entity.Property(e => e.ClientCompanyVirtualAccountId).HasColumnName("ClientCompanyVirtualAccountID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.Balance) .HasColumnType("decimal(25, 8)") .HasDefaultValueSql("((0))"); entity.Property(e => e.BalanceDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.ClientCompanyVirtualAccount) .WithMany(p => p.ClientCompanyVirtualAccountCurrencyBalance) .HasForeignKey(d => d.ClientCompanyVirtualAccountId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyVirtualAccountCurrencyBalance_ClientCompanyVirtualAccount"); entity.HasOne(d => d.Currency) .WithMany(p => p.ClientCompanyVirtualAccountCurrencyBalance) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyVirtualAccountCurrencyBalance_Currency"); }); modelBuilder.Entity<ClientCompanyVirtualAccountCurrencyBalanceHistory>(entity => { entity.HasKey(e => new { e.ClientCompanyVirtualAccountId, e.CurrencyId, e.TransactionCommitId }); entity.Property(e => e.ClientCompanyVirtualAccountId).HasColumnName("ClientCompanyVirtualAccountID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.Balance).HasColumnType("decimal(25, 8)"); entity.Property(e => e.BalanceDate).HasColumnType("datetime"); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.HasOne(d => d.ClientCompanyVirtualAccount) .WithMany(p => p.ClientCompanyVirtualAccountCurrencyBalanceHistory) .HasForeignKey(d => d.ClientCompanyVirtualAccountId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyVirtualAccountCurrencyBalanceHistory_ClientCompanyVirtualAccount"); entity.HasOne(d => d.Currency) .WithMany(p => p.ClientCompanyVirtualAccountCurrencyBalanceHistory) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyVirtualAccountCurrencyBalanceHistory_Currency"); entity.HasOne(d => d.TransactionCommit) .WithMany(p => p.ClientCompanyVirtualAccountCurrencyBalanceHistory) .HasForeignKey(d => d.TransactionCommitId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientCompanyVirtualAccountCurrencyBalanceHistory_TransactionCommit"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ClientCompanyVirtualAccountCurrencyBalanceHistory) .HasForeignKey(d => d.UpdatedByAuthUserId) .HasConstraintName("FK_ClientCompanyVirtualAccountCurrencyBalanceHistory_AuthUser"); }); modelBuilder.Entity<ClientSiteAction>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientSiteActionStatusId).HasColumnName("ClientSiteActionStatusID"); entity.Property(e => e.ClientSiteActionTypeId).HasColumnName("ClientSiteActionTypeID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.Details) .IsRequired() .HasMaxLength(1000) .IsUnicode(false); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.UpdatedTimestamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.ClientSiteActionStatus) .WithMany(p => p.ClientSiteAction) .HasForeignKey(d => d.ClientSiteActionStatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction_ClientSiteActionStatus"); entity.HasOne(d => d.ClientSiteActionType) .WithMany(p => p.ClientSiteAction) .HasForeignKey(d => d.ClientSiteActionTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction_ClientSiteActionType"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.ClientSiteActionCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction_AuthUser_Client"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ClientSiteActionUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction_AuthUser_Trader"); }); modelBuilder.Entity<ClientSiteAction2ClientCompanyOpi>(entity => { entity.ToTable("ClientSiteAction2ClientCompanyOPI"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.ClientSiteActionId).HasColumnName("ClientSiteActionID"); entity.HasOne(d => d.ClientCompanyOpi) .WithMany(p => p.ClientSiteAction2ClientCompanyOpi) .HasForeignKey(d => d.ClientCompanyOpiid) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2ClientCompanyOPI_ClientCompanyOPI"); entity.HasOne(d => d.ClientSiteAction) .WithMany(p => p.ClientSiteAction2ClientCompanyOpi) .HasForeignKey(d => d.ClientSiteActionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2ClientCompanyOPI_ClientSiteAction"); }); modelBuilder.Entity<ClientSiteAction2FixFxforwardTrade>(entity => { entity.ToTable("ClientSiteAction2FixFXForwardTrade"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientSiteActionId).HasColumnName("ClientSiteActionID"); entity.Property(e => e.FxforwardTradeCode) .IsRequired() .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false); entity.HasOne(d => d.ClientSiteAction) .WithMany(p => p.ClientSiteAction2FixFxforwardTrade) .HasForeignKey(d => d.ClientSiteActionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FixFXForwardTrade_ClientSiteAction"); entity.HasOne(d => d.FxforwardTradeCodeNavigation) .WithMany(p => p.ClientSiteAction2FixFxforwardTrade) .HasForeignKey(d => d.FxforwardTradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FixFXForwardTrade_FXForwardTrade"); }); modelBuilder.Entity<ClientSiteAction2FxforwardTrade2Opi>(entity => { entity.ToTable("ClientSiteAction2FXForwardTrade2OPI"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientSiteActionId).HasColumnName("ClientSiteActionID"); entity.Property(e => e.FxforwardTrade2Opiid).HasColumnName("FXForwardTrade2OPIID"); entity.HasOne(d => d.ClientSiteAction) .WithMany(p => p.ClientSiteAction2FxforwardTrade2Opi) .HasForeignKey(d => d.ClientSiteActionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FXForwardTrade2OPI_ClientSiteAction"); entity.HasOne(d => d.FxforwardTrade2Opi) .WithMany(p => p.ClientSiteAction2FxforwardTrade2Opi) .HasForeignKey(d => d.FxforwardTrade2Opiid) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FXForwardTrade2OPI_FXForwardTrade2OPI"); }); modelBuilder.Entity<ClientSiteAction2Fxswap>(entity => { entity.ToTable("ClientSiteAction2FXSwap"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientSiteActionId).HasColumnName("ClientSiteActionID"); entity.Property(e => e.FxswapId).HasColumnName("FXSwapID"); }); modelBuilder.Entity<ClientSiteActionStatus>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Details) .IsRequired() .HasMaxLength(250) .IsUnicode(false); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50) .IsUnicode(false); }); modelBuilder.Entity<ClientSiteActionType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Details) .IsRequired() .HasMaxLength(250) .IsUnicode(false); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50) .IsUnicode(false); }); modelBuilder.Entity<Commission>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AppUserId).HasColumnName("AppUserID"); entity.Property(e => e.Commission1).HasColumnName("Commission"); entity.Property(e => e.CommissionTypeId).HasColumnName("CommissionTypeID"); entity.HasOne(d => d.AppUser) .WithMany(p => p.Commission) .HasForeignKey(d => d.AppUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Commission_AppUser"); entity.HasOne(d => d.CommissionType) .WithMany(p => p.Commission) .HasForeignKey(d => d.CommissionTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Commission_CommissionType"); }); modelBuilder.Entity<CommissionType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.DefaultCommissionRate).HasDefaultValueSql("((0.15))"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ComplianceClassification>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(100); entity.Property(e => e.Sequence).HasDefaultValueSql("((0))"); }); modelBuilder.Entity<ComplianceClassificationFile>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyComplianceId).HasColumnName("ClientCompanyComplianceID"); entity.Property(e => e.ComplianceClassificationId).HasColumnName("ComplianceClassificationID"); entity.Property(e => e.DocumentId) .IsRequired() .HasColumnName("DocumentID") .HasMaxLength(100); entity.Property(e => e.FileName) .IsRequired() .HasMaxLength(250); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.UploadedByAuthUserId).HasColumnName("UploadedByAuthUserID"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.ClientCompanyCompliance) .WithMany(p => p.ComplianceClassificationFile) .HasForeignKey(d => d.ClientCompanyComplianceId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceClassificationFile_ClientCompanyCompliance"); entity.HasOne(d => d.ComplianceClassification) .WithMany(p => p.ComplianceClassificationFile) .HasForeignKey(d => d.ComplianceClassificationId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceClassificationFile_ComplianceClassification"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ComplianceClassificationFileUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceClassificationFile_AuthUser"); entity.HasOne(d => d.UploadedByAuthUser) .WithMany(p => p.ComplianceClassificationFileUploadedByAuthUser) .HasForeignKey(d => d.UploadedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceClassificationFile_AuthUser1"); }); modelBuilder.Entity<ComplianceCorporateSectorFinancial>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(250); entity.Property(e => e.Sequence).HasDefaultValueSql("((0))"); entity.Property(e => e.Value) .IsRequired() .HasMaxLength(3); }); modelBuilder.Entity<ComplianceCorporateSectorNonFinancial>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(250); entity.Property(e => e.Sequence).HasDefaultValueSql("((0))"); entity.Property(e => e.Value) .IsRequired() .HasMaxLength(3); }); modelBuilder.Entity<ComplianceIsincurrencyValueDate>(entity => { entity.ToTable("ComplianceISINCurrencyValueDate"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CurrencyPair) .IsRequired() .HasMaxLength(6) .IsUnicode(false); entity.Property(e => e.Isin) .IsRequired() .HasColumnName("ISIN") .HasMaxLength(12); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.ValueDate).HasColumnType("date"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ComplianceIsincurrencyValueDate) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceISIN_AuthUser"); }); modelBuilder.Entity<ComplianceNature>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(100); entity.Property(e => e.EmirValue) .IsRequired() .HasMaxLength(1); entity.Property(e => e.Sequence).HasDefaultValueSql("((0))"); }); modelBuilder.Entity<ComplianceQuestionnaire>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyComplianceId).HasColumnName("ClientCompanyComplianceID"); entity.Property(e => e.ComplianceQuestionnaireAnswerId).HasColumnName("ComplianceQuestionnaireAnswerID"); entity.Property(e => e.ComplianceQuestionnaireQuestionId).HasColumnName("ComplianceQuestionnaireQuestionID"); entity.Property(e => e.IsFirstTimeSelect) .IsRequired() .HasDefaultValueSql("((1))"); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.ClientCompanyCompliance) .WithMany(p => p.ComplianceQuestionnaire) .HasForeignKey(d => d.ClientCompanyComplianceId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceQuestionnaire_ClientCompanyCompliance"); entity.HasOne(d => d.ComplianceQuestionnaireAnswer) .WithMany(p => p.ComplianceQuestionnaire) .HasForeignKey(d => d.ComplianceQuestionnaireAnswerId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceQuestionnaire_ComplianceQuestionnaireAnswer"); entity.HasOne(d => d.ComplianceQuestionnaireQuestion) .WithMany(p => p.ComplianceQuestionnaire) .HasForeignKey(d => d.ComplianceQuestionnaireQuestionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceQuestionnaire_ComplianceQuestionnaireQuestion"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ComplianceQuestionnaire) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceQuestionnaire_AuthUser"); }); modelBuilder.Entity<ComplianceQuestionnaireAnswer>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ComplianceQuestionnaireQuestionId).HasColumnName("ComplianceQuestionnaireQuestionID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(100); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50); entity.HasOne(d => d.ComplianceQuestionnaireQuestion) .WithMany(p => p.ComplianceQuestionnaireAnswer) .HasForeignKey(d => d.ComplianceQuestionnaireQuestionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ComplianceQuestionnaireAnswer_ComplianceQuestionnaireQuestion"); }); modelBuilder.Entity<ComplianceQuestionnaireQuestion>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(1000); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ComplianceReason>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(100); entity.Property(e => e.Sequence).HasDefaultValueSql("((0))"); }); modelBuilder.Entity<ComplianceTradeReason>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ContactCategory>(entity => { entity.HasIndex(e => e.Description) .HasName("IX_ContactCategory") .IsUnique(); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(100); }); modelBuilder.Entity<Country>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CodeIso2) .IsRequired() .HasColumnName("CodeISO2") .HasMaxLength(2); entity.Property(e => e.CodeIso3) .IsRequired() .HasColumnName("CodeISO3") .HasMaxLength(3); entity.Property(e => e.CodeIso3numeric).HasColumnName("CodeISO3Numeric"); entity.Property(e => e.CountryGroupId).HasColumnName("CountryGroupID"); entity.Property(e => e.FormalName) .IsRequired() .HasMaxLength(200); entity.Property(e => e.IsEea).HasColumnName("IsEEA"); entity.Property(e => e.LengthIban).HasColumnName("LengthIBAN"); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(100); entity.Property(e => e.PhoneCode).HasMaxLength(25); entity.Property(e => e.RegexBban) .HasColumnName("RegexBBAN") .HasMaxLength(250); entity.HasOne(d => d.CountryGroup) .WithMany(p => p.Country) .HasForeignKey(d => d.CountryGroupId) .HasConstraintName("FK_Country_CountryGroup"); }); modelBuilder.Entity<CountryClearingCodePrefix>(entity => { entity.HasKey(e => new { e.CountryId, e.ClearingCodePrefixId }); entity.Property(e => e.CountryId).HasColumnName("CountryID"); entity.Property(e => e.ClearingCodePrefixId).HasColumnName("ClearingCodePrefixID"); entity.HasOne(d => d.ClearingCodePrefix) .WithMany(p => p.CountryClearingCodePrefix) .HasForeignKey(d => d.ClearingCodePrefixId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_CountryClearingCodePrefix_ClearingCodePrefix"); entity.HasOne(d => d.Country) .WithMany(p => p.CountryClearingCodePrefix) .HasForeignKey(d => d.CountryId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_CountryClearingCodePrefix_Country"); }); modelBuilder.Entity<CountryGroup>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(100); }); modelBuilder.Entity<Currency>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Code) .IsRequired() .HasMaxLength(3) .IsUnicode(false); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.SwiftAmountFormat) .IsRequired() .HasMaxLength(50) .HasDefaultValueSql("('####.00')"); entity.Property(e => e.UpdateDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.CurrencyCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Currency_CreatedByAuthUserId"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.CurrencyUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Currency_UpdatedByAuthUserId"); }); modelBuilder.Entity<CurrencyFxrate>(entity => { entity.HasKey(e => new { e.LhsCcyid, e.RhsCcyid }); entity.ToTable("CurrencyFXRate"); entity.Property(e => e.LhsCcyid).HasColumnName("lhsCCYID"); entity.Property(e => e.RhsCcyid).HasColumnName("rhsCCYID"); entity.Property(e => e.Rate) .HasColumnName("rate") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<CurrencyPairPricing>(entity => { entity.ToTable("viewCurrencyPairPricing"); entity.HasKey(e => e.CurrencyPair); entity.Property(e => e.Rate).HasColumnName("Rate"); entity.Property(e => e.RateTimeStamp).HasColumnName("RateTimeStamp"); entity.Property(e => e.FeedTimeStamp).HasColumnName("FeedTimeStamp"); entity.Property(e => e.RateCurrencyPair).HasColumnName("RateCurrecyPair"); }); modelBuilder.Entity<CurrencyPairPriceHistory>(entity => { entity.HasKey(e => new { e.CurrencyPair, e.PriceDate }); entity.Property(e => e.CurrencyPair) .HasMaxLength(6) .IsUnicode(false); entity.Property(e => e.PriceDate).HasColumnType("date"); entity.Property(e => e.Price).HasColumnType("decimal(25, 8)"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<CurrencyPairValidation>(entity => { entity.HasIndex(e => e.CurrencyPair) .HasName("UQ__Currency__FA4F09C27CDB6CCB") .IsUnique(); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CurrencyPair) .IsRequired() .HasMaxLength(20); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); }); modelBuilder.Entity<Emirreport>(entity => { entity.ToTable("EMIRReport"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.EmirreportOutgoingFileId).HasColumnName("EMIRReportOutgoingFileID"); entity.HasOne(d => d.EmirreportOutgoingFile) .WithMany(p => p.Emirreport) .HasForeignKey(d => d.EmirreportOutgoingFileId) .HasConstraintName("FK_EMIRReport_EMIRReportOutgoingFile"); }); modelBuilder.Entity<EmirreportField>(entity => { entity.ToTable("EMIRReportField"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AppSettingKey).HasMaxLength(50); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(250); entity.Property(e => e.EmirreportTypeId).HasColumnName("EMIRReportTypeID"); entity.Property(e => e.FieldCode) .IsRequired() .HasMaxLength(100); entity.Property(e => e.FieldName) .IsRequired() .HasMaxLength(100); entity.Property(e => e.FieldValue).HasMaxLength(100); entity.Property(e => e.IsActive) .IsRequired() .HasDefaultValueSql("((1))"); entity.HasOne(d => d.EmirreportType) .WithMany(p => p.EmirreportField) .HasForeignKey(d => d.EmirreportTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_EMIRReportField_EMIRReportType"); }); modelBuilder.Entity<EmirreportFxforwardTrade>(entity => { entity.ToTable("EMIRReportFXForwardTrade"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.EmirreportId).HasColumnName("EMIRReportID"); entity.Property(e => e.EmirreportTypeId).HasColumnName("EMIRReportTypeID"); entity.Property(e => e.EmirstatusId).HasColumnName("EMIRStatusID"); entity.Property(e => e.EmirstatusUpdatedDateTime) .HasColumnName("EMIRStatusUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.FxforwardTradeCode) .IsRequired() .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false); entity.HasOne(d => d.Emirreport) .WithMany(p => p.EmirreportFxforwardTrade) .HasForeignKey(d => d.EmirreportId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_EMIRReportFxForwardTrade_EMIRReport"); entity.HasOne(d => d.EmirreportType) .WithMany(p => p.EmirreportFxforwardTrade) .HasForeignKey(d => d.EmirreportTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_EMIRReportFxForwardTrade_EMIRReportType"); entity.HasOne(d => d.Emirstatus) .WithMany(p => p.EmirreportFxforwardTrade) .HasForeignKey(d => d.EmirstatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_EMIRReportFxForwardTrade_EMIRStatus"); entity.HasOne(d => d.FxforwardTradeCodeNavigation) .WithMany(p => p.EmirreportFxforwardTrade) .HasForeignKey(d => d.FxforwardTradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_EMIRReportFxForwardTrade_FXForwardTrade"); }); modelBuilder.Entity<EmirreportIncomingFile>(entity => { entity.ToTable("EMIRReportIncomingFile"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.EmirreportId).HasColumnName("EMIRReportID"); entity.Property(e => e.EmirreportIncomingFileContentId).HasColumnName("EMIRReportIncomingFileContentID"); entity.Property(e => e.Xmlfilename) .HasColumnName("XMLFilename") .HasMaxLength(255); entity.Property(e => e.Zipfilename) .IsRequired() .HasColumnName("ZIPFilename") .HasMaxLength(255); entity.HasOne(d => d.Emirreport) .WithMany(p => p.EmirreportIncomingFile) .HasForeignKey(d => d.EmirreportId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_EMIRReportIncomingFile_EMIRReport"); entity.HasOne(d => d.EmirreportIncomingFileContent) .WithMany(p => p.EmirreportIncomingFile) .HasForeignKey(d => d.EmirreportIncomingFileContentId) .HasConstraintName("FK_EMIRReportIncomingFile_EMIRReportIncomingFileContent"); }); modelBuilder.Entity<EmirreportIncomingFileContent>(entity => { entity.ToTable("EMIRReportIncomingFileContent"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.FileContent).IsRequired(); }); modelBuilder.Entity<EmirreportOutgoingFile>(entity => { entity.ToTable("EMIRReportOutgoingFile"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.EmirreportOutgoingFileContentId).HasColumnName("EMIRReportOutgoingFileContentID"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); entity.Property(e => e.UploadedFilename).HasMaxLength(255); entity.Property(e => e.Xmlfilename) .IsRequired() .HasColumnName("XMLFilename") .HasMaxLength(255); entity.HasOne(d => d.EmirreportOutgoingFileContent) .WithMany(p => p.EmirreportOutgoingFile) .HasForeignKey(d => d.EmirreportOutgoingFileContentId) .HasConstraintName("FK_EMIRReportOutgoingFile_EMIRReportOutgoingFileContent"); }); modelBuilder.Entity<EmirreportOutgoingFileContent>(entity => { entity.ToTable("EMIRReportOutgoingFileContent"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.FileContent).IsRequired(); }); modelBuilder.Entity<EmirreportResponseCode>(entity => { entity.ToTable("EMIRReportResponseCode"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(250); entity.Property(e => e.ErrorMessage) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<EmirreportTradeResponseError>(entity => { entity.ToTable("EMIRReportTradeResponseError"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.EmirreportFxforwardTradeId).HasColumnName("EMIRReportFXForwardTradeID"); entity.Property(e => e.EmirreportResponseCodeId).HasColumnName("EMIRReportResponseCodeID"); entity.Property(e => e.ResponseMessage).HasMaxLength(500); entity.Property(e => e.Source) .IsRequired() .HasMaxLength(100); entity.HasOne(d => d.EmirreportFxforwardTrade) .WithMany(p => p.EmirreportTradeResponseError) .HasForeignKey(d => d.EmirreportFxforwardTradeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_EMIRReportTradeResponseError_EMIRReportFXForwardTrade"); entity.HasOne(d => d.EmirreportResponseCode) .WithMany(p => p.EmirreportTradeResponseError) .HasForeignKey(d => d.EmirreportResponseCodeId) .HasConstraintName("FK_EMIRReportTradeResponseError_EMIRReportResponseCode"); }); modelBuilder.Entity<EmirreportType>(entity => { entity.ToTable("EMIRReportType"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<Emirstatus>(entity => { entity.ToTable("EMIRStatus"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ExpectedFrequency>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(100); entity.Property(e => e.Value).HasDefaultValueSql("((0))"); }); modelBuilder.Entity<FixApareportField>(entity => { entity.ToTable("FixAPAReportField"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AppSettingKey).HasMaxLength(250); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(250); entity.Property(e => e.IsActive) .IsRequired() .HasDefaultValueSql("((1))"); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(100); entity.Property(e => e.Value).HasMaxLength(100); }); modelBuilder.Entity<FixApatradeCapture>(entity => { entity.ToTable("FixAPATradeCapture"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ApastatusId).HasColumnName("APAStatusID"); entity.Property(e => e.ApastatusUpdatedDateTime) .HasColumnName("APAStatusUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.BloombergTradeId) .HasColumnName("BloombergTradeID") .HasMaxLength(255) .IsUnicode(false); entity.Property(e => e.ErrorMessage) .HasMaxLength(500) .IsUnicode(false); entity.Property(e => e.PublishDateTime).HasColumnType("datetime"); entity.Property(e => e.RejectReason) .HasMaxLength(5) .IsUnicode(false); entity.Property(e => e.TradeCode) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.TradeReportId) .HasColumnName("TradeReportID") .HasMaxLength(150) .IsUnicode(false); entity.HasOne(d => d.Apastatus) .WithMany(p => p.FixApatradeCapture) .HasForeignKey(d => d.ApastatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FixAPATradeCapture_APAStatus"); entity.HasOne(d => d.AuthUser) .WithMany(p => p.FixApatradeCapture) .HasForeignKey(d => d.AuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FixAPATradeCapture_AuthUser"); entity.HasOne(d => d.TradeCodeNavigation) .WithMany(p => p.FixApatradeCapture) .HasForeignKey(d => d.TradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FixAPATradeCapture_FxForwardTrade"); }); modelBuilder.Entity<FixApatradeMessage>(entity => { entity.ToTable("FixAPATradeMessage"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.FixMessage) .IsRequired() .HasMaxLength(4000) .IsUnicode(false); entity.Property(e => e.MessageDate).HasColumnType("datetime"); entity.Property(e => e.TradeCode) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.HasOne(d => d.TradeCodeNavigation) .WithMany(p => p.FixApatradeMessage) .HasForeignKey(d => d.TradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FixAPATradeMessage_FxForwardTrade"); }); modelBuilder.Entity<FixFxforwardTradeOrder>(entity => { entity.ToTable("FixFXForwardTradeOrder"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.BarclaysAssignedId) .IsRequired() .HasMaxLength(256) .IsUnicode(false); entity.Property(e => e.BarclaysTradeId) .IsRequired() .HasMaxLength(256) .IsUnicode(false); entity.Property(e => e.ErrorMessage) .HasMaxLength(1000) .IsUnicode(false); entity.Property(e => e.FxforwardCode) .IsRequired() .HasColumnName("FXForwardCode") .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.OrderDate).HasColumnType("datetime"); entity.Property(e => e.RejectReason) .HasMaxLength(1000) .IsUnicode(false); entity.HasOne(d => d.FxforwardCodeNavigation) .WithMany(p => p.FixFxforwardTradeOrder) .HasForeignKey(d => d.FxforwardCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FixTradeOrder_FXForwardTrade"); entity.HasOne(d => d.User) .WithMany(p => p.FixFxforwardTradeOrder) .HasForeignKey(d => d.UserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FixTradeOrder_AuthUser"); }); modelBuilder.Entity<FixQuote>(entity => { entity.HasKey(e => e.QuoteId); entity.Property(e => e.QuoteId) .HasMaxLength(256) .IsUnicode(false) .ValueGeneratedNever(); entity.Property(e => e.TradeId) .IsRequired() .HasMaxLength(100) .IsUnicode(false); }); modelBuilder.Entity<FixQuoteCancelled>(entity => { entity.HasKey(e => e.QuoteId); entity.Property(e => e.QuoteId) .HasMaxLength(256) .IsUnicode(false) .ValueGeneratedNever(); }); modelBuilder.Entity<FixTradeMessage>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.FixMessage) .IsRequired() .HasMaxLength(4000) .IsUnicode(false); entity.Property(e => e.MessageDate).HasColumnType("datetime"); entity.Property(e => e.TradeCode) .IsRequired() .HasMaxLength(256) .IsUnicode(false); }); modelBuilder.Entity<FxforwardTrade>(entity => { entity.HasKey(e => e.Code); entity.ToTable("FXForwardTrade"); entity.HasIndex(e => new { e.ClientCompanyId, e.Deleted, e.ContractDate }) .HasName("_dta_index_FXForwardTrade_52_562101043__K7_K31_K13"); entity.Property(e => e.Code) .HasMaxLength(100) .IsUnicode(false) .ValueGeneratedNever(); entity.Property(e => e.ApastatusId).HasColumnName("APAStatusID"); entity.Property(e => e.ApastatusUpdatedDateTime) .HasColumnName("APAStatusUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.Armreported).HasColumnName("ARMReported"); entity.Property(e => e.ArmstatusId).HasColumnName("ARMStatusID"); entity.Property(e => e.ArmstatusUpdatedDateTime) .HasColumnName("ARMStatusUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.AuthorisedByClientCompanyContactId).HasColumnName("AuthorisedByClientCompanyContactID"); entity.Property(e => e.BdpforwardPoints) .HasColumnName("BDPForwardPoints") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokenDatePrice).HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokerId).HasColumnName("BrokerID"); entity.Property(e => e.BrokerLhsamt) .HasColumnName("BrokerLHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokerRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokerRhsamt) .HasColumnName("BrokerRHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokeredDate).HasColumnType("datetime"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.ClientLhsamt) .HasColumnName("ClientLHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRhsamt) .HasColumnName("ClientRHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.CollateralPerc).HasColumnType("decimal(25, 8)"); entity.Property(e => e.CommPaidOutDate).HasColumnType("date"); entity.Property(e => e.ComplianceIsin) .HasColumnName("Compliance_ISIN") .HasMaxLength(12); entity.Property(e => e.ComplianceTradeReasonId).HasColumnName("ComplianceTradeReasonID"); entity.Property(e => e.ContractDate).HasColumnType("date"); entity.Property(e => e.ContractNoteSentToClientDateTime).HasColumnType("datetime"); entity.Property(e => e.ContractNoteSentToMyselfDateTime).HasColumnType("datetime"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.CurrencyPair) .HasMaxLength(6) .IsUnicode(false); entity.Property(e => e.DeliveredDate).HasColumnType("datetime"); entity.Property(e => e.EmirDelegatedReported).HasColumnName("EMIR_DelegatedReported"); entity.Property(e => e.EmirReported).HasColumnName("EMIR_Reported"); entity.Property(e => e.EmirReportedDateTime) .HasColumnName("EMIR_ReportedDateTime") .HasColumnType("datetime"); entity.Property(e => e.EmirUti) .HasColumnName("EMIR_UTI") .HasMaxLength(100); entity.Property(e => e.EmirdelegatedSubmissionId) .HasColumnName("EMIRDelegatedSubmissionID") .HasMaxLength(50); entity.Property(e => e.EmirstatusId).HasColumnName("EMIRStatusID"); entity.Property(e => e.EmirstatusUpdatedDateTime) .HasColumnName("EMIRStatusUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.EmirsubmissionId) .HasColumnName("EMIRSubmissionID") .HasMaxLength(50); entity.Property(e => e.FilledDateTime).HasColumnType("datetime"); entity.Property(e => e.FxforwardTradeStatusId).HasColumnName("FXForwardTradeStatusID"); entity.Property(e => e.IsApareportable) .IsRequired() .HasColumnName("IsAPAReportable") .HasDefaultValueSql("((1))"); entity.Property(e => e.IsArmreportable) .IsRequired() .HasColumnName("IsARMReportable") .HasDefaultValueSql("((1))"); entity.Property(e => e.IsComplianceRegulated) .IsRequired() .HasDefaultValueSql("((1))"); entity.Property(e => e.IsComplianceSupported) .IsRequired() .HasDefaultValueSql("((1))"); entity.Property(e => e.IsEmirreportable) .IsRequired() .HasColumnName("IsEMIRReportable") .HasDefaultValueSql("((1))"); entity.Property(e => e.IsOrder).HasDefaultValueSql("((0))"); entity.Property(e => e.IsRhsmajor).HasColumnName("IsRHSMajor"); entity.Property(e => e.Lhsccyid).HasColumnName("LHSCCYID"); entity.Property(e => e.MarkToMarketValue).HasColumnType("decimal(25, 8)"); entity.Property(e => e.MarkToMarketValueUpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.MarketSideUti) .HasColumnName("MarketSideUTI") .HasMaxLength(100); entity.Property(e => e.OpenValueDate).HasColumnType("date"); entity.Property(e => e.OpiupdatedByAuthUserId).HasColumnName("OPIUpdatedByAuthUserId"); entity.Property(e => e.OpiupdatedDateTime) .HasColumnName("OPIUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.PrevDayMarktoMarket).HasColumnType("decimal(25, 8)"); entity.Property(e => e.PrevDayMarktoMarketUpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.PrevailingRate2).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Profit).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ProfitConsolidatedDateTime).HasColumnType("datetime"); entity.Property(e => e.ProfitConsolidatedValue).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ProfitGbprate) .HasColumnName("ProfitGBPRate") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.Reference).HasMaxLength(20); entity.Property(e => e.RemainingClientLhsamt) .HasColumnName("RemainingClientLHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.RemainingClientRhsamt) .HasColumnName("RemainingClientRHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.Rhsccyid).HasColumnName("RHSCCYID"); entity.Property(e => e.SettledDate).HasColumnType("datetime"); entity.Property(e => e.TradeInstructionMethodId).HasColumnName("TradeInstructionMethodID"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.ValueDate).HasColumnType("date"); entity.Property(e => e.VerifiedByAuthUserId).HasColumnName("VerifiedByAuthUserID"); entity.HasOne(d => d.Apastatus) .WithMany(p => p.FxforwardTradeApastatus) .HasForeignKey(d => d.ApastatusId) .HasConstraintName("FK_FXForwardTrade_APAStatus"); entity.HasOne(d => d.Armstatus) .WithMany(p => p.FxforwardTradeArmstatus) .HasForeignKey(d => d.ArmstatusId) .HasConstraintName("FK_FXForwardTrade_ARMStatus"); entity.HasOne(d => d.AuthorisedByClientCompanyContact) .WithMany(p => p.FxforwardTrade) .HasForeignKey(d => d.AuthorisedByClientCompanyContactId) .HasConstraintName("FK_FXForwardTrade_ClientCompanyContact"); entity.HasOne(d => d.Broker) .WithMany(p => p.FxforwardTrade) .HasForeignKey(d => d.BrokerId) .HasConstraintName("FK_FXForwardTrade_Broker"); entity.HasOne(d => d.BrokeredByAuthUser) .WithMany(p => p.FxforwardTradeBrokeredByAuthUser) .HasForeignKey(d => d.BrokeredByAuthUserId) .HasConstraintName("FK_FXForwardTrade_BrokeredByAuthUser"); entity.HasOne(d => d.ClientCompanyNavigation) .WithMany(p => p.FxforwardTrade) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTrade_ClientCompany"); entity.HasOne(d => d.ClientCompanyOpi) .WithMany(p => p.FxforwardTrade) .HasForeignKey(d => d.ClientCompanyOpiid) .HasConstraintName("FK_FXForwardTrade_ClientCompanyOPI"); entity.HasOne(d => d.ComplianceTradeReason) .WithMany(p => p.FxforwardTrade) .HasForeignKey(d => d.ComplianceTradeReasonId) .HasConstraintName("FK_FXForwardTrade_ComplianceTradeReason"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.FxforwardTradeCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTrade_AuthUser"); entity.HasOne(d => d.Emirstatus) .WithMany(p => p.FxforwardTradeEmirstatus) .HasForeignKey(d => d.EmirstatusId) .HasConstraintName("FK_FXForwardTrade_EMIRStatus"); entity.HasOne(d => d.FilledByAuthUser) .WithMany(p => p.FxforwardTradeFilledByAuthUser) .HasForeignKey(d => d.FilledByAuthUserId) .HasConstraintName("FK_FXForwardTrade_FilledByAuthUser"); entity.HasOne(d => d.FxforwardTradeStatus) .WithMany(p => p.FxforwardTrade) .HasForeignKey(d => d.FxforwardTradeStatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTrade_FXForwardTradeStatus"); entity.HasOne(d => d.Lhsccy) .WithMany(p => p.FxforwardTradeLhsccy) .HasForeignKey(d => d.Lhsccyid) .HasConstraintName("FK_FXForwardTrade_Currency"); entity.HasOne(d => d.OpiupdatedByAuthUser) .WithMany(p => p.FxforwardTradeOpiupdatedByAuthUser) .HasForeignKey(d => d.OpiupdatedByAuthUserId) .HasConstraintName("FK_FXForwardTrade_OPIUpdatedByAuthUser"); entity.HasOne(d => d.Rhsccy) .WithMany(p => p.FxforwardTradeRhsccy) .HasForeignKey(d => d.Rhsccyid) .HasConstraintName("FK_FXForwardTrade_Currency1"); entity.HasOne(d => d.TradeInstructionMethod) .WithMany(p => p.FxforwardTrade) .HasForeignKey(d => d.TradeInstructionMethodId) .HasConstraintName("FK_FXForwardTrade_TradeInstructionMethod"); entity.HasOne(d => d.TransactionCommit) .WithMany(p => p.FxforwardTrade) .HasForeignKey(d => d.TransactionCommitId) .HasConstraintName("FK_FXForwardTrade_TransactionCommit"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.FxforwardTradeUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTrade_AuthUser1"); entity.HasOne(d => d.VerifiedByAuthUser) .WithMany(p => p.FxforwardTradeVerifiedByAuthUser) .HasForeignKey(d => d.VerifiedByAuthUserId) .HasConstraintName("FK_FXForwardTrade_AuthUser2"); }); modelBuilder.Entity<FxforwardTrade2Opi>(entity => { entity.ToTable("FXForwardTrade2OPI"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Amount).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.Details).HasMaxLength(100); entity.Property(e => e.FxforwardTradeCode) .IsRequired() .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.TradeValueDate).HasColumnType("datetime"); entity.HasOne(d => d.ClientCompanyOpi) .WithMany(p => p.FxforwardTrade2Opi) .HasForeignKey(d => d.ClientCompanyOpiid) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTrade2OPI_ClientCompanyOPI"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.FxforwardTrade2Opi) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTrade2OPI_AuthUser"); entity.HasOne(d => d.FxforwardTradeCodeNavigation) .WithMany(p => p.FxforwardTrade2Opi) .HasForeignKey(d => d.FxforwardTradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTrade2OPI_FXForwardTrade"); }); modelBuilder.Entity<FxforwardTradeInvoice>(entity => { entity.ToTable("FXForwardTradeInvoice"); entity.HasIndex(e => new { e.TradeCode, e.FileName }) .HasName("IDX_TradeCodeFilename") .IsUnique() .HasFilter("([FileName] IS NOT NULL)"); entity.Property(e => e.Id) .HasColumnName("ID") .ValueGeneratedOnAdd(); entity.Property(e => e.Comment).HasMaxLength(500); entity.Property(e => e.DocumentId).HasColumnName("DocumentID"); entity.Property(e => e.FileName) .HasMaxLength(250) .IsUnicode(false); entity.Property(e => e.TradeCode) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.IdNavigation) .WithOne(p => p.InverseIdNavigation) .HasForeignKey<FxforwardTradeInvoice>(d => d.Id) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTradeInvoice_FXForwardTradeInvoice"); entity.HasOne(d => d.TradeCodeNavigation) .WithMany(p => p.FxforwardTradeInvoice) .HasForeignKey(d => d.TradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTradeInvoice_FXForwardTrade"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.FxforwardTradeInvoice) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXForwardTradeInvoice_AuthUser"); }); modelBuilder.Entity<FxforwardTradeStatus>(entity => { entity.ToTable("FXForwardTradeStatus"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50) .IsUnicode(false); }); modelBuilder.Entity<FxforwardTradeSwapCount>(entity => { entity.HasKey(e => e.FxforwardTradeCode); entity.ToTable("FXForwardTradeSwapCount"); entity.Property(e => e.FxforwardTradeCode) .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false) .ValueGeneratedNever(); entity.Property(e => e.SwapCount).HasDefaultValueSql("((0))"); }); modelBuilder.Entity<Fxoption>(entity => { entity.HasKey(e => e.Code); entity.ToTable("FXOption"); entity.Property(e => e.Code) .HasMaxLength(100) .IsUnicode(false) .ValueGeneratedNever(); entity.Property(e => e.AuthorisedByClientCompanyContactId).HasColumnName("AuthorisedByClientCompanyContactID"); entity.Property(e => e.Barrier).HasColumnType("decimal(25, 8)"); entity.Property(e => e.BestCaseRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokerId).HasColumnName("BrokerID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.ClientLhsamt) .HasColumnName("ClientLHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientLhsamtNotional) .HasColumnName("ClientLHSAmtNotional") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRhsamt) .HasColumnName("ClientRHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRhsamtNotional) .HasColumnName("ClientRHSAmtNotional") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.CommPaidOutDate).HasColumnType("date"); entity.Property(e => e.ContractDate).HasColumnType("date"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.CurrencyPair) .HasMaxLength(6) .IsUnicode(false); entity.Property(e => e.Deleted).HasDefaultValueSql("((0))"); entity.Property(e => e.DeliveredDate).HasColumnType("datetime"); entity.Property(e => e.ExpiryDate).HasColumnType("date"); entity.Property(e => e.ExtBarrier).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ExtBarrierDate).HasColumnType("datetime"); entity.Property(e => e.ExtStrike).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ExtValueDate).HasColumnType("datetime"); entity.Property(e => e.ForwardRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.FxoptionSettlementId).HasColumnName("FXOptionSettlementID"); entity.Property(e => e.FxoptionStatusId).HasColumnName("FXOptionStatusID"); entity.Property(e => e.FxoptionTypeId).HasColumnName("FXOptionTypeID"); entity.Property(e => e.GraphImgTemplateFile).HasMaxLength(150); entity.Property(e => e.IsBuy).HasDefaultValueSql("((0))"); entity.Property(e => e.IsExpired).HasDefaultValueSql("((0))"); entity.Property(e => e.IsExtended).HasDefaultValueSql("((0))"); entity.Property(e => e.IsKnockedIn).HasDefaultValueSql("((0))"); entity.Property(e => e.IsKnockedOut).HasDefaultValueSql("((0))"); entity.Property(e => e.IsLeveraged).HasDefaultValueSql("((0))"); entity.Property(e => e.IsRhsmajour).HasColumnName("IsRHSMajour"); entity.Property(e => e.KnockInRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.KnockOutRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.LevBarrier).HasColumnType("decimal(25, 8)"); entity.Property(e => e.LevBarrierDate).HasColumnType("datetime"); entity.Property(e => e.LevNotional).HasColumnType("decimal(25, 8)"); entity.Property(e => e.LevStrike).HasColumnType("decimal(25, 8)"); entity.Property(e => e.LevValueDate).HasColumnType("datetime"); entity.Property(e => e.Lhsccyid).HasColumnName("LHSCCYID"); entity.Property(e => e.OptionTrigger).HasColumnType("decimal(25, 8)"); entity.Property(e => e.OptionTriggerProtecLvl).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ParentCode) .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.PercentagePart).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Premium).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Profit).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ProtectedLevel).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Rhsccyid).HasColumnName("RHSCCYID"); entity.Property(e => e.SettledDate).HasColumnType("datetime"); entity.Property(e => e.SettlementTradeId).HasColumnName("SettlementTradeID"); entity.Property(e => e.TradeInstructionMethodId).HasColumnName("TradeInstructionMethodID"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.ValueDate).HasColumnType("date"); entity.Property(e => e.Verified).HasDefaultValueSql("((0))"); entity.Property(e => e.VerifiedByAuthUserId).HasColumnName("VerifiedByAuthUserID"); entity.Property(e => e.WorstCaseRate).HasColumnType("decimal(25, 8)"); entity.HasOne(d => d.AuthorisedByClientCompanyContact) .WithMany(p => p.Fxoption) .HasForeignKey(d => d.AuthorisedByClientCompanyContactId) .HasConstraintName("FK_FXOption_ClientCompanyContact"); entity.HasOne(d => d.Broker) .WithMany(p => p.Fxoption) .HasForeignKey(d => d.BrokerId) .HasConstraintName("FK_FXOption_Broker"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.Fxoption) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXOption_ClientCompany"); entity.HasOne(d => d.ClientCompanyOpi) .WithMany(p => p.Fxoption) .HasForeignKey(d => d.ClientCompanyOpiid) .HasConstraintName("FK_FXOption_ClientCompanyOPI"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.FxoptionCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXOption_AuthUser"); entity.HasOne(d => d.FxoptionStatus) .WithMany(p => p.Fxoption) .HasForeignKey(d => d.FxoptionStatusId) .HasConstraintName("FK_FXOption_FXOptionStatus"); entity.HasOne(d => d.Lhsccy) .WithMany(p => p.FxoptionLhsccy) .HasForeignKey(d => d.Lhsccyid) .HasConstraintName("FK_FXOption_Currency"); entity.HasOne(d => d.Rhsccy) .WithMany(p => p.FxoptionRhsccy) .HasForeignKey(d => d.Rhsccyid) .HasConstraintName("FK_FXOption_Currency1"); entity.HasOne(d => d.TradeInstructionMethod) .WithMany(p => p.Fxoption) .HasForeignKey(d => d.TradeInstructionMethodId) .HasConstraintName("FK_FXOption_TradeInstructionMethod"); entity.HasOne(d => d.TransactionCommit) .WithMany(p => p.Fxoption) .HasForeignKey(d => d.TransactionCommitId) .HasConstraintName("FK_FXOption_TransactionCommit"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.FxoptionUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXOption_AuthUser1"); entity.HasOne(d => d.VerifiedByAuthUser) .WithMany(p => p.FxoptionVerifiedByAuthUser) .HasForeignKey(d => d.VerifiedByAuthUserId) .HasConstraintName("FK_FXOption_AuthUser2"); }); modelBuilder.Entity<FxoptionOutputs>(entity => { entity.ToTable("FXOptionOutputs"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.ExternalTradeCode).HasMaxLength(50); entity.Property(e => e.FxoptionCode) .HasColumnName("FXOptionCode") .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.FxoptionOutputsTemplateId).HasColumnName("FXOptionOutputsTemplateID"); entity.Property(e => e.Outputs).HasMaxLength(150); }); modelBuilder.Entity<FxoptionOutputsTemplate>(entity => { entity.ToTable("FXOptionOutputsTemplate"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.FxoptionTypeId).HasColumnName("FXOptionTypeID"); entity.Property(e => e.Template).HasMaxLength(150); }); modelBuilder.Entity<FxoptionSettlements>(entity => { entity.ToTable("FXOptionSettlements"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AuthorisedByClientCompanyContactId).HasColumnName("AuthorisedByClientCompanyContactID"); entity.Property(e => e.BrokerId).HasColumnName("BrokerID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.ClientLhsamt) .HasColumnName("ClientLHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRhsamt) .HasColumnName("ClientRHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.Code).HasMaxLength(50); entity.Property(e => e.ContractDate).HasColumnType("date"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.CurrencyPair) .HasMaxLength(6) .IsUnicode(false); entity.Property(e => e.Description).HasMaxLength(150); entity.Property(e => e.FxoptionCode) .HasColumnName("FXOptionCode") .HasMaxLength(50); entity.Property(e => e.FxoptionSettlementsTemplateId).HasColumnName("FXOptionSettlementsTemplateID"); entity.Property(e => e.IsRhsmajour).HasColumnName("IsRHSMajour"); entity.Property(e => e.IsSettled).HasDefaultValueSql("((0))"); entity.Property(e => e.Lhsccyid).HasColumnName("LHSCCYID"); entity.Property(e => e.Notional).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Rhsccyid).HasColumnName("RHSCCYID"); entity.Property(e => e.TradeInstructionMethodId).HasColumnName("TradeInstructionMethodID"); entity.Property(e => e.ValueDate).HasColumnType("date"); }); modelBuilder.Entity<FxoptionSettlementsTemplate>(entity => { entity.ToTable("FXOptionSettlementsTemplate"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientRate).HasMaxLength(50); entity.Property(e => e.FxoptionTypeId).HasColumnName("FXOptionTypeID"); entity.Property(e => e.Notional).HasMaxLength(50); entity.Property(e => e.Template).HasMaxLength(150); entity.Property(e => e.TradeCodeSuffix).HasMaxLength(10); }); modelBuilder.Entity<FxoptionStatus>(entity => { entity.ToTable("FXOptionStatus"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50) .IsUnicode(false); }); modelBuilder.Entity<FxoptionType>(entity => { entity.ToTable("FXOptionType"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.ExtOptionTypeId).HasColumnName("ExtOptionTypeID"); entity.Property(e => e.LevOptionTypeId).HasColumnName("LevOptionTypeID"); entity.Property(e => e.TermSheetImg) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.VisibleInputs) .HasMaxLength(200) .IsUnicode(false); }); modelBuilder.Entity<Fxswap>(entity => { entity.ToTable("FXSwap"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CreatedAuthUserId).HasColumnName("CreatedAuthUserID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.DeliveryLegTradeCode) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.ParentTradeCode) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.ReversalLegTradeCode) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.CreatedAuthUser) .WithMany(p => p.Fxswap) .HasForeignKey(d => d.CreatedAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXSwap_CreatedAuthUser"); entity.HasOne(d => d.DeliveryLegTradeCodeNavigation) .WithMany(p => p.FxswapDeliveryLegTradeCodeNavigation) .HasForeignKey(d => d.DeliveryLegTradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXSwap_DeliveryLegFXForwardTrade"); entity.HasOne(d => d.ParentTradeCodeNavigation) .WithMany(p => p.FxswapParentTradeCodeNavigation) .HasForeignKey(d => d.ParentTradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXSwap_ParentFXForwardTrade"); entity.HasOne(d => d.ReversalLegTradeCodeNavigation) .WithMany(p => p.FxswapReversalLegTradeCodeNavigation) .HasForeignKey(d => d.ReversalLegTradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_FXSwap_ReversalLegFXForwardTrade"); }); modelBuilder.Entity<GlobalSearchScope>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.StoredProcName) .IsRequired() .HasMaxLength(100); }); modelBuilder.Entity<IntroducingBroker>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Email).HasMaxLength(100); entity.Property(e => e.FullName) .IsRequired() .HasMaxLength(101) .HasComputedColumnSql("(([Name]+' ')+isnull([Surname],''))"); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Surname).HasMaxLength(50); entity.Property(e => e.Telephone).HasMaxLength(50); entity.Property(e => e.UpdateAuthUserId).HasColumnName("UpdateAuthUserID"); entity.Property(e => e.UpdateDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); }); modelBuilder.Entity<LastWorkingDay>(entity => { entity.HasKey(e => new { e.Year, e.Month }); entity.Property(e => e.LastWorkingDay1) .HasColumnName("LastWorkingDay") .HasColumnType("date"); }); modelBuilder.Entity<LogAuthUser>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ApplicationId).HasColumnName("ApplicationID"); entity.Property(e => e.Comment).HasMaxLength(200); entity.Property(e => e.CreateDate).HasColumnType("datetime"); entity.Property(e => e.Email) .IsRequired() .HasMaxLength(100); entity.Property(e => e.FailedPasswordAttemptWindowStart).HasColumnType("datetime"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LastActivityDate).HasColumnType("datetime"); entity.Property(e => e.LastLockOutDate).HasColumnType("datetime"); entity.Property(e => e.LastLoginDate).HasColumnType("datetime"); entity.Property(e => e.LastPasswordChangeDate).HasColumnType("datetime"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(100); entity.Property(e => e.Password) .IsRequired() .HasMaxLength(255) .IsUnicode(false); entity.Property(e => e.UserName) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<LogBankAccountCurrencyDetails>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.BankAccountIban) .HasColumnName("BankAccountIBAN") .HasMaxLength(50); entity.Property(e => e.BankAccountId).HasColumnName("BankAccountID"); entity.Property(e => e.BankAccountName).HasMaxLength(100); entity.Property(e => e.BankAccountNumber).HasMaxLength(50); entity.Property(e => e.BankAccountSort).HasMaxLength(8); entity.Property(e => e.BankAccountSwift).HasMaxLength(11); entity.Property(e => e.BankAddress).HasMaxLength(400); entity.Property(e => e.BankName).HasMaxLength(100); entity.Property(e => e.BeneficiaryAddress).HasMaxLength(400); entity.Property(e => e.BeneficiaryName).HasMaxLength(100); entity.Property(e => e.ClearingCodePrefixId).HasColumnName("ClearingCodePrefixID"); entity.Property(e => e.CountryId).HasColumnName("CountryID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false) .HasDefaultValueSql("('CREATED')"); entity.Property(e => e.UpdateDateTime).HasColumnType("datetime"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<LogBreach>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.BreachLevelId).HasColumnName("BreachLevelID"); entity.Property(e => e.BreachTypeId).HasColumnName("BreachTypeID"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.Notes).HasMaxLength(500); entity.Property(e => e.OriginalLimit).HasMaxLength(250); entity.Property(e => e.OverrideValue).HasMaxLength(250); entity.Property(e => e.PaymentId).HasColumnName("PaymentID"); entity.Property(e => e.TradeCode) .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogBreachInvoice>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.Comment).HasMaxLength(500); entity.Property(e => e.DocumentId) .IsRequired() .HasColumnName("DocumentID") .HasMaxLength(100); entity.Property(e => e.FileName).HasMaxLength(250); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(25); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.UploadedByAuthUserId).HasColumnName("UploadedByAuthUserID"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogCassRecs>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.CassRecsDate).HasColumnType("date"); entity.Property(e => e.CassRecsStatementFileId).HasColumnName("CassRecsStatementFileID"); entity.Property(e => e.Check1ByAuthUserId).HasColumnName("Check1ByAuthUserID"); entity.Property(e => e.Check1UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.Check2ByAuthUserId).HasColumnName("Check2ByAuthUserID"); entity.Property(e => e.Check2UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.CompletedByAuthUserId).HasColumnName("CompletedByAuthUserID"); entity.Property(e => e.CompletedDateTime).HasColumnType("datetime"); entity.Property(e => e.CurrencyCode) .IsRequired() .HasMaxLength(3) .IsUnicode(false); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LastNightsClosingLedger).HasColumnType("decimal(25, 8)"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogCassRecsPaymentFile>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.CassRecsDate).HasColumnType("date"); entity.Property(e => e.DocumentId) .IsRequired() .HasColumnName("DocumentID") .HasMaxLength(100); entity.Property(e => e.FileName) .IsRequired() .HasMaxLength(250); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.UploadedByAuthUserId).HasColumnName("UploadedByAuthUserID"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogCassRecsStatementFile>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.CassRecsDate).HasColumnType("date"); entity.Property(e => e.DocumentId) .IsRequired() .HasColumnName("DocumentID") .HasMaxLength(100); entity.Property(e => e.FileName) .IsRequired() .HasMaxLength(250); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.UploadedByAuthUserId).HasColumnName("UploadedByAuthUserID"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogClientCompanyCompliance>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.AmlriskId).HasColumnName("AMLRiskID"); entity.Property(e => e.BalanceSheetGbp) .HasColumnName("BalanceSheetGBP") .HasColumnType("decimal(25, 2)"); entity.Property(e => e.ClassificationId).HasColumnName("ClassificationID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.ExpectedFrequencyId).HasColumnName("ExpectedFrequencyID"); entity.Property(e => e.ExpectedMaxTradeSize).HasColumnType("decimal(25, 2)"); entity.Property(e => e.ExpectedTotalVolume).HasColumnType("decimal(25, 2)"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.IsMiFid).HasColumnName("IsMiFID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50); entity.Property(e => e.NatureId).HasColumnName("NatureID"); entity.Property(e => e.OwnFundsGbp) .HasColumnName("OwnFundsGBP") .HasColumnType("decimal(25, 2)"); entity.Property(e => e.ReasonId).HasColumnName("ReasonID"); entity.Property(e => e.RefreshDueDateTime).HasColumnType("datetime"); entity.Property(e => e.RegisteredDomicileCountryId).HasColumnName("RegisteredDomicileCountryID"); entity.Property(e => e.Ttca).HasColumnName("TTCA"); entity.Property(e => e.TurnoverGbp) .HasColumnName("TurnoverGBP") .HasColumnType("decimal(25, 2)"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .HasMaxLength(8); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogClientCompanyComplianceCorporateSector>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ClientCompanyComplianceId).HasColumnName("ClientCompanyComplianceID"); entity.Property(e => e.ComplianceCorporateSectorFinancialId).HasColumnName("ComplianceCorporateSectorFinancialID"); entity.Property(e => e.ComplianceCorporateSectorNonFinancialId).HasColumnName("ComplianceCorporateSectorNonFinancialID"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(25); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogClientCompanyComplianceNote>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50); entity.Property(e => e.NoteText).IsRequired(); entity.Property(e => e.Title).HasMaxLength(100); }); modelBuilder.Entity<LogClientCompanyContact>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.AspcreationDate) .HasColumnName("ASPCreationDate") .HasColumnType("datetime"); entity.Property(e => e.Aspnumber) .HasColumnName("ASPNumber") .HasMaxLength(9); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.Birthday).HasColumnType("date"); entity.Property(e => e.BloombergGpi).HasMaxLength(255); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.Email).HasMaxLength(200); entity.Property(e => e.Forename) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Fullname) .IsRequired() .HasMaxLength(162) .HasComputedColumnSql("(((isnull([Title]+' ','')+[Forename])+' ')+[Surname])"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LastEmailChangeDate).HasColumnType("datetime"); entity.Property(e => e.LastTelephoneChangeDate).HasColumnType("datetime"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(100); entity.Property(e => e.NiNumber).HasMaxLength(50); entity.Property(e => e.Position).HasMaxLength(50); entity.Property(e => e.RecAmreport).HasColumnName("RecAMReport"); entity.Property(e => e.Surname) .IsRequired() .HasMaxLength(100); entity.Property(e => e.TelephoneDirect).HasMaxLength(50); entity.Property(e => e.TelephoneMobile).HasMaxLength(50); entity.Property(e => e.TelephoneOther).HasMaxLength(50); entity.Property(e => e.Title).HasMaxLength(10); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogClientCompanyContactCategory>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ClientCompanyContactId).HasColumnName("ClientCompanyContactID"); entity.Property(e => e.ContactCategoryId).HasColumnName("ContactCategoryID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.DateCreated).HasColumnType("datetime"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false) .HasDefaultValueSql("('UPDATE')"); }); modelBuilder.Entity<LogClientCompanyLinkedGroup>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LastUpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); }); modelBuilder.Entity<LogClientCompanyOnlineDetails>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.Collateral).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false) .HasDefaultValueSql("('UPDATE')"); entity.Property(e => e.MaxOpen).HasColumnType("decimal(25, 8)"); entity.Property(e => e.MaxTenor).HasColumnType("datetime"); entity.Property(e => e.MaxTradeSize).HasColumnType("decimal(25, 8)"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogClientCompanyOnlineDetailsSkew>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ClientCompanyOnlineDetailsId).HasColumnName("ClientCompanyOnlineDetailsID"); entity.Property(e => e.Currency1Id).HasColumnName("Currency1ID"); entity.Property(e => e.Currency2Id).HasColumnName("Currency2ID"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false) .HasDefaultValueSql("('UPDATE')"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogClientCompanyOpi>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogClientCompanyOPI"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.AccountName) .IsRequired() .HasMaxLength(100); entity.Property(e => e.AccountNumber) .IsRequired() .HasMaxLength(50); entity.Property(e => e.AuthorisedByAuthUserId).HasColumnName("AuthorisedByAuthUserID"); entity.Property(e => e.AuthorisedDateTime).HasColumnType("datetime"); entity.Property(e => e.BankName) .IsRequired() .HasMaxLength(100); entity.Property(e => e.BeneficiaryName).HasMaxLength(250); entity.Property(e => e.ClearingCodePrefixId).HasColumnName("ClearingCodePrefixID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.CountryId).HasColumnName("CountryID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDate).HasColumnType("datetime"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Iban) .HasColumnName("IBAN") .HasMaxLength(50); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.Reference) .IsRequired() .HasMaxLength(50); entity.Property(e => e.RejectedByAuthUserId).HasColumnName("RejectedByAuthUserID"); entity.Property(e => e.RejectedDateTime).HasColumnType("datetime"); entity.Property(e => e.SortCode).HasMaxLength(50); entity.Property(e => e.SwiftCode) .HasMaxLength(11) .IsUnicode(false); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .HasMaxLength(50); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDate).HasColumnType("datetime"); }); modelBuilder.Entity<LogClientCompanyOpiduplicate>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogClientCompanyOPIDuplicate"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.DuplicateClientCompanyOpiid).HasColumnName("DuplicateClientCompanyOPIID"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.IsOk).HasColumnName("IsOK"); entity.Property(e => e.IsOkupdatedByAuthUserId).HasColumnName("IsOKUpdatedByAuthUserID"); entity.Property(e => e.IsOkupdatedDateTime) .HasColumnName("isOKUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.Note).HasMaxLength(250); entity.Property(e => e.OriginalClientCompanyOpiid).HasColumnName("OriginalClientCompanyOPIID"); }); modelBuilder.Entity<LogClientCompanySalesAppUser>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50); entity.Property(e => e.SalesPersonAppUserId).HasColumnName("SalesPersonAppUserID"); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogComplianceClassificationFile>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ClientCompanyComplianceId).HasColumnName("ClientCompanyComplianceID"); entity.Property(e => e.ComplianceClassificationId).HasColumnName("ComplianceClassificationID"); entity.Property(e => e.DocumentId) .IsRequired() .HasColumnName("DocumentID") .HasMaxLength(100); entity.Property(e => e.FileName) .IsRequired() .HasMaxLength(250); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.UploadedByAuthUserId).HasColumnName("UploadedByAuthUserID"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogComplianceIsincurrencyValueDate>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogComplianceISINCurrencyValueDate"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.CurrencyPair) .IsRequired() .HasMaxLength(6) .IsUnicode(false); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Isin) .IsRequired() .HasColumnName("ISIN") .HasMaxLength(12); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.ValueDate).HasColumnType("date"); }); modelBuilder.Entity<LogComplianceQuestionnaire>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ClientCompanyComplianceId).HasColumnName("ClientCompanyComplianceID"); entity.Property(e => e.ComplianceQuestionnaireAnswerId).HasColumnName("ComplianceQuestionnaireAnswerID"); entity.Property(e => e.ComplianceQuestionnaireQuestionId).HasColumnName("ComplianceQuestionnaireQuestionID"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(25); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogCurrency>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.Code) .IsRequired() .HasMaxLength(3) .IsUnicode(false); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false) .HasDefaultValueSql("('CREATED')"); entity.Property(e => e.SwiftAmountFormat) .IsRequired() .HasMaxLength(50); entity.Property(e => e.UpdateDateTime).HasColumnType("datetime"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<LogCurrencyPairPriceHistory>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.CurrencyPair) .IsRequired() .HasMaxLength(6) .IsUnicode(false); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.Price).HasColumnType("decimal(25, 8)"); entity.Property(e => e.PriceDate).HasColumnType("date"); entity.Property(e => e.UpdateTimeStamp).HasMaxLength(8); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogFxforwardTrade>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogFXForwardTrade"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.AuthorisedByClientCompanyContactId).HasColumnName("AuthorisedByClientCompanyContactID"); entity.Property(e => e.BdpforwardPoints) .HasColumnName("BDPForwardPoints") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokenDatePrice).HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokerId).HasColumnName("BrokerID"); entity.Property(e => e.BrokerLhsamt) .HasColumnName("BrokerLHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokerRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokerRhsamt) .HasColumnName("BrokerRHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokeredDate).HasColumnType("datetime"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.ClientLhsamt) .HasColumnName("ClientLHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRhsamt) .HasColumnName("ClientRHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.Code) .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.CollateralPerc).HasColumnType("decimal(25, 8)"); entity.Property(e => e.CommPaidOutDate).HasColumnType("date"); entity.Property(e => e.ComplianceIsin) .HasColumnName("Compliance_ISIN") .HasMaxLength(12); entity.Property(e => e.ComplianceTradeReasonId).HasColumnName("ComplianceTradeReasonID"); entity.Property(e => e.ContractDate).HasColumnType("date"); entity.Property(e => e.ContractNoteSentToClientDateTime).HasColumnType("datetime"); entity.Property(e => e.ContractNoteSentToMyselfDateTime).HasColumnType("datetime"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDate).HasColumnType("datetime"); entity.Property(e => e.CurrencyPair) .HasMaxLength(6) .IsUnicode(false); entity.Property(e => e.DeliveredDate).HasColumnType("datetime"); entity.Property(e => e.EmirReported).HasColumnName("EMIR_Reported"); entity.Property(e => e.EmirReportedDateTime) .HasColumnName("EMIR_ReportedDateTime") .HasColumnType("datetime"); entity.Property(e => e.EmirUti) .HasColumnName("EMIR_UTI") .HasMaxLength(104); entity.Property(e => e.EmirdelegatedSubmissionId) .HasColumnName("EMIRDelegatedSubmissionID") .HasMaxLength(50); entity.Property(e => e.EmirsubmissionId) .HasColumnName("EMIRSubmissionID") .HasMaxLength(50); entity.Property(e => e.FilledDateTime).HasColumnType("datetime"); entity.Property(e => e.FxforwardTradeStatusId).HasColumnName("FXForwardTradeStatusID"); entity.Property(e => e.IsRhsmajor).HasColumnName("IsRHSMajor"); entity.Property(e => e.Lhsccyid).HasColumnName("LHSCCYID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.MarkToMarketValue).HasColumnType("decimal(25, 8)"); entity.Property(e => e.MarkToMarketValueUpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.MarketSideUti) .HasColumnName("MarketSideUTI") .HasMaxLength(100); entity.Property(e => e.OpenValueDate).HasColumnType("date"); entity.Property(e => e.OpiupdatedByAuthUserId).HasColumnName("OPIUpdatedByAuthUserId"); entity.Property(e => e.OpiupdatedDateTime) .HasColumnName("OPIUpdatedDateTime") .HasColumnType("datetime"); entity.Property(e => e.PrevDayMarktoMarket).HasColumnType("decimal(25, 8)"); entity.Property(e => e.PrevDayMarktoMarketUpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.PrevailingRate2).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Profit).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ProfitConsolidatedDateTime).HasColumnType("datetime"); entity.Property(e => e.ProfitConsolidatedValue).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ProfitGbprate) .HasColumnName("ProfitGBPRate") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.Reference).HasMaxLength(20); entity.Property(e => e.RemainingClientLhsamt) .HasColumnName("RemainingClientLHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.RemainingClientRhsamt) .HasColumnName("RemainingClientRHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.Rhsccyid).HasColumnName("RHSCCYID"); entity.Property(e => e.SettledDate).HasColumnType("datetime"); entity.Property(e => e.TradeInstructionMethodId).HasColumnName("TradeInstructionMethodID"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.UpdateTimeStamp).HasMaxLength(8); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDate).HasColumnType("datetime"); entity.Property(e => e.ValueDate).HasColumnType("date"); entity.Property(e => e.VerifiedByAuthUserId).HasColumnName("VerifiedByAuthUserID"); }); modelBuilder.Entity<LogFxforwardTradeCcmlimitOverride>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogFXForwardTradeCCMLimitOverride"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ClosedByAppUserId).HasColumnName("ClosedByAppUserID"); entity.Property(e => e.ClosedDateTime).HasColumnType("datetime"); entity.Property(e => e.ClosedNotes).HasMaxLength(500); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LimitName) .IsRequired() .HasMaxLength(50); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50); entity.Property(e => e.OriginalLimit) .IsRequired() .HasMaxLength(200); entity.Property(e => e.OverrideByAppUserId).HasColumnName("OverrideByAppUserID"); entity.Property(e => e.OverrideDateTime).HasColumnType("datetime"); entity.Property(e => e.OverrideValue) .IsRequired() .HasMaxLength(200); entity.Property(e => e.TradeCode) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogFxforwardTradeInvoice>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogFXForwardTradeInvoice"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.Comment).HasMaxLength(500); entity.Property(e => e.DocumentId).HasColumnName("DocumentID"); entity.Property(e => e.FileName) .HasMaxLength(250) .IsUnicode(false); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(25); entity.Property(e => e.TradeCode) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .HasMaxLength(8); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.UploadedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogFxoption>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogFXOption"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.AuthorisedByClientCompanyContactId).HasColumnName("AuthorisedByClientCompanyContactID"); entity.Property(e => e.Barrier).HasColumnType("decimal(25, 8)"); entity.Property(e => e.BestCaseRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.BrokerId).HasColumnName("BrokerID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.ClientLhsamt) .HasColumnName("ClientLHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientLhsamtNotional) .HasColumnName("ClientLHSAmtNotional") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRhsamt) .HasColumnName("ClientRHSAmt") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.ClientRhsamtNotional) .HasColumnName("ClientRHSAmtNotional") .HasColumnType("decimal(25, 8)"); entity.Property(e => e.Code) .IsRequired() .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.CommPaidOutDate).HasColumnType("date"); entity.Property(e => e.ContractDate).HasColumnType("date"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDate).HasColumnType("datetime"); entity.Property(e => e.CurrencyPair) .HasMaxLength(6) .IsUnicode(false); entity.Property(e => e.DeliveredDate).HasColumnType("datetime"); entity.Property(e => e.ExpiryDate).HasColumnType("date"); entity.Property(e => e.ExtBarrier).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ExtBarrierDate).HasColumnType("datetime"); entity.Property(e => e.ExtStrike).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ExtValueDate).HasColumnType("datetime"); entity.Property(e => e.ForwardRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.FxoptionSettlementId).HasColumnName("FXOptionSettlementID"); entity.Property(e => e.FxoptionStatusId).HasColumnName("FXOptionStatusID"); entity.Property(e => e.FxoptionTypeId).HasColumnName("FXOptionTypeID"); entity.Property(e => e.GraphImgTemplateFile).HasMaxLength(300); entity.Property(e => e.IsRhsmajour).HasColumnName("IsRHSMajour"); entity.Property(e => e.KnockInRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.KnockOutRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.LevBarrier).HasColumnType("decimal(25, 8)"); entity.Property(e => e.LevBarrierDate).HasColumnType("datetime"); entity.Property(e => e.LevNotional).HasColumnType("decimal(25, 8)"); entity.Property(e => e.LevStrike).HasColumnType("decimal(25, 8)"); entity.Property(e => e.LevValueDate).HasColumnType("datetime"); entity.Property(e => e.Lhsccyid).HasColumnName("LHSCCYID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.OptionTrigger).HasColumnType("decimal(25, 8)"); entity.Property(e => e.OptionTriggerProtecLvl).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ParentCode) .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.PercentagePart).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Premium).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Profit).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ProtectedLevel).HasColumnType("decimal(25, 8)"); entity.Property(e => e.Rhsccyid).HasColumnName("RHSCCYID"); entity.Property(e => e.SettledDate).HasColumnType("datetime"); entity.Property(e => e.SettlementTradeId).HasColumnName("SettlementTradeID"); entity.Property(e => e.TradeInstructionMethodId).HasColumnName("TradeInstructionMethodID"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.UpdateTimeStamp).HasMaxLength(8); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDate).HasColumnType("datetime"); entity.Property(e => e.ValueDate).HasColumnType("date"); entity.Property(e => e.VerifiedByAuthUserId).HasColumnName("VerifiedByAuthUserID"); entity.Property(e => e.WorstCaseRate).HasColumnType("decimal(25, 8)"); }); modelBuilder.Entity<LogPayment>(entity => { entity.HasKey(e => e.LogId); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.Amount).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ApplicableRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.AppliedDateTime).HasColumnType("datetime"); entity.Property(e => e.AuthorisedByAuthUserId).HasColumnName("AuthorisedByAuthUserID"); entity.Property(e => e.AuthorisedDateTime).HasColumnType("datetime"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.Code) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDate).HasColumnType("datetime"); entity.Property(e => e.CreditBankAccountId).HasColumnName("CreditBankAccountID"); entity.Property(e => e.CreditClientCompanyOpiid).HasColumnName("CreditClientCompanyOPIID"); entity.Property(e => e.CreditClientCompanyVirtualAccountId).HasColumnName("CreditClientCompanyVirtualAccountID"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.DebitBankAccountId).HasColumnName("DebitBankAccountID"); entity.Property(e => e.DebitClientCompanyVirtualAccountId).HasColumnName("DebitClientCompanyVirtualAccountID"); entity.Property(e => e.FxforwardTradeCode) .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.IsSwiftpayment) .IsRequired() .HasColumnName("IsSWIFTPayment") .HasDefaultValueSql("((1))"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.PaymentRecReasonId).HasColumnName("PaymentRecReasonID"); entity.Property(e => e.PaymentSwiftoutgoingStatusId).HasColumnName("PaymentSWIFTOutgoingStatusID"); entity.Property(e => e.PaymentTypeId).HasColumnName("PaymentTypeID"); entity.Property(e => e.Reference).HasMaxLength(255); entity.Property(e => e.SwiftAuth1ByAuthUserId).HasColumnName("SwiftAuth1ByAuthUserID"); entity.Property(e => e.SwiftAuth1DateTime).HasColumnType("datetime"); entity.Property(e => e.SwiftAuth2ByAuthUserId).HasColumnName("SwiftAuth2ByAuthUserID"); entity.Property(e => e.SwiftAuth2DateTime).HasColumnType("datetime"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.UpdateDateTime).HasColumnType("datetime"); entity.Property(e => e.UpdateTimeStamp).HasMaxLength(8); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.ValueDate).HasColumnType("datetime"); }); modelBuilder.Entity<LogSwiftincomingFile>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogSWIFTIncomingFile"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.Filename) .IsRequired() .HasMaxLength(250); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LaufileContent).HasColumnName("LAUFileContent"); entity.Property(e => e.Laufilename) .HasColumnName("LAUFilename") .HasMaxLength(250); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(100); entity.Property(e => e.SwiftincomingFileProcessingStatusId).HasColumnName("SWIFTIncomingFileProcessingStatusID"); entity.Property(e => e.SwiftincomingFileTypeId).HasColumnName("SWIFTIncomingFileTypeID"); }); modelBuilder.Entity<LogSwiftincomingFileStatement>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogSWIFTIncomingFileStatement"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.DisplayError).HasMaxLength(500); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(100); entity.Property(e => e.MatchingContent).HasMaxLength(500); entity.Property(e => e.PaymentId).HasColumnName("PaymentID"); entity.Property(e => e.ProcessingError).HasMaxLength(1500); entity.Property(e => e.RawContentLine61) .IsRequired() .HasMaxLength(250); entity.Property(e => e.RawContentLine86) .IsRequired() .HasMaxLength(500); entity.Property(e => e.SwiftincomingFileId).HasColumnName("SWIFTIncomingFileID"); }); modelBuilder.Entity<LogSwiftincomingMatchedAccount>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogSWIFTIncomingMatchedAccount"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(100); entity.Property(e => e.MatchingContent) .IsRequired() .HasMaxLength(500); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .HasMaxLength(8); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); }); modelBuilder.Entity<LogSwiftintegrationService>(entity => { entity.HasKey(e => e.LogId); entity.ToTable("LogSWIFTIntegrationService"); entity.Property(e => e.LogId).HasColumnName("LogID"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LastStatusChangeByAuthUserId).HasColumnName("LastStatusChangeByAuthUserID"); entity.Property(e => e.LastStatusChangeDateTime).HasColumnType("datetime"); entity.Property(e => e.LogAction) .IsRequired() .HasMaxLength(50); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<NavMenuItem>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AuthPermissionId).HasColumnName("AuthPermissionID"); entity.Property(e => e.DisplayText) .IsRequired() .HasMaxLength(50); entity.Property(e => e.NavMenuSectionId).HasColumnName("NavMenuSectionID"); entity.Property(e => e.NavigateUrl) .IsRequired() .HasColumnName("NavigateURL") .HasMaxLength(255); entity.HasOne(d => d.AuthPermission) .WithMany(p => p.NavMenuItem) .HasForeignKey(d => d.AuthPermissionId) .HasConstraintName("FK_NavMenuItem_AuthPermission"); entity.HasOne(d => d.NavMenuSection) .WithMany(p => p.NavMenuItem) .HasForeignKey(d => d.NavMenuSectionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_NavMenuItem_NavMenuSection"); }); modelBuilder.Entity<NavMenuSection>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50) .IsUnicode(false); }); modelBuilder.Entity<Payment>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Amount).HasColumnType("decimal(25, 8)"); entity.Property(e => e.ApplicableRate).HasColumnType("decimal(25, 8)"); entity.Property(e => e.AppliedDateTime).HasColumnType("datetime"); entity.Property(e => e.AuthorisedByAuthUserId).HasColumnName("AuthorisedByAuthUserID"); entity.Property(e => e.AuthorisedDateTime).HasColumnType("datetime"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.Code) .IsRequired() .HasMaxLength(50) .IsUnicode(false); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.FxforwardTradeCode) .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.IsDebitedForMfidaccounts).HasColumnName("IsDebitedForMFIDAccounts"); entity.Property(e => e.IsSwiftpayment) .IsRequired() .HasColumnName("IsSWIFTPayment") .HasDefaultValueSql("((1))"); entity.Property(e => e.NotifyClient).HasDefaultValueSql("((0))"); entity.Property(e => e.PaymentRecReasonId).HasColumnName("PaymentRecReasonID"); entity.Property(e => e.PaymentSwiftoutgoingStatusId) .HasColumnName("PaymentSWIFTOutgoingStatusID") .HasDefaultValueSql("((1))"); entity.Property(e => e.PaymentTypeId).HasColumnName("PaymentTypeID"); entity.Property(e => e.Reference).HasMaxLength(255); entity.Property(e => e.SwiftAuth1ByAuthUserId).HasColumnName("SwiftAuth1ByAuthUserID"); entity.Property(e => e.SwiftAuth1DateTime).HasColumnType("datetime"); entity.Property(e => e.SwiftAuth2ByAuthUserId).HasColumnName("SwiftAuth2ByAuthUserID"); entity.Property(e => e.SwiftAuth2DateTime).HasColumnType("datetime"); entity.Property(e => e.TransactionCommitId).HasColumnName("TransactionCommitID"); entity.Property(e => e.UpdateDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.ValueDate) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.HasOne(d => d.AuthorisedByAuthUser) .WithMany(p => p.PaymentAuthorisedByAuthUser) .HasForeignKey(d => d.AuthorisedByAuthUserId) .HasConstraintName("FK_Payment_AuthUser2"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.Payment) .HasForeignKey(d => d.ClientCompanyId) .HasConstraintName("FK_Payment_ClientCompany"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.PaymentCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Payment_AuthUser"); entity.HasOne(d => d.Currency) .WithMany(p => p.Payment) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Payment_Currency"); entity.HasOne(d => d.FxforwardTradeCodeNavigation) .WithMany(p => p.Payment) .HasForeignKey(d => d.FxforwardTradeCode) .HasConstraintName("FK_Payment_FXForwardTrade"); entity.HasOne(d => d.PaymentRecReason) .WithMany(p => p.Payment) .HasForeignKey(d => d.PaymentRecReasonId) .HasConstraintName("FK_Payment_PaymentRecReason"); entity.HasOne(d => d.PaymentSwiftoutgoingStatus) .WithMany(p => p.Payment) .HasForeignKey(d => d.PaymentSwiftoutgoingStatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Payment_PaymentSWIFTOutgoingStatus"); entity.HasOne(d => d.PaymentType) .WithMany(p => p.Payment) .HasForeignKey(d => d.PaymentTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Payment_PaymentType"); entity.HasOne(d => d.SwiftAuth1ByAuthUser) .WithMany(p => p.PaymentSwiftAuth1ByAuthUser) .HasForeignKey(d => d.SwiftAuth1ByAuthUserId) .HasConstraintName("FK_Payment_SwiftAuth1ByAuthUserID"); entity.HasOne(d => d.SwiftAuth2ByAuthUser) .WithMany(p => p.PaymentSwiftAuth2ByAuthUser) .HasForeignKey(d => d.SwiftAuth2ByAuthUserId) .HasConstraintName("FK_Payment_SwiftAuth2ByAuthUserID"); entity.HasOne(d => d.TransactionCommit) .WithMany(p => p.Payment) .HasForeignKey(d => d.TransactionCommitId) .HasConstraintName("FK_Payment_TransactionCommit"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.PaymentUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_Payment_AuthUser1"); }); modelBuilder.Entity<PaymentRecReason>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<PaymentSwiftoutgoingStatus>(entity => { entity.ToTable("PaymentSWIFTOutgoingStatus"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Status) .IsRequired() .HasMaxLength(100); }); modelBuilder.Entity<PaymentSwiftoutgoingStatusTransitions>(entity => { entity.HasKey(e => new { e.FromStatusId, e.ToStatusId }); entity.ToTable("PaymentSWIFTOutgoingStatusTransitions"); entity.Property(e => e.FromStatusId).HasColumnName("FromStatusID"); entity.Property(e => e.ToStatusId).HasColumnName("ToStatusID"); entity.Property(e => e.CreateDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); }); modelBuilder.Entity<PaymentType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50) .IsUnicode(false); }); modelBuilder.Entity<PipelineAction>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.DisplayOrder).HasDefaultValueSql("((1))"); entity.Property(e => e.PipelineActionTypeId).HasColumnName("PipelineActionTypeID"); entity.HasOne(d => d.PipelineActionType) .WithMany(p => p.PipelineAction) .HasForeignKey(d => d.PipelineActionTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_PipelineAction_PipelineActionType"); }); modelBuilder.Entity<PipelineActionType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<ReportProcessedLog>(entity => { entity.Property(e => e.Id) .HasColumnName("ID") .ValueGeneratedNever(); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.EndDateTime).HasColumnType("datetime"); entity.Property(e => e.ExceptionInfo).HasMaxLength(200); entity.Property(e => e.FunctionName) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(700); entity.Property(e => e.Parameters) .IsRequired() .HasColumnType("xml"); entity.Property(e => e.ReportStatusId).HasColumnName("ReportStatusID"); entity.Property(e => e.Result).HasColumnType("xml"); entity.Property(e => e.ResultPage) .IsRequired() .HasMaxLength(50); entity.Property(e => e.StartDateTime).HasColumnType("datetime"); entity.HasOne(d => d.AuthUser) .WithMany(p => p.ReportProcessedLog) .HasForeignKey(d => d.AuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ReportProcessedLog_AuthUser"); entity.HasOne(d => d.ReportStatus) .WithMany(p => p.ReportProcessedLog) .HasForeignKey(d => d.ReportStatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ReportProcessedLog_ReportStatus"); }); modelBuilder.Entity<ReportQueueToProcess>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.FunctionName) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(700); entity.Property(e => e.Parameters) .IsRequired() .HasColumnType("xml"); entity.Property(e => e.ReportStatusId).HasColumnName("ReportStatusID"); entity.Property(e => e.ResultPage) .IsRequired() .HasMaxLength(50); entity.Property(e => e.StartDateTime).HasColumnType("datetime"); entity.HasOne(d => d.AuthUser) .WithMany(p => p.ReportQueueToProcess) .HasForeignKey(d => d.AuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ReportQueueToProcess_AuthUser"); entity.HasOne(d => d.ReportStatus) .WithMany(p => p.ReportQueueToProcess) .HasForeignKey(d => d.ReportStatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ReportQueueToProcess_ReportStatus"); }); modelBuilder.Entity<ReportStatus>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Status) .IsRequired() .HasMaxLength(20); }); modelBuilder.Entity<ScheduledReportDummyPluginTable>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<SchemaVersions>(entity => { entity.Property(e => e.Applied).HasColumnType("datetime"); entity.Property(e => e.ScriptName) .IsRequired() .HasMaxLength(255); }); modelBuilder.Entity<SuspiciousActivityReport>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AcknowledgementReceivedDateTime).HasColumnType("datetime"); entity.Property(e => e.ClientName).HasMaxLength(200); entity.Property(e => e.Conlusions).IsUnicode(false); entity.Property(e => e.ConsentNcareceivedDescription) .HasColumnName("ConsentNCAReceivedDescription") .IsUnicode(false); entity.Property(e => e.CreateDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.CustomerInformation).IsUnicode(false); entity.Property(e => e.DateTimeReceivedByMlro) .HasColumnName("DateTimeReceivedByMLRO") .HasColumnType("datetime"); entity.Property(e => e.Description).IsRequired(); entity.Property(e => e.DocumentsInvestigatedInformation).IsUnicode(false); entity.Property(e => e.IsReportMadeToNca).HasColumnName("IsReportMadeToNCA"); entity.Property(e => e.IssueClosedDateTime).HasColumnType("datetime"); entity.Property(e => e.NcareportDateTime) .HasColumnName("NCAReportDateTime") .HasColumnType("datetime"); entity.Property(e => e.PaymentCode).HasMaxLength(50); entity.Property(e => e.ReasonNcareportNotMade) .HasColumnName("ReasonNCAReportNotMade") .IsUnicode(false); entity.Property(e => e.ResearchUnderTakenDescription).IsUnicode(false); entity.Property(e => e.TradeCode).HasMaxLength(100); entity.Property(e => e.UpdateDateTime).HasColumnType("datetime"); entity.Property(e => e.UpdateTimestamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.SuspiciousActivityReportCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .HasConstraintName("FK_SuspiciousActivityReport_AuthUser_CreatedBy"); entity.HasOne(d => d.IssueClosedByAuthUser) .WithMany(p => p.SuspiciousActivityReportIssueClosedByAuthUser) .HasForeignKey(d => d.IssueClosedByAuthUserId) .HasConstraintName("FK_SuspiciousActivityReport_AuthUser_IssueClosedBy"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.SuspiciousActivityReportUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .HasConstraintName("FK_SuspiciousActivityReport_AuthUser_UpdatedBy"); }); modelBuilder.Entity<SwiftincomingFile>(entity => { entity.ToTable("SWIFTIncomingFile"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.Filename) .IsRequired() .HasMaxLength(250); entity.Property(e => e.LaufileContent).HasColumnName("LAUFileContent"); entity.Property(e => e.Laufilename) .HasColumnName("LAUFilename") .HasMaxLength(250); entity.Property(e => e.SwiftincomingFileProcessingStatusId).HasColumnName("SWIFTIncomingFileProcessingStatusID"); entity.Property(e => e.SwiftincomingFileTypeId).HasColumnName("SWIFTIncomingFileTypeID"); entity.HasOne(d => d.SwiftincomingFileProcessingStatus) .WithMany(p => p.SwiftincomingFile) .HasForeignKey(d => d.SwiftincomingFileProcessingStatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTIncomingFile_SWIFTIncomingFileProcessingStatus"); entity.HasOne(d => d.SwiftincomingFileType) .WithMany(p => p.SwiftincomingFile) .HasForeignKey(d => d.SwiftincomingFileTypeId) .HasConstraintName("FK_SWIFTIncomingFile_SWIFTIncomingFileType"); }); modelBuilder.Entity<SwiftincomingFileProcessingStatus>(entity => { entity.ToTable("SWIFTIncomingFileProcessingStatus"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Status) .IsRequired() .HasMaxLength(100); }); modelBuilder.Entity<SwiftincomingFileStatement>(entity => { entity.ToTable("SWIFTIncomingFileStatement"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.DisplayError).HasMaxLength(500); entity.Property(e => e.FilePartNumber).HasDefaultValueSql("((1))"); entity.Property(e => e.MatchingContent).HasMaxLength(500); entity.Property(e => e.PaymentId).HasColumnName("PaymentID"); entity.Property(e => e.ProcessingError).HasMaxLength(1500); entity.Property(e => e.RawContentLine61) .IsRequired() .HasMaxLength(250); entity.Property(e => e.RawContentLine86) .IsRequired() .HasMaxLength(500); entity.Property(e => e.SwiftincomingFileId).HasColumnName("SWIFTIncomingFileID"); entity.HasOne(d => d.Payment) .WithMany(p => p.SwiftincomingFileStatement) .HasForeignKey(d => d.PaymentId) .HasConstraintName("FK_SWIFTIncomingFileStatement_Payment"); entity.HasOne(d => d.SwiftincomingFile) .WithMany(p => p.SwiftincomingFileStatement) .HasForeignKey(d => d.SwiftincomingFileId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTIncomingFileStatement_SWIFTIncomingFile"); }); modelBuilder.Entity<SwiftincomingFileType>(entity => { entity.ToTable("SWIFTIncomingFileType"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(100); }); modelBuilder.Entity<SwiftincomingMatchedAccount>(entity => { entity.ToTable("SWIFTIncomingMatchedAccount"); entity.HasIndex(e => e.ChecksumMatchingContent) .HasName("IX_SWIFTIncomingMatchedAccount_MatchingContent"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ChecksumMatchingContent).HasComputedColumnSql("(checksum([MatchingContent]))"); entity.Property(e => e.ClientCompanyId).HasColumnName("ClientCompanyID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.MatchingContent) .IsRequired() .HasMaxLength(500); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.HasOne(d => d.ClientCompany) .WithMany(p => p.SwiftincomingMatchedAccount) .HasForeignKey(d => d.ClientCompanyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTIncomingMatchedAccount_ClientCompany"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.SwiftincomingMatchedAccountCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTIncomingMatchedAccount_CreatedByAuthUser"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.SwiftincomingMatchedAccountUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .HasConstraintName("FK_SWIFTIncomingMatchedAccount_UpdatedByAuthUser"); }); modelBuilder.Entity<SwiftintegrationService>(entity => { entity.ToTable("SWIFTIntegrationService"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.LastStatusChangeByAuthUserId).HasColumnName("LastStatusChangeByAuthUserID"); entity.Property(e => e.LastStatusChangeDateTime).HasColumnType("datetime"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.LastStatusChangeByAuthUser) .WithMany(p => p.SwiftintegrationService) .HasForeignKey(d => d.LastStatusChangeByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTIntegrationService_AuthUser"); }); modelBuilder.Entity<Swiftmessage>(entity => { entity.ToTable("SWIFTMessage"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.CreatedDateTime).HasColumnType("datetime"); entity.Property(e => e.FileName).HasMaxLength(50); entity.Property(e => e.HitErrorCode).HasMaxLength(100); entity.Property(e => e.HitUpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.Laufile).HasColumnName("LAUFile"); entity.Property(e => e.LaufileName) .HasColumnName("LAUFileName") .HasMaxLength(50); entity.Property(e => e.NakErrorCode).HasMaxLength(100); entity.Property(e => e.NakUpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.SenderReference) .IsRequired() .HasMaxLength(16); entity.Property(e => e.Xmlfile).HasColumnName("XMLFile"); entity.HasOne(d => d.Payment) .WithMany(p => p.Swiftmessage) .HasForeignKey(d => d.PaymentId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTMessage_Payment"); }); modelBuilder.Entity<SwiftvalidationCurrencyCountry>(entity => { entity.HasKey(e => new { e.CurrencyId, e.CountryId, e.OptionId }); entity.ToTable("SWIFTValidationCurrencyCountry"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.CountryId).HasColumnName("CountryID"); entity.Property(e => e.OptionId).HasColumnName("OptionID"); entity.HasOne(d => d.Country) .WithMany(p => p.SwiftvalidationCurrencyCountry) .HasForeignKey(d => d.CountryId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationCurrencyCountry_Country"); entity.HasOne(d => d.Currency) .WithMany(p => p.SwiftvalidationCurrencyCountry) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationCurrencyCountry_Currency"); entity.HasOne(d => d.Option) .WithMany(p => p.SwiftvalidationCurrencyCountry) .HasForeignKey(d => d.OptionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationCurrencyCountry_SWIFTValidationOption"); }); modelBuilder.Entity<SwiftvalidationCurrencyMessageField>(entity => { entity.HasKey(e => new { e.CurrencyId, e.MessageId, e.MessageFieldId }); entity.ToTable("SWIFTValidationCurrencyMessageField"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.MessageId).HasColumnName("MessageID"); entity.Property(e => e.MessageFieldId).HasColumnName("MessageFieldID"); entity.HasOne(d => d.Currency) .WithMany(p => p.SwiftvalidationCurrencyMessageField) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationCurrencyMessageField_Currency"); entity.HasOne(d => d.MessageField) .WithMany(p => p.SwiftvalidationCurrencyMessageField) .HasForeignKey(d => d.MessageFieldId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationCurrencyMessageField_SWIFTValidationMessageField"); entity.HasOne(d => d.Message) .WithMany(p => p.SwiftvalidationCurrencyMessageField) .HasForeignKey(d => d.MessageId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationCurrencyMessageField_SWIFTValidationMessage"); }); modelBuilder.Entity<SwiftvalidationField>(entity => { entity.ToTable("SWIFTValidationField"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(250); entity.Property(e => e.PaymentTypeId).HasColumnName("PaymentTypeID"); entity.Property(e => e.Tag) .IsRequired() .HasMaxLength(20); entity.HasOne(d => d.PaymentType) .WithMany(p => p.SwiftvalidationField) .HasForeignKey(d => d.PaymentTypeId) .HasConstraintName("FK_SWIFTValidationField_PaymentTypeID"); }); modelBuilder.Entity<SwiftvalidationFieldComponent>(entity => { entity.ToTable("SWIFTValidationFieldComponent"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description).HasMaxLength(250); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<SwiftvalidationFieldFieldComponent>(entity => { entity.HasKey(e => new { e.FieldId, e.FieldComponentId }); entity.ToTable("SWIFTValidationFieldFieldComponent"); entity.Property(e => e.FieldId).HasColumnName("FieldID"); entity.Property(e => e.FieldComponentId).HasColumnName("FieldComponentID"); entity.Property(e => e.LineNumber).HasDefaultValueSql("((1))"); entity.Property(e => e.Sequence).HasDefaultValueSql("((1))"); entity.HasOne(d => d.FieldComponent) .WithMany(p => p.SwiftvalidationFieldFieldComponent) .HasForeignKey(d => d.FieldComponentId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationFieldFieldComponent_SWIFTValidationFieldComponent"); entity.HasOne(d => d.Field) .WithMany(p => p.SwiftvalidationFieldFieldComponent) .HasForeignKey(d => d.FieldId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationFieldFieldComponent_SWIFTValidationField"); }); modelBuilder.Entity<SwiftvalidationMessage>(entity => { entity.ToTable("SWIFTValidationMessage"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(500); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(250); }); modelBuilder.Entity<SwiftvalidationMessageField>(entity => { entity.ToTable("SWIFTValidationMessageField"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(500); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(250); }); modelBuilder.Entity<SwiftvalidationOption>(entity => { entity.ToTable("SWIFTValidationOption"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description).HasMaxLength(250); entity.Property(e => e.IsActive) .IsRequired() .HasDefaultValueSql("((1))"); entity.Property(e => e.Sequence).HasDefaultValueSql("((1))"); }); modelBuilder.Entity<SwiftvalidationOptionField>(entity => { entity.HasKey(e => new { e.OptionId, e.FieldId }); entity.ToTable("SWIFTValidationOptionField"); entity.Property(e => e.OptionId).HasColumnName("OptionID"); entity.Property(e => e.FieldId).HasColumnName("FieldID"); entity.Property(e => e.Sequence).HasDefaultValueSql("((1))"); entity.HasOne(d => d.Field) .WithMany(p => p.SwiftvalidationOptionField) .HasForeignKey(d => d.FieldId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationOptionField_SWIFTValidationField"); entity.HasOne(d => d.Option) .WithMany(p => p.SwiftvalidationOptionField) .HasForeignKey(d => d.OptionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_SWIFTValidationOptionField_SWIFTValidationOption"); }); modelBuilder.Entity<SystemEmailSenderAddress>(entity => { entity.HasIndex(e => e.EmailKeyName) .HasName("U_EmailKeyName") .IsUnique(); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.EmailAddressValue).HasMaxLength(500); entity.Property(e => e.EmailKeyName).HasMaxLength(255); }); modelBuilder.Entity<TelephoneCountryCode>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Iso) .IsRequired() .HasColumnName("ISO") .HasMaxLength(3); entity.Property(e => e.Iso3) .HasColumnName("ISO3") .HasMaxLength(3) .IsUnicode(false); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(80); entity.Property(e => e.Nicename) .IsRequired() .HasMaxLength(80); }); modelBuilder.Entity<TradeInstructionMethod>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50) .IsUnicode(false); }); modelBuilder.Entity<TransactionCommit>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.CommitDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.HasOne(d => d.AuthUser) .WithMany(p => p.TransactionCommit) .HasForeignKey(d => d.AuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_TransactionCommit_AuthUser"); }); modelBuilder.Entity<UserAuditLogChanges>(entity => { entity.Property(e => e.Id) .HasColumnName("ID") .ValueGeneratedNever(); entity.Property(e => e.ActionType) .IsRequired() .HasMaxLength(50); entity.Property(e => e.Data).IsRequired(); entity.Property(e => e.DateTime).HasColumnType("datetime"); entity.Property(e => e.IpAddress) .IsRequired() .HasMaxLength(50); entity.Property(e => e.UserName) .IsRequired() .HasMaxLength(50); entity.Property(e => e.UserRole) .IsRequired() .HasMaxLength(50); }); modelBuilder.Entity<UserAuditLogPageViews>(entity => { entity.Property(e => e.DateTime).HasColumnType("datetime"); entity.Property(e => e.IpAddress) .IsRequired() .HasMaxLength(50); entity.Property(e => e.PageViewName) .IsRequired() .HasMaxLength(50); entity.HasOne(d => d.AuthUser) .WithMany(p => p.UserAuditLogPageViews) .HasForeignKey(d => d.AuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_UserAuditLogPageViews_AuthUser"); }); modelBuilder.Entity<UserChangeRequest>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AuthUserId).HasColumnName("AuthUserID"); entity.Property(e => e.ChangeDateTime).HasColumnType("datetime"); entity.Property(e => e.ChangeStatus) .IsRequired() .HasMaxLength(20); entity.Property(e => e.ChangeValueType) .IsRequired() .HasMaxLength(15); entity.Property(e => e.ChangedByAuthUserId).HasColumnName("ChangedByAuthUserID"); entity.Property(e => e.CurrentValue) .IsRequired() .HasMaxLength(250); entity.Property(e => e.ProposedValue) .IsRequired() .HasMaxLength(250); entity.HasOne(d => d.AuthUser) .WithMany(p => p.UserChangeRequestAuthUser) .HasForeignKey(d => d.AuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_UserChangeRequest_AuthUser"); entity.HasOne(d => d.ChangedByAuthUser) .WithMany(p => p.UserChangeRequestChangedByAuthUser) .HasForeignKey(d => d.ChangedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_UserChangeRequest_AuthUser1"); }); modelBuilder.Entity<UserChangeRequestApproval>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ApprovedByAuthUserId).HasColumnName("ApprovedByAuthUserID"); entity.Property(e => e.ApprovedDateTime).HasColumnType("datetime"); entity.Property(e => e.IsActive) .IsRequired() .HasDefaultValueSql("((1))"); entity.Property(e => e.UserChangeRequestId).HasColumnName("UserChangeRequestID"); entity.HasOne(d => d.ApprovedByAuthUser) .WithMany(p => p.UserChangeRequestApproval) .HasForeignKey(d => d.ApprovedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_UserChangeRequestApproval_AuthUser"); entity.HasOne(d => d.UserChangeRequest) .WithMany(p => p.UserChangeRequestApproval) .HasForeignKey(d => d.UserChangeRequestId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_UserChangeRequestApproval_UserChangeRequest"); }); modelBuilder.Entity<VirtualAccountTransaction>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Amount) .HasColumnType("decimal(25, 8)") .HasDefaultValueSql("((0))"); entity.Property(e => e.CurrencyId).HasColumnName("CurrencyID"); entity.Property(e => e.FxforwardTradeCode) .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false); entity.Property(e => e.PaymentId).HasColumnName("PaymentID"); entity.Property(e => e.UpdateTimeStamp) .IsRequired() .IsRowVersion(); entity.Property(e => e.VirtualAccountId).HasColumnName("VirtualAccountID"); entity.HasOne(d => d.Currency) .WithMany(p => p.VirtualAccountTransaction) .HasForeignKey(d => d.CurrencyId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_VirtualAccountTransaction_Currency"); entity.HasOne(d => d.FxforwardTradeCodeNavigation) .WithMany(p => p.VirtualAccountTransaction) .HasForeignKey(d => d.FxforwardTradeCode) .HasConstraintName("FK_VirtualAccountTransaction_FXForwardTrade"); entity.HasOne(d => d.Payment) .WithMany(p => p.VirtualAccountTransaction) .HasForeignKey(d => d.PaymentId) .HasConstraintName("FK_VirtualAccountTransaction_Payment"); entity.HasOne(d => d.VirtualAccount) .WithMany(p => p.VirtualAccountTransaction) .HasForeignKey(d => d.VirtualAccountId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_VirtualAccountTransaction_ClientCompanyVirtualAccount"); }); modelBuilder.Entity<VirtualAccountType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Description) .IsRequired() .HasMaxLength(50); entity.Property(e => e.IsPaymentAllowed) .IsRequired() .HasDefaultValueSql("((1))"); }); modelBuilder.Entity<VirtualAccountTypeBankAccount>(entity => { entity.HasKey(e => new { e.VirtualAccountTypeId, e.BankAccountId }); entity.Property(e => e.VirtualAccountTypeId).HasColumnName("VirtualAccountTypeID"); entity.Property(e => e.BankAccountId).HasColumnName("BankAccountID"); entity.HasOne(d => d.BankAccount) .WithMany(p => p.VirtualAccountTypeBankAccount) .HasForeignKey(d => d.BankAccountId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_VirtualAccountTypeBankAccount_BankAccount"); entity.HasOne(d => d.VirtualAccountType) .WithMany(p => p.VirtualAccountTypeBankAccount) .HasForeignKey(d => d.VirtualAccountTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_VirtualAccountTypeBankAccount_VirtualAccountType"); }); modelBuilder.Entity<ClientSiteAction>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientSiteActionStatusId).HasColumnName("ClientSiteActionStatusID"); entity.Property(e => e.ClientSiteActionTypeId).HasColumnName("ClientSiteActionTypeID"); entity.Property(e => e.CreatedByAuthUserId).HasColumnName("CreatedByAuthUserID"); entity.Property(e => e.CreatedDateTime) .HasColumnType("datetime") .HasDefaultValueSql("(getdate())"); entity.Property(e => e.Details) .IsRequired() .HasMaxLength(1000) .IsUnicode(false); entity.Property(e => e.UpdatedByAuthUserId).HasColumnName("UpdatedByAuthUserID"); entity.Property(e => e.UpdatedDateTime).HasColumnType("datetime"); entity.Property(e => e.UpdatedTimestamp) .IsRequired() .IsRowVersion(); entity.HasOne(d => d.ClientSiteActionStatus) .WithMany(p => p.ClientSiteAction) .HasForeignKey(d => d.ClientSiteActionStatusId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction_ClientSiteActionStatus"); entity.HasOne(d => d.ClientSiteActionType) .WithMany(p => p.ClientSiteAction) .HasForeignKey(d => d.ClientSiteActionTypeId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction_ClientSiteActionType"); entity.HasOne(d => d.CreatedByAuthUser) .WithMany(p => p.ClientSiteActionCreatedByAuthUser) .HasForeignKey(d => d.CreatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction_AuthUser_Client"); entity.HasOne(d => d.UpdatedByAuthUser) .WithMany(p => p.ClientSiteActionUpdatedByAuthUser) .HasForeignKey(d => d.UpdatedByAuthUserId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction_AuthUser_Trader"); }); modelBuilder.Entity<ClientSiteAction2ClientCompanyOpi>(entity => { entity.ToTable("ClientSiteAction2ClientCompanyOPI"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientCompanyOpiid).HasColumnName("ClientCompanyOPIID"); entity.Property(e => e.ClientSiteActionId).HasColumnName("ClientSiteActionID"); entity.HasOne(d => d.ClientCompanyOpi) .WithMany(p => p.ClientSiteAction2ClientCompanyOpi) .HasForeignKey(d => d.ClientCompanyOpiid) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2ClientCompanyOPI_ClientCompanyOPI"); entity.HasOne(d => d.ClientSiteAction) .WithMany(p => p.ClientSiteAction2ClientCompanyOpi) .HasForeignKey(d => d.ClientSiteActionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2ClientCompanyOPI_ClientSiteAction"); }); modelBuilder.Entity<ClientSiteAction2FixFxforwardTrade>(entity => { entity.ToTable("ClientSiteAction2FixFXForwardTrade"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientSiteActionId).HasColumnName("ClientSiteActionID"); entity.Property(e => e.FxforwardTradeCode) .IsRequired() .HasColumnName("FXForwardTradeCode") .HasMaxLength(100) .IsUnicode(false); entity.HasOne(d => d.ClientSiteAction) .WithMany(p => p.ClientSiteAction2FixFxforwardTrade) .HasForeignKey(d => d.ClientSiteActionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FixFXForwardTrade_ClientSiteAction"); entity.HasOne(d => d.FxforwardTradeCodeNavigation) .WithMany(p => p.ClientSiteAction2FixFxforwardTrade) .HasForeignKey(d => d.FxforwardTradeCode) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FixFXForwardTrade_FXForwardTrade"); }); modelBuilder.Entity<ClientSiteAction2FxforwardTrade2Opi>(entity => { entity.ToTable("ClientSiteAction2FXForwardTrade2OPI"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientSiteActionId).HasColumnName("ClientSiteActionID"); entity.Property(e => e.FxforwardTrade2Opiid).HasColumnName("FXForwardTrade2OPIID"); entity.HasOne(d => d.ClientSiteAction) .WithMany(p => p.ClientSiteAction2FxforwardTrade2Opi) .HasForeignKey(d => d.ClientSiteActionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FXForwardTrade2OPI_ClientSiteAction"); entity.HasOne(d => d.FxforwardTrade2Opi) .WithMany(p => p.ClientSiteAction2FxforwardTrade2Opi) .HasForeignKey(d => d.FxforwardTrade2Opiid) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FXForwardTrade2OPI_FXForwardTrade2OPI"); }); modelBuilder.Entity<ClientSiteAction2Fxswap>(entity => { entity.ToTable("ClientSiteAction2FXSwap"); entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.ClientSiteActionId).HasColumnName("ClientSiteActionID"); entity.Property(e => e.FxswapId).HasColumnName("FXSwapID"); entity.HasOne(d => d.ClientSiteAction) .WithMany(p => p.ClientSiteAction2Fxswap) .HasForeignKey(d => d.ClientSiteActionId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FXSwap_ClientSiteAction"); entity.HasOne(d => d.Fxswap) .WithMany(p => p.ClientSiteAction2Fxswap) .HasForeignKey(d => d.FxswapId) .OnDelete(DeleteBehavior.ClientSetNull) .HasConstraintName("FK_ClientSiteAction2FXSwap_FXSwap"); }); modelBuilder.Entity<ClientSiteActionStatus>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Details) .IsRequired() .HasMaxLength(250) .IsUnicode(false); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50) .IsUnicode(false); }); modelBuilder.Entity<ClientSiteActionType>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.Details) .IsRequired() .HasMaxLength(250) .IsUnicode(false); entity.Property(e => e.Name) .IsRequired() .HasMaxLength(50) .IsUnicode(false); }); modelBuilder.Entity<AppUserNotification>(entity => { entity.Property(e => e.Id).HasColumnName("ID"); entity.Property(e => e.AppUserId) .HasColumnName("AppUserID") .IsRequired(); entity.Property(e => e.ClientCompanyId) .HasColumnName("ClientCompanyID") .IsRequired(); entity.Property(e => e.TradeNotifications) .IsRequired(); entity.Property(e => e.InwardPaymentNotifications) .IsRequired(); entity.Property(e => e.OutwardPaymentNotifications) .IsRequired(); entity.Property(e => e.SettlementRequests) .IsRequired(); }); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Helpers/DateHelpersTests.cs using System; using System.Collections.Generic; using Argentex.Core.Service.Helpers; using Xunit; namespace Argentex.Core.Service.Tests.Helpers { public class DateHelpersTests { public void GetDaysDifferencreBetween_Should_Give_The_Days_Difference_Between_Two_Dates(DateTime first, DateTime other, int expected) { // Given method parameters // When var result = DateHelpers.GetDaysDifferencreBetween(first, other); // Then Assert.Equal(expected, result); } public static IEnumerable<object[]> TestData => new List<object[]> { new object[] {DateTime.Today, DateTime.Today.AddDays(-5), 5}, new object[] {DateTime.Today, DateTime.Today.AddDays(5), 5}, new object[] {DateTime.Today, DateTime.Today.AddYears(1), 365}, new object[] {DateTime.Now, DateTime.Now, 0}, new object[] {DateTime.Now, DateTime.Today, 0}, new object[] {DateTime.Now, DateTime.Today.AddHours(18), 0} }; } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Trades/DealRequestModel.cs using Argentex.Core.Service.Attributes; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Service.Models.Trades { public class DealRequestModel { [Required] public int? AuthUserId { get; set; } [Required] public int? ClientCompanyId { get; set; } [Required(ErrorMessage = "At least one deal must be supplied")] public ICollection<DealModel> DealModels { get; set; } } public class DealModel { [Required] public int TradeIndex { get; set; } [Required(ErrorMessage = "LHS currency cannot be empty")] [StringLength(3)] public string LhsCcy { get; set; } [Required(ErrorMessage = "RHS currency cannot be empty")] [StringLength(3)] public string RhsCcy { get; set; } [BoolRequired(ErrorMessage = "IsBuy must be boolean")] public bool IsBuy { get; set; } [DecimalRequired(ErrorMessage = "Amount must be 0 or greater")] public decimal Amount { get; set; } /// <summary> /// Client Rate /// </summary> [DecimalRequired(ErrorMessage = "Amount must be 0 or greater")] public decimal Rate { get; set; } [DecimalRequired(ErrorMessage = "Amount must be 0 or greater")] public decimal BrokerRate { get; set; } [DateRequired(ErrorMessage = "Value date cannot be empty")] public DateTime ValueDate { get; set; } [DateRequired(ErrorMessage = "Expiration Date Time cannot be empty")] public DateTime ExpirationDateTime { get; set; } [BoolRequired(ErrorMessage = "IsRhsMajor must be boolean")] public bool IsRhsMajor { get; set; } [Required] public string QuoteId { get; set; } [Required] public string QuoteReqId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyContactCategory.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyContactCategory { public int ClientCompanyContactId { get; set; } public int ContactCategoryId { get; set; } public DateTime DateCreated { get; set; } public int CreatedByAuthUserId { get; set; } public ClientCompanyContact ClientCompanyContact { get; set; } public ContactCategory ContactCategory { get; set; } public AuthUser CreatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/EmirreportResponseCode.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class EmirreportResponseCode { public EmirreportResponseCode() { EmirreportTradeResponseError = new HashSet<EmirreportTradeResponseError>(); } public int Id { get; set; } public int ResponseCode { get; set; } public string ErrorMessage { get; set; } public string Description { get; set; } public ICollection<EmirreportTradeResponseError> EmirreportTradeResponseError { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/ActivityLogModel.cs namespace Argentex.Core.Identity.DataAccess { public class ActivityLogModel : ActivityLog { public bool IsOnline { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyOpi.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyOpi { public ClientCompanyOpi() { Breach = new HashSet<Breach>(); ClientCompanyCurrencyDefaultOpi = new HashSet<ClientCompanyCurrencyDefaultOpi>(); ClientCompanyOpiduplicateDuplicateClientCompanyOpi = new HashSet<ClientCompanyOpiduplicate>(); ClientCompanyOpiduplicateOriginalClientCompanyOpi = new HashSet<ClientCompanyOpiduplicate>(); ClientCompanyOpitransaction = new HashSet<ClientCompanyOpitransaction>(); ClientSiteAction2ClientCompanyOpi = new HashSet<ClientSiteAction2ClientCompanyOpi>(); FxforwardTrade = new HashSet<FxforwardTrade>(); FxforwardTrade2Opi = new HashSet<FxforwardTrade2Opi>(); Fxoption = new HashSet<Fxoption>(); } public int Id { get; set; } public int ClientCompanyId { get; set; } public string Description { get; set; } public string AccountName { get; set; } public string BankName { get; set; } public string AccountNumber { get; set; } public string SortCode { get; set; } public string Reference { get; set; } public string SwiftCode { get; set; } public string Iban { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDate { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime CreatedDate { get; set; } public bool Authorised { get; set; } public int? AuthorisedByAuthUserId { get; set; } public DateTime? AuthorisedDateTime { get; set; } public int CurrencyId { get; set; } public bool IsCompanyAccount { get; set; } public string BeneficiaryAddress { get; set; } public int? CountryId { get; set; } public string BeneficiaryName { get; set; } public string BankAddress { get; set; } public int? ClearingCodePrefixId { get; set; } public bool Rejected { get; set; } public int? RejectedByAuthUserId { get; set; } public DateTime? RejectedDateTime { get; set; } public bool IsOwnAccount { get; set; } public bool IsDeleted { get; set; } public ClearingCodePrefix ClearingCodePrefix { get; set; } public ClientCompany ClientCompany { get; set; } public Country Country { get; set; } public Currency Currency { get; set; } public AuthUser RejectedByAuthUser { get; set; } public ICollection<Breach> Breach { get; set; } public ICollection<ClientCompanyCurrencyDefaultOpi> ClientCompanyCurrencyDefaultOpi { get; set; } public ICollection<ClientCompanyOpiduplicate> ClientCompanyOpiduplicateDuplicateClientCompanyOpi { get; set; } public ICollection<ClientCompanyOpiduplicate> ClientCompanyOpiduplicateOriginalClientCompanyOpi { get; set; } public ICollection<ClientCompanyOpitransaction> ClientCompanyOpitransaction { get; set; } public ICollection<ClientSiteAction2ClientCompanyOpi> ClientSiteAction2ClientCompanyOpi { get; set; } public ICollection<FxforwardTrade> FxforwardTrade { get; set; } public ICollection<FxforwardTrade2Opi> FxforwardTrade2Opi { get; set; } public ICollection<Fxoption> Fxoption { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClearingCodePrefix.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClearingCodePrefix { public ClearingCodePrefix() { BankAccountCurrencyDetails = new HashSet<BankAccountCurrencyDetails>(); ClientCompanyOpi = new HashSet<ClientCompanyOpi>(); CountryClearingCodePrefix = new HashSet<CountryClearingCodePrefix>(); } public int Id { get; set; } public string Code { get; set; } public string Description { get; set; } public ICollection<BankAccountCurrencyDetails> BankAccountCurrencyDetails { get; set; } public ICollection<ClientCompanyOpi> ClientCompanyOpi { get; set; } public ICollection<CountryClearingCodePrefix> CountryClearingCodePrefix { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogFxforwardTradeCcmlimitOverride.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogFxforwardTradeCcmlimitOverride { public long LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public string TradeCode { get; set; } public string LimitName { get; set; } public string OriginalLimit { get; set; } public string OverrideValue { get; set; } public int OverrideByAppUserId { get; set; } public DateTime OverrideDateTime { get; set; } public bool Closed { get; set; } public int? ClosedByAppUserId { get; set; } public string ClosedNotes { get; set; } public DateTime? ClosedDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } public DateTime UpdatedDateTime { get; set; } public int? UpdatedByAuthUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/PaymentType.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class PaymentType { public PaymentType() { Payment = new HashSet<Payment>(); SwiftvalidationField = new HashSet<SwiftvalidationField>(); } public int Id { get; set; } public string Description { get; set; } public bool RequiresApproval { get; set; } public bool CanBeSwift { get; set; } public bool DefaultSendToSwift { get; set; } public ICollection<Payment> Payment { get; set; } public ICollection<SwiftvalidationField> SwiftvalidationField { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Settlements/AssignSettlementRequestModel.cs using Argentex.Core.Service.Models.Trade; using System.Collections.Generic; namespace Argentex.Core.Service.Models.Settlements { public class AssignSettlementRequestModel { public int AuthUserId { get; set; } public int ClientCompanyId { get; set; } public TradeModel Trade { get; set; } public IList<AssignSettlementModel> SettlementModels { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Sms/Models/TextMagicConfigModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Sms.Models { public class TextMagicConfigModel { public string UserName { get; set; } public string Token { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Commission.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Commission { public int Id { get; set; } public int CommissionTypeId { get; set; } public int AppUserId { get; set; } public double Commission1 { get; set; } public AppUser AppUser { get; set; } public CommissionType CommissionType { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Order/IOrderService.cs using Argentex.Core.Service.Models.Email; using Argentex.Core.Service.Models.Order; using System.Collections.Generic; using System.Threading.Tasks; namespace Argentex.Core.Service.Order { public interface IOrderService { Task<IList<OrderResponseModel>> ExecuteOrdersAsync(OrderRequestModel ordersToExecute); IList<Models.Trade.TradeModel> GetOpenOrders(int clientCompanyId); IList<CancelOrderModel> GetExpiredValidityOrders(); Task<bool> CancelOrderAsync(string tradeCode); Task<bool> CancelOrderAsync(CancelOrderModel model); } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Users/Model/ApproveUserChangeResponse.cs using Argentex.Core.DataAccess.Entities; using Microsoft.AspNetCore.Identity; using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.UnitsOfWork.Users.Model { public class ApproveUserChangeResponse { public UserChangeRequest UserChangeRequest { get; set; } public bool SendNotification { get; set; } public IdentityResult Result { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/CountryClearingCodePrefix.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class CountryClearingCodePrefix { public int CountryId { get; set; } public int ClearingCodePrefixId { get; set; } public bool IsDefault { get; set; } public int Sequence { get; set; } public ClearingCodePrefix ClearingCodePrefix { get; set; } public Country Country { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Trades/QuoteRequestModel.cs using Argentex.Core.Service.Attributes; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Service.Models.Trades { public class QuoteRequestModel { [Required] public int? AuthUserId { get; set; } [Required] public int? ClientCompanyId { get; set; } [Required(ErrorMessage = "At least one order must be supplied")] public ICollection<QuoteModel> QuoteModels { get; set; } } public class QuoteModel { [Required] public int QuoteIndex { get; set; } [Required(ErrorMessage = "LHS currency cannot be empty")] [StringLength(3)] public string LhsCcy { get; set; } [Required(ErrorMessage = "RHS currency cannot be empty")] [StringLength(3)] public string RhsCcy { get; set; } [BoolRequired(ErrorMessage = "IsBuy must be boolean")] public bool IsBuy { get; set; } [DecimalRequired(ErrorMessage = "Amount must be 0 or greater")] public decimal Amount { get; set; } [DateRequired(ErrorMessage = "Value date cannot be empty")] public DateTime ValueDate { get; set; } [BoolRequired(ErrorMessage = "IsRhsMajor must be boolean")] public bool IsRhsMajor { get; set; } [DateRequired(ErrorMessage = "Contract date cannot be empty")] public DateTime ContractDate { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Filters/Models/ErrorResponse.cs using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Threading.Tasks; namespace Argentex.Core.Api.Filters.Models { public class ErrorResponse { public string Data { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Controllers/SettlementControllerTests.cs using Argentex.Core.Api.Controllers.Settlements; using Argentex.Core.Service.Models.Settlements; using Argentex.Core.Service.Settlements; using Microsoft.AspNetCore.Mvc; using Moq; using SynetecLogger; using System; using System.Collections.Generic; using System.Net; using Xunit; namespace Argentex.Core.Service.Tests.Controllers { public class SettlementControllerTests { [Fact] public async void Assign_Should_Return_BadRequest_When_Model_Is_Not_Valid() { // Arrange AssignSettlementRequestModel settlementModel = new AssignSettlementRequestModel() { AuthUserId = 1, ClientCompanyId = 111, Trade = new Models.Trade.TradeModel(), SettlementModels = new List<AssignSettlementModel>() }; var settlementServiceMock = new Mock<ISettlementService>(); var loggerMock = new Mock<ILogWrapper>(); settlementServiceMock.Setup(x => x.AssignAsync(It.IsAny<AssignSettlementRequestModel>())).ThrowsAsync(new Exception()); var controller = new SettlementController(settlementServiceMock.Object, loggerMock.Object); controller.ModelState.AddModelError("Test", "Model is not valid"); var expectedStatusCode = HttpStatusCode.BadRequest; // Act var response = await controller.Assign(settlementModel); var result = response as StatusCodeResult; // Assert Assert.Equal((int)expectedStatusCode, result.StatusCode); Assert.False(controller.ModelState.IsValid); Assert.Single(controller.ModelState); Assert.True(controller.ModelState.ContainsKey("Test")); } [Fact] public async void Assign_Should_Return_An_Ok_Response_When_Passed_Object_Is_Valid() { // Arrange AssignSettlementRequestModel settlementModel = new AssignSettlementRequestModel() { AuthUserId = 4, ClientCompanyId = 1, Trade = new Models.Trade.TradeModel(), SettlementModels = new List<AssignSettlementModel>() }; AssignSettlementModel assignSettlementModel = new AssignSettlementModel() { SettlementId = 1515, TradedCurrency = "GBPEUR", Account = new AccountModel() { }, Amount = 14700, ValueDate = DateTime.Today.ToString(), Reference = "Test Reference", IsPayTotal = false, Status = 0, IsWarning = false, WarningMessage = string.Empty }; var settlementServiceMock = new Mock<ISettlementService>(); var loggerMock = new Mock<ILogWrapper>(); settlementServiceMock.Setup(x => x.AssignAsync(It.IsAny<AssignSettlementRequestModel>())).ReturnsAsync(new List<AssignSettlementModel>() { assignSettlementModel }); var controller = new SettlementController(settlementServiceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedType = typeof(IList<AssignSettlementModel>); var expectedListCount = 1; var expectedAmount = 14700; var expectedsettlementId = 1515; var expectedTradedCurrency = "GBPEUR"; var expectedValueDate = DateTime.Today.ToString(); var expectedReference = "Test Reference"; var expectedIsPayTotal = false; var expectedStatus = 0; var expectedIsWarning = false; var expectedWarningMessage = string.Empty; // Act var response = await controller.Assign(settlementModel); var result = response as OkObjectResult; var returnedList = result.Value as IList<AssignSettlementModel>; // Assert Assert.NotNull(result); Assert.Equal((int)expectedStatusCode, result.StatusCode); Assert.NotNull(returnedList); Assert.Equal(expectedListCount, returnedList.Count); Assert.Equal(expectedAmount, returnedList[0].Amount); Assert.Equal(expectedsettlementId, returnedList[0].SettlementId); Assert.Equal(expectedTradedCurrency, returnedList[0].TradedCurrency); Assert.Equal(expectedValueDate, returnedList[0].ValueDate); Assert.Equal(expectedReference, returnedList[0].Reference); Assert.Equal(expectedIsPayTotal, returnedList[0].IsPayTotal); Assert.Equal(expectedStatus, returnedList[0].Status); Assert.Equal(expectedIsWarning, returnedList[0].IsWarning); Assert.Equal(expectedWarningMessage, returnedList[0].WarningMessage); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogCassRecs.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogCassRecs { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public int? CassRecsStatementFileId { get; set; } public DateTime CassRecsDate { get; set; } public string CurrencyCode { get; set; } public decimal LastNightsClosingLedger { get; set; } public int? Check1ByAuthUserId { get; set; } public DateTime? Check1UpdatedDateTime { get; set; } public int? Check2ByAuthUserId { get; set; } public DateTime? Check2UpdatedDateTime { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public int? CompletedByAuthUserId { get; set; } public DateTime? CompletedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.SignalRService/ITraderActionsHub.cs using System.Threading.Tasks; namespace Argentex.Core.SignalRService { public interface ITraderActionsHub { Task ContinueExecuteTrade(string model); Task ManageClientTrade(string model); } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.SignalRService/IMonitoringHub.cs using System.Threading.Tasks; namespace Argentex.Core.SignalRService { public interface IMonitoringHub { Task TradingStarted(string user); Task CheckExecuteTrade(string modelJson); Task RefreshClientDetails(); } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Email/EmailSender/IEmailSender.cs using Argentex.Core.Service.Email.EmailSender; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace Argentex.Core.Service { public interface IEmailSender { Task SendAsync(string appAccounLogin, string appAccountPass, string emailUserName, string recipient, string subject, string body, bool IsHtml, string sender, string fromName, int priority = 0, string bccEmail = null, string ccEmail = null); string CreateBody(EmailType type); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanySalesRegion.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanySalesRegion { public ClientCompanySalesRegion() { AppUserType = new HashSet<AppUserType>(); ClientCompany = new HashSet<ClientCompany>(); } public int Id { get; set; } public string Description { get; set; } public int? PermissionId { get; set; } public ICollection<AppUserType> AppUserType { get; set; } public ICollection<ClientCompany> ClientCompany { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftincomingFileProcessingStatus.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftincomingFileProcessingStatus { public SwiftincomingFileProcessingStatus() { SwiftincomingFile = new HashSet<SwiftincomingFile>(); } public int Id { get; set; } public string Status { get; set; } public ICollection<SwiftincomingFile> SwiftincomingFile { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Users/Model/ApproveUserChangeRequests.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.UnitsOfWork.Users.Model { public class ApproveUserChangeRequest { public int UserChangeRequestID { get; set; } public int ApprovedByAuthUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientSiteAction/CSANewOPIRequestDisplayModel.cs using System; namespace Argentex.Core.Service.Models.ClientSiteAction { [Serializable] public class CSANewOPIRequestDisplayModel { public string CompanyName { get; set; } public int CompanyID { get; set; } public string CurrencyCode { get; set; } public string OPIName { get; set; } public string Status { get; set; } public string CreatedByClientName { get; set; } public DateTime CreatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Settlements/ISettlementUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using System; using System.Collections.Generic; namespace Argentex.Core.UnitsOfWork.Settlements { public interface ISettlementUow : IBaseUow { IDictionary<FxforwardTrade, DataAccess.Entities.ClientSiteAction> GetTradeSwaps(string parentTradeCode); IList<FxforwardTrade2Opi> GetTradeOpis(string parentTradeCode); FxforwardTradeSwapCount GetTradeSwapCount(string parentTradeCode); int Assign(FxforwardTrade deliveryLegTrade, FxforwardTrade reversalLegTrade, string parentTradeCode, int authUserID); void AddTrade2Opi(FxforwardTrade2Opi trade2opi); void DeleteAssignedSettlement(long settlementId); decimal GetSettlementAmountForTrade(string tradeCode); DateTime GetMaxCreateDateForTrade(string tradeCode); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientCompany/ClientCompanyModel.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service { public class ClientCompanyModel { public int ID { get; set; } public string Name { get; set; } public string Crn { get; set; } public int? DealerAppUserID { get; set; } public string Description { get; set; } public string TradingName { get; set; } public string TelephoneNumber { get; set; } public string FaxNumber { get; set; } public string WebsiteURL { get; set; } public string Address { get; set; } public int? ClientCompanyTypeID { get; set; } public int ClientCompanyStatusID { get; set; } public int UpdatedByAuthUserID { get; set; } public DateTime UpdatedDateTime { get; set; } public string ImportantNote { get; set; } public int? ClientCompanyCategoryID { get; set; } public bool IsHouseAccount { get; set; } public string PostCode { get; set; } public DateTime? ApprovedDateTime { get; set; } public bool? IsKYC { get; set; } public bool? IsTandCs { get; set; } public bool? IsRiskWarning { get; set; } public int? ClientCompanyOptionStatusID { get; set; } public DateTime? ApprovedOptionDateTime { get; set; } public bool IsPitched { get; set; } public int? PitchedByAppUserID { get; set; } public DateTime? PitchedDateTime { get; set; } public DateTime? AccountFormsSentDateTime { get; set; } public bool IsInternalAccount { get; set; } public string QualifiedNewTradeCode { get; set; } public string TradingAddress { get; set; } public int? MaxOpenGBP { get; set; } public int? MaxTradeSizeGBP { get; set; } public int? MaxTenorMonths { get; set; } public decimal? MaxCreditLimit { get; set; } public string TradingPostCode { get; set; } public string EMIR_LEI { get; set; } public bool? EMIR_EEA { get; set; } public bool? AssignNewTrades { get; set; } public int? ClientCompanyIndustrySectorID { get; set; } public int ClientCompanySalesRegionID { get; set; } public string SpreadsNote { get; set; } public int? ClientCompanyLinkedGroupID { get; set; } public bool IsExcludedFromEMoney { get; set; } public DateTime? FirstTradeDate { get; set; } public int ClientCompanyCreditTypeID { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ArmreportFxforwardTrade.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ArmreportFxforwardTrade { public int Id { get; set; } public int ArmreportId { get; set; } public string FxforwardTradeCode { get; set; } public int ArmstatusId { get; set; } public bool IsResubmited { get; set; } public DateTime ReportedDateTime { get; set; } public Armreport Armreport { get; set; } public Emirstatus Armstatus { get; set; } public FxforwardTrade FxforwardTradeCodeNavigation { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/Country.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class Country { [Key] public long Id { get; set; } [Required] [MaxLength(128)] public string Name { get; set; } [Required] [MaxLength(256)] public string FormalName { get; set; } [Required] [StringLength(2)] public string CodeISO2 { get; set; } [Required] [StringLength(3)] public string CodeISO3 { get; set; } [MaxLength(25)] public string PhoneCode { get; set; } public int CodeISO3Numeric { get; set; } [Required] public int Sequence { get; set; } public int CountryGroupId { get; set; } [ForeignKey(nameof(CountryGroupId))] public CountryGroup ContryGroup { get; set; } public int? LengthIBAN { get; set; } [MaxLength(256)] public string RegexBBAN { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftintegrationService.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftintegrationService { public int Id { get; set; } public bool IsActive { get; set; } public DateTime LastStatusChangeDateTime { get; set; } public int LastStatusChangeByAuthUserId { get; set; } public byte[] UpdateTimeStamp { get; set; } public AuthUser LastStatusChangeByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Helpers/DateHelpers.cs using System; namespace Argentex.Core.Service.Helpers { public static class DateHelpers { public static int GetDaysDifferencreBetween(DateTime date, DateTime other) { var dateValue = date.Date; var dateValueOther = other.Date; if (dateValue == dateValueOther) return 0; if (dateValue > dateValueOther) return (dateValue - dateValueOther).Days; return (dateValueOther - dateValue).Days; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Identity/LoginModel.cs namespace Argentex.Core.Service.Models.Identity { public class LoginServiceModel { public string Username { get; set; } public string Password { get; set; } public string PrimaryIP { get; set; } public string SecondaryIP { get; set; } public string Grant_Type { get; set; } public string ClientId { get; set; } public string ClientSecret { get; set; } public string RefreshToken { get; set; } public string Resource { get; set; } public string Scope { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Identity/Services/SanitizeUrlStringConstants.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Identity { public class SanitizeForwardSlash { public const string Old = "/"; public const string New = "@$@"; } public class SanitizeDoubleEqual { public const string Old = "=="; public const string New = "@-@"; } public class SanitizePlus { public const string Old = "+"; public const string New = "@*@"; } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/TradeInstructionMethod.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class TradeInstructionMethod { public TradeInstructionMethod() { FxforwardTrade = new HashSet<FxforwardTrade>(); Fxoption = new HashSet<Fxoption>(); } public int Id { get; set; } public string Description { get; set; } public ICollection<FxforwardTrade> FxforwardTrade { get; set; } public ICollection<Fxoption> Fxoption { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/AppSettings/AppSettingEnum.cs namespace Argentex.Core.UnitsOfWork.AppSettings { public enum AppSettingEnum { InternalUserChangeRequestApprovalsRequired, ExternalUserChangeRequestApprovalsRequired } }<file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/AppSettings/IAppSettingUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.UnitsOfWork.AppSettings { public interface IAppSettingUow : IBaseUow { AppSetting GetAppSetting(string key); AppSetting GetAppSetting(AppSettingEnum key); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Email/EmailSender/EmailSender.cs using Argentex.Core.Service.Email.EmailSender; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Helpers; using EQService; using System; using System.Collections.Generic; using System.IO; using System.Threading.Tasks; namespace Argentex.Core.Service { public class EmailSender : IEmailSender { private List<EmailAttachment> _emailAttachments; private IServiceEmail _eqsService; private IConfigWrapper _config; public EmailSender(IServiceEmail emailService, IConfigWrapper config) { _eqsService = emailService; _config = config; } public async Task SendAsync( string appAccounLogin, string appAccountPass, string emailUserName, string recipient, string subject, string body, bool IsHtml, string sender, string fromName, int priority = 0, string bccEmail = null, string ccEmail = null) { List<EmailAttachment> emailAttachments = new List<EmailAttachment>(); EmailAttachment[] arrayOfAttachments = emailAttachments.ToArray(); string defaultEmail = _config.Get("Emails:DefaultEmail"); if (!string.IsNullOrWhiteSpace(defaultEmail)) { recipient = defaultEmail; if (!string.IsNullOrWhiteSpace(bccEmail)) { bccEmail = defaultEmail; } if (!string.IsNullOrWhiteSpace(ccEmail)) { ccEmail = defaultEmail; } } AddRequest request = CreateRequest (appAccounLogin, appAccountPass, emailUserName, recipient, subject, body, IsHtml, sender, fromName, priority, bccEmail, ccEmail, arrayOfAttachments); var emailId = await _eqsService.AddAsync(request); } private static AddRequest CreateRequest(string appAccounLogin, string appAccountPass, string emailUserName, string recipient, string subject, string body, bool IsHtml, string sender, string fromName, int priority, string bccEmail, string ccEmail, EmailAttachment[] arrayOfAttachments) { return new AddRequest { appAccountLogin = appAccounLogin, appAccountPassword = <PASSWORD>, emailUserName = emailUserName, toAddress = recipient, subject = subject, body = body, isHTML = IsHtml, attachments = arrayOfAttachments, fromAddress = sender, fromName = fromName, bccAddress = bccEmail, ccAddress = ccEmail, priority = priority, sendDateTime = DateTime.UtcNow }; } public async Task SendWithAttachmentsAsync( string appAccounLogin, string appAccountPass, string emailUserName, string recipient, string subject, string body, bool IsHtml, string sender, string fromName, int priority = 0, string bccEmail = null, string ccEmail = null) { //TODO implement as property List<EmailAttachment> emailAttachments = new List<EmailAttachment>(); EmailAttachment[] arrayOfAttachments = emailAttachments.ToArray(); string defaultEmail = _config.Get("Emails:DefaultEmail"); if (!string.IsNullOrWhiteSpace(defaultEmail)) { recipient = defaultEmail; if (!string.IsNullOrWhiteSpace(bccEmail)) { bccEmail = defaultEmail; } if (!string.IsNullOrWhiteSpace(ccEmail)) { ccEmail = defaultEmail; } } var emailId = await _eqsService.AddAsync(CreateRequest (appAccounLogin, appAccountPass, emailUserName, recipient, subject, body, IsHtml, sender, fromName, priority, bccEmail, ccEmail, arrayOfAttachments)); } public string CreateBody(EmailType type) { string url = AppDomain.CurrentDomain.BaseDirectory; switch(type) { case EmailType.NewUser: url += Path.Combine(url, "/Email/EmailSender/Templates/setNewPassword.html"); break; case EmailType.ResetPassword: url += Path.Combine(url, "/Email/EmailSender/Templates/resetPassword.html"); break; case EmailType.PasswordChanged: url += Path.Combine(url, "/Email/EmailSender/Templates/passwordChanged.html"); break; case EmailType.TradeNote: url += Path.Combine(url, "/Email/EmailSender/Templates/tradeNote.html"); break; case EmailType.BrokerTradeNote: url += Path.Combine(url, "/Email/EmailSender/Templates/brokerTradeNote.html"); break; case EmailType.FailedFIXTrades: url += Path.Combine(url, "/Email/EmailSender/Templates/failedFIXTrades.html"); break; case EmailType.OrderNote: url += Path.Combine(url, "/Email/EmailSender/Templates/orderNote.html"); break; case EmailType.DealerOrderNote: url += Path.Combine(url, "/Email/EmailSender/Templates/dealerOrderNote.html"); break; case EmailType.CancelOrder: url += Path.Combine(url, "/Email/EmailSender/Templates/cancelOrder.html"); break; case EmailType.SettlementAssigned: url += Path.Combine(url, "/Email/EmailSender/Templates/settlementAssigned.html"); break; // Payment Emails case EmailType.InwardPayment: url += Path.Combine(url, "/Email/EmailSender/Templates/inwardPayment.html"); break; case EmailType.OutwardPayment: url += Path.Combine(url, "/Email/EmailSender/Templates/outwardPayment.html"); break; // User Change Request Email case EmailType.UserChangeRequestAlert: url += Path.Combine(url, "/Email/EmailSender/Templates/UserChangeRequiresApprovalAlert.html"); break; case EmailType.MobileChangeEmailAlert: url += Path.Combine(url, "/Email/EmailSender/Templates/phoneChanged.html"); break; default: throw new NoSuchEmailTemplate("Please specify EmailType and define email body template"); } using (StreamReader sourceReader = File.OpenText(url)) { return sourceReader.ReadToEnd(); } } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Currencies/CurrencyController.cs using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Exceptions; using Microsoft.AspNetCore.Mvc; using SynetecLogger; namespace Argentex.Core.Api.Controllers.Currencies { [Produces("application/json")] [Route("api/currencies")] public class CurrencyController : Controller { private readonly ICurrencyService _currencyService; private readonly ILogWrapper _logger; public CurrencyController(ICurrencyService currencyService, ILogWrapper logger) { _currencyService = currencyService; _logger = logger; } [HttpGet("currency-pair-rate")] public IActionResult GetCurrencyPairRate(string currencyPair) { try { var rate = _currencyService.GetCurrencyPairRate(currencyPair); return Ok(rate); } catch (CurrencyPairPricingNotFoundException e) { _logger.Error(e); return BadRequest(e.Message); } } [HttpGet("")] public IActionResult GetCurrencies() { return Ok(_currencyService.GetCurrencies()); } /// <summary> /// This is currently in trade and should be updated in client site to use this one /// </summary> /// <returns></returns> //[HttpGet] //[Route("currency-codes")] //public IActionResult GetCurrencyCodes() //{ // return Ok(_tradeService.GetCurrencyCodes()); //} protected override void Dispose(bool disposing) { if (disposing) { _currencyService.Dispose(); } } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/CountryGroup.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class CountryGroup { public CountryGroup() { Country = new HashSet<Country>(); } public int Id { get; set; } public string Description { get; set; } public int? Sequence { get; set; } public ICollection<Country> Country { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyCurrencyDefaultOpi.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyCurrencyDefaultOpi { public int ClientCompanyId { get; set; } public int CurrencyId { get; set; } public int ClientCompanyOpiid { get; set; } public byte[] UpdateTimeStamp { get; set; } public DateTime UpdateDateTime { get; set; } public int UpdateAuthUserId { get; set; } public ClientCompany ClientCompany { get; set; } public ClientCompanyOpi ClientCompanyOpi { get; set; } public Currency Currency { get; set; } public AuthUser UpdateAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Users/ClientApplicationUow.cs using System.Threading.Tasks; using Argentex.Core.Identity.DataAccess; using OpenIddict.Core; using OpenIddict.EntityFrameworkCore.Models; using Synetec.Data.UnitOfWork.BaseUnitOfWork; namespace Argentex.Core.UnitsOfWork.Users { public class ClientApplicationUow : BaseUow, IClientApplicationUow { private readonly OpenIddictApplicationManager<OpenIddictApplication> _applicationManager; public ClientApplicationUow(SecurityDbContext context, OpenIddictApplicationManager<OpenIddictApplication> applicationManager) : base(context) { _applicationManager = applicationManager; } public async Task<OpenIddictApplication> GetClientCredentialsAsync(string clientId) => await _applicationManager.FindByClientIdAsync(clientId); } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ArmfxForwardTradeStatusesHistory.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ArmfxForwardTradeStatusesHistory { public int Id { get; set; } public int? ArmreportId { get; set; } public string FxForwardTradeCode { get; set; } public DateTime ArmstatusUpdatedDateTime { get; set; } public string ErrorDescription { get; set; } public int ArmstatusId { get; set; } public Armreport Armreport { get; set; } public Emirstatus Armstatus { get; set; } public FxforwardTrade FxForwardTradeCodeNavigation { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/ClientSiteAction/ClientSiteActionUow.cs using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using Argentex.Core.DataAccess.Entities; using System.Linq; using Microsoft.EntityFrameworkCore; namespace Argentex.Core.UnitsOfWork.ClientSiteAction { public class ClientSiteActionUow : BaseUow, IClientSiteActionUow { private IGenericRepo<DataAccess.Entities.ClientSiteAction> _clientSiteActionRepo; private IGenericRepo<ClientSiteActionStatus> _clientSiteActionStatusRepo; private IGenericRepo<ClientSiteActionType> _clientSiteActionTypeRepo; private IGenericRepo<ClientSiteAction2FxforwardTrade2Opi> _clientSiteAction2FxforwardTrade2OpiRepo; private IGenericRepo<ClientSiteAction2Fxswap> _clientSiteAction2FxswapRepo; private IGenericRepo<ClientSiteAction2ClientCompanyOpi> _clientSiteAction2ClientCompanyOpiRepo; private IGenericRepo<ClientSiteAction2FixFxforwardTrade> _clientSiteAction2FixFxforwardTradeRepo; public ClientSiteActionUow(FXDB1Context context) : base(context) { } #region Repo Initializing private IGenericRepo<DataAccess.Entities.ClientSiteAction> ClientSiteActionRepo { get { return _clientSiteActionRepo = _clientSiteActionRepo ?? new GenericRepo<DataAccess.Entities.ClientSiteAction>(Context); } } private IGenericRepo<ClientSiteActionStatus> ClientSiteActionStatusRepo { get { return _clientSiteActionStatusRepo = _clientSiteActionStatusRepo ?? new GenericRepo<ClientSiteActionStatus>(Context); } } private IGenericRepo<ClientSiteActionType> ClientSiteActionTypeRepo { get { return _clientSiteActionTypeRepo = _clientSiteActionTypeRepo ?? new GenericRepo<ClientSiteActionType>(Context); } } private IGenericRepo<ClientSiteAction2ClientCompanyOpi> ClientSiteAction2ClientCompanyOpiRepo { get { return _clientSiteAction2ClientCompanyOpiRepo = _clientSiteAction2ClientCompanyOpiRepo ?? new GenericRepo<ClientSiteAction2ClientCompanyOpi>(Context); } } private IGenericRepo<ClientSiteAction2FixFxforwardTrade> ClientSiteAction2FixFxforwardTradeRepo { get { return _clientSiteAction2FixFxforwardTradeRepo = _clientSiteAction2FixFxforwardTradeRepo ?? new GenericRepo<ClientSiteAction2FixFxforwardTrade>(Context); } } private IGenericRepo<ClientSiteAction2FxforwardTrade2Opi> ClientSiteAction2FxforwardTrade2OpiRepo { get { return _clientSiteAction2FxforwardTrade2OpiRepo = _clientSiteAction2FxforwardTrade2OpiRepo ?? new GenericRepo<ClientSiteAction2FxforwardTrade2Opi>(Context); } } private IGenericRepo<ClientSiteAction2Fxswap> ClientSiteAction2FxswapRepo { get { return _clientSiteAction2FxswapRepo = _clientSiteAction2FxswapRepo ?? new GenericRepo<ClientSiteAction2Fxswap>(Context); } } #endregion public IQueryable<DataAccess.Entities.ClientSiteAction> GetClientSiteAction(long clientSiteActionID) { return ClientSiteActionRepo.GetQueryable(x => x.Id == clientSiteActionID, orderBy: null, includeProperties: "CreatedByAuthUser,UpdatedByAuthUser,ClientSiteActionType,ClientSiteActionStatus"); } public IQueryable<ClientSiteAction2ClientCompanyOpi> GetClientSiteActionByOPIID(int clientCompanyOPIID) { return ClientSiteAction2ClientCompanyOpiRepo.GetQueryable(x => x.ClientCompanyOpiid == clientCompanyOPIID) .Include(x => x.ClientSiteAction) .Include(x => x.ClientSiteAction.ClientSiteActionStatus) .Include(x => x.ClientSiteAction.ClientSiteActionType) .Include(x => x.ClientSiteAction.CreatedByAuthUser) .Include(x => x.ClientSiteAction.UpdatedByAuthUser); } public void LogAction(DataAccess.Entities.ClientSiteAction action, string Id) { ClientSiteActionRepo.Insert(action); switch (action.ClientSiteActionType.Name) { case "RequestSwap": var Csa2Swap = new ClientSiteAction2Fxswap { FxswapId = int.Parse(Id), ClientSiteActionId = action.Id }; ClientSiteAction2FxswapRepo.Insert(Csa2Swap); break; case "RequestOPIAssignedToTrades": var Csa2Pay = new ClientSiteAction2FxforwardTrade2Opi { FxforwardTrade2Opiid = long.Parse(Id), ClientSiteActionId = action.Id }; ClientSiteAction2FxforwardTrade2OpiRepo.Insert(Csa2Pay); break; case "RequestNewOPI": var Csa2Opi = new ClientSiteAction2ClientCompanyOpi { ClientCompanyOpiid = int.Parse(Id), ClientSiteActionId = action.Id }; ClientSiteAction2ClientCompanyOpiRepo.Insert(Csa2Opi); break; case "RequestTradesNoFIXConfirmation": var Csa2FixTrade = new ClientSiteAction2FixFxforwardTrade { FxforwardTradeCode = Id, ClientSiteActionId = action.Id }; ClientSiteAction2FixFxforwardTradeRepo.Insert(Csa2FixTrade); break; default: break; } SaveContext(); } public ClientSiteActionStatus GetClientSiteActionStatusFromName(string actionStatusName) { var actionStatus = ClientSiteActionStatusRepo.GetQueryable(x => x.Name == actionStatusName) .FirstOrDefault(); return actionStatus; } public ClientSiteActionType GetClientSiteActionTypeFromName(string actionTypeName) { var actionType = ClientSiteActionTypeRepo.GetQueryable(x => x.Name == actionTypeName) .FirstOrDefault(); return actionType; } public IQueryable<ClientSiteAction2FxforwardTrade2Opi> GetOPIsAssignedToTrades() { const string ClientSiteAction_Type_OpisAssignedToTrades = "RequestOPIAssignedToTrades"; var actionType = GetClientSiteActionTypeFromName(ClientSiteAction_Type_OpisAssignedToTrades); return ClientSiteAction2FxforwardTrade2OpiRepo .GetQueryable(x => x.ClientSiteAction.ClientSiteActionType.Id == actionType.Id) .Include(x => x.ClientSiteAction) .Include(x => x.ClientSiteAction.ClientSiteActionStatus) .Include(x => x.ClientSiteAction.ClientSiteActionType) .Include(x => x.FxforwardTrade2Opi) .Include(x => x.FxforwardTrade2Opi.FxforwardTradeCodeNavigation) .Include(x => x.FxforwardTrade2Opi.ClientCompanyOpi) .Include(x => x.FxforwardTrade2Opi.ClientCompanyOpi.ClientCompany); } public IQueryable<ClientSiteAction2ClientCompanyOpi> GetNewOPIRequested() { const string ClientSiteAction_Type_RequestNewOPI = "RequestNewOPI"; var actionType = GetClientSiteActionTypeFromName(ClientSiteAction_Type_RequestNewOPI); return ClientSiteAction2ClientCompanyOpiRepo .GetQueryable(x => x.ClientSiteAction.ClientSiteActionType.Id == actionType.Id) .Include(x => x.ClientSiteAction) .Include(x => x.ClientSiteAction.ClientSiteActionStatus) .Include(x => x.ClientSiteAction.ClientSiteActionType) .Include(x => x.ClientCompanyOpi); } public IQueryable<ClientSiteAction2FixFxforwardTrade> GetTradesWithoutFIXConfirmation() { const string ClientSiteAction_Type_NoFIXConfirmation = "RequestTradesNoFIXConfirmation"; var actionType = GetClientSiteActionTypeFromName(ClientSiteAction_Type_NoFIXConfirmation); return ClientSiteAction2FixFxforwardTradeRepo .GetQueryable(x=>x.ClientSiteAction.ClientSiteActionType.Id == actionType.Id) .Include(x => x.ClientSiteAction) .Include(x => x.ClientSiteAction.ClientSiteActionStatus) .Include(x => x.ClientSiteAction.ClientSiteActionType) .Include(x => x.FxforwardTradeCodeNavigation); } public IQueryable<ClientSiteAction2Fxswap> GetSwaps() { const string ClientSiteAction_Type_SwapCreation = "RequestSwap"; var actionType = GetClientSiteActionTypeFromName(ClientSiteAction_Type_SwapCreation); return ClientSiteAction2FxswapRepo .GetQueryable(x => x.ClientSiteAction.ClientSiteActionType.Id == actionType.Id) .Include(x => x.ClientSiteAction) .Include(x => x.ClientSiteAction.ClientSiteActionStatus) .Include(x => x.ClientSiteAction.ClientSiteActionType) .Include(x => x.Fxswap) .Include(x => x.Fxswap.ParentTradeCodeNavigation) .Include(x => x.Fxswap.ParentTradeCodeNavigation.ClientCompanyNavigation) .Include(x => x.Fxswap.DeliveryLegTradeCodeNavigation) .Include(x => x.Fxswap.ReversalLegTradeCodeNavigation) .Include(x => x.ClientSiteAction.CreatedByAuthUser.ClientCompanyContactAuthUser); } public void UpdateClientSiteAction(DataAccess.Entities.ClientSiteAction action) { var actionEntity = ClientSiteActionRepo.GetByPrimaryKey(action.Id); actionEntity.UpdatedByAuthUserId = action.UpdatedByAuthUserId; actionEntity.UpdatedDateTime = action.UpdatedDateTime; actionEntity.ClientSiteActionStatusId = action.ClientSiteActionStatusId; ClientSiteActionRepo.Update(actionEntity); SaveContext(); } public void DeleteAction2AssignedSettlementLink(long settlementId) { var csa2Trade2Opi = ClientSiteAction2FxforwardTrade2OpiRepo .GetQueryable(x => x.FxforwardTrade2Opiid == settlementId) .Select(x => new ClientSiteAction2FxforwardTrade2Opi { Id = x.Id, ClientSiteActionId = x.ClientSiteActionId, FxforwardTrade2Opiid = x.FxforwardTrade2Opiid }).SingleOrDefault(); // Only delete the client site action if it was found // Client site actions are just created from client site, if opi was assigned from // trader site then client site action is not created. if (csa2Trade2Opi != null) { ClientSiteAction2FxforwardTrade2OpiRepo.Delete(csa2Trade2Opi); SaveContext(); } } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/BreachType.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class BreachType { public BreachType() { Breach = new HashSet<Breach>(); } public int Id { get; set; } public string Name { get; set; } public string Description { get; set; } public int Sequence { get; set; } public int DefaultBreachLevelId { get; set; } public BreachLevel DefaultBreachLevel { get; set; } public ICollection<Breach> Breach { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/ClientSiteAction/ClientSiteActionController.cs using Argentex.Core.Service.ClientSiteAction; using Microsoft.AspNetCore.Mvc; using SynetecLogger; using Microsoft.AspNetCore.Authorization; using Argentex.Core.Service.Models.ClientSiteAction; namespace Argentex.Core.Api.Controllers.ClientSiteAction { [Produces("application/json")] [Route("api/csa")] public class ClientSiteActionController : Controller { private readonly IClientSiteActionService _clientSiteActionService; private readonly ILogWrapper _logger; public ClientSiteActionController(IClientSiteActionService clientSiteActionService, ILogWrapper logger) { _clientSiteActionService = clientSiteActionService; _logger = logger; } [HttpGet] [Route("{clientSiteActionID:long}")] public IActionResult GetClientSiteAction(long clientSiteActionID) { return Ok(_clientSiteActionService.GetClientSiteAction(clientSiteActionID)); } [HttpGet] [Route("trades-no-fix-confirmation")] public IActionResult GetTradesWithoutFIXConfirmation() { return Ok(_clientSiteActionService.GetTradesWithoutFIXConfirmation()); } [HttpPut] public IActionResult UpdateClientSiteAction([FromBody] ClientSiteActionModel model) { _clientSiteActionService.UpdateClientSiteAction(model); return Ok(); } [HttpGet] [Route("action-status/{actionStatusName}")] public IActionResult GetClientSiteActionStatus(string actionStatusName) { return Ok(_clientSiteActionService.GetClientSiteActionStatus(actionStatusName)); } [HttpGet("opis-assigned-trade")] public IActionResult GetOPIsAssignedToTrades() { return Ok(_clientSiteActionService.GetOPIsAssignedToTrades()); } [HttpGet("new-opi-requested")] public IActionResult GetNewOPIRequested() { return Ok(_clientSiteActionService.GetNewOPIRequested()); } [HttpGet] [Route("csa-opi/{clientCompanyOPIID:int}")] public IActionResult GetClientSiteActionByOPIID(int clientCompanyOPIID) { return Ok(_clientSiteActionService.GetClientSiteActionByOPIID(clientCompanyOPIID)); } [HttpGet("swaps")] public IActionResult GetSwaps() { return Ok(_clientSiteActionService.GetSwaps()); } protected override void Dispose(bool disposing) { if (disposing) { _clientSiteActionService?.Dispose(); } } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/AppSettings/AppSettingUow.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System; using System.Collections.Generic; using System.Linq; using Microsoft.EntityFrameworkCore; namespace Argentex.Core.UnitsOfWork.AppSettings { public class AppSettingUow : BaseUow, IAppSettingUow { private IGenericRepo<AppSetting> _appSettingRepo; private readonly IList<AppSetting> _appSettings; public AppSettingUow(FXDB1Context context) : base(context) { _appSettings = AppSettingRepo.GetQueryable().AsNoTracking().ToList(); } private IGenericRepo<AppSetting> AppSettingRepo => _appSettingRepo = _appSettingRepo ?? new GenericRepo<AppSetting>(Context); public AppSetting GetAppSetting(string key) { return _appSettings .FirstOrDefault(x => x.SettingKey == key); } public AppSetting GetAppSetting(AppSettingEnum key) => GetAppSetting(Enum.GetName(typeof(AppSettingEnum), key)); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Email/SettlementNoteModel.cs using System; namespace Argentex.Core.Service.Models.Email { public class SettlementNoteModel { public string ParentTradeCode { get; set; } public string TradedCurrency { get; set; } public decimal Amount { get; set; } public string InstructedBy { get; set; } public DateTime InstructedDateTime { get; set; } public DateTime ValueDate { get; set; } public string AccountName { get; set; } public decimal? SettlementAmount { get; set; } public string AccountCurrency { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Statements/StatementModel.cs using System; namespace Argentex.Core.Service.Models.Statements { public class StatementModel { public string TradeCode { get; set; } public string PaymentCode { get; set; } public int BankAccountId { get; set; } public DateTime ValueDate { get; set; } public string Event { get; set; } public bool IsDebit { get; set; } public decimal Amount { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Synetec.Data.UnitOfWork/GenericRepo/GenericRepo.cs using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using Microsoft.EntityFrameworkCore; namespace Synetec.Data.UnitOfWork.GenericRepo { public class GenericRepo<TEntity> : IGenericRepo<TEntity> where TEntity : class { private readonly DbContext _context; private readonly DbSet<TEntity> _dbSet; public GenericRepo(DbContext context) { _context = context; _dbSet = context.Set<TEntity>(); } public virtual IEnumerable<TEntity> Get( Expression<Func<TEntity, bool>> filter = null, Func<IQueryable<TEntity>, IOrderedQueryable<TEntity>> orderBy = null, string includeProperties = "") { var query = GetQueryable(filter, includeProperties); return orderBy != null ? orderBy(query).ToList() : query.ToList(); } public virtual IQueryable<TEntity> GetQueryable( Expression<Func<TEntity, bool>> filter = null, Func<IQueryable<TEntity>, IOrderedQueryable<TEntity>> orderBy = null, string includeProperties = "") { var query = GetQueryable(filter, includeProperties); return orderBy != null ? orderBy(query): query; } public virtual IList<TEntity> GetAllAsList() { return _dbSet.ToList(); } //public virtual IQueryable<TEntity> GetAllAsQueryable() //{ // return _dbSet; //} public virtual TEntity GetByPrimaryKey(object id) { return _dbSet.Find(id); } public virtual void Insert(TEntity entity) { _dbSet.Add(entity); } public virtual void Delete(object id) { TEntity entityToDelete = _dbSet.Find(id); Delete(entityToDelete); } public virtual void Delete(TEntity entityToDelete) { if (_context.Entry(entityToDelete).State == EntityState.Detached) { _dbSet.Attach(entityToDelete); } _dbSet.Remove(entityToDelete); } public virtual void Update(TEntity entityToUpdate) { if (_context.Entry(entityToUpdate).State == EntityState.Detached) { _dbSet.Attach(entityToUpdate); } _context.Entry(entityToUpdate).State = EntityState.Modified; } public virtual void IgnoreProperty(TEntity entity, Expression<Func<TEntity, object>> property ) { //var prop = nameof(property); string prop = ConvertPropertyToString(property); _context.Entry(entity).Property(prop).IsModified = false; } /// <summary> /// This is legacy. Use C# 6 nameOf([property]) /// </summary> /// <param name="property"></param> /// <returns></returns> private string ConvertPropertyToString(Expression<Func<TEntity, object>> property) { var body = property.Body as MemberExpression; if (body == null) { body = ((UnaryExpression) property.Body).Operand as MemberExpression; } string prop = body.Member.Name; return prop; } private IQueryable<TEntity> GetQueryable(Expression<Func<TEntity, bool>> filter, string includeProperties) { IQueryable<TEntity> query = _dbSet; if (filter != null) { query = query.Where(filter); } foreach (var includeProperty in includeProperties.Split (new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { query = query.Include(includeProperty); } return query; } //*****************************Context disposal****************************** //protected virtual void Dispose(bool disposing) //{ // _context = null; // _dbSet = null; //} //public void Dispose() //{ // Dispose(true); // GC.SuppressFinalize(this); //} } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ArmreportOutgoingFile.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ArmreportOutgoingFile { public ArmreportOutgoingFile() { Armreport = new HashSet<Armreport>(); } public int Id { get; set; } public string Csvfilename { get; set; } public string UploadedFilename { get; set; } public DateTime? UploadedDateTime { get; set; } public int? ArmreportOutgoingFileContentId { get; set; } public ArmreportOutgoingFileContent ArmreportOutgoingFileContent { get; set; } public ICollection<Armreport> Armreport { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Notifications/NotificationUow.cs using Argentex.Core.DataAccess.Entities; using Microsoft.EntityFrameworkCore; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System; using System.Data; using System.Data.SqlClient; using System.Linq; using System.Threading.Tasks; namespace Argentex.Core.UnitsOfWork.Notifications { public class NotificationUow : BaseUow, INotificationUow { #region Properties V1 private IGenericRepo<AppUserNotification> _appUserNotificationRepo; private IGenericRepo<AppUserNotification> AppUserNotificationRepo => _appUserNotificationRepo = _appUserNotificationRepo ?? new GenericRepo<AppUserNotification>(Context); #endregion #region Properties V2 //private IGenericRepo<NotificationType> _notificationTypeRepo; //private IGenericRepo<AppUserNotificationType> _appUserNotificationType; //private IGenericRepo<NotificationType> NotificationTypeRepo => // _notificationTypeRepo = _notificationTypeRepo ?? new GenericRepo<NotificationType>(Context); //private IGenericRepo<AppUserNotificationType> AppUserNotificationTypeRepo => // _appUserNotificationType = _appUserNotificationType ?? new GenericRepo<AppUserNotificationType>(Context); #endregion public NotificationUow(FXDB1Context context) : base(context) { } public IQueryable<AppUserNotification> GetCompanyAppUserNotification(int clientCompanyID) { return this.AppUserNotificationRepo .GetQueryable(e => e.ClientCompanyId == clientCompanyID); } public bool SaveAppUserNotification(AppUserNotification model) { var notificationItem = AppUserNotificationRepo.GetQueryable(e => e.AppUserId == model.AppUserId && e.ClientCompanyId == model.ClientCompanyId).SingleOrDefault(); if (notificationItem != null && notificationItem.Id > 0) { notificationItem.InwardPaymentNotifications = model.InwardPaymentNotifications; notificationItem.OutwardPaymentNotifications = model.OutwardPaymentNotifications; notificationItem.TradeNotifications = model.TradeNotifications; notificationItem.SettlementRequests = model.SettlementRequests; AppUserNotificationRepo.Update(notificationItem); } else { model.Id = 0; AppUserNotificationRepo.Insert(model); } SaveContext(); return true; } #region V2 //public IQueryable<NotificationType> GetNotificationTypes() //{ // return NotificationTypeRepo.GetQueryable(); //} //public IQueryable<AppUserNotificationType> GetCompanyNotifications(int clientCompanyID) //{ // return AppUserNotificationTypeRepo.GetQueryable(x => x.ClientCompanyID == clientCompanyID); //} //public bool SaveUserNotification(AppUserNotificationType model) //{ // AppUserNotificationTypeRepo.Insert(model); // SaveContext(); // return true; //} #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/Users/Model/UserChangeRequestResponse.cs using System; using System.Collections.Generic; namespace Argentex.Core.UnitsOfWork.Users.Model { public class UserChangeRequestResponse { public bool InsertOrUpdateUserChangeRequest { get; set; } public bool SendUserChangeAlerts { get; set; } public string WarningMessage { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/SynetecLogger/Log4Net/Log4NetWrapper.cs using log4net; using log4net.Appender; using log4net.Config; using log4net.Core; using System; using System.IO; using System.Reflection; using System.Xml; [assembly: log4net.Config.XmlConfigurator(Watch = true)] namespace SynetecLogger { /// <summary> /// This is log4net wrapper which log to file to executing assembly location. /// Pass in fileName and/or hosting directory which will be created inside current directory. /// </summary> public class Log4NetWrapper : ILogWrapper { private static readonly ILog _logger = LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType); private bool _disposed; private FileAppender appender; public Log4NetWrapper(string fileName) { Directory.SetCurrentDirectory(System.AppDomain.CurrentDomain.BaseDirectory); XmlDocument log4netConfig = new XmlDocument(); log4netConfig.Load(File.OpenRead(Path.Combine(Directory.GetCurrentDirectory(), "log4net.config"))); var repo = log4net.LogManager.CreateRepository( Assembly.GetEntryAssembly(), typeof(log4net.Repository.Hierarchy.Hierarchy)); log4net.Config.XmlConfigurator.Configure(repo, log4netConfig["log4net"]); appender = (log4net.Appender.RollingFileAppender)repo.GetAppenders()[0]; appender.File = Path.Combine(Directory.GetCurrentDirectory(), fileName); //appender.ActivateOptions(); } public void Debug(string message) { throw new NotImplementedException(); } /// <summary> /// Logs exception to the file and shuts down repository, i.e. does not lock a file. /// Suitable for singleton class, but for any other class/project. /// If you need disposable (scoped) please implement the same interface with the same code /// and activate logger in constructor and shut down repository in Dispose() as commented out code below. /// </summary> /// <param name="ex"></param> public void Error(Exception ex) { ActivateLogger(); if (_logger.IsErrorEnabled) { var exception = ex; _logger.Error(exception.Message, exception); while (exception.InnerException != null) { exception = exception.InnerException; _logger.Error(exception.Message, exception); } } _logger.Logger.Repository.Shutdown(); } public void Fatal(Exception ex) { throw new NotImplementedException(); } public void Info(string info) { throw new NotImplementedException(); } private void ActivateLogger() { appender.ActivateOptions(); } //public ILogger GetLogger() //{ // return _logger.Logger; //} //protected virtual void Dispose(bool disposing) //{ // if (!_disposed) // { // if (disposing) // { // _logger.Logger.Repository.Shutdown(); // } // } // _disposed = true; //} //public void Dispose() //{ // Dispose(true); // GC.SuppressFinalize(this); //} } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/ClientCompanies/ClientCompanyAccountsController.cs using System; using System.Linq; using Argentex.Core.Api.Models.ClientCompanies; using Argentex.Core.Service; using Argentex.Core.Service.ClientCompanies; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Models.ClientCompany; using Argentex.Core.Service.Settlements; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using SynetecLogger; namespace Argentex.Core.Api.Controllers.ClientCompanies { [Produces("application/json")] [Route("api/client-company-accounts")] public class ClientCompanyAccountsController : Controller { private readonly IClientCompanyAccountsService _clientCompanyAccountsService; private readonly ISettlementService _settlementService; private readonly ILogWrapper _logger; public ClientCompanyAccountsController(IClientCompanyAccountsService clientCompanyAccountsService, ISettlementService settlementService, ILogWrapper logger) { _clientCompanyAccountsService = clientCompanyAccountsService; _settlementService = settlementService; _logger = logger; } [HttpGet("{clientCompanyId:int}")] public IActionResult GetClientCompanyAccounts(int clientCompanyId) { try { var accounts = _clientCompanyAccountsService.GetClientCompanyAccounts(clientCompanyId); if (!accounts.Any()) return NoContent(); var mappedAccounts = accounts.Select(MapClientCompanyAccount); return Ok(mappedAccounts); } catch (ClientCompanyNotFoundException e) { _logger.Error(e); return BadRequest(e.Message); } } [HttpGet("client-company-account/{clientCompanyOpiId:int}")] public IActionResult GetClientCompanyAccount(int clientCompanyOpiId) { return Ok(_clientCompanyAccountsService.GetClientCompanyAccount(clientCompanyOpiId)); } [HttpPost] [Route("add")] public IActionResult AddClientCompanyAccount([FromBody] SettlementAccountModel model) { if (!ModelState.IsValid) { return BadRequest(ModelState); } if (String.IsNullOrEmpty(model.AccountNumber.ToString()) && String.IsNullOrEmpty(model.Iban)) { return BadRequest("Both Account name and IBAN are empty."); } _clientCompanyAccountsService.AddSettlementAccount(model); return Ok(); } [HttpPost("edit")] public IActionResult EditClientCompanyAccount([FromBody] SettlementAccountModel model) { if (!ModelState.IsValid) { return BadRequest(ModelState); } if (string.IsNullOrEmpty(model.AccountNumber.ToString()) && string.IsNullOrEmpty(model.Iban)) { return BadRequest("Both Account name and IBAN are empty."); } _clientCompanyAccountsService.EditSettlementAccount(model); return Ok(); } [HttpGet] [Route("clearing-code-prefixes")] public IActionResult GetClearingCodePrefixes() { return Ok(_clientCompanyAccountsService.GetClearingCodePrefixes()); } [HttpPost("set-as-default")] public IActionResult SetAccountAsDefault([FromBody] SetDefaultAccountModel model) { _clientCompanyAccountsService.SetAccountAsDefault(model); return Ok(); } [HttpGet("trade-opis/{*tradeCode}")] public IActionResult GetTradeOPIs(string tradeCode) { return Ok(_clientCompanyAccountsService.GetTradeOPIs(tradeCode)); } [HttpPost("trade-opis/add")] public IActionResult AddTradeOPI([FromBody] FXForwardTrade2OPIModel model) { _clientCompanyAccountsService.AddTradeOPI(model); return Ok(); } [HttpDelete("trade-opis/delete/{opiTradeAllocationID:long}")] public IActionResult DeleteTradeOPIAllocation(long opiTradeAllocationID) { _settlementService.DeleteAssignedSettlements(opiTradeAllocationID); return Ok(); } private static ClientCompanyAccountDto MapClientCompanyAccount(ClientCompanyAccountModel account) { return new ClientCompanyAccountDto { ClientCompanyOpiId = account.ClientCompanyOpiId, ClientCompanyId = account.ClientCompanyId, AccountName = account.AccountName, AccountNumber = account.AccountNumber, Currency = account.Currency }; } [HttpDelete("assigned-settlements/{clientCompanyOpiId:int}")] public IActionResult DeleteSettlementAccount(int clientCompanyOpiId, [FromQuery] int authUserId) { if (clientCompanyOpiId.Equals(null) || clientCompanyOpiId.Equals(0)) return BadRequest("There is no Client Company OPI related to the provided ID."); _clientCompanyAccountsService.DeleteSettlementAccount(clientCompanyOpiId, authUserId); return Ok(); } [HttpGet("assigned-settlements/count/{*clientCompanyOpiId}")] public IActionResult GetNumberOfAssignedSettlements(int clientCompanyOpiId) { if (clientCompanyOpiId.Equals(null) || clientCompanyOpiId.Equals(0)) return BadRequest("There is no Client Company OPI related to the provided ID."); int numberOfAssignedSettlements = _clientCompanyAccountsService.GetNumberOfAssociatedTrades(clientCompanyOpiId); return Ok(numberOfAssignedSettlements); } protected override void Dispose(bool disposing) { if (disposing) { _clientCompanyAccountsService.Dispose(); } } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Email/PaymentNotificationModel.cs using System; using Argentex.Core; using Argentex.Core.DataAccess.Entities; namespace Argentex.Core.Service.Models.Email { public class PaymentNotificationModel { public string PaymentTypeDescription { get; set; } public string PaymentCode { get; set; } public decimal PaymentAmount { get; set; } public DateTime ValueDate { get; set; } public string Reference { get; set; } public Currency Currency { get; set; } public Argentex.Core.DataAccess.Entities.ClientCompany ClientCompany { get; set; } public ClientCompanyOpi ClientCompanyOpi { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/ClientCompanyContacts/ClientCompanyUow.cs using Argentex.Core.DataAccess.Entities; using Microsoft.EntityFrameworkCore; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System; using System.Collections.Generic; using System.Linq; using SynetecLogger; using Argentex.Core.UnitsOfWork.ClientCompanyContacts.Model; namespace Argentex.Core.UnitsOfWork.ClientCompanyContacts { public class ClientCompanyUow : BaseUow, IClientCompanyUow { private IGenericRepo<ClientCompanyContact> _clientCompanyContactRepository; private IGenericRepo<ClientCompany> _clientCompanyRepository; private IGenericRepo<ClientCompanyOpi> _clientCompanyOpiRepository; private IGenericRepo<ClientCompanyOnlineDetails> _clientCompanyOnlineDetailsRepository; private IGenericRepo<ClientCompanyOnlineDetailsSkew> _clientCompanyOnlineDetailsSkewRepository; private IGenericRepo<ClientCompanyOnlineSpreadAdjustment> _clientCompanyOnlineSpreadAdjustmentRepository; private IGenericRepo<ClientCompanyContactCategory> _clientCompanyContactCategoryRepository; private IGenericRepo<ContactCategory> _contactCategoryRepository; private IGenericRepo<LogClientCompanyContact> _logClientCompanyContactRepository; private IGenericRepo<LogClientCompanyContactCategory> _logClientCompanyContactCategoryRepository; private readonly ILogWrapper _logger; private IGenericRepo<ClientCompany> ClientCompanyRepository => _clientCompanyRepository = _clientCompanyRepository ?? new GenericRepo<ClientCompany>(Context); private IGenericRepo<ClientCompanyOpi> ClientCompanyOpiRepository => _clientCompanyOpiRepository = _clientCompanyOpiRepository ?? new GenericRepo<ClientCompanyOpi>(Context); public IGenericRepo<ClientCompanyContact> ClientCompanyContactRepository => _clientCompanyContactRepository = _clientCompanyContactRepository ?? new GenericRepo<ClientCompanyContact>(Context); private IGenericRepo<ClientCompanyOnlineDetails> ClientCompanyOnlineDetailsRepository => _clientCompanyOnlineDetailsRepository = _clientCompanyOnlineDetailsRepository ?? new GenericRepo<ClientCompanyOnlineDetails>(Context); private IGenericRepo<ClientCompanyOnlineDetailsSkew> ClientCompanyOnlineDetailsSkewRepository => _clientCompanyOnlineDetailsSkewRepository = _clientCompanyOnlineDetailsSkewRepository ?? new GenericRepo<ClientCompanyOnlineDetailsSkew>(Context); private IGenericRepo<ClientCompanyOnlineSpreadAdjustment> ClientCompanyOnlineSpreadAdjustmentRepository => _clientCompanyOnlineSpreadAdjustmentRepository = _clientCompanyOnlineSpreadAdjustmentRepository ?? new GenericRepo<ClientCompanyOnlineSpreadAdjustment>(Context); private IGenericRepo<ClientCompanyContactCategory> ClientCompanyContactCategoryRepository => _clientCompanyContactCategoryRepository = _clientCompanyContactCategoryRepository ?? new GenericRepo<ClientCompanyContactCategory>(Context); private IGenericRepo<ContactCategory> ContactCategoryRepository => _contactCategoryRepository = _contactCategoryRepository ?? new GenericRepo<ContactCategory>(Context); public IGenericRepo<LogClientCompanyContact> LogClientCompanyContactRepository => _logClientCompanyContactRepository = _logClientCompanyContactRepository ?? new GenericRepo<LogClientCompanyContact>(Context); public IGenericRepo<LogClientCompanyContactCategory> LogClientCompanyContactCategoryRepository => _logClientCompanyContactCategoryRepository = _logClientCompanyContactCategoryRepository ?? new GenericRepo<LogClientCompanyContactCategory>(Context); public ClientCompanyUow(FXDB1Context context) : base (context) { } public ClientCompanyUow(FXDB1Context context, ILogWrapper logger) : base(context) { _logger = logger; } public IQueryable<ClientCompany> GetClientCompany(int clientCompanyId) { return ClientCompanyRepository .GetQueryable(x => x.Id == clientCompanyId); } public IQueryable<ClientCompany> GetClientCompanies() { return ClientCompanyRepository.Get().AsQueryable(); } public IQueryable<ClientCompanyOpi> GetClientCompanyAccounts(int clientCompanyId) { return ClientCompanyOpiRepository .GetQueryable(x => x.ClientCompanyId == clientCompanyId && !x.IsDeleted); } public IQueryable<ClientCompanyContact> GetClientCompanyContact(int clientCompanyId) { return ClientCompanyContactRepository .GetQueryable(x => x.ClientCompanyId == clientCompanyId); } public void UpdateCompanyQualifiedTradeDetails(int clientCompanyId, string qualifiedTradeCode, int authUserId) { ClientCompany clientCompany = GetClientCompany(clientCompanyId).SingleOrDefault(); clientCompany.QualifiedNewTradeCode = qualifiedTradeCode; clientCompany.UpdatedByAuthUserId = authUserId; clientCompany.UpdatedDateTime = DateTime.Now; ClientCompanyRepository.Update(clientCompany); SaveContext(); } public void UpdateCompanyFirstTradeDate(int clientCompanyId, int authUserId) { ClientCompany clientCompany = GetClientCompany(clientCompanyId).SingleOrDefault(); if (clientCompany.FirstTradeDate == null) { clientCompany.FirstTradeDate = DateTime.Now; clientCompany.UpdatedByAuthUserId = authUserId; clientCompany.UpdatedDateTime = DateTime.Now; ClientCompanyRepository.Update(clientCompany); SaveContext(); } } public void UpdateCompanyLastContractDate(int clientCompanyId, DateTime? tradeContractDate, int authUserId) { ClientCompany clientCompany = GetClientCompany(clientCompanyId).SingleOrDefault(); if (tradeContractDate.HasValue) { clientCompany.LastContractDate = tradeContractDate; clientCompany.UpdatedByAuthUserId = authUserId; clientCompany.UpdatedDateTime = DateTime.Now; ClientCompanyRepository.Update(clientCompany); SaveContext(); } } public IQueryable<ClientCompanyOnlineDetails> GetClientCompanyOnlineDetails(int clientCompanyId) { return ClientCompanyOnlineDetailsRepository .GetQueryable(x => x.ClientCompanyId == clientCompanyId); } public IQueryable<ClientCompanyOnlineDetailsSkew> GetClientCompanyOnlineDetailsSkew(int clientCompanyId, int currency1Id, int currency2Id, bool isBuy) { return ClientCompanyOnlineDetailsSkewRepository .GetQueryable(x => x.ClientCompanyOnlineDetails.ClientCompanyId == clientCompanyId && x.ClientCompanyOnlineDetails.AllowOnlineTrading && x.Currency1Id == currency1Id && x.Currency2Id == currency2Id && x.IsBuy == isBuy, orderBy: null, includeProperties: "ClientCompanyOnlineDetails") .OrderByDescending(x => x.UpdatedDateTime); } public IQueryable<ClientCompanyOnlineSpreadAdjustment> GetClientCompanyOnlineSpreadAdjustment(int clientCompanyId, int currency1Id, int currency2Id, bool isBuy) { return ClientCompanyOnlineSpreadAdjustmentRepository .GetQueryable(x => x.ClientCompanyOnlineDetails.ClientCompanyId == clientCompanyId && x.ClientCompanyOnlineDetails.AllowOnlineTrading && x.Currency1Id == currency1Id && x.Currency2Id == currency2Id && x.IsBuy == isBuy, orderBy: null, includeProperties: "ClientCompanyOnlineDetails") .OrderByDescending(x => x.UpdatedDateTime); } public void AddClientCompanyOnlineSpreadAdjustment(ClientCompanyOnlineSpreadAdjustment model) { ClientCompanyOnlineSpreadAdjustmentRepository.Insert(model); SaveContext(); } public void SetClientCompanyOnlineKicked(int clientCompanyId) { var clientCompanyOnlineDetails = GetClientCompanyOnlineDetails(clientCompanyId).SingleOrDefault(); clientCompanyOnlineDetails.Kicked = true; ClientCompanyOnlineDetailsRepository.Update(clientCompanyOnlineDetails); SaveContext(); } public IQueryable<ClientCompanyContactCategory> GetClientCompanyContactCategories(int clientCompanyContactId) { return ClientCompanyContactCategoryRepository .GetQueryable(x => x.ClientCompanyContactId == clientCompanyContactId) .Include(x => x.ContactCategory.Description); } public IQueryable<ContactCategory> GetContactCategories() { return ContactCategoryRepository .GetQueryable(); } public void AddContactCategory(ContactCategory entity) { ContactCategoryRepository.Insert(entity); SaveContext(); } public IQueryable<ContactCategory> GetContactCategory(int contactCategoryId) { return ContactCategoryRepository.Get(x => x.Id == contactCategoryId).AsQueryable(); } public IQueryable<ContactCategory> GetContactCategory(string contactCategoryDescription) { return ContactCategoryRepository.Get(x => x.Description == contactCategoryDescription).AsQueryable(); } public bool ProcessClientCompanyContactCategories(List<int> unassignClientCompanyContactCategoryIds, List<int> assignClientCompanyContactCategoryIds, int clientCompanyContactId, int authUserId) { using (var transaction = Context.Database.BeginTransaction()) { try { foreach (int unassignContactCategoryId in unassignClientCompanyContactCategoryIds) { LogClientCompanyContactCategoryRepository.Insert(new LogClientCompanyContactCategory() { LogAction = "UNASSIGN", ClientCompanyContactId = clientCompanyContactId, ContactCategoryId = unassignContactCategoryId, DateCreated = DateTime.Now, CreatedByAuthUserId = authUserId }); ClientCompanyContactCategoryRepository.Delete(new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryId = unassignContactCategoryId, }); } foreach (int assignContactCategoryId in assignClientCompanyContactCategoryIds) { ClientCompanyContactCategoryRepository.Insert(new ClientCompanyContactCategory() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryId = assignContactCategoryId, DateCreated = DateTime.Now, CreatedByAuthUserId = authUserId, }); LogClientCompanyContactCategoryRepository.Insert(new LogClientCompanyContactCategory() { LogAction = "ASSIGN", ClientCompanyContactId = clientCompanyContactId, ContactCategoryId = assignContactCategoryId, DateCreated = DateTime.Now, CreatedByAuthUserId = authUserId }); } SaveContext(); transaction.Commit(); return true; } catch (Exception ex) { transaction.Rollback(); _logger.Error(ex); return false; } } } public ClientCompanyContact GetCurrentClientCompanyContact(ClientCompanyContactSearchModel clientCompanyContactSearchContext) { ClientCompanyContact clientCompanyContact = new ClientCompanyContact(); if (clientCompanyContactSearchContext != null) { // Check if ClientCompanyContactId is passed and get data by it, else get data by AuthUserId clientCompanyContact = clientCompanyContactSearchContext.ClientCompanyContactId != null ? ClientCompanyContactRepository.GetQueryable().Include(c => c.AuthUser) .Include(c => c.ClientCompany).SingleOrDefault(c => c.Id == clientCompanyContactSearchContext.ClientCompanyContactId) : ClientCompanyContactRepository.GetQueryable().Include(c => c.AuthUser) .Include(c => c.ClientCompany).SingleOrDefault(c => c.AuthUserId == clientCompanyContactSearchContext.AuthUsertId); } return clientCompanyContact; } public IQueryable<ClientCompanyContact> GetClientCompanyContactList(int clientCompanyID) { var clientCompanyContactList = ClientCompanyContactRepository.GetQueryable(x => x.ClientCompanyId == clientCompanyID && !x.IsDeleted); return clientCompanyContactList; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Migrations/20180725161908_UniqueAuthUserId.cs using Microsoft.EntityFrameworkCore.Migrations; namespace Argentex.Core.Identity.DataAccess.Migrations { public partial class UniqueAuthUserId : Migration { protected override void Up(MigrationBuilder migrationBuilder) { migrationBuilder.AddUniqueConstraint( name: "AK_User_AuthUserId", table: "User", column: "AuthUserId"); migrationBuilder.AddUniqueConstraint( name: "AK_User_ClientCompanyContactId", table: "User", column: "ClientCompanyContactId"); } protected override void Down(MigrationBuilder migrationBuilder) { migrationBuilder.DropUniqueConstraint( name: "AK_User_AuthUserId", table: "User"); migrationBuilder.DropUniqueConstraint( name: "AK_User_ClientCompanyContactId", table: "User"); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AuthPermission.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AuthPermission { public AuthPermission() { AuthRolePermission = new HashSet<AuthRolePermission>(); NavMenuItem = new HashSet<NavMenuItem>(); } public int Id { get; set; } public string Description { get; set; } public ICollection<AuthRolePermission> AuthRolePermission { get; set; } public ICollection<NavMenuItem> NavMenuItem { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Emirstatus.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Emirstatus { public Emirstatus() { ArmfxForwardTradeStatusesHistory = new HashSet<ArmfxForwardTradeStatusesHistory>(); ArmreportFxforwardTrade = new HashSet<ArmreportFxforwardTrade>(); EmirreportFxforwardTrade = new HashSet<EmirreportFxforwardTrade>(); FixApatradeCapture = new HashSet<FixApatradeCapture>(); FxforwardTradeApastatus = new HashSet<FxforwardTrade>(); FxforwardTradeArmstatus = new HashSet<FxforwardTrade>(); FxforwardTradeEmirstatus = new HashSet<FxforwardTrade>(); } public int Id { get; set; } public string Description { get; set; } public int Sequence { get; set; } public ICollection<ArmfxForwardTradeStatusesHistory> ArmfxForwardTradeStatusesHistory { get; set; } public ICollection<ArmreportFxforwardTrade> ArmreportFxforwardTrade { get; set; } public ICollection<EmirreportFxforwardTrade> EmirreportFxforwardTrade { get; set; } public ICollection<FixApatradeCapture> FixApatradeCapture { get; set; } public ICollection<FxforwardTrade> FxforwardTradeApastatus { get; set; } public ICollection<FxforwardTrade> FxforwardTradeArmstatus { get; set; } public ICollection<FxforwardTrade> FxforwardTradeEmirstatus { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/Armreport.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class Armreport { public Armreport() { ArmfxForwardTradeStatusesHistory = new HashSet<ArmfxForwardTradeStatusesHistory>(); ArmreportFxforwardTrade = new HashSet<ArmreportFxforwardTrade>(); } public int Id { get; set; } public DateTime CreatedDateTime { get; set; } public int? ArmreportOutgoingFileId { get; set; } public ArmreportOutgoingFile ArmreportOutgoingFile { get; set; } public ICollection<ArmfxForwardTradeStatusesHistory> ArmfxForwardTradeStatusesHistory { get; set; } public ICollection<ArmreportFxforwardTrade> ArmreportFxforwardTrade { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/Report.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class Report { [Key] [DatabaseGenerated(DatabaseGeneratedOption.Identity)] public long ReportId { get; set; } [MaxLength(128)] public string Description { get; set; } //[ForeignKey(nameof(UserReport.ReportId))] public virtual ICollection<UserReport> UserReports { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ClientCompanyContact.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ClientCompanyContact { public ClientCompanyContact() { FxforwardTrade = new HashSet<FxforwardTrade>(); Fxoption = new HashSet<Fxoption>(); ClientCompanyContactCategory = new HashSet<ClientCompanyContactCategory>(); } public int Id { get; set; } public int ClientCompanyId { get; set; } public string Title { get; set; } public string Forename { get; set; } public string Surname { get; set; } public string Email { get; set; } public string TelephoneDirect { get; set; } public string TelephoneMobile { get; set; } public string TelephoneOther { get; set; } public DateTime? Birthday { get; set; } public bool Authorized { get; set; } public byte[] UpdateTimeStamp { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public string Notes { get; set; } public string Fullname { get; set; } public bool RecNotifications { get; set; } public bool RecAmreport { get; set; } public int? AuthUserId { get; set; } public string Position { get; set; } public bool? PrimaryContact { get; set; } public bool RecActivityReport { get; set; } public bool IsDeleted { get; set; } public string Aspnumber { get; set; } public DateTime? AspcreationDate { get; set; } public DateTime? LastTelephoneChangeDate { get; set; } public DateTime? LastEmailChangeDate { get; set; } public string BloombergGpi { get; set; } public string NiNumber { get; set; } public AuthUser AuthUser { get; set; } public ClientCompany ClientCompany { get; set; } public AuthUser UpdatedByAuthUser { get; set; } public ICollection<FxforwardTrade> FxforwardTrade { get; set; } public ICollection<Fxoption> Fxoption { get; set; } public ICollection<ClientCompanyContactCategory> ClientCompanyContactCategory { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FxoptionOutputsTemplate.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FxoptionOutputsTemplate { public int Id { get; set; } public int? FxoptionTypeId { get; set; } public string Template { get; set; } public bool? IsBuy { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Models/Statements/StatementDto.cs using System; namespace Argentex.Core.Api.Models.Statements { public class StatementDto { public string TradeCode { get; set; } public string PaymentCode { get; set; } public int BankAccountId { get; set; } public DateTime ValueDate { get; set; } public string Event { get; set; } public bool IsDebit { get; set; } public decimal Amount { get; set; } public decimal Balance { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AppUser.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AppUser { public AppUser() { AppUserEmailAlternative = new HashSet<AppUserEmailAlternative>(); ClientCompanySalesAppUser = new HashSet<ClientCompanySalesAppUser>(); Commission = new HashSet<Commission>(); } public int Id { get; set; } public string Forename { get; set; } public string Surname { get; set; } public int AppUserTypeId { get; set; } public int AuthUserId { get; set; } public byte[] UpdateTimeStamp { get; set; } public string FullName { get; set; } public bool IsDirector { get; set; } public string Ipaddress { get; set; } public string Extension { get; set; } public string Aspnumber { get; set; } public DateTime? AspcreationDate { get; set; } public string TelephoneNumber { get; set; } public int? TelephoneCountryCodeId { get; set; } public DateTime? UserStartDate { get; set; } public bool Is2Famember { get; set; } public bool IsUserManager { get; set; } public DateTime? LastTelephoneChangeDate { get; set; } public DateTime? LastEmailChangeDate { get; set; } public string BloombergGpi { get; set; } public bool OnlineTradingNotifications { get; set; } public AppUserType AppUserType { get; set; } public AuthUser AuthUser { get; set; } public TelephoneCountryCode TelephoneCountryCode { get; set; } public ICollection<AppUserEmailAlternative> AppUserEmailAlternative { get; set; } public ICollection<ClientCompanySalesAppUser> ClientCompanySalesAppUser { get; set; } public ICollection<Commission> Commission { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LastWorkingDay.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LastWorkingDay { public int Year { get; set; } public int Month { get; set; } public DateTime LastWorkingDay1 { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api.Tests/ClientCompanyContact/ClientCompanyControllerTests.cs using System; using System.Collections.Generic; using Argentex.Core.Service; using Microsoft.AspNetCore.Mvc; using Moq; using SynetecLogger; using System.Net; using System.Threading.Tasks; using Argentex.Core.Api.Controllers.Client; using Argentex.Core.Api.Models; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Models.ClientCompany; using FluentAssertions; using Xunit; using Microsoft.AspNetCore.Http; using Argentex.Core.Api.Controllers; namespace Argentex.Core.Api.Tests.ClientCompanyContact { public class ClientCompanyControllerTests { [Fact] public void Add_Contact_Category_Success() { // Given var model = new ContactCategoryModel(); var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); var returnValue = true; serviceMock.Setup(x => x.AddContactCategory(It.IsAny<ContactCategoryModel>())).Returns(returnValue); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.AddContactCategory(model); var result = response as OkObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Info"); } [Fact] public void Add_Contact_Category_Failed() { // Given var model = new ContactCategoryModel(); var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); var returnValue = false; serviceMock.Setup(x => x.AddContactCategory(It.IsAny<ContactCategoryModel>())).Returns(returnValue); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.AddContactCategory(model); var result = response as BadRequestObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); } [Fact] public void Add_Contact_Category_Failed_ModelState_Invalid() { // Given var model = new ContactCategoryModel(); var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); var returnValue = true; serviceMock.Setup(x => x.AddContactCategory(It.IsAny<ContactCategoryModel>())).Returns(returnValue); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); controller.ModelState.AddModelError("Error", "An error has occurred"); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.AddContactCategory(model); var result = response as BadRequestObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); var responseModel = (ResponseModel)result.Value; responseModel.ResponseMessages.Should().NotBeEmpty(); responseModel.ResponseMessages.Should().ContainKey("Errors"); responseModel.ResponseMessages.Should().HaveCount(1); } [Fact] public void Get_Contact_Categories_Success() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); const string contactCategoryDescription1 = "EUR/USD"; const string contactCategoryDescription2 = "EUR/GBP"; const string contactCategoryDescription3 = "GBP/USD"; const string contactCategoryDescription4 = "GBP/EUR"; const int contactCategoryId1 = 1; const int contactCategoryId2 = 2; const int contactCategoryId3 = 3; const int contactCategoryId4 = 4; const int contactCategorySequence1 = 1; const int contactCategorySequence2 = 2; const int contactCategorySequence3 = 3; const int contactCategorySequence4 = 4; IEnumerable<ContactCategoryModel> contactCategoryModels = new List<ContactCategoryModel>() { new ContactCategoryModel() { Id = contactCategoryId1, Description = contactCategoryDescription1, Sequence = contactCategorySequence1 }, new ContactCategoryModel() { Id = contactCategoryId2, Description = contactCategoryDescription2, Sequence = contactCategorySequence2 }, new ContactCategoryModel() { Id = contactCategoryId3, Description = contactCategoryDescription3, Sequence = contactCategorySequence3 }, new ContactCategoryModel() { Id = contactCategoryId4, Description = contactCategoryDescription4, Sequence = contactCategorySequence4 } }; serviceMock.Setup(x => x.GetContactCategories()).Returns(Task.FromResult(contactCategoryModels)); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); var expectedResultValueType = typeof(List<ContactCategoryModel>); // When var response = controller.GetContactCategories(); var result = response.Result as OkObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedResultValueType); result.Value.Should().BeEquivalentTo(contactCategoryModels); } [Fact] public void Get_Contact_Categories_Failed() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); serviceMock.Setup(x => x.GetContactCategories()).Throws(new Exception("An error has occurred")); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.GetContactCategories(); var result = response.Result as BadRequestObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); ((ResponseModel)result.Value).ResponseMessages.Should().HaveCount(1); } [Fact] public void Get_Client_Company_Contact_Categories_Success() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); int clientCompanyContactId = 44; const string contactCategoryDescription1 = "EUR/USD"; const string contactCategoryDescription2 = "EUR/GBP"; const string contactCategoryDescription3 = "GBP/USD"; const string contactCategoryDescription4 = "GBP/EUR"; const int contactCategoryId1 = 1; const int contactCategoryId2 = 2; const int contactCategoryId3 = 3; const int contactCategoryId4 = 4; IEnumerable<ClientCompanyContactCategoryModel> contactCategoryModels = new List<ClientCompanyContactCategoryModel>() { new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryDescription = contactCategoryDescription1, ContactCategoryId = contactCategoryId1 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryDescription = contactCategoryDescription2, ContactCategoryId = contactCategoryId2 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryDescription = contactCategoryDescription3, ContactCategoryId = contactCategoryId3 }, new ClientCompanyContactCategoryModel() { ClientCompanyContactId = clientCompanyContactId, ContactCategoryDescription = contactCategoryDescription4, ContactCategoryId = contactCategoryId4 }, }; serviceMock.Setup(x => x.GetClientCompanyContactCategories(It.IsAny<int>())).Returns(Task.FromResult(contactCategoryModels)); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); var expectedResultValueType = typeof(List<ClientCompanyContactCategoryModel>); // When var response = controller.GetClientCompanyContactCategories(clientCompanyContactId); var result = response.Result as OkObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedResultValueType); result.Value.Should().BeEquivalentTo(contactCategoryModels); } [Fact] public void Get_Client_Company_Contact_Categories_Failed() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); int clientCompanyContactId = 44; serviceMock.Setup(x => x.GetClientCompanyContactCategories(It.IsAny<int>())).Throws(new Exception("An error has occurred")); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.GetClientCompanyContactCategories(clientCompanyContactId); var result = response.Result as BadRequestObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); ((ResponseModel)result.Value).ResponseMessages.Should().HaveCount(1); } [Fact] public void Get_Contact_Category_By_Id_Success() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); int contactCategoryId = 44; string contactCategoryDescription = "EUR/USD"; var contactCategorySequence = 1; ContactCategory contactCategory = new ContactCategory() { Id = contactCategoryId, Description = contactCategoryDescription, Sequence = contactCategorySequence }; serviceMock.Setup(x => x.GetContactCategory(It.IsAny<int>())).Returns(contactCategory); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); var expectedResultValueType = typeof(ContactCategory); // When var response = controller.GetContactCategory(contactCategoryId); var result = response as OkObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedResultValueType); result.Value.Should().BeEquivalentTo(contactCategory); } [Fact] public void Get_Contact_Category_By_Id_Failed() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); int contactCategoryId = 44; serviceMock.Setup(x => x.GetContactCategory(It.IsAny<int>())).Throws(new Exception("An error has occurred")); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.GetContactCategory(contactCategoryId); var result = response as BadRequestObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); ((ResponseModel)result.Value).ResponseMessages.Should().HaveCount(1); } [Fact] public void Get_Contact_Category_By_Id_Not_Found() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); int contactCategoryId = 44; serviceMock.Setup(x => x.GetContactCategory(It.IsAny<int>())).Returns((ContactCategory) null); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); // When var response = controller.GetContactCategory(contactCategoryId); var result = response as OkObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeNull(); } [Fact] public void Get_Contact_Category_By_Description_Success() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); int contactCategoryId = 44; string contactCategoryDescription = "EUR/USD"; var contactCategorySequence = 1; ContactCategory contactCategory = new ContactCategory() { Id = contactCategoryId, Description = contactCategoryDescription, Sequence = contactCategorySequence }; serviceMock.Setup(x => x.GetContactCategory(It.IsAny<string>())).Returns(contactCategory); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); var expectedResultValueType = typeof(ContactCategory); // When var response = controller.GetContactCategory(contactCategoryDescription); var result = response as OkObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedResultValueType); result.Value.Should().BeEquivalentTo(contactCategory); } [Fact] public void Get_Contact_Category_By_Description_Failed() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); string contactCategoryDescription = "EUR/USD"; serviceMock.Setup(x => x.GetContactCategory(It.IsAny<string>())).Throws(new Exception("An error has occurred")); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.GetContactCategory(contactCategoryDescription); var result = response as BadRequestObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); ((ResponseModel)result.Value).ResponseMessages.Should().HaveCount(1); } [Fact] public void Get_Contact_Category_By_Description_NotFound() { // Given var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); string contactCategoryDescription = "EUR/USD"; serviceMock.Setup(x => x.GetContactCategory(It.IsAny<string>())).Returns((ContactCategory)null); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); // When var response = controller.GetContactCategory(contactCategoryDescription); var result = response as OkObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeNull(); } [Fact] public void Process_Client_Company_Contact_Categories_Success() { // Given var model = new ClientCompanyContactBulkCategoryModel(); var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); var returnValue = true; serviceMock.Setup(x => x.ProcessClientCompanyContactCategories(It.IsAny<ClientCompanyContactBulkCategoryModel>())).Returns(returnValue); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedResultType = typeof(OkObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.ProcessClientCompanyContactCategories(model); var result = response as OkObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Info"); } [Fact] public void Process_Client_Company_Contact_Categories_Failed() { // Given var model = new ClientCompanyContactBulkCategoryModel(); var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); var returnValue = false; serviceMock.Setup(x => x.ProcessClientCompanyContactCategories(It.IsAny<ClientCompanyContactBulkCategoryModel>())).Returns(returnValue); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.ProcessClientCompanyContactCategories(model); var result = response as BadRequestObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); } [Fact] public void Process_Client_Company_Contact_Categories_Failed_ModelState_Invalid() { // Given var model = new ClientCompanyContactBulkCategoryModel(); var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); var returnValue = true; serviceMock.Setup(x => x.ProcessClientCompanyContactCategories(It.IsAny<ClientCompanyContactBulkCategoryModel>())).Returns(returnValue); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); var errorMessage = "An error has occurred"; controller.ModelState.AddModelError("Error", errorMessage); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ResponseModel); // When var response = controller.ProcessClientCompanyContactCategories(model); var result = response as BadRequestObjectResult; // Then result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); ((ResponseModel)result.Value).ResponseMessages.Should().HaveCount(1); } [Fact] public void GetClientCompanyContact_Success_With_Valid_ClientCompanyContactId_Input() { //Arrange var responseModel = new ClientCompanyContactResponseModel() { CompanyContactModel = new ClientCompanyContactModel() { ID = 1, ContactForename = "Test", ContactSurname = "Tester" } }; var clientCompanyContactSearchContext = new ClientCompanyContactSearchContext() { ClientCompanyContactId = 1, AuthUsertId = 0 }; var serviceMock = new Mock<IClientCompanyService>(); serviceMock.Setup(x => x.GetClientCompanyContact(It.IsAny<ClientCompanyContactSearchContext>())) .Returns(responseModel); var controller = new ClientCompanyContactController(serviceMock.Object, null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var response = controller.GetClientCompanyContact(clientCompanyContactSearchContext); var expectedResultType = typeof(OkObjectResult); var result = response as OkObjectResult; //Assert result.Should().BeOfType<OkObjectResult>().And.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); ((ClientCompanyContactResponseModel)result.Value).CompanyContactModel.Should().NotBeNull(); ((ClientCompanyContactResponseModel)result.Value).CompanyContactModel.ID.Should().Be(1); ((ClientCompanyContactResponseModel)result.Value).CompanyContactModel.ContactForename.Should().Be("Test"); ((ClientCompanyContactResponseModel)result.Value).CompanyContactModel.ContactSurname.Should().Be("Tester"); } [Fact] public void GetClientCompanyContact_Success_With_Valid_AuthUsertId_Input() { //Arrange var responseModel = new ClientCompanyContactResponseModel() { CompanyContactModel = new ClientCompanyContactModel() { ID = 1, ContactForename = "Test", ContactSurname = "Tester" } }; var clientCompanyContactSearchContext = new ClientCompanyContactSearchContext() { ClientCompanyContactId = 0, AuthUsertId = 1 }; var serviceMock = new Mock<IClientCompanyService>(); serviceMock.Setup(x => x.GetClientCompanyContact(It.IsAny<ClientCompanyContactSearchContext>())) .Returns(responseModel); var controller = new ClientCompanyContactController(serviceMock.Object, null); controller.ControllerContext = new ControllerContext(); controller.ControllerContext.HttpContext = new DefaultHttpContext(); controller.ControllerContext.HttpContext.Request.Scheme = "test"; //Act var response = controller.GetClientCompanyContact(clientCompanyContactSearchContext); var expectedResultType = typeof(OkObjectResult); var result = response as OkObjectResult; //Assert result.Should().BeOfType<OkObjectResult>().And.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); ((ClientCompanyContactResponseModel)result.Value).CompanyContactModel.Should().NotBeNull(); ((ClientCompanyContactResponseModel)result.Value).CompanyContactModel.ID.Should().Be(1); ((ClientCompanyContactResponseModel)result.Value).CompanyContactModel.ContactForename.Should().Be("Test"); ((ClientCompanyContactResponseModel)result.Value).CompanyContactModel.ContactSurname.Should().Be("Tester"); } [Fact] public void GetClientCompanyContact_Not_Found() { //Arrange var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); var errorMessage = new ClientCompanyContactResponseModel { ResponseMessages = new Dictionary<string, string[]> { { "Errors", new string[] {$"Client Company Contact with ID 1 could not be retrieved."} } } }; var clientCompanyContactSearchContext = new ClientCompanyContactSearchContext() { ClientCompanyContactId = 0, AuthUsertId = 0 }; serviceMock.Setup(x => x.GetClientCompanyContact(It.IsAny<ClientCompanyContactSearchContext>())) .Returns(new ClientCompanyContactResponseModel()); serviceMock.Setup(x => x.GetErrorMessages(It.IsAny<HttpStatusCode>(), It.IsAny<Exception>(), It.IsAny<ClientCompanyContactSearchContext>())) .Returns(errorMessage); var controller = new ClientCompanyContactController(serviceMock.Object, loggerMock.Object); //Act var response = controller.GetClientCompanyContact(clientCompanyContactSearchContext); var expectedResultType = typeof(NotFoundObjectResult); var result = response as NotFoundObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); ((ClientCompanyContactResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ClientCompanyContactResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); ((ClientCompanyContactResponseModel)result.Value).ResponseMessages.Should().HaveCount(1); } [Fact] public void GetClientCompanyContact_Failed_When_InvalidModel_With_Bad_Request() { //Arrange var service = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); var controller = new ClientCompanyContactController(service.Object, loggerMock.Object); var errorMessage = new ClientCompanyContactResponseModel { ResponseMessages = new Dictionary<string, string[]> { { "Errors", new string[] {$"Client Company Contact with ID 1 could not be retrieved."} } } }; var clientCompanyContactSearchContext = new ClientCompanyContactSearchContext() { ClientCompanyContactId = 0, AuthUsertId = 0 }; service.Setup(x => x.GetClientCompanyContact(It.IsAny<ClientCompanyContactSearchContext>())) .Throws(new Exception("An error has occurred")); service.Setup(x => x.GetErrorMessages(It.IsAny<HttpStatusCode>(), It.IsAny<Exception>(), It.IsAny<ClientCompanyContactSearchContext>())) .Returns(errorMessage); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ClientCompanyContactResponseModel); //Act var response = controller.GetClientCompanyContact(clientCompanyContactSearchContext); var result = response as BadRequestObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ClientCompanyContactResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ClientCompanyContactResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); ((ClientCompanyContactResponseModel)result.Value).ResponseMessages.Should().HaveCount(1); } [Fact] public void GetCompanyContactList_Success_With_Valid_ClientCompanyContactId_Input() { //Arrange var responseModel = new ClientCompanyContactListResponseModel() { CompanyContactListModel = new List<ClientCompanyContactList>() { new ClientCompanyContactList() { ID = 1, ContactForename = "Test", ContactSurname = "Tester" } }, Succeeded = true }; var applicationServiceUserList = new ClientCompanyContactListResponseModel(); var serviceMock = new Mock<IClientCompanyService>(); serviceMock.Setup(x => x.GetCompanyContactList(It.IsAny<int>())) .Returns(responseModel); serviceMock.Setup(x => x.GetCompanyContactList(It.IsAny<int>())) .Returns(responseModel); var controller = new ClientCompanyController(serviceMock.Object, null); //Act var response = controller.GetCompanyContactList(1); var expectedResultType = typeof(OkObjectResult); var result = response as OkObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); ((ClientCompanyContactListResponseModel)result.Value).CompanyContactListModel.Should().NotBeNull(); ((ClientCompanyContactListResponseModel)result.Value).CompanyContactListModel.Count.Should().Be(1); ((ClientCompanyContactListResponseModel)result.Value).Succeeded.Should().Be(true); } [Fact] public void GetCompanyContactList_Returns_Not_Found() { //Arrange var applicationServiceUserList = new ClientCompanyContactListResponseModel(); var serviceMock = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); serviceMock.Setup(x => x.GetCompanyContactList(It.IsAny<int>())) .Returns(applicationServiceUserList); var controller = new ClientCompanyController(serviceMock.Object, loggerMock.Object); //Act var result = controller.GetCompanyContactList(1); var expectedResultType = typeof(OkObjectResult); //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); } [Fact] public void GetCompanyContactList_Failed_When_InvalidModel_With_Bad_Request() { //Arrange var service = new Mock<IClientCompanyService>(); var loggerMock = new Mock<ILogWrapper>(); var controller = new ClientCompanyController(service.Object, loggerMock.Object); var errorMessage = new ClientCompanyContactListResponseModel { ResponseMessages = new Dictionary<string, string[]> { { "Errors", new string[] {$"Client Company Contact with ID 1 could not be retrieved."} } } }; service.Setup(x => x.GetCompanyContactList(It.IsAny<int>())) .Throws(new Exception("An error has occurred")); service.Setup(x => x.GetErrorMessagesForContactList(It.IsAny<HttpStatusCode>(), It.IsAny<Exception>(), It.IsAny<int>())) .Returns(errorMessage); var expectedStatusCode = HttpStatusCode.BadRequest; var expectedResultType = typeof(BadRequestObjectResult); var expectedType = typeof(ClientCompanyContactListResponseModel); //Act var response = controller.GetCompanyContactList(1); var result = response as BadRequestObjectResult; //Assert result.Should().NotBeNull(); result.Should().BeOfType(expectedResultType); result.StatusCode.Should().Be((int)expectedStatusCode); result.Value.Should().BeOfType(expectedType); ((ClientCompanyContactListResponseModel)result.Value).ResponseMessages.Should().NotBeEmpty(); ((ClientCompanyContactListResponseModel)result.Value).ResponseMessages.Should().ContainKey("Errors"); ((ClientCompanyContactListResponseModel)result.Value).ResponseMessages.Should().HaveCount(1); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/ComplianceQuestionnaire.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class ComplianceQuestionnaire { public int Id { get; set; } public int ClientCompanyComplianceId { get; set; } public int ComplianceQuestionnaireQuestionId { get; set; } public int ComplianceQuestionnaireAnswerId { get; set; } public bool? IsFirstTimeSelect { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public ClientCompanyCompliance ClientCompanyCompliance { get; set; } public ComplianceQuestionnaireAnswer ComplianceQuestionnaireAnswer { get; set; } public ComplianceQuestionnaireQuestion ComplianceQuestionnaireQuestion { get; set; } public AuthUser UpdatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Models/ClientCompanies/ClientCompanyAccountDto.cs namespace Argentex.Core.Api.Models.ClientCompanies { public class ClientCompanyAccountDto { public int ClientCompanyOpiId { get; set; } public int ClientCompanyId { get; set; } public string AccountName { get; set; } public string AccountNumber { get; set; } public string Currency { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/CassRecs.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class CassRecs { public int Id { get; set; } public int? CassRecsStatementFileId { get; set; } public DateTime CassRecsDate { get; set; } public string CurrencyCode { get; set; } public decimal LastNightsClosingLedger { get; set; } public int? Check1ByAuthUserId { get; set; } public DateTime? Check1UpdatedDateTime { get; set; } public int? Check2ByAuthUserId { get; set; } public DateTime? Check2UpdatedDateTime { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } public int? CompletedByAuthUserId { get; set; } public DateTime? CompletedDateTime { get; set; } public CassRecsStatementFile CassRecsStatementFile { get; set; } public AuthUser Check1ByAuthUser { get; set; } public AuthUser Check2ByAuthUser { get; set; } public AuthUser CompletedByAuthUser { get; set; } public AuthUser UpdatedByAuthUser { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/AppUserNotification.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class AppUserNotification { public int Id { get; set; } public int AppUserId { get; set; } public int ClientCompanyId { get; set; } public bool TradeNotifications { get; set; } public bool InwardPaymentNotifications { get; set; } public bool OutwardPaymentNotifications { get; set; } public bool SettlementRequests { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Identity.DataAccess/Models/Token.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Text; namespace Argentex.Core.Identity.DataAccess { public class Token { #region Properties [Key] [Required] public int Id { get; set; } public string ClientId { get; set; } public int? Type { get; set; } [Required] public string Value { get; set; } [Required] public long UserId { get; set; } [Required] public DateTime CreatedDate { get; set; } [Required] public DateTime LastModifiedDate { get; set; } #endregion #region Lazy-Load Properties /// <summary> /// The user related to this token /// </summary> [ForeignKey(nameof(UserId))] public virtual ApplicationUser User { get; set; } #endregion } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.SignalRService/TraderActionsHub.cs using Microsoft.AspNetCore.SignalR; using System; using System.Threading.Tasks; namespace Argentex.Core.SignalRService { public class TraderActionsHub : Hub, ITraderActionsHub { private readonly IHubContext<TraderActionsHub> _context; public TraderActionsHub(IHubContext<TraderActionsHub> context) { _context = context; } public override async Task OnConnectedAsync() { await Groups.AddToGroupAsync(Context.ConnectionId, "SignalR Users"); await base.OnConnectedAsync(); } public override async Task OnDisconnectedAsync(Exception exception) { await Groups.RemoveFromGroupAsync(Context.ConnectionId, "SignalR Users"); await base.OnDisconnectedAsync(exception); } public async Task ContinueExecuteTrade(string model) { await _context.Clients.All.SendAsync("ContinueExecuteTrade", model); } public async Task ManageClientTrade(string model) { await _context.Clients.All.SendAsync("ManageClientTrade", model); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Order/OrderRequestModel.cs using Argentex.Core.Service.Attributes; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Service.Models.Order { public class OrderRequestModel { [Required] public int? AuthUserId { get; set; } [Required] public int? ClientCompanyId { get; set; } [Required(ErrorMessage = "At least one order must be supplied")] public ICollection<OrderModel> OrderModels { get; set; } } public class OrderModel { [Required] public int OrderIndex { get; set; } [DateRequired(ErrorMessage = "Value date cannot be empty")] public DateTime ValueDate { get; set; } public DateTime? ValidityDate { get; set; } [DecimalRequiredAttribute(ErrorMessage = "Client rate cannot be empty")] public decimal ClientRate { get; set; } [Required(ErrorMessage = "LHS currency cannot be empty")] public string LhsCcy { get; set; } [Required(ErrorMessage = "RHS currency cannot be empty")] public string RhsCcy { get; set; } [DecimalRequired(ErrorMessage ="Client amount must be 0 or greater")] public decimal ClientAmount { get; set; } [BoolRequired(ErrorMessage = "IsBuy must be boolean")] public bool IsBuy { get; set; } [BoolRequired(ErrorMessage="IsRhsMajor must be boolean")] public bool IsRhsMajor { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Monitoring/MonitoringController.cs using Argentex.Core.Service.Models.Trades; using Argentex.Core.Service.Monitoring; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using System.Threading.Tasks; namespace Argentex.Core.Api.Controllers.Monitoring { [Route("api/monitoring")] [ApiController] public class MonitoringController : ControllerBase { private readonly IMonitoringService _monitoringService; public MonitoringController(IMonitoringService monitoringService) { _monitoringService = monitoringService; } [HttpGet] [Route("notify-trade-started/{authUserId:int}")] public async Task<IActionResult> NotifyTradeStarted(int authUserId) { var success = await _monitoringService.NotifyTradeStarted(authUserId); if (!success) return BadRequest(); return Ok(); } [HttpPost] [Route("check-execute-trade")] public async Task<IActionResult> CheckExecuteTrade([FromBody] TradeNotificationModel model) { if (!ModelState.IsValid) return BadRequest(); return Ok(await _monitoringService.CheckExecuteTrade(model)); } [HttpGet] [Route("refresh-client-details")] public async Task<IActionResult> RefreshClientDetails() { if (!ModelState.IsValid) return BadRequest(); await _monitoringService.RefreshClientDetails(); return Ok(); } } }<file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Models/Quotes/QuoteRequestDto.cs using Argentex.Core.Service.Attributes; using System; using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Api.Models.Quotes { public class QuoteRequestDto { [Range(1, int.MaxValue)] public int ClientCompanyId { get; set; } [Required] [StringLength(3)] public string LeftCurrency { get; set; } [Required] [StringLength(3)] public string RightCurrency { get; set; } public bool IsBuy { get; set; } [Required] public decimal Amount { get; set; } [DateRequired] public DateTime ValueDate { get; set; } [BoolRequired] public bool IsRhsMajor { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/ClientSiteAction/CSASwapsModel.cs using System; namespace Argentex.Core.Service.Models.ClientSiteAction { public class CSASwapsModel { public long ActionID { get; set; } public int ClientCompanyID { get; set; } public string ClientCompanyName { get; set; } public string CreatedByClientName { get; set; } public string FXForwardTradeCode { get; set; } public DateTime? ValueDate { get; set; } public decimal? SellAmount { get; set; } public decimal? BuyAmount { get; set; } public decimal Rate { get; set; } public string CurrencyPair { get; set; } public string ActionStatus { get; set; } public int ActionStatusID { get; set; } public DateTime ActionCreatedDateTime { get; set; } public bool IsParentTrade { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Email/IEmailService.cs using Argentex.Core.Service.Models.Email; using Argentex.Core.Service.Models.Trades; using Microsoft.AspNetCore.Identity; using System.Collections.Generic; using System.Threading.Tasks; namespace Argentex.Core.Service { public interface IEmailService { Task<IdentityResult> SendUserNewPasswordEmailAsync(string userName, string clientCompanyName); Task<IdentityResult> SendResetPasswordEmailAsync(string email); Task<IdentityResult> SendPasswordChangedEmailAsync(string email); Task<IdentityResult> SendEmailToDirectorsForApproval(); Task SendTradeReceiptEmailAsync(FxForwardTradeInformationModel trade); Task SendBrokerTradeNoteEmailAsync(BrokerTradeNoteModel trade); Task SendFailedFIXTradeEmailAsync(FailedFIXTradeModel model); Task SendOrderNoteEmailAsync(OrderNoteModel model); Task SendDealerOrderNoteEmailAsync(OrderNoteModel model); Task SendCancelOrderEmailAsync(CancelOrderModel model); Task SendSettlementEmailsAsync(IList<SettlementNoteModel> modelList, List<string> emailList); Task SendInwardPaymentEmailAsync(PaymentNotificationModel model, IEnumerable<string> emailList); Task SendOutwardPaymentEmailAsync(PaymentNotificationModel model, IEnumerable<string> emailList); Task SendMobileChangeEmailAsync(string email, string proposedValue); } } <file_sep>/agfx-fxdb-core-api-dmz/UnitsOfWork/ClientCompanies/IClientCompanyAccountsUoW.cs using Argentex.Core.DataAccess.Entities; using Synetec.Data.UnitOfWork.BaseUnitOfWork; using Synetec.Data.UnitOfWork.GenericRepo; using System.Collections.Generic; using System.Linq; namespace Argentex.Core.UnitsOfWork.ClientCompanies { public interface IClientCompanyAccountsUoW : IBaseUow { IGenericRepo<ClientCompany> ClientCompanyRepository { get; } IGenericRepo<ClientCompanyOpi> ClientCompanyOpiRepository { get; } IGenericRepo<Currency> CurrencyRepository { get; } IEnumerable<ClientCompanyOpi> GetClientCompanyAccounts(int clientCompanyId); IQueryable<ClientCompanyOpi> GetClientCompanyAccountQueryable(int clientCompanyOpiId); void AddClientCompanyOpi(ClientCompanyOpi clientCompanyOpi); IQueryable<ClearingCodePrefix> GetClearingPrefixCodes(); ClientCompanyOpi GetClientCompanyAccount(int opiId); void UpdateAccount(ClientCompanyOpi clientCompanyOpi); IQueryable<ClientCompanyCurrencyDefaultOpi> GetClientCompanyDefaultAccount(int clientCompanyId, int currencyId); void RemoveDefaultAccount(ClientCompanyCurrencyDefaultOpi account); void AddDefaultAccount(ClientCompanyCurrencyDefaultOpi defaultAccount); IEnumerable<VirtualAccountType> GetVirtualAccountType(string description); IEnumerable<ClientCompanyVirtualAccount> GetClientCompanyVirtualAccount(ClientCompany company, VirtualAccountType vat); IEnumerable<VirtualAccountTypeBankAccount> GetVirtualAccountTypeBankAccount(VirtualAccountType vat); IQueryable<FxforwardTrade2Opi> GetTradeOPIs(string tradeCode); void AddTradeOPI(FxforwardTrade2Opi fxforwardTrade2Opi); IList<long> GetSettlementIDs(int clientCompanyOpiId); int GetAssociatedTradesCount(int clientCompanyOpiId, int statusDeliveredID); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Enums/PaymentTypes.cs namespace Argentex.Core.Service.Enums { public enum PaymentTypes { In = 1, Out = 2, BankToBank = 3, InterVirtualAccount = 4, Reconciliation = 5, MtMCalc = 6 } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Trade/TradeService.cs using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.ClientSiteAction; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Enums; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Fix; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Models.Email; using Argentex.Core.Service.Models.Fix; using Argentex.Core.Service.Models.Trade; using Argentex.Core.Service.Models.Trades; using Argentex.Core.Service.User; using Argentex.Core.UnitsOfWork.ClientCompanies; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using Argentex.Core.UnitsOfWork.Trades; using System; using System.Collections.Generic; using System.Data; using System.Linq; using System.Threading.Tasks; namespace Argentex.Core.Service.Trade { public class TradeService : ITradeService { private const int DecimalPlaces = 2; private readonly ITradeUow _tradeUow; private readonly IClientCompanyAccountsUoW _clientCompanyAccountsUow; private readonly IBarxFxService _fixQuoteService; private readonly IEmailService _emailService; private readonly IConfigWrapper _config; private readonly IUserService _userService; private readonly ICurrencyService _currencyService; private readonly IAppSettingService _appSettingService; private readonly IClientCompanyUow _clientCompanyUow; private readonly IClientCompanyService _clientCompanyService; private readonly IClientSiteActionService _clientSiteActionService; private bool _disposed; public TradeService(ITradeUow tradeUow, IBarxFxService fixQuoteService, IEmailService emailService, IConfigWrapper config, IClientCompanyAccountsUoW clientCompanyAccountsUow, IUserService userService, ICurrencyService currencyService, IAppSettingService appSettingService, IClientCompanyUow clientCompanyUow, IClientCompanyService clientCompanyService, IClientSiteActionService clientSiteActionService) { _tradeUow = tradeUow; _clientCompanyAccountsUow = clientCompanyAccountsUow; _fixQuoteService = fixQuoteService; _emailService = emailService; _config = config; _userService = userService; _currencyService = currencyService; _appSettingService = appSettingService; _clientCompanyUow = clientCompanyUow; _clientCompanyService = clientCompanyService; _clientSiteActionService = clientSiteActionService; } public IList<TradeModel> GetUnsettledTrades(int clientCompanyId) { var table = _tradeUow.GetUnsettledTrades(clientCompanyId); var list = new List<TradeModel>(); for (int i = 0; i < table.Rows.Count; i++) { var row = table.Rows[i]; TradeModel tradeModel = MapTradeModelFromDataRow(row); list.Add(tradeModel); } list.Sort((x, y) => Nullable.Compare(y.ValueDate, x.ValueDate)); return list; } public IEnumerable<string> GetCurrencyCodes() { return _tradeUow.GetCurrencies() .Select(x => x.Code) .ToList(); } public IEnumerable<string> GetAllowedCurrencyPairs() { var pairs = _tradeUow.GetCurrencyPairValidation() .Select(x => x.CurrencyPair) .ToList(); return pairs; } public async Task<FxForwardTradeInformationModel> GetTradeNote(string tradeCode) { // getting trade data var tradeNote = GetTradeInformation(tradeCode); // getting broker data var brokerTradeNote = GetBrokerTradeInformation(tradeCode); // sending email to client await _emailService.SendTradeReceiptEmailAsync(tradeNote); // sending email to broker and dealer await _emailService.SendBrokerTradeNoteEmailAsync(brokerTradeNote); return tradeNote; } public FxForwardTradeInformationModel GetTradeInformation(string tradeCode) { var tradeInformation = _tradeUow .GetTrade(tradeCode) .Select(trade => new FxForwardTradeInformationModel { InstructedBy = trade.AuthorisedByClientCompanyContactId.HasValue ? trade.AuthorisedByClientCompanyContact.Fullname : string.Empty, InstructedDateTime = trade.CreatedDate, Method = Enum.GetName(typeof(TradeInstructionMethods), trade.TradeInstructionMethodId.Value), TradeRef = trade.Code, SellCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, SellAmount = trade.IsBuy ? trade.ClientRhsamt.Value : trade.ClientLhsamt.Value, BuyCcy = trade.IsBuy ? trade.Lhsccy.Code : trade.Rhsccy.Code, BuyAmount = trade.IsBuy ? trade.ClientLhsamt.Value : trade.ClientRhsamt.Value, ValueDate = trade.ValueDate.Value, Rate = (double)trade.ClientRate.Value, Collateral = trade.IsBuy ? CalculateCollateralValue(trade.ClientRhsamt.Value, trade.CollateralPerc.Value) : CalculateCollateralValue(trade.ClientLhsamt.Value, trade.CollateralPerc.Value), CollateralCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, CurrencyPair = trade.CurrencyPair, ClientEmail = trade.AuthorisedByClientCompanyContactId.HasValue ? trade.AuthorisedByClientCompanyContact.Email : string.Empty, ClientCompany = trade.AuthorisedByClientCompanyContact.ClientCompany, //Must be reworked to allow multiple OPIs SettlementAccountDetails = GetClientCompanyOpi(trade) }).SingleOrDefault(); if (tradeInformation == null) throw new TradeNotFoundException($"Trade with code {tradeCode} does not exist"); return tradeInformation; } public BrokerTradeNoteModel GetBrokerTradeInformation(string tradeCode) { var tradeInformation = _tradeUow .GetTrade(tradeCode) .Select(trade => new BrokerTradeNoteModel { InstructedBy = trade.AuthorisedByClientCompanyContactId.HasValue ? trade.AuthorisedByClientCompanyContact.Fullname : string.Empty, InstructedDateTime = trade.CreatedDate, Method = Enum.GetName(typeof(TradeInstructionMethods), trade.TradeInstructionMethodId.Value), TradeCode = trade.Code, SellCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, SellAmount = trade.IsBuy ? trade.ClientRhsamt.Value : trade.ClientLhsamt.Value, BuyCcy = trade.IsBuy ? trade.Lhsccy.Code : trade.Rhsccy.Code, BuyAmount = trade.IsBuy ? trade.ClientLhsamt.Value : trade.ClientRhsamt.Value, ValueDate = trade.ValueDate.Value, Rate = (double)trade.BrokerRate.Value, ClientCompany = trade.AuthorisedByClientCompanyContact.ClientCompany, SettlementAccountDetails = trade.ClientCompanyOpi, Broker = trade.Broker, Collateral = trade.IsBuy ? CalculateCollateralValue(trade.ClientRhsamt.Value, trade.CollateralPerc.Value) : CalculateCollateralValue(trade.ClientLhsamt.Value, trade.CollateralPerc.Value), CollateralCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, CurrencyPair = trade.CurrencyPair }).SingleOrDefault(); if (tradeInformation == null) { throw new TradeNotFoundException($"Trade with code {tradeCode} does not exist"); } else if (tradeInformation.ClientCompany != null && tradeInformation.ClientCompany.DealerAppUserId > 0) { // getting app user from fxdb var appUser = _userService.GetFXDBAppUserById((int)tradeInformation.ClientCompany.DealerAppUserId); if(appUser != null) { // getting auth user from fxdb var authUser = _userService.GetAuthUserById(appUser.AuthUserId); tradeInformation.DealerAuthUser = authUser; } } return tradeInformation; } private string GetGeneratedTradeCode(int clientCompanyId) { var tradeCount = _tradeUow.GetClientCompanyTradeCount(clientCompanyId) .Select(x => x.TradeCount) .SingleOrDefault(); return CodeBuilder.FormatTradeCode(tradeCount + 1, clientCompanyId); } public async Task<IList<FixQuoteResponseModel>> GetQuotesAsync(QuoteRequestModel quoteRequest) { List<FixQuoteResponseModel> responseList = new List<FixQuoteResponseModel>(); foreach (var request in quoteRequest.QuoteModels) { var tradeCode = GetGeneratedTradeCode(quoteRequest.ClientCompanyId.Value); var quoteRequestModel = CreateFixQuoteRequestModel(request, tradeCode); var response = await _fixQuoteService.GetQuoteAsync(quoteRequestModel); if (response == null) throw new FixQuoteException("Cannot get quote"); response.QuoteIndex = request.QuoteIndex; response.ClientRate = CalculateClientRate(quoteRequest.ClientCompanyId.Value, request, response.BrokerRate); responseList.Add(response); } return responseList; } private decimal CalculateClientRate(int clientCompanyId, QuoteModel request, decimal brokerRate) { string currency1 = string.Empty; string currency2 = string.Empty; //determine the Buy and Sell to get the correct spread if (request.IsRhsMajor) { if (request.IsBuy) { currency1 = request.RhsCcy; currency2 = request.LhsCcy; } else { currency1 = request.LhsCcy; currency2 = request.RhsCcy; } } else { if (request.IsBuy) { currency1 = request.LhsCcy; currency2 = request.RhsCcy; } else { currency1 = request.RhsCcy; currency2 = request.LhsCcy; } } int spread = _clientCompanyService.GetClientCompanySpread(clientCompanyId, currency1, currency2, request.IsBuy, request.ValueDate, request.ContractDate); decimal clientRate = brokerRate; if (request.IsBuy) { clientRate = brokerRate + ((decimal)spread / 10000 * brokerRate); } else { clientRate = brokerRate - ((decimal)spread / 10000 * brokerRate); } return clientRate; } public async Task<IList<DealResponseModel>> Deal(DealRequestModel dealRequest) { List<DealResponseModel> responseList = new List<DealResponseModel>(); foreach (var deal in dealRequest.DealModels) { DealResponseModel response = await GetDealResponse(deal, dealRequest.AuthUserId.Value, dealRequest.ClientCompanyId.Value); response.TradeIndex = deal.TradeIndex; responseList.Add(response); } return responseList; } private async Task<DealResponseModel> GetDealResponse(DealModel deal, int authUserId, int clientCompanyId) { var dealResponse = new DealResponseModel { IsSuccessful = false, ErrorMessage = "" }; if (deal.ExpirationDateTime.AddMinutes(2) <= DateTime.UtcNow) //TODO Remove the AddMinutes and modify the Fix Acceptor instead. { dealResponse.ErrorMessage = "Quote has expired."; return dealResponse; } var tradeCountObject = _tradeUow.GetTradeCountByPrimaryKey(clientCompanyId); tradeCountObject.TradeCount++; dealResponse.Code = CodeBuilder.FormatTradeCode(tradeCountObject.TradeCount, clientCompanyId); var filledtradeEntity = CreateFilledTradeEntity(deal, dealResponse.Code, clientCompanyId, authUserId); dealResponse.IsSuccessful = _tradeUow.CreateDeal(filledtradeEntity, tradeCountObject); if (!dealResponse.IsSuccessful) { dealResponse.ErrorMessage = "Database error while creating Deal"; return dealResponse; } bool isFixSuccessful = await FixExecuteQuote(deal, dealResponse, filledtradeEntity); FxforwardTrade brokerdtradeEntity = null; if (isFixSuccessful) { brokerdtradeEntity = CreateBrokeredTradeEntity(dealResponse.Code, authUserId); dealResponse.IsSuccessful = _tradeUow.BrokerDeal(brokerdtradeEntity, tradeCountObject); if (!dealResponse.IsSuccessful) dealResponse.ErrorMessage = "Database error while creating Deal"; } else { var failedFIXTradeModel = GetFailedFIXTradeInfo(filledtradeEntity.Code); _clientSiteActionService.LogActionUnconfirmedTrade(authUserId, failedFIXTradeModel.TradeCode); await _emailService.SendFailedFIXTradeEmailAsync(failedFIXTradeModel); dealResponse.Code = ""; } if (dealResponse.IsSuccessful) { SetClientCompanyTradeDetails(brokerdtradeEntity, clientCompanyId, authUserId); } return dealResponse; } /// <summary> /// Getting trade informations for the sending email /// </summary> /// <param name="tradeCode">Unique Id for Order/Trade</param> /// <returns></returns> private FailedFIXTradeModel GetFailedFIXTradeInfo(string tradeCode) { var aa = _tradeUow.GetTrade(tradeCode); var tradeInformation = _tradeUow .GetTrade(tradeCode) .Select(trade => new FailedFIXTradeModel { TradeCode = trade.Code, SellCcy = trade.IsBuy ? trade.Rhsccy.Code : trade.Lhsccy.Code, BuyCcy = trade.IsBuy ? trade.Lhsccy.Code : trade.Rhsccy.Code, ValueDate = trade.ValueDate.Value, Rate = (double?)trade.ClientRate ?? 0, ClientCompany = trade.AuthorisedByClientCompanyContact.ClientCompany, }).SingleOrDefault(); tradeInformation.CurrencyPair = $"{tradeInformation.SellCcy}/{tradeInformation.BuyCcy}"; if (tradeInformation == null) throw new TradeNotFoundException($"Trade with code {tradeCode} does not exist"); return tradeInformation; } private async Task<bool> FixExecuteQuote(DealModel deal, DealResponseModel dealResponse, FxforwardTrade tradeEntity) { try { var dealRequestModel = CreateFixNewOrderRequestModel(deal, tradeEntity); var fixResponse = await _fixQuoteService.NewOrderSingleAsync(dealRequestModel); if (fixResponse == null) { dealResponse.ErrorMessage = "Deal not done due to an unexpected error, please try again"; } else if (!string.IsNullOrEmpty(fixResponse.ErrorMessage)) { dealResponse.ErrorMessage = fixResponse.ErrorMessage; dealResponse.IsSuccessful = false; } else { dealResponse.BarclaysAssignedId = fixResponse.BarclaysAssignedId; dealResponse.BarclaysTradeId = fixResponse.BarclaysTradeId; //dealResponse.IsSuccessful comes up as true } } catch (Exception ex) { //notify user and/or send email dealResponse.ErrorMessage = "Deal not done due to an unexpected error, please try again"; dealResponse.IsSuccessful = false; } return dealResponse.IsSuccessful; } private FxforwardTrade CreateFilledTradeEntity(DealModel dealModel, string code, int clientCompanyId, int authUserId) { var pendingStatus = _tradeUow.GetEmirStatus("Pending"); var filledStatus = _tradeUow.GetFxForwardStatus("Filled"); var tradeInstructionMethod = _tradeUow.GetTradeInstructionMethod("Online"); var tradeModel = new FxforwardTrade() { Code = code, CreatedDate = DateTime.Now, CreatedByAuthUserId = authUserId, ClientCompanyId = clientCompanyId, AuthorisedByClientCompanyContactId = GetClientCompanyContactId(authUserId), ContractDate = DateTime.Now, ContractNoteSentToClientDateTime = DateTime.Now, ValueDate = dealModel.ValueDate, IsOrder = false, CurrencyPair = (dealModel.LhsCcy + dealModel.RhsCcy).ToUpper(), IsBuy = dealModel.IsBuy, Lhsccyid = _currencyService.GetCurrencyId(dealModel.LhsCcy), Rhsccyid = _currencyService.GetCurrencyId(dealModel.RhsCcy), ClientRate = dealModel.Rate, BrokerRate = dealModel.BrokerRate, CollateralPerc = 0, UpdatedByAuthUserId = authUserId, IsRhsmajor = dealModel.IsRhsMajor, ProfitConsolidated = false, Deleted = false, EmirReported = false, IsComplianceSupported = true, IsComplianceRegulated = false, EmirDelegatedReported = false, IsEmirreportable = true, IsApareportable = true, IsArmreportable = true, Armreported = false, EmirstatusId = pendingStatus.Id, ApastatusId = pendingStatus.Id, ArmstatusId = pendingStatus.Id, TradeInstructionMethodId = tradeInstructionMethod.Id, FxforwardTradeStatusId = filledStatus.Id, FilledByAuthUserId = authUserId, FilledDateTime = DateTime.Now }; //determine the default account currency needed int? currencyId = null; if (tradeModel.IsRhsmajor.HasValue) { if (tradeModel.IsRhsmajor.Value) { if (tradeModel.IsBuy) { currencyId = tradeModel.Rhsccyid; } else { currencyId = tradeModel.Lhsccyid; } } else { if (tradeModel.IsBuy) { currencyId = tradeModel.Lhsccyid; } else { currencyId = tradeModel.Rhsccyid; } } } if (currencyId.HasValue) { var clientCompanyDefaultAccount = _clientCompanyService.GetClientCompanyDefaultAccount(clientCompanyId, currencyId.Value); if (clientCompanyDefaultAccount != null) { tradeModel.ClientCompanyOpiid = clientCompanyDefaultAccount.ClientCompanyOpiId; } } CalculateAmountsAndProfit(dealModel, tradeModel); return tradeModel; } private FxforwardTrade CreateBrokeredTradeEntity(string code, int authUserId) { var brokeredStatus = _tradeUow.GetFxForwardStatus("Brokered"); var broker = _tradeUow.GetBroker("Barclays"); FxforwardTrade trade = _tradeUow.GetTrade(code).FirstOrDefault(); trade.FxforwardTradeStatusId = brokeredStatus.Id; trade.EmirUti = _appSettingService.GetEmirUtiCode() + code.Replace("-", string.Empty).Replace("/", ""); trade.BrokeredDate = DateTime.Now; trade.BrokerId = broker.Id; trade.Verified = true; trade.VerifiedByAuthUserId = authUserId; return trade; } private int GetClientCompanyContactId(int authUserId) { return _userService .GetApplicationUserByAuthUserId(authUserId) .ClientCompanyContactId; } private void CalculateAmountsAndProfit(DealModel dealModel, FxforwardTrade tradeModel) { if (dealModel.IsRhsMajor) { tradeModel.ClientLhsamt = decimal.Round(dealModel.Amount / tradeModel.ClientRate.Value, DecimalPlaces); tradeModel.BrokerLhsamt = decimal.Round(dealModel.Amount / tradeModel.BrokerRate.Value, DecimalPlaces); tradeModel.ClientRhsamt = dealModel.Amount; tradeModel.BrokerRhsamt = dealModel.Amount; tradeModel.Profit = tradeModel.IsBuy ? tradeModel.BrokerLhsamt - tradeModel.ClientLhsamt : tradeModel.ClientLhsamt - tradeModel.BrokerLhsamt; } else { tradeModel.ClientLhsamt = dealModel.Amount; tradeModel.BrokerLhsamt = dealModel.Amount; tradeModel.ClientRhsamt = decimal.Round(dealModel.Amount * tradeModel.ClientRate.Value, DecimalPlaces); tradeModel.BrokerRhsamt = decimal.Round(dealModel.Amount * tradeModel.BrokerRate.Value, DecimalPlaces); tradeModel.Profit = tradeModel.IsBuy ? tradeModel.ClientRhsamt - tradeModel.BrokerRhsamt : tradeModel.BrokerRhsamt - tradeModel.ClientRhsamt; } tradeModel.RemainingClientLhsamt = tradeModel.ClientLhsamt; tradeModel.RemainingClientRhsamt = tradeModel.ClientRhsamt; } private FixQuoteRequestModel CreateFixQuoteRequestModel(QuoteModel quoteRequest, string tradeCode) { var model = new FixQuoteRequestModel { TradeCode = tradeCode, LHSCCY = quoteRequest.LhsCcy, RHSCCY = quoteRequest.RhsCcy, MajorCurrency = quoteRequest.IsRhsMajor ? quoteRequest.RhsCcy : quoteRequest.LhsCcy, Side = quoteRequest.IsBuy ? 1 : 2, BrokerMajorAmount = quoteRequest.Amount, ValueDate = quoteRequest.ValueDate.ToString("yyyy-MM-dd"), TimeOut = _appSettingService.GetFixTimeout(), Duration = _appSettingService.GetStreamingDuration() }; return model; } private FixNewOrderRequestModel CreateFixNewOrderRequestModel(DealModel deal, FxforwardTrade trade) { var model = new FixNewOrderRequestModel { TradeCode = trade.Code, QuoteId = deal.QuoteId, QuoteReqId = deal.QuoteReqId, Price = trade.BrokerRate.Value, ClientPrice = trade.ClientRate.Value, LHSCCY = deal.LhsCcy, RHSCCY = deal.RhsCcy, MajorCurrency = trade.IsRhsmajor.Value ? deal.RhsCcy : deal.LhsCcy, Side = trade.IsBuy ? 1 : 2, BrokerMajorAmount = trade.IsRhsmajor.Value ? trade.BrokerRhsamt.Value : trade.BrokerLhsamt.Value, ValueDate = deal.ValueDate.ToString("yyyy-MM-dd"), TimeOut = _appSettingService.GetFixTimeout(), Duration = _appSettingService.GetStreamingDuration() }; return model; } public ClientCompanyOpi GetClientCompanyOpi(FxforwardTrade trade) { return trade.FxforwardTrade2Opi.FirstOrDefault()?.ClientCompanyOpi ?? null; } public bool SetTradeDefaultOPI(string tradeCode, int clientCompanyId, bool setAsDefault) { bool isSuccessful = false; bool updateTrade = false; FxforwardTrade trade = _tradeUow.GetTrade(tradeCode, true); Currency buyCCY = !trade.IsBuy ? trade.Rhsccy : trade.Lhsccy; ClientCompanyCurrencyDefaultOpi clientCompanyCurrencyDefaultOpi = _clientCompanyAccountsUow.GetClientCompanyDefaultAccount(clientCompanyId, buyCCY.Id).SingleOrDefault(); if (clientCompanyCurrencyDefaultOpi != null) { if (setAsDefault) { trade.ClientCompanyOpi = clientCompanyCurrencyDefaultOpi.ClientCompanyOpi; updateTrade = true; } else { //check if the default OPI is assigned //if assigned, remove it if (trade.ClientCompanyOpi != null && trade.ClientCompanyOpi.Id == clientCompanyCurrencyDefaultOpi.ClientCompanyOpi.Id) { trade.ClientCompanyOpi = null; updateTrade = true; } } } if (updateTrade) { _tradeUow.UpdateTrade(trade); isSuccessful = true; } return isSuccessful; } public IList<TradeModel> GetClosedTrades(int clientCompanyId) { var table = _tradeUow.GetClosedTrades(clientCompanyId); var list = new List<TradeModel>(); for (int i = 0; i < table.Rows.Count; i++) { var row = table.Rows[i]; TradeModel tradeModel = MapTradeModelFromDataRow(row); list.Add(tradeModel); } return list; } private static TradeModel MapTradeModelFromDataRow(DataRow row) { var tradeModel = new TradeModel() { TradeId = (string)row["Code"], ContractDate = (DateTime)row["ContractDate"], ValueDate = (DateTime)row["ValueDate"], ClientRate = (decimal)row["ClientRate"], SellCcy = (string)row["SellCurrencyCode"], BuyCcy = (string)row["BuyCurrencyCode"], ClientBuyAmount = (decimal)row["BuyClientAmount"], ClientSellAmount = (decimal)row["SellClientAmount"], IsFullPayment = true }; if (row.Table.Columns.Contains("MajorCCYCode")) { tradeModel.MajorCcy = (string)row["MajorCCYCode"]; } if (row.Table.Columns.Contains("Reference")) { tradeModel.Reference = row["Reference"] == DBNull.Value ? string.Empty : (string)row["Reference"]; } if (row.Table.Columns.Contains("RemainingVolume")) { tradeModel.Balance = (decimal)row["RemainingVolume"]; } if (row.Table.Columns.Contains("FXForwardTradeStatusClientDescription")) { tradeModel.Status = row["FXForwardTradeStatusClientDescription"] == DBNull.Value ? string.Empty : (string)row["FXForwardTradeStatusClientDescription"]; } else { if (row.Table.Columns.Contains("FXForwardTradeStatusDescription")) { tradeModel.Status = row["FXForwardTradeStatusDescription"] == DBNull.Value ? string.Empty : (string)row["FXForwardTradeStatusDescription"]; } } if (row.Table.Columns.Contains("IsDefaultOPI")) { tradeModel.PayToDefaultOPI = (bool)row["IsDefaultOPI"]; } return tradeModel; } private static decimal CalculateCollateralValue(decimal value, decimal collateralPercentage) { return value * (collateralPercentage / 100); } private void SetClientCompanyTradeDetails(FxforwardTrade trade, int clientCompanyId, int authUserId) { //update the client company Qualified Trade (if needed) ClientCompany clientCompany = _clientCompanyUow.GetClientCompany(clientCompanyId).SingleOrDefault(); if (clientCompany.AssignNewTrades.HasValue && clientCompany.AssignNewTrades.Value) { IList<Models.Currencies.CurrencyModel> currencies = _currencyService.GetCurrencies().ToList(); if (CheckTradeValueThreshold(trade, currencies)) { if(string.IsNullOrWhiteSpace(clientCompany.QualifiedNewTradeCode)) { _clientCompanyUow.UpdateCompanyQualifiedTradeDetails(clientCompanyId, trade.Code, authUserId); } } } //Update client company first trade date if necessary _clientCompanyUow.UpdateCompanyFirstTradeDate(clientCompanyId, authUserId); //Update client company Last Contract Date _clientCompanyUow.UpdateCompanyLastContractDate(clientCompanyId, trade.ContractDate, authUserId); } /// <summary> /// Check if the value of the given trade is over £100k /// </summary> /// <param name="trade"></param> /// <param name="currencies"></param> /// <returns></returns> private bool CheckTradeValueThreshold(FxforwardTrade trade, IList<Models.Currencies.CurrencyModel> currencies) { const decimal ThresholdValue = 100000m; const string GBP = "GBP"; decimal? valueGBP; string valueCCYCode = null; int valueCCYID; decimal? valueAmount; decimal? valueGBPRate; valueCCYID = trade.IsRhsmajor.HasValue ? trade.IsRhsmajor.Value ? trade.Rhsccyid.Value : trade.Lhsccyid.Value : 0; valueCCYCode = currencies.Where(x => x.Id == valueCCYID).SingleOrDefault().Code; valueAmount = trade.IsRhsmajor.HasValue ? trade.IsRhsmajor.Value ? trade.ClientRhsamt : trade.ClientLhsamt : null; valueGBPRate = trade.PrevailingRate2; if (string.Equals(valueCCYCode, GBP, StringComparison.InvariantCultureIgnoreCase)) { valueGBP = valueAmount ?? 0; } else { if (valueGBPRate == null) { double? currencyPairRate = _currencyService.GetCurrencyPairRate(GBP + valueCCYCode); valueGBPRate = currencyPairRate.HasValue ? Convert.ToDecimal(currencyPairRate) : (decimal?)null; } valueGBP = valueAmount / (valueGBPRate ?? 1); } return valueGBP.HasValue ? valueGBP.Value >= ThresholdValue : false; } private IList<TradeModel> GetUnsettledTradesForBalanceCalculation(int clientCompanyId) { var table = _tradeUow.GetUnsettledTradesForBalanceCalculation(clientCompanyId); var list = new List<TradeModel>(); for (int i = 0; i < table.Rows.Count; i++) { var row = table.Rows[i]; TradeModel tradeModel = MapTradeModelFromDataRow(row); list.Add(tradeModel); } list.Sort((x, y) => Nullable.Compare(y.ValueDate, x.ValueDate)); return list; } public decimal GetTradeBalance(int clientCompanyId, string tradeCode) => GetUnsettledTradesForBalanceCalculation(clientCompanyId).Where(x => x.TradeId == tradeCode).Sum(x => x.Balance); protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { _tradeUow?.Dispose(); _fixQuoteService?.Dispose(); } } _disposed = true; } public void Dispose() { Dispose(true); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Identity/ApproveUserChangeRequests.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Models.Identity { public class ApproveUserChangeRequests { public int UserChangeRequestID { get; set; } public int ApprovedByAuthUserId { get; set; } public string AuthApplicationName { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftincomingFileType.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftincomingFileType { public SwiftincomingFileType() { SwiftincomingFile = new HashSet<SwiftincomingFile>(); } public int Id { get; set; } public string Description { get; set; } public ICollection<SwiftincomingFile> SwiftincomingFile { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api.Tests/Payments/PaymentControllerTests.cs using Argentex.Core.Api.Controllers.Settlements; using Argentex.Core.Service.Models.Payments; using Argentex.Core.Service.Settlements; using Microsoft.AspNetCore.Mvc; using Moq; using SynetecLogger; using System.Net; using Xunit; namespace Argentex.Core.Api.Tests.Payments { public class PaymentControllerTests { [Fact] public void Given_A_Payment_Is_Returned_An_Ok_Object_Result_Should_Be_Returned() { // Given var paymentCode = "PC 42"; var paymentServiceMock = new Mock<ISettlementService>(); var loggerMock = new Mock<ILogWrapper>(); paymentServiceMock.Setup(x => x.GetPaymentInformation(It.IsAny<string>(), It.IsAny<bool>())).Returns(new PaymentInformationModel()); var controller = new SettlementController(paymentServiceMock.Object, loggerMock.Object); var expectedStatusCode = HttpStatusCode.OK; var expectedType = typeof(PaymentInformationModel); // When var response = controller.GetPaymentOutInformation(paymentCode); var result = response as OkObjectResult; // Then Assert.NotNull(result); Assert.Equal((int) expectedStatusCode, result.StatusCode); Assert.Equal(expectedType, result.Value.GetType()); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Service.Tests/Currencies/CurrencyServiceTests.cs using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Exceptions; using Argentex.Core.Service.Models.Currencies; using Argentex.Core.UnitsOfWork.Currencies; using Moq; using Synetec.Data.UnitOfWork.GenericRepo; using Xunit; namespace Argentex.Core.Service.Tests.Currencies { public class CurrencyServiceTests { [Fact] public void Given_Currency_Pair_Does_Not_Exist_An_Exception_Should_Be_Thrown() { // Given var currencyPair = "GBPUSD"; var currencyPairPricingRepositoryMock = new Mock<IGenericRepo<CurrencyPairPricing>>(); var currencyUoWMock = new Mock<ICurrencyUoW>(); currencyPairPricingRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<CurrencyPairPricing, bool>>>(), It.IsAny<Func<IQueryable<CurrencyPairPricing>, IOrderedQueryable<CurrencyPairPricing>>>(), "")) .Returns(new List<CurrencyPairPricing>()); currencyUoWMock.Setup(x => x.CurrencyPairPricingRepository) .Returns(currencyPairPricingRepositoryMock.Object); var currencyService = new CurrencyService(currencyUoWMock.Object); var expectedMessage = $"{currencyPair} does not exist"; // When var result = Assert.Throws<CurrencyPairPricingNotFoundException>(() => currencyService.GetCurrencyPairRate(currencyPair)); // Then Assert.NotNull(result); Assert.Equal(expectedMessage, result.Message); } [Fact] public void Given_That_Currency_Pair_Exists_The_Rate_Should_Be_Returned() { // Given var currencyPair = "GBPUSD"; var currencyPairPricing = new CurrencyPairPricing() { CurrencyPair = currencyPair, Rate = 1.5 }; var currencyPairPricingRepositoryMock = new Mock<IGenericRepo<CurrencyPairPricing>>(); var currencyUoWMock = new Mock<ICurrencyUoW>(); currencyPairPricingRepositoryMock.Setup(x => x.Get(It.IsAny<Expression<Func<CurrencyPairPricing, bool>>>(), It.IsAny<Func<IQueryable<CurrencyPairPricing>, IOrderedQueryable<CurrencyPairPricing>>>(), "")) .Returns(new List<CurrencyPairPricing>{ currencyPairPricing }); currencyUoWMock.Setup(x => x.CurrencyPairPricingRepository) .Returns(currencyPairPricingRepositoryMock.Object); var currencyService = new CurrencyService(currencyUoWMock.Object); var expectedResult = 1.5; // When var result = currencyService.GetCurrencyPairRate(currencyPair); // Then Assert.Equal(expectedResult, result); } [Fact] public void Given_There_Is_No_Currency_Associated_With_The_Id_An_Exception_Should_Be_Thrown() { // Given var currencyId = 42; var currencies = new List<Currency>(); var currencyRepositoryMock = new Mock<IGenericRepo<Currency>>(); var currencyUoWMock = new Mock<ICurrencyUoW>(); currencyUoWMock.Setup(x => x.GetCurrency(It.IsAny<int>())).Returns(currencies.AsQueryable); var service = new CurrencyService(currencyUoWMock.Object); var expectedMessage = $"Currency with id {currencyId} does not exist"; // When var result = Assert.Throws<CurrencyNotFoundException>(() => service.GetCurrency(currencyId)); // Then Assert.NotNull(result); Assert.Equal(expectedMessage, result.Message); } [Fact] public void Given_A_Currency_Is_Found_A_Currency_Model_Should_Be_Returned() { // Given var currency = new Currency { Id = 42, Code = "GBP" }; var currencies = new List<Currency> { currency }; var currencyRepositoryMock = new Mock<IGenericRepo<Currency>>(); var currencyUoWMock = new Mock<ICurrencyUoW>(); currencyUoWMock.Setup(x => x.GetCurrency(It.IsAny<int>())).Returns(currencies.AsQueryable); var service = new CurrencyService(currencyUoWMock.Object); var expectedType = typeof(CurrencyModel); var expectedCode = "GBP"; // When var result = service.GetCurrency(currency.Id); // Then Assert.NotNull(result); Assert.Equal(expectedType, result.GetType()); Assert.Equal(expectedCode, result.Code); } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Helpers/SystemConstant.cs namespace Argentex.Core.Service.Helpers { public class SystemConstant { public const string ClientSiteAction_Type_OPIPayment = "RequestOPIAssignedToTrades"; public const string ClientSiteAction_Type_SwapCreation = "RequestSwap"; public const string ClientSiteAction_Type_NoFIXConfirmation = "RequestTradesNoFIXConfirmation"; public const string ClientSiteAction_Type_NewOPI = "RequestNewOPI"; public const string ClientSiteAction_Status_New = "New"; public const string ClientSiteAction_Status_Requested = "Requested"; public const string ClientSiteAction_Status_Pending = "Pending"; public const string Setting_UserChangeDaysRequiresForApproval = "UserChangeDaysRequiresForApproval"; public const int Setting_UserChangeDaysRequiresForApproval_Default = 10; } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Startup.cs using System.Collections.Generic; using Argentex.ClientSite.Service.Http; using Argentex.Core.Api.Automapper; using Argentex.Core.Api.Config; using Argentex.Core.Api.Filters; using Argentex.Core.DataAccess.Entities; using Argentex.Core.Identity.DataAccess; using Argentex.Core.Service; using Argentex.Core.Service.AppSettings; using Argentex.Core.Service.ClientCompanies; using Argentex.Core.Service.ClientSiteAction; using Argentex.Core.Service.Country; using Argentex.Core.Service.Currencies; using Argentex.Core.Service.Fix; using Argentex.Core.Service.Helpers; using Argentex.Core.Service.Identity; using Argentex.Core.Service.Identity.Services; using Argentex.Core.Service.Monitoring; using Argentex.Core.Service.Order; using Argentex.Core.Service.Payments; using Argentex.Core.Service.Settlements; using Argentex.Core.Service.Sms.SmsSender; using Argentex.Core.Service.Statements; using Argentex.Core.Service.Trade; using Argentex.Core.Service.User; using Argentex.Core.SignalRService; using Argentex.Core.UnitsOfWork.AppSettings; using Argentex.Core.UnitsOfWork.ClientCompanies; using Argentex.Core.UnitsOfWork.ClientCompanyContacts; using Argentex.Core.UnitsOfWork.ClientSiteAction; using Argentex.Core.UnitsOfWork.Countries; using Argentex.Core.UnitsOfWork.Currencies; using Argentex.Core.UnitsOfWork.Notifications; using Argentex.Core.UnitsOfWork.Payments; using Argentex.Core.UnitsOfWork.Settlements; using Argentex.Core.UnitsOfWork.Statements; using Argentex.Core.UnitsOfWork.Trades; using Argentex.Core.UnitsOfWork.Users; using AutoMapper; using EQService; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc.Authorization; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using SynetecLogger; using System.Net.Http; using Argentex.Core.Api.ClientAuthentication; using Microsoft.EntityFrameworkCore.Internal; using OpenIddict.Abstractions; namespace Argentex.Core.Api { public class Startup { public Startup(IHostingEnvironment env) { var builder = new ConfigurationBuilder() .SetBasePath(env.ContentRootPath) .AddJsonFile("appsettings.json", optional: false, reloadOnChange: true) .AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true, reloadOnChange: true) .AddJsonFile(@"ClientAuthentication/ClientCredentials.json", optional:true) .AddEnvironmentVariables(); Configuration = builder.Build(); Configuration.Bind("ClientCredentials", _clientCredentials); } public IConfiguration Configuration { get; } private readonly IList<ClientConfig> _clientCredentials = new List<ClientConfig>(); // This method gets called by the runtime. Use this method to add services to the container. public void ConfigureServices(IServiceCollection services) { services.AddDbContext<SecurityDbContext>(options => { options.UseSqlServer(Configuration.GetConnectionString("IdentityDB")); options.UseOpenIddict(); }); services.AddIdentity<ApplicationUser, ApplicationRole>() .AddEntityFrameworkStores<SecurityDbContext>() .AddDefaultTokenProviders(); services.AddEntityFrameworkSqlServer() .AddDbContext<FXDB1Context>(options => options.UseSqlServer(Configuration.GetConnectionString("FXDB1"))); IdentityConfig.ConfigureIdentityOptions(services); // Register the OpenIddict services. services.AddOpenIddict() .AddCore(o => o.UseEntityFrameworkCore() .UseDbContext<SecurityDbContext>()) .AddServer(o => { o.UseMvc(); o.EnableTokenEndpoint("/api/security/token"); o.AllowPasswordFlow(); o.AllowRefreshTokenFlow(); o.AllowClientCredentialsFlow(); o.DisableHttpsRequirement(); o.AcceptAnonymousClients(); o.RegisterScopes(OpenIddictConstants.Scopes.Email, OpenIddictConstants.Scopes.OpenId, OpenIddictConstants.Scopes.Phone, OpenIddictConstants.Scopes.Profile, OpenIddictConstants.Scopes.OfflineAccess, OpenIddictConstants.Scopes.Roles); }); JwtConfig.ConfigureJwt(services, Configuration); //CORS services.AddCors(options => options.AddPolicy("CorsPolicy", builder => { //URL slashes must be consistent, no double slashes builder.AllowAnyMethod().AllowAnyHeader() .WithOrigins(Configuration["Urls:FXDBTraderUrl"], Configuration["Urls:ClientSiteUrl"]) .AllowCredentials(); })); // Add application services. //transiet //scoped services.AddScoped<HttpClient, HttpClient>(); services.AddScoped<IHttpService, HttpService>(); services.AddScoped<IBarxFxService, BarxFxService>(); services.AddScoped<ITradeService, TradeService>(); services.AddScoped<ITradeUow, TradeUow>(); services.AddScoped<IClientCompanyService, ClientCompanyService>(); services.AddScoped<IClientCompanyUow, ClientCompanyUow>(); services.AddScoped<ICountryService, CountryService>(); services.AddScoped<ICountryUow, CountryUow>(); services.AddScoped<IIdentityService, IdentityService>(); services.AddScoped<IUserService, UserService>(); services.AddScoped<IUserUow, UserUow>(); services.AddScoped<IConfigWrapper, ConfigWrapper>(); services.AddScoped<ILogWrapper, NLogWrapper>((ctx) => new NLogWrapper(Configuration.GetConnectionString("NLogWrapperDB"))); services.AddScoped<IStatementUoW, StatementUoW>(); services.AddScoped<IStatementService, StatementService>(); services.AddScoped<ICurrencyUoW, CurrencyUoW>(); services.AddScoped<ICurrencyService, CurrencyService>(); services.AddScoped<IClientCompanyAccountsUoW, ClientCompanyAccountsUoW>(); services.AddScoped<IClientCompanyAccountsService, ClientCompanyAccountsService>(); services.AddScoped<IPaymentUoW, PaymentUoW>(); services.AddScoped<ISettlementService, SettlementService>(); services.AddScoped<ISettlementUow, SettlementUow>(); services.AddScoped<IOrderService, OrderService>(); services.AddScoped<IAppSettingService, AppSettingService>(); services.AddScoped<IAppSettingUow, AppSettingUow>(); services.AddScoped<IMonitoringService, MonitoringService>(); services.AddScoped<IServiceEmail, ServiceEmailClient>((ctx) => new ServiceEmailClient(ServiceEmailClient.EndpointConfiguration.Basic, Configuration["EQS:EQSEndPointUrl"])); services.AddScoped<IEmailSender, EmailSender>(); services.AddScoped<IEmailService, EmailService>(); services.AddScoped<IMonitoringHub, MonitoringHub>(); services.AddScoped<ITraderActionsHub, TraderActionsHub>(); services.AddScoped<IClientSiteActionService, ClientSiteActionService>(); services.AddScoped<IClientSiteActionUow, ClientSiteActionUow>(); services.AddScoped<IPaymentsService, PaymentsService>(); services.AddScoped<ISmsSender, SmsSender>(); services.AddScoped<ISmsService, SmsService>(); services.AddScoped<ITextMagicService, TextMagicService>(); services.AddScoped<INotificationService, NotificationService>(); services.AddScoped<INotificationUow, NotificationUow>(); services.AddScoped<IClientApplicationUow, ClientApplicationUow>(); services.AddAutoMapper(x => { x.AddProfile<MappingProfiles>(); }); services.AddMvc(options => { options.Filters.Add(typeof(GlobalExceptionFilter)); var policy = new AuthorizationPolicyBuilder() .RequireAuthenticatedUser() .Build(); options.Filters.Add(new AuthorizeFilter(policy)); }); services.AddSignalR(options => { options.EnableDetailedErrors = true; }); } // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IHostingEnvironment env) { if (env.IsDevelopment()) { app.UseBrowserLink(); app.UseDeveloperExceptionPage(); } else { app.UseExceptionHandler("/error"); } app.Use((context, next) => { context.Response.Headers.Remove("x-powered-by"); return next(); }); app.UseAuthentication(); //Configure Cors app.UseCors("CorsPolicy"); app.UseMvc(); app.UseWebSockets(); app.UseSignalR(routes => { //slashes must be consistent, no double slashes //and hub URLs should always start with a slash routes.MapHub<MonitoringHub>("/hubs/monitoring"); routes.MapHub<TraderActionsHub>("/hubs/trader-actions"); }); if (_clientCredentials.Any()) { using (var updater = new ClientUpdater(app.ApplicationServices)) { updater.SynchroniseClients(_clientCredentials); } } } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Models/SecurityModels/UpdateUserModel.cs using System.ComponentModel.DataAnnotations; namespace Argentex.Core.Api.Models.SecurityModels { public class UpdateUserModel { [Display(Name = "Id")] public long Id { get; set; } [Required] [MaxLength(128)] [Display(Name = "Username")] public string Username { get; set; } [Required] [EmailAddress] [Display(Name = "Email")] public string Email { get; set; } [Required] [MaxLength(256)] public string Forename { get; set; } [Required] [MaxLength(256)] public string Surname { get; set; } [Required] public int ClientCompanyId { get; set; } [Required] public int UpdatedByAuthUserId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/VirtualAccountTransaction.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class VirtualAccountTransaction { public int Id { get; set; } public int VirtualAccountId { get; set; } public int CurrencyId { get; set; } public decimal? Amount { get; set; } public bool IsDebit { get; set; } public int? PaymentId { get; set; } public string FxforwardTradeCode { get; set; } public byte[] UpdateTimeStamp { get; set; } public bool IsProfitTransaction { get; set; } public Currency Currency { get; set; } public FxforwardTrade FxforwardTradeCodeNavigation { get; set; } public Payment Payment { get; set; } public ClientCompanyVirtualAccount VirtualAccount { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/FxoptionSettlements.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class FxoptionSettlements { public int Id { get; set; } public string Code { get; set; } public string FxoptionCode { get; set; } public string Description { get; set; } public string CurrencyPair { get; set; } public decimal? ClientRate { get; set; } public decimal? ClientLhsamt { get; set; } public decimal? ClientRhsamt { get; set; } public int? Lhsccyid { get; set; } public int? Rhsccyid { get; set; } public int? ClientCompanyId { get; set; } public int? AuthorisedByClientCompanyContactId { get; set; } public int? TradeInstructionMethodId { get; set; } public int? CreatedByAuthUserId { get; set; } public DateTime? ContractDate { get; set; } public DateTime? ValueDate { get; set; } public bool? IsBuy { get; set; } public DateTime? CreatedDate { get; set; } public bool? IsDeleted { get; set; } public decimal? Notional { get; set; } public int? GroupNum { get; set; } public int? IsRhsmajour { get; set; } public bool? IsSettled { get; set; } public int? FxoptionSettlementsTemplateId { get; set; } public int? BrokerId { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/ClientCompanyOpiTransactionNotFoundException.cs using System; namespace Argentex.Core.Service.Exceptions { public class ClientCompanyOpiTransactionNotFoundException : Exception { public ClientCompanyOpiTransactionNotFoundException() : base() { } public ClientCompanyOpiTransactionNotFoundException(string message) : base(message) { } public ClientCompanyOpiTransactionNotFoundException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Synetec.Data.UnitOfWork/GenericRepo/IGenericRepo.cs using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; namespace Synetec.Data.UnitOfWork.GenericRepo { public interface IGenericRepo<TEntity> where TEntity : class { IEnumerable<TEntity> Get(Expression<Func<TEntity, bool>> filter = null, Func<IQueryable<TEntity>, IOrderedQueryable<TEntity>> orderBy = null, string includeProperties = ""); IQueryable<TEntity> GetQueryable( Expression<Func<TEntity, bool>> filter = null, Func<IQueryable<TEntity>, IOrderedQueryable<TEntity>> orderBy = null, string includeProperties = ""); IList<TEntity> GetAllAsList(); //IQueryable<TEntity> GetAllAsQueryable(); TEntity GetByPrimaryKey(object id); void Insert(TEntity entity); void Update(TEntity entityToUpdate); void Delete(object id); void Delete(TEntity entityToDelete); void IgnoreProperty(TEntity entity, Expression<Func<TEntity, object>> property); } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogCurrencyPairPriceHistory.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogCurrencyPairPriceHistory { public int LogId { get; set; } public string LogAction { get; set; } public string CurrencyPair { get; set; } public DateTime PriceDate { get; set; } public decimal? Price { get; set; } public int? UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } public byte[] UpdateTimeStamp { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Exceptions/PaymentNotFoundException.cs using System; namespace Argentex.Core.Service.Exceptions { public class PaymentNotFoundException : Exception { public PaymentNotFoundException() : base() { } public PaymentNotFoundException(string message) : base(message) { } public PaymentNotFoundException(string message, Exception inner) : base(message, inner) { } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Payments/PaymentOutInformationModel.cs namespace Argentex.Core.Service.Models.Payments { public class PaymentOutInformationModel : PaymentInformationModel { public string OpiDescription { get; set; } public string OpiAccountName { get; set; } public string OpiSortCode { get; set; } public string OpiAccountNumber { get; set; } public string OpiBankName { get; set; } public string OpiSwiftCode { get; set; } public string OpiReference { get; set; } public string OpiIban { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/SwiftvalidationOption.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class SwiftvalidationOption { public SwiftvalidationOption() { SwiftvalidationCurrencyCountry = new HashSet<SwiftvalidationCurrencyCountry>(); SwiftvalidationOptionField = new HashSet<SwiftvalidationOptionField>(); } public int Id { get; set; } public string Description { get; set; } public int Sequence { get; set; } public bool? IsActive { get; set; } public ICollection<SwiftvalidationCurrencyCountry> SwiftvalidationCurrencyCountry { get; set; } public ICollection<SwiftvalidationOptionField> SwiftvalidationOptionField { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Identity/Helpers/Constants.cs using System; using System.Collections.Generic; using System.Text; namespace Argentex.Core.Service.Identity.Helpers { public static class GrantType { public const string Password = "<PASSWORD>"; public const string RefreshToken = "<PASSWORD>"; } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Services/Models/Trades/TradeNotificationModel.cs using System; namespace Argentex.Core.Service.Models.Trades { public class TradeNotificationModel { public int ClientCompanyID { get; set; } public string ClientCompanyName { get; set; } public int AuthUserID { get; set; } public string AuthUserName { get; set; } public DateTime LastLoginDate { get; set; } public decimal ClientRate { get; set; } public string SellCcy { get; set; } public string BuyCcy { get; set; } public decimal ClientSellAmount { get; set; } public decimal ClientBuyAmount { get; set; } public bool IsBuy { get; set; } public DateTime ValueDate { get; set; } public DateTime ContractDate { get; set; } public decimal Spread { get; set; } /// <summary> /// Dealer ID for the specific company /// </summary> public int? DealerAppUserID { get; set; } public int TraderNotificationCounter { get; set; } public bool SendNotification { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogClientCompanySalesAppUser.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogClientCompanySalesAppUser { public long LogId { get; set; } public string LogAction { get; set; } public int ClientCompanyId { get; set; } public int SalesPersonAppUserId { get; set; } public int SalesOrder { get; set; } public int UpdatedByAuthUserId { get; set; } public DateTime UpdatedDateTime { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.DataAccess/Entities/LogSwiftincomingMatchedAccount.cs using System; using System.Collections.Generic; namespace Argentex.Core.DataAccess.Entities { public partial class LogSwiftincomingMatchedAccount { public int LogId { get; set; } public string LogAction { get; set; } public int Id { get; set; } public string MatchingContent { get; set; } public int ClientCompanyId { get; set; } public DateTime CreatedDateTime { get; set; } public int CreatedByAuthUserId { get; set; } public DateTime? UpdatedDateTime { get; set; } public int? UpdatedByAuthUserId { get; set; } public byte[] UpdateTimeStamp { get; set; } public int? ChecksumMatchingContent { get; set; } } } <file_sep>/agfx-fxdb-core-api-dmz/Argentex.Core.Api/Controllers/Client/ClientCompanyController.cs using Argentex.Core.Service; using Argentex.Core.Service.Models.ClientCompany; using Microsoft.AspNetCore.Mvc; using SynetecLogger; using System; using System.Net; namespace Argentex.Core.Api.Controllers { [Produces("application/json")] [Route("api/client-company")] public class ClientCompanyController : Controller { private readonly IClientCompanyService _clientCompanyService; private readonly ILogWrapper _logger; public ClientCompanyController(IClientCompanyService clientCompanyService, ILogWrapper logger) { _clientCompanyService = clientCompanyService; _logger = logger; } [HttpGet] [Route("{clientCompanyId:int}")] public IActionResult GetCompanyName(int clientCompanyId) { return Ok(_clientCompanyService.GetClientCompany(clientCompanyId)); } [HttpGet] [Route("accounts/{clientCompanyId:int}")] public IActionResult GetClientCompanyAccounts(int clientCompanyId) { return Ok(_clientCompanyService.GetClientCompanyAccounts(clientCompanyId)); } [HttpGet] [Route("online-details/{clientCompanyId:int}")] public IActionResult GetClientCompanyOnlineDetails(int clientCompanyId) { return Ok(_clientCompanyService.GetClientCompanyOnlineDetailsModel(clientCompanyId)); } [HttpPost] [Route("spread-adjustment/add")] public IActionResult AddClientCompanyAccount([FromBody] SpreadAdjustmentModel model) { if (!ModelState.IsValid) { return BadRequest(ModelState); } _clientCompanyService.AddSpredAdjustment(model); return Ok(); } [HttpPost] [Route("set-kicked/{clientCompanyId:int}")] public IActionResult SetKicked(int clientCompanyId) { if (!ModelState.IsValid) { return BadRequest(ModelState); } _clientCompanyService.SetKicked(clientCompanyId); return Ok(); } [HttpGet] [Route("contacts/{clientCompanyId:int}")] public IActionResult GetCompanyContactList(int clientCompanyId) { try { var contactList = _clientCompanyService.GetCompanyContactList(clientCompanyId); return Ok(contactList); } catch (Exception exception) { _logger.Error(exception); return BadRequest(_clientCompanyService.GetErrorMessagesForContactList(HttpStatusCode.BadRequest, exception, clientCompanyId)); } } protected override void Dispose(bool disposing) { if (disposing) { _clientCompanyService.Dispose(); base.Dispose(disposing); } } } }
cea6351cc4f9d33027be236bd1981a958baf00e4
[ "Markdown", "C#" ]
424
C#
ranjitmenon/agfx-fxdb-core-api-dmz
0799cae7fc8b81c281bd0e5e844d0f2c1e577f9f
74c3b5b5b969d6acb2e5b71fd742adb861bdfb73
refs/heads/master
<repo_name>kg-bot/kg-bot.github.io<file_sep>/styles/bgimages/Grees_test.user.js // ==UserScript== // @name AntiSpam // @namespace erep_test // @description Testna eRep skripta // @include http://www.erepublik.com/* // @include http://*.erepublik.com/* // @version 1 // @grant none // ==/UserScript== function obrisiSpamera() { var divs = document.getElementById("notification_area"); divs.removeChild(divs.childNodes[0]); divs.removeChild(divs.childNodes[1]); divs.removeChild(divs.childNodes[2]); divs.removeChild(divs.childNodes[3]); divs.removeChild(divs.childNodes[4]); divs.removeChild(divs.childNodes[5]); divs.removeChild(divs.childNodes[6]); } function obrisiCupidDonacije() { if (document.getElementsByClassName("cupid_2014_gift_a_friend")) { var cupid = document.getElementsByClassName("cupid_2014_gift_a_friend"); cupid[0].parentNode.removeChild(cupid[0]); } } function obrisiDonjiBaner() { if (document.getElementsByClassName("sidebar_banners_area")) { var banner = document.getElementsByClassName("sidebar_banners_area"); banner[0].parentNode.removeChild(banner[0]); } } function obrisiGiftSliku() { if (document.getElementById("donate_to_friend")) { var gift = document.getElementById("donate_to_friend"); var gift_slika = gift.childNodes[1]; gift_slika.style.backgroundImage='none'; gift_slika.style.backgroundColor='transparent'; gift_slika.style.backgroundClip='inherit'; gift_slika.style.border='inherit'; gift_slika.style.boxShadow='inherit'; gift_slika.innerHTML='Send a gift<img width="13" height="21" alt="" src="http://s2.www.erepublik.net/images/modules/citizenprofile/left_arrow.png">'; } } function napraviListu() { var dete = document.getElementById("content"); var prvoDete = dete.childNodes[6].childNodes[1].childNodes[1]; var novoDete = document.createElement("div"); prvoDete.appendChild(novoDete); var novoDete_text = document.createTextNode("Whats uuuuuuuup"); novoDete.appendChild(novoDete_text); novoDete.id="moja_lista"; novoDete.style.backgroundColor="transparent"; var dugme = document.createElement("button"); var dugme_text = document.createTextNode("Please Click Me"); novoDete.appendChild(dugme); dugme.appendChild(dugme_text); dugme.type="button"; dugme.id="dugmeZaFirme"; console.log("Now going to add events"); dugme.addEventListener("dblclick", dbklik, true); console.log("dblclick added"); dugme.addEventListener("click", jedanklik, true); console.log("click added"); } function napraviListuAko() { if (document.URL == "http://www.erepublik.com/en/economy/myCompanies") { if (document.getElementById("moja_lista")) { return } else { napraviListu() } } } function alertMe() { alert("Hellooooo"); } function dbklik(){ var lista = document.getElementById("moja_lista"); lista.style.backgroundColor='transparent'; } function jedanklik() { var lista = document.getElementById("moja_lista"); lista.style.backgroundColor='red'; } document.body.addEventListener("load", obrisiSpamera, true); document.body.addEventListener("load", obrisiCupidDonacije, true); document.body.addEventListener("load", obrisiDonjiBaner, true); document.body.addEventListener("load", obrisiGiftSliku, true); document.body.addEventListener("load", napraviListuAko, true); document.addEventListener("load", alertMe, false);<file_sep>/scripts/index.js var p = document.createElement("p");var p_text = document.createTextNode("This is my first web page, created with JavaScript");var div = document.createElement("div"); var a_first = document.createElement("a"); var a_second = document.createElement("a"); var a_second_text = document.createTextNode("IRC Channel"); var img = document.createElement("img"); var button = document.createElement("button"); var button_text = document.createTextNode("Please click me."); var br1 = document.createElement("br"); var br = document.createElement("br"); button.id="button_changes"; button.type="button"; button.onclick= function(){alertmeplease();}; button.ondblclick= function(){getItBack();}; button.appendChild(button_text); a_second.href="irc.html"; a_second.appendChild(a_second_text); img.id="erep_image_2"; img.alt="Go to eRepublik"; img.title="Go to eRep"; img.src="styles/small_images/32/2.png"; a_first.class="erep_redirect"; a_first.href="http://www.erepublik.com"; a_first.appendChild(img); p.appendChild(p_text); p.id="p_elem"; p.style="color:red"; div.id = "first_paragraph"; div.appendChild(p); document.body.appendChild(div); document.body.appendChild(a_first); document.body.appendChild(br1); document.body.appendChild(a_second); document.body.appendChild(br); document.body.appendChild(button);
6700e6a15710f131c6074e2858f5e9155de2fb7e
[ "JavaScript" ]
2
JavaScript
kg-bot/kg-bot.github.io
11b389295f224b880dde131e4fb36aabedd55080
757eafbd26e7a1125d964e8757282e1901ee49f8
refs/heads/main
<repo_name>AdrianValenzuela/prueba-tecnica-front-2021<file_sep>/todo-app/src/components/Layout/Footer/Footer.js // local imports import './Footer.css'; function Footer() { return ( <footer className='pageFooter'>© 2021 Product Hackers</footer> ); } export default Footer;<file_sep>/todo-app/src/App.js // local imports import { TaskPage } from './components/TaskList'; import './App.css'; function App({ initialTasks }) { return ( <div className="App"> <TaskPage initialTasks={initialTasks}/> </div> ); } export default App; <file_sep>/todo-app/src/components/TaskList/TaskPage/TaskPage.js // libraries imports import React from 'react'; // local imports import TaskList from '../TaskList'; import { Layout } from '../../Layout'; import storage from '../../../utils/storage.js'; import './TaskPage.css'; function TaskPage({ initialTasks }) { const [tasks, setTasks] = React.useState(initialTasks); const [newTask, setNewTask] = React.useState([]); const [storedTask, setStoredTask] = React.useState([]); const onTaskRemove = task => { const index = tasks.indexOf(task); tasks.splice(index, 1); setTasks(tasks => { return [ ...tasks ]; }); storage.set('tasks', tasks); }; const onNewTask = event => { event.preventDefault(); setTasks(tasks => { return [ ...tasks, newTask ]; }); setNewTask(''); storage.set('tasks', [...tasks, newTask]); }; const onTaskStore = task => { setStoredTask(tasks); onTaskRemove(task); }; const handleChange = event => { setNewTask(event.target.value); }; return ( <div> <Layout> <TaskList tasks={tasks} onTaskRemove={onTaskRemove} onTaskStore={onTaskStore}/> <form className='new-task' onSubmit={onNewTask}> <input className='input' name='task' value={newTask} onChange={handleChange}></input> <button className='button is-primary'>New Task</button> </form> </Layout> </div> ); } export default TaskPage;<file_sep>/todo-app/src/components/TaskList/TaskList/TaskList.js // local imports import Task from '../Task'; import './TaskList.css'; function TaskList({ tasks, onTaskRemove, onTaskStore }) { return ( <div className='task-list'> {tasks.map(task => { return ( <Task task={task} onTaskRemove={onTaskRemove} onTaskStore={onTaskStore}/> ); })} </div> ); } export default TaskList;<file_sep>/todo-app/src/components/TaskList/Task/Task.js //libraries imports import React from 'react'; // local imports import './Task.css'; function Task({ task, onTaskRemove, onTaskStore }) { const [done, setDone] = React.useState(false); const handleDone = () => { setDone(true); }; const handleRemove = event => { onTaskRemove(event.target.name); } const handleStore = event => { onTaskStore(event.target.name); } return ( <div className="card"> <div className="card-content"> {done ? <p className="title doit"> {task} </p> : <p className="title"> {task} </p> } </div> <footer className="card-footer"> <button className="card-footer-item button is-primary" onClick={handleDone}> <span> Done! </span> </button> <button className="card-footer-item button is-danger" onClick={handleRemove} name={task}> <span> Remove </span> </button> {done && <button className="card-footer-item button is-info" onClick={handleStore} name={task}> <span> Store </span> </button> } </footer> </div> ); } export default Task;
a3eb9df478b69f4e7fb694a4caaa27b19d1c5e23
[ "JavaScript" ]
5
JavaScript
AdrianValenzuela/prueba-tecnica-front-2021
3ef5f09f10b03c9fcdc3af863d24d2449f50e142
78270e6c00fba1c31b82e520bc9fe7cf73fe42b2
refs/heads/master
<file_sep>jQuery(document).ready(function() { $(function () { var array=[1,2,7]; var map_json={}; for(const i of array){ map_json[i] = new Date(2022,11,i,10); } var currdate=new Date(); var a=currdate.getDate(); console.log(map_json[1]); if(a<1){ $('#defaultCountdown').countdown({until:map_json[1]}); $('#main-title').html("INTEGRAL UNIVERSITY <br> <span class='id-color' style='color:var(--secondary-color) !important'>Lucknow</span>"); } else if(a<2){ $('#defaultCountdown').countdown({until:map_json[2]}); $('#main-title').html("Bennett University <br> <span class='id-color' style='color:var(--secondary-color) !important'>Greater Noida</span>"); } else{ $('#defaultCountdown').countdown({until:map_json[7]}); $('#main-title').html("Delhi Technological <br> University <br> <span class='id-color' style='color:var(--secondary-color) !important'>New Delhi</span>"); } $('#defaultCountdown').countdown({until:map_json[7]}); $('#main-title').html("Coming <br> <span class='id-color' style='color:var(--secondary-color) !important'>Soon</span>"); }); }); <file_sep>function on() { document.getElementById("overlay").style.display = "block"; document.body.scroll="no"; document.body.style.overflow="hidden"; } function off() { document.getElementById("overlay").style.display = "none"; document.body.scroll="yes"; document.body.style.overflow="visible"; } function show_story(){ $('.story').css('display','block'); $('.show-button').addClass('hide'); } // ScrollReveal().reveal('.scroll-animation',{distance:"100px",opacity:0,duration:2000}); ScrollReveal().reveal('.scroll-animation',{distance:"50px",opacity:0,duration:1000}); ScrollReveal().reveal('.scroll-delay-1',{distance:"50px",opacity:0,duration:1000,delay:500}); ScrollReveal().reveal('.scroll-delay-2',{distance:"50px",opacity:0,duration:1000,delay:1000}); ScrollReveal().reveal('.scroll-delay-3',{distance:"50px",opacity:0,duration:1000,delay:1500}); ScrollReveal().reveal('.scroll-delay-4',{distance:"50px",opacity:0,duration:1000,delay:2000}); ScrollReveal().reveal('.scroll-delay-5',{distance:"50px",opacity:0,duration:1000,delay:2500});<file_sep># ajaystark.github.io My portfolio Design inspired by https://www.nooneswatching.me https://bepatrickdavid.com https://www.nksh.io/ Unicorn png : Kartikaye azad <file_sep>jQuery(window).on("load", function () { "use strict"; (function(){ 'use strict'; if($('.top-bar').length>0) var t = $('.top-bar').height(); else t=0; $('.megamenu .arrow').on("click", function (){ if($(this).parent().hasClass('hover')){ $(this).parent().removeClass("hover"); $('.sub-arrow').toggleClass('inner-arrow'); }else{ $(this).parent().addClass("hover"); $('.sub-arrow').toggleClass('inner-arrow'); } }); var k=0; $(window).on('scroll', function (){ if($(window).width()>1000){ if($(window).scrollTop()>200+t){ $('.megamenu').removeAttr('style').addClass('pin'); }else{ $('.megamenu').css({top:-$(window).scrollTop()}).removeClass('pin'); }if($(window).scrollTop()>150+t){ $('.megamenu').addClass('before'); }else{ $('.megamenu').removeClass('before'); } }else{ //$('.megamenu').css({top:$(window).scrollTop()}) if($(window).scrollTop()<k){ $('.megamenu').addClass('off').removeClass('woff').removeAttr('style'); $('#menu').removeClass('in'); k=0; } } if($(window).scrollTop()>t){ if(!$('.megamenu').hasClass('woff')){ $('.megamenu').addClass('pin-start').addClass('off'); } }else{ $('.megamenu').removeClass('pin-start').removeClass('off'); } }); if($(window).scrollTop()>150+t){ $('.megamenu').addClass('pin'); }else{ $('.megamenu').removeAttr('style').removeClass('pin'); } $(window).on("resize", function () { if($(window).width()>1000){ $('.megamenu').removeAttr('style'); } }); if($(window).scrollTop()>t){ $('.megamenu').addClass('off').addClass('pin-start'); }else{ $('.megamenu').removeClass('off').removeClass('pin-start'); } $('.menu-icon').on("click", function (){ if($('#menu').hasClass('in')){ $('.megamenu').addClass('off').removeClass('woff').removeAttr('style'); if($(window).scrollTop()>t){ if(!$('.megamenu').hasClass('woff')){ $('.megamenu').addClass('pin-start').addClass('off'); } }else{ $('.megamenu').removeClass('pin-start').removeClass('off'); } }else{ k=$(window).scrollTop(); $('.megamenu').removeClass('off').addClass('woff').css({top:$(window).scrollTop()}); } }) })(); jQuery(function ($) { "use strict"; /* =================================== Multipage Side Menu ====================================== */ if ($("#sidemenu_toggle").length) { /* Multipage SideNav */ /* Multi Items Main Menu */ $(".multi-item1, .multi-item2, .multi-item3, .multi-item4, .multi-item5, .multi-item6, .multi-item7, .multi-item8, .multi-item9, .multi-item10").on("click", function () { $(".side-main-menu").addClass("toggle"),$(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".multi-item1").on("click", function () {$(".sub-multi-item1").addClass("toggle")}), $(".multi-item2").on("click", function () {$(".sub-multi-item2").addClass("toggle")}), $(".multi-item3").on("click", function () {$(".sub-multi-item3").addClass("toggle")}), $(".multi-item4").on("click", function () {$(".sub-multi-item4").addClass("toggle")}), $(".multi-item5").on("click", function () {$(".sub-multi-item5").addClass("toggle")}), $(".multi-item6").on("click", function () {$(".sub-multi-item6").addClass("toggle")}), $(".multi-item7").on("click", function () {$(".sub-multi-item7").addClass("toggle")}), $(".multi-item8").on("click", function () {$(".sub-multi-item8").addClass("toggle")}), $(".multi-item9").on("click", function () {$(".sub-multi-item9").addClass("toggle")}), $(".multi-item10").on("click", function () {$(".sub-multi-item10").addClass("toggle")}), /* Multi Items 1 */ $(".sub-multi-item1 .item1, .sub-multi-item1 .item2, .sub-multi-item1 .item3, .sub-multi-item1 .item4, .sub-multi-item1 .item5, .sub-multi-item1 .item6, .sub-multi-item1 .item7, .sub-multi-item1 .item8, .sub-multi-item1 .item9, .sub-multi-item1 .item10").on("click", function () { $(".sub-multi-item1").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item1 .item1").on("click", function () {$(".inner-multi-item1.item1").addClass("toggle")}), $(".sub-multi-item1 .item2").on("click", function () {$(".inner-multi-item1.item2").addClass("toggle")}), $(".sub-multi-item1 .item3").on("click", function () {$(".inner-multi-item1.item3").addClass("toggle")}), $(".sub-multi-item1 .item4").on("click", function () {$(".inner-multi-item1.item4").addClass("toggle")}), $(".sub-multi-item1 .item5").on("click", function () {$(".inner-multi-item1.item5").addClass("toggle")}), $(".sub-multi-item1 .item6").on("click", function () {$(".inner-multi-item1.item6").addClass("toggle")}), $(".sub-multi-item1 .item7").on("click", function () {$(".inner-multi-item1.item7").addClass("toggle")}), $(".sub-multi-item1 .item8").on("click", function () {$(".inner-multi-item1.item8").addClass("toggle")}), $(".sub-multi-item1 .item9").on("click", function () {$(".inner-multi-item1.item9").addClass("toggle")}), $(".sub-multi-item1 .item10").on("click", function () {$(".inner-multi-item1.item10").addClass("toggle")}), /* Multi Items 2 */ $(".sub-multi-item2 .item1, .sub-multi-item2 .item2, .sub-multi-item2 .item3, .sub-multi-item2 .item4, .sub-multi-item2 .item5, .sub-multi-item2 .item6, .sub-multi-item2 .item7, .sub-multi-item2 .item8, .sub-multi-item2 .item9, .sub-multi-item2 .item10").on("click", function () { $(".sub-multi-item2").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item2 .item1").on("click", function () {$(".inner-multi-item2.item1").addClass("toggle")}), $(".sub-multi-item2 .item2").on("click", function () {$(".inner-multi-item2.item2").addClass("toggle")}), $(".sub-multi-item2 .item3").on("click", function () {$(".inner-multi-item2.item3").addClass("toggle")}), $(".sub-multi-item2 .item4").on("click", function () {$(".inner-multi-item2.item4").addClass("toggle")}), $(".sub-multi-item2 .item5").on("click", function () {$(".inner-multi-item2.item5").addClass("toggle")}), $(".sub-multi-item2 .item6").on("click", function () {$(".inner-multi-item2.item6").addClass("toggle")}), $(".sub-multi-item2 .item7").on("click", function () {$(".inner-multi-item2.item7").addClass("toggle")}), $(".sub-multi-item2 .item8").on("click", function () {$(".inner-multi-item2.item8").addClass("toggle")}), $(".sub-multi-item2 .item9").on("click", function () {$(".inner-multi-item2.item9").addClass("toggle")}), $(".sub-multi-item2 .item10").on("click", function () {$(".inner-multi-item2.item10").addClass("toggle")}), /* Multi Items 3 */ $(".sub-multi-item3 .item1, .sub-multi-item3 .item2, .sub-multi-item3 .item3, .sub-multi-item3 .item4, .sub-multi-item3 .item5, .sub-multi-item3 .item6, .sub-multi-item3 .item7, .sub-multi-item3 .item8, .sub-multi-item3 .item9, .sub-multi-item3 .item10").on("click", function () { $(".sub-multi-item3").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item3 .item1").on("click", function () {$(".inner-multi-item3.item1").addClass("toggle")}), $(".sub-multi-item3 .item2").on("click", function () {$(".inner-multi-item3.item2").addClass("toggle")}), $(".sub-multi-item3 .item3").on("click", function () {$(".inner-multi-item3.item3").addClass("toggle")}), $(".sub-multi-item3 .item4").on("click", function () {$(".inner-multi-item3.item4").addClass("toggle")}), $(".sub-multi-item3 .item5").on("click", function () {$(".inner-multi-item3.item5").addClass("toggle")}), $(".sub-multi-item3 .item6").on("click", function () {$(".inner-multi-item3.item6").addClass("toggle")}), $(".sub-multi-item3 .item7").on("click", function () {$(".inner-multi-item3.item7").addClass("toggle")}), $(".sub-multi-item3 .item8").on("click", function () {$(".inner-multi-item3.item8").addClass("toggle")}), $(".sub-multi-item3 .item9").on("click", function () {$(".inner-multi-item3.item9").addClass("toggle")}), $(".sub-multi-item3 .item10").on("click", function () {$(".inner-multi-item3.item10").addClass("toggle")}), /* Multi Items 4 */ $(".sub-multi-item4 .item1, .sub-multi-item4 .item2, .sub-multi-item4 .item3, .sub-multi-item4 .item4, .sub-multi-item4 .item5, .sub-multi-item4 .item6, .sub-multi-item4 .item7, .sub-multi-item4 .item8, .sub-multi-item4 .item9, .sub-multi-item4 .item10").on("click", function () { $(".sub-multi-item4").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item4 .item1").on("click", function () {$(".inner-multi-item4.item1").addClass("toggle")}), $(".sub-multi-item4 .item2").on("click", function () {$(".inner-multi-item4.item2").addClass("toggle")}), $(".sub-multi-item4 .item3").on("click", function () {$(".inner-multi-item4.item3").addClass("toggle")}), $(".sub-multi-item4 .item4").on("click", function () {$(".inner-multi-item4.item4").addClass("toggle")}), $(".sub-multi-item4 .item5").on("click", function () {$(".inner-multi-item4.item5").addClass("toggle")}), $(".sub-multi-item4 .item6").on("click", function () {$(".inner-multi-item4.item6").addClass("toggle")}), $(".sub-multi-item4 .item7").on("click", function () {$(".inner-multi-item4.item7").addClass("toggle")}), $(".sub-multi-item4 .item8").on("click", function () {$(".inner-multi-item4.item8").addClass("toggle")}), $(".sub-multi-item4 .item9").on("click", function () {$(".inner-multi-item4.item9").addClass("toggle")}), $(".sub-multi-item4 .item10").on("click", function () {$(".inner-multi-item4.item10").addClass("toggle")}), /* Multi Items 5 */ $(".sub-multi-item5 .item1, .sub-multi-item5 .item2, .sub-multi-item5 .item3, .sub-multi-item5 .item4, .sub-multi-item5 .item5, .sub-multi-item5 .item6, .sub-multi-item5 .item7, .sub-multi-item5 .item8, .sub-multi-item5 .item9, .sub-multi-item5 .item10").on("click", function () { $(".sub-multi-item5").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item5 .item1").on("click", function () {$(".inner-multi-item5.item1").addClass("toggle")}), $(".sub-multi-item5 .item2").on("click", function () {$(".inner-multi-item5.item2").addClass("toggle")}), $(".sub-multi-item5 .item3").on("click", function () {$(".inner-multi-item5.item3").addClass("toggle")}), $(".sub-multi-item5 .item4").on("click", function () {$(".inner-multi-item5.item4").addClass("toggle")}), $(".sub-multi-item5 .item5").on("click", function () {$(".inner-multi-item5.item5").addClass("toggle")}), $(".sub-multi-item5 .item6").on("click", function () {$(".inner-multi-item5.item6").addClass("toggle")}), $(".sub-multi-item5 .item7").on("click", function () {$(".inner-multi-item5.item7").addClass("toggle")}), $(".sub-multi-item5 .item8").on("click", function () {$(".inner-multi-item5.item8").addClass("toggle")}), $(".sub-multi-item5 .item9").on("click", function () {$(".inner-multi-item5.item9").addClass("toggle")}), $(".sub-multi-item5 .item10").on("click", function () {$(".inner-multi-item5.item10").addClass("toggle")}), /* Multi Items 6 */ $(".sub-multi-item6 .item1, .sub-multi-item6 .item2, .sub-multi-item6 .item3, .sub-multi-item6 .item4, .sub-multi-item6 .item5, .sub-multi-item6 .item6, .sub-multi-item6 .item7, .sub-multi-item6 .item8, .sub-multi-item6 .item9, .sub-multi-item6 .item10").on("click", function () { $(".sub-multi-item6").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item6 .item1").on("click", function () {$(".inner-multi-item6.item1").addClass("toggle")}), $(".sub-multi-item6 .item2").on("click", function () {$(".inner-multi-item6.item2").addClass("toggle")}), $(".sub-multi-item6 .item3").on("click", function () {$(".inner-multi-item6.item3").addClass("toggle")}), $(".sub-multi-item6 .item4").on("click", function () {$(".inner-multi-item6.item4").addClass("toggle")}), $(".sub-multi-item6 .item5").on("click", function () {$(".inner-multi-item6.item5").addClass("toggle")}), $(".sub-multi-item6 .item6").on("click", function () {$(".inner-multi-item6.item6").addClass("toggle")}), $(".sub-multi-item6 .item7").on("click", function () {$(".inner-multi-item6.item7").addClass("toggle")}), $(".sub-multi-item6 .item8").on("click", function () {$(".inner-multi-item6.item8").addClass("toggle")}), $(".sub-multi-item6 .item9").on("click", function () {$(".inner-multi-item6.item9").addClass("toggle")}), $(".sub-multi-item6 .item10").on("click", function () {$(".inner-multi-item6.item10").addClass("toggle")}), /* Multi Items 7 */ $(".sub-multi-item7 .item1, .sub-multi-item7 .item2, .sub-multi-item7 .item3, .sub-multi-item7 .item4, .sub-multi-item7 .item5, .sub-multi-item7 .item6, .sub-multi-item7 .item7, .sub-multi-item7 .item8, .sub-multi-item7 .item9, .sub-multi-item7 .item10").on("click", function () { $(".sub-multi-item7").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item7 .item1").on("click", function () {$(".inner-multi-item7.item1").addClass("toggle")}), $(".sub-multi-item7 .item2").on("click", function () {$(".inner-multi-item7.item2").addClass("toggle")}), $(".sub-multi-item7 .item3").on("click", function () {$(".inner-multi-item7.item3").addClass("toggle")}), $(".sub-multi-item7 .item4").on("click", function () {$(".inner-multi-item7.item4").addClass("toggle")}), $(".sub-multi-item7 .item5").on("click", function () {$(".inner-multi-item7.item5").addClass("toggle")}), $(".sub-multi-item7 .item6").on("click", function () {$(".inner-multi-item7.item6").addClass("toggle")}), $(".sub-multi-item7 .item7").on("click", function () {$(".inner-multi-item7.item7").addClass("toggle")}), $(".sub-multi-item7 .item8").on("click", function () {$(".inner-multi-item7.item8").addClass("toggle")}), $(".sub-multi-item7 .item9").on("click", function () {$(".inner-multi-item7.item9").addClass("toggle")}), $(".sub-multi-item7 .item10").on("click", function () {$(".inner-multi-item7.item10").addClass("toggle")}), /* Multi Items 8 */ $(".sub-multi-item8 .item1, .sub-multi-item8 .item2, .sub-multi-item8 .item3, .sub-multi-item8 .item4, .sub-multi-item8 .item5, .sub-multi-item8 .item6, .sub-multi-item8 .item7, .sub-multi-item8 .item8, .sub-multi-item8 .item9, .sub-multi-item8 .item10").on("click", function () { $(".sub-multi-item8").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item8 .item1").on("click", function () {$(".inner-multi-item8.item1").addClass("toggle")}), $(".sub-multi-item8 .item2").on("click", function () {$(".inner-multi-item8.item2").addClass("toggle")}), $(".sub-multi-item8 .item3").on("click", function () {$(".inner-multi-item8.item3").addClass("toggle")}), $(".sub-multi-item8 .item4").on("click", function () {$(".inner-multi-item8.item4").addClass("toggle")}), $(".sub-multi-item8 .item5").on("click", function () {$(".inner-multi-item8.item5").addClass("toggle")}), $(".sub-multi-item8 .item6").on("click", function () {$(".inner-multi-item8.item6").addClass("toggle")}), $(".sub-multi-item8 .item7").on("click", function () {$(".inner-multi-item8.item7").addClass("toggle")}), $(".sub-multi-item8 .item8").on("click", function () {$(".inner-multi-item8.item8").addClass("toggle")}), $(".sub-multi-item8 .item9").on("click", function () {$(".inner-multi-item8.item9").addClass("toggle")}), $(".sub-multi-item8 .item10").on("click", function () {$(".inner-multi-item8.item10").addClass("toggle")}), /* Multi Items 9 */ $(".sub-multi-item9 .item1, .sub-multi-item9 .item2, .sub-multi-item9 .item3, .sub-multi-item9 .item4, .sub-multi-item9 .item5, .sub-multi-item9 .item6, .sub-multi-item9 .item7, .sub-multi-item9 .item8, .sub-multi-item9 .item9, .sub-multi-item9 .item10").on("click", function () { $(".sub-multi-item9").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item9 .item1").on("click", function () {$(".inner-multi-item9.item1").addClass("toggle")}), $(".sub-multi-item9 .item2").on("click", function () {$(".inner-multi-item9.item2").addClass("toggle")}), $(".sub-multi-item9 .item3").on("click", function () {$(".inner-multi-item9.item3").addClass("toggle")}), $(".sub-multi-item9 .item4").on("click", function () {$(".inner-multi-item9.item4").addClass("toggle")}), $(".sub-multi-item9 .item5").on("click", function () {$(".inner-multi-item9.item5").addClass("toggle")}), $(".sub-multi-item9 .item6").on("click", function () {$(".inner-multi-item9.item6").addClass("toggle")}), $(".sub-multi-item9 .item7").on("click", function () {$(".inner-multi-item9.item7").addClass("toggle")}), $(".sub-multi-item9 .item8").on("click", function () {$(".inner-multi-item9.item8").addClass("toggle")}), $(".sub-multi-item9 .item9").on("click", function () {$(".inner-multi-item9.item9").addClass("toggle")}), $(".sub-multi-item9 .item10").on("click", function () {$(".inner-multi-item9.item10").addClass("toggle")}), /* Multi Items 10 */ $(".sub-multi-item10 .item1, .sub-multi-item10 .item2, .sub-multi-item10 .item3, .sub-multi-item10 .item4, .sub-multi-item10 .item5, .sub-multi-item10 .item6, .sub-multi-item10 .item7, .sub-multi-item10 .item8, .sub-multi-item10 .item9, .sub-multi-item10 .item10").on("click", function () { $(".sub-multi-item10").addClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".sub-multi-item10 .item1").on("click", function () {$(".inner-multi-item10.item1").addClass("toggle")}), $(".sub-multi-item10 .item2").on("click", function () {$(".inner-multi-item10.item2").addClass("toggle")}), $(".sub-multi-item10 .item3").on("click", function () {$(".inner-multi-item10.item3").addClass("toggle")}), $(".sub-multi-item10 .item4").on("click", function () {$(".inner-multi-item10.item4").addClass("toggle")}), $(".sub-multi-item10 .item5").on("click", function () {$(".inner-multi-item10.item5").addClass("toggle")}), $(".sub-multi-item10 .item6").on("click", function () {$(".inner-multi-item10.item6").addClass("toggle")}), $(".sub-multi-item10 .item7").on("click", function () {$(".inner-multi-item10.item7").addClass("toggle")}), $(".sub-multi-item10 .item8").on("click", function () {$(".inner-multi-item10.item8").addClass("toggle")}), $(".sub-multi-item10 .item9").on("click", function () {$(".inner-multi-item10.item9").addClass("toggle")}), $(".sub-multi-item10 .item10").on("click", function () {$(".inner-multi-item10.item10").addClass("toggle")}), /* Single Items */ $(".side-main-menu .single-item1, .side-main-menu .single-item2, .side-main-menu .single-item3, .side-main-menu .single-item4, .side-main-menu .single-item5, .side-main-menu .single-item6, .side-main-menu .item7, .side-main-menu .single-item8, .side-main-menu .single-item9, .side-main-menu .single-item10").on("click", function () { $(".side-main-menu").addClass("toggle"),$(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), $(".side-main-menu .single-item1").on("click", function () {$(".side-sub-menu.single-item1").addClass("toggle")}), $(".side-main-menu .single-item2").on("click", function () {$(".side-sub-menu.single-item2").addClass("toggle")}), $(".side-main-menu .single-item3").on("click", function () {$(".side-sub-menu.single-item3").addClass("toggle")}), $(".side-main-menu .single-item4").on("click", function () {$(".side-sub-menu.single-item4").addClass("toggle")}), $(".side-main-menu .single-item5").on("click", function () {$(".side-sub-menu.single-item5").addClass("toggle")}), $(".side-main-menu .single-item6").on("click", function () {$(".side-sub-menu.single-item6").addClass("toggle")}), $(".side-main-menu .single-item7").on("click", function () {$(".side-sub-menu.single-item7").addClass("toggle")}), $(".side-main-menu .single-item8").on("click", function () {$(".side-sub-menu.single-item8").addClass("toggle")}), $(".side-main-menu .single-item9").on("click", function () {$(".side-sub-menu.single-item9").addClass("toggle")}), $(".side-main-menu .single-item10").on("click", function () {$(".side-sub-menu.single-item10").addClass("toggle")}), /* Back To Main Button Toggle */ $(".back-main").on("click", function () { $(".side-main-menu, .side-sub-menu, .sub-multi-item1, .sub-multi-item2, .sub-multi-item3, .sub-multi-item4, .sub-multi-item5, .sub-multi-item6, .sub-multi-item7, .sub-multi-item8, .sub-multi-item9, .sub-multi-item10, .inner-multi-item1, .inner-multi-item2, .inner-multi-item3, .inner-multi-item4, .inner-multi-item5, .inner-multi-item6, .inner-multi-item7, .inner-multi-item8, .inner-multi-item9, .inner-multi-item10").removeClass("toggle"), $(".sub-multi-item1, .sub-multi-item2, .sub-multi-item3, .sub-multi-item4, .sub-multi-item5, .sub-multi-item6, .sub-multi-item7, .sub-multi-item8, .sub-multi-item9, .sub-multi-item10").removeClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }), /* Back Button Toggle */ $(".back").on("click", function () { $(".inner-multi-item1, .inner-multi-item2, .inner-multi-item3, .inner-multi-item4, .inner-multi-item5, .inner-multi-item6, .inner-multi-item7, .inner-multi-item8, .inner-multi-item9, .inner-multi-item10").removeClass("toggle"), $(".sub-multi-item1, .sub-multi-item2, .sub-multi-item3, .sub-multi-item4, .sub-multi-item5, .sub-multi-item6, .sub-multi-item7, .sub-multi-item8, .sub-multi-item9, .sub-multi-item10").removeClass("toggle-inner"), $(".side-menu").addClass("side-menu-active"), $("#close_side_menu").fadeOut(200), $(".pushwrap").removeClass("active") }); } }); });
3bd10be1b3da7d0c67760c0f4f2373b038b7ba40
[ "JavaScript", "Markdown" ]
4
JavaScript
ajaystark/ajaystark.github.io
ae1292cfc8edc2ee9f4cb2867237f207d294d730
39467b0c0ab451517f49e7748c96291df4220b3a
refs/heads/master
<file_sep>/** * */ package com.mulodo.miniblog.service.impl; import java.sql.Timestamp; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.mulodo.miniblog.dao.PostDAO; import com.mulodo.miniblog.pojo.Post; import com.mulodo.miniblog.pojo.User; import com.mulodo.miniblog.service.PostService; import com.mulodo.miniblog.service.UserService; /** * @author TriLe */ @Service public class PostServiceImpl implements PostService { private static final Logger logger = LoggerFactory.getLogger(PostServiceImpl.class); @Autowired private PostDAO postDAO; @Autowired private UserService userSer; /** * {@inheritDoc} */ @Override @Transactional public Post add(Post post) { // If setted user then not get from Db // Else setted userId then get from Db if (null == post.getUser()) { // Get user of post User user = userSer.get(post.getUserId()); // Set referent to create Fk post.setUser(user); } // Add to Db and return return postDAO.add(post); } /** * {@inheritDoc} */ @Override @Transactional public Post update(Post updatePost) { // Load user info from db Post post = get(updatePost.getId()); // Return null if post doesn't exist if (null == post) { return null; } // Check have any change then set new post info boolean changeFlg = false; // Title if (null != updatePost.getTitle() && !updatePost.getTitle().equals(post.getTitle())) { post.setTitle(updatePost.getTitle()); changeFlg = true; } // Description if (null != updatePost.getDescription() && !updatePost.getDescription().equals(post.getDescription())) { post.setDescription(updatePost.getDescription()); changeFlg = true; } // Content if (null != updatePost.getContent() && !updatePost.getContent().equals(post.getContent())) { post.setContent(updatePost.getContent()); changeFlg = true; } // If have any change then update in Db if (changeFlg) { // Edit time post.setEditTime(new Timestamp(System.currentTimeMillis())); // Call hibernate to update post = postDAO.update(post); } return post; } /** * {@inheritDoc} */ @Override @Transactional public void delete(Post deletePost) { // Get post from Db to delete Post post = get(deletePost.getId()); if (null != post) { postDAO.delete(post); } } /** * {@inheritDoc} */ @Override @Transactional public Post load(int id) { return postDAO.load(id); } /** * {@inheritDoc} */ @Override @Transactional public Post get(int id) { return postDAO.get(id); } /** * {@inheritDoc} */ @Override @Transactional public void deleteAll() { postDAO.deleteAll(); } /** * {@inheritDoc} */ @Override @Transactional(readOnly = true) public List<Post> search(String query) { return postDAO.search(query); } /** * {@inheritDoc} */ @Override @Transactional public Post activeDeactive(int post_id, boolean activeFlg) { // Get post Post post = get(post_id); if (null == post) { logger.warn("Post with id ={} does not exist", post_id); return null; } // If activeFlg = TRUE and post not publicized then change Db if (activeFlg && null == post.getPublicTime()) { post.setPublicTime(new Timestamp(System.currentTimeMillis())); // Update Db post = postDAO.update(post); // If activeFlg = FALSE and post publicized then change } else if (!activeFlg && null != post.getPublicTime()) { post.setPublicTime(null); // Update Db post = postDAO.update(post); } return post; } /** * {@inheritDoc} */ @Override @Transactional(readOnly = true) public boolean checkOwner(int postId, int userId) { return postDAO.checkOwner(postId, userId); } /** * {@inheritDoc} */ @Transactional(readOnly = true) @Override public List<Post> list() { return postDAO.list(); } /** * {@inheritDoc} */ @Transactional(readOnly = true) @Override public List<Post> getByUserId(int userId, boolean showUnpublic) { return postDAO.getByUserId(userId, showUnpublic); } /** * {@inheritDoc} */ @Transactional(readOnly = true) @Override public List<Post> top() { return postDAO.top(); } } <file_sep>miniBlog.directive('isActiveNav', [ '$location', function($location) { return { restrict : 'A', link : function(scope, element) { scope.location = $location; scope.$watch('location.path()', function(currentPath) { if ('#' + currentPath === element[0].attributes['href'].nodeValue) { element.parent().addClass('active'); } else { element.parent().removeClass('active'); } }); } }; } ]); miniBlog.directive("compareTo", function() { return { require : "ngModel", scope : { otherModelValue : "=compareTo" }, link : function(scope, element, attributes, ngModel) { ngModel.$validators.compareTo = function(modelValue) { return modelValue == scope.otherModelValue; }; scope.$watch("otherModelValue", function() { ngModel.$validate(); }); } }; }); miniBlog.directive('toggle', function() { return { restrict : 'A', link : function(scope, element, attrs) { if (attrs.toggle == "tooltip") { $(element).tooltip(); } if (attrs.toggle == "popover") { $(element).popover(); } } }; }); <file_sep>'use strict'; miniBlog.factory('commentSer', [ '$rootScope', '$http', '$location', '$cookies', '$q', 'toaster', 'authSer', 'util', function($rootScope, $http, $location, $cookies, $q, toaster, authSer, util) { // interface var service = { comments : [], getComments : getComments, addComment : addComment, editComment : editComment, deleteComment : deleteComment }; return service; // implementation // Get all comments by post id function getComments(postId) { } // Add comment function addComment(postId, content) { var def = $q.defer(); // Prepare param var comment = {}; comment.content = content; comment.post_id = postId; // User id and token comment.user_id = $rootScope.currentUser.user_id; comment.token = $rootScope.token; // Call rest $http({ method : 'POST', url : REST_API_URL + 'comments', data : $.param(comment), // pass in data as strings headers : { 'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8' } // set the headers so angular passing info as form data }).success(function(data) { // Notify toaster.pop('success', data.meta.message); def.resolve(data.data); }).error(function(data) { // Show notify util.showErrorMsgs(data.meta.messages); def.reject("Failed to get albums"); }); return def.promise; } // Edit comment function editComment(commentId, content) { var def = $q.defer(); // Prepare param var comment = {}; comment.content = content; comment.comment_id = commentId; // User id and token comment.user_id = $rootScope.currentUser.user_id; comment.token = $rootScope.token; // Call rest $http({ method : 'PUT', url : REST_API_URL + 'comments', data : $.param(comment), // pass in data as strings headers : { 'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8' } // set the headers so angular passing info as form data }).success(function(data) { // Notify toaster.pop('success', data.meta.message); def.resolve(data.data); }).error(function(data) { // Show notify util.showErrorMsgs(data.meta.messages); def.reject("Failed to get albums"); }); return def.promise; } // Delete comment function deleteComment(commentId) { var def = $q.defer(); // Call rest $http({ method : 'DELETE', url : REST_API_URL + 'comments', headers : { 'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8', // User id and token 'user_id' : $rootScope.currentUser.user_id, 'token' : $rootScope.token, // Comment Id 'comment_id' : commentId } // set the headers so angular passing info as form data }).success(function(data) { // Notify toaster.pop('success', data.meta.message); def.resolve(data.meta.message); }).error(function(data) { // Show notify util.showErrorMsgs(data.meta.messages); def.reject("Failed to get albums"); }); return def.promise; } } ]); <file_sep>/** * */ package com.mulodo.miniblog.rest.controller; import java.util.List; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.hibernate.HibernateException; import org.jboss.resteasy.plugins.validation.hibernate.ValidateRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import com.mulodo.miniblog.common.Contants; import com.mulodo.miniblog.message.ResultMessage; import com.mulodo.miniblog.pojo.Post; import com.mulodo.miniblog.service.PostService; import com.mulodo.miniblog.service.TokenService; /** * @author TriLe */ @Controller @Path(Contants.URL_POST) @Produces(MediaType.APPLICATION_JSON) @ValidateRequest public class PostController { private static final Logger logger = LoggerFactory.getLogger(PostController.class); @Autowired private TokenService tokenSer; @Autowired private PostService postSer; @SuppressWarnings("rawtypes") @Path(Contants.URL_ADD) @POST @Consumes(MediaType.APPLICATION_FORM_URLENCODED) public Response add( @NotNull(message = "{user_id.NotNull}") @FormParam(value = "user_id") @Min(value = 0) Integer user_id, @NotNull(message = "{token.NotNull}") @Size(min = 64, max = 64, message = "{token.Size}") @FormParam(value = "token") String token, @NotNull(message = "{title.NotNull}") @Size(min = 1, max = 128, message = "{title.Size}") @FormParam(value = "title") String title, @NotNull(message = "{description.NotNull}") @Size(min = 1, max = 512, message = "{description.Size}") @FormParam(value = "description") String description, @NotNull(message = "{content.NotNull}") @Size(min = 1, max = 8192, message = "{content.Size}") @FormParam(value = "content") String content) { // Check token if (!tokenSer.checkToken(user_id, token)) { // Log logger.warn("Token {} invaild or expired", token); // Unauthorized ResultMessage unauthorizedMsg = new ResultMessage(Contants.CODE_TOKEN_ERR, Contants.MSG_TOKEN_ERR, String.format(Contants.FOR_TOKEN_ERR, token)); return Response.status(Contants.CODE_UNAUTHORIZED).entity(unauthorizedMsg).build(); } // Create new post to call service Post post = new Post(); post.setTitle(title); post.setDescription(description); post.setContent(content); // Set userId post.setUserId(user_id); // Call service to insert into Db try { postSer.add(post); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<Post> result = new ResultMessage<Post>(Contants.CODE_CREATED, Contants.MSG_CREATE_POST_SCC, post); return Response.status(Contants.CODE_CREATED).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_UPDATE) @PUT @Consumes(MediaType.APPLICATION_FORM_URLENCODED) public Response update( @NotNull(message = "{user_id.NotNull}") @FormParam(value = "user_id") @Min(value = 0) Integer user_id, @NotNull(message = "{token.NotNull}") @Size(min = 64, max = 64, message = "{token.Size}") @FormParam(value = "token") String token, @NotNull(message = "{post_id.NotNull}") @FormParam(value = "post_id") @Min(value = 0) Integer post_id, @Size(min = 1, max = 128, message = "{title.Size}") @FormParam(value = "title") String title, @Size(min = 1, max = 512, message = "{description.Size}") @FormParam(value = "description") String description, @Size(min = 1, max = 8192, message = "{content.Size}") @FormParam(value = "content") String content) { // Check have any field change? if (null == title && null == description && null == content) { // Log logger.warn(Contants.MSG_MISS_ALL_FIELDS); // Miss all fields ResultMessage missAllFieldsMsg = new ResultMessage(Contants.CODE_INPUT_ERR, Contants.MSG_MISS_ALL_FIELDS, Contants.MSG_MISS_ALL_FIELDS_DTL); return Response.status(Contants.CODE_BAD_REQUEST).entity(missAllFieldsMsg).build(); } // Check token if (!tokenSer.checkToken(user_id, token)) { // Log logger.warn("Token {} invaild or expired", token); // Unauthorized ResultMessage unauthorizedMsg = new ResultMessage(Contants.CODE_TOKEN_ERR, Contants.MSG_TOKEN_ERR, String.format(Contants.FOR_TOKEN_ERR, token)); return Response.status(Contants.CODE_UNAUTHORIZED).entity(unauthorizedMsg).build(); } // Check owner if (!postSer.checkOwner(post_id, user_id)) { logger.warn("Token in request invaild or expired"); // Response username or password invalid ResultMessage forbiddenMsg = new ResultMessage(Contants.CODE_FORBIDDEN, Contants.MSG_FORBIDDEN, String.format(Contants.FOR_FORBIDDEN_POST, user_id, post_id)); return Response.status(Contants.CODE_FORBIDDEN).entity(forbiddenMsg).build(); } // Create new post to call service Post post = new Post(); post.setTitle(title); post.setDescription(description); post.setContent(content); // Set postId post.setId(post_id); // Call service to update into Db try { post = postSer.update(post); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<Post> result = new ResultMessage<Post>(Contants.CODE_OK, Contants.MSG_UPDATE_POST_SCC, post); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_DELETE) @DELETE @Consumes(MediaType.APPLICATION_FORM_URLENCODED) public Response delete( @NotNull(message = "{user_id.NotNull}") @HeaderParam(value = "user_id") @Min(value = 0) Integer user_id, @NotNull(message = "{token.NotNull}") @Size(min = 64, max = 64, message = "{token.Size}") @HeaderParam(value = "token") String token, @NotNull(message = "{post_id.NotNull}") @HeaderParam(value = "post_id") @Min(value = 0) Integer post_id) { // Check token if (!tokenSer.checkToken(user_id, token)) { // Log logger.warn("Token {} invaild or expired", token); // Unauthorized ResultMessage unauthorizedMsg = new ResultMessage(Contants.CODE_TOKEN_ERR, Contants.MSG_TOKEN_ERR, String.format(Contants.FOR_TOKEN_ERR, token)); return Response.status(Contants.CODE_UNAUTHORIZED).entity(unauthorizedMsg).build(); } // Check owner if (!postSer.checkOwner(post_id, user_id)) { logger.warn("Token in request invaild or expired"); // Response username or password invalid ResultMessage forbiddenMsg = new ResultMessage(Contants.CODE_FORBIDDEN, Contants.MSG_FORBIDDEN, String.format(Contants.FOR_FORBIDDEN_POST, user_id, post_id)); return Response.status(Contants.CODE_FORBIDDEN).entity(forbiddenMsg).build(); } // Create new post to call service Post post = new Post(); // Set postId post.setId(post_id); // Call service to delete from Db try { postSer.delete(post); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage result = new ResultMessage(Contants.CODE_OK, Contants.MSG_DELETE_POST_SCC); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_PUBLICT) @PUT @Consumes(MediaType.APPLICATION_FORM_URLENCODED) public Response activeDeactive( @NotNull(message = "{user_id.NotNull}") @FormParam(value = "user_id") @Min(value = 0) Integer user_id, @NotNull(message = "{token.NotNull}") @Size(min = 64, max = 64, message = "{token.Size}") @FormParam(value = "token") String token, @NotNull(message = "{post_id.NotNull}") @FormParam(value = "post_id") @Min(value = 0) Integer post_id, @NotNull(message = "{active.NotNull}") @FormParam(value = "active") Boolean active) { // Check token if (!tokenSer.checkToken(user_id, token)) { // Log logger.warn("Token {} invaild or expired", token); // Unauthorized ResultMessage unauthorizedMsg = new ResultMessage(Contants.CODE_TOKEN_ERR, Contants.MSG_TOKEN_ERR, String.format(Contants.FOR_TOKEN_ERR, token)); return Response.status(Contants.CODE_UNAUTHORIZED).entity(unauthorizedMsg).build(); } // Check owner if (!postSer.checkOwner(post_id, user_id)) { logger.warn("Token in request invaild or expired"); // Response username or password invalid ResultMessage forbiddenMsg = new ResultMessage(Contants.CODE_FORBIDDEN, Contants.MSG_FORBIDDEN, String.format(Contants.FOR_FORBIDDEN_POST, user_id, post_id)); return Response.status(Contants.CODE_FORBIDDEN).entity(forbiddenMsg).build(); } Post post = null; // Call service to insert into Db try { post = postSer.activeDeactive(post_id, active); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<Post> result = new ResultMessage<Post>(Contants.CODE_OK, Contants.MSG_ACT_DEACT_SCC, post); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_GET) @GET public Response allPost() { List<Post> posts = null; // Call service to get all public post from Db try { posts = postSer.list(); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<List<Post>> result = new ResultMessage<List<Post>>(Contants.CODE_OK, String.format(Contants.FOR_GET_ALL_POST_SCC, posts.size()), posts); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_TOP) @GET public Response topPost() { List<Post> posts = null; // Call service to get top public post from Db try { posts = postSer.top(); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<List<Post>> result = new ResultMessage<List<Post>>(Contants.CODE_OK, String.format(Contants.FOR_GET_TOP_POST_SCC, posts.size()), posts); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_GET_BY_USER) @POST public Response getByUserId(@PathParam("user_id") int userId, @NotNull(message = "{token.NotNull}") @Size(min = 64, max = 64, message = "{token.Size}") @FormParam(value = "token") String token) { // Check token if (!tokenSer.checkToken(userId, token)) { // Log logger.warn("Token {} invaild or expired", token); // Unauthorized ResultMessage unauthorizedMsg = new ResultMessage(Contants.CODE_TOKEN_ERR, Contants.MSG_TOKEN_ERR, String.format(Contants.FOR_TOKEN_ERR, token)); return Response.status(Contants.CODE_UNAUTHORIZED).entity(unauthorizedMsg).build(); } List<Post> posts = null; // Call service to get all public post of user from Db try { posts = postSer.getByUserId(userId, true); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<List<Post>> result = new ResultMessage<List<Post>>(Contants.CODE_OK, String.format(Contants.FOR_GET_ALL_POST_SCC, posts.size()), posts); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_GET_BY_USER) @GET public Response getByUserId(@PathParam("user_id") int userId) { List<Post> posts = null; // Call service to get all public post of user from Db try { posts = postSer.getByUserId(userId, false); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<List<Post>> result = new ResultMessage<List<Post>>(Contants.CODE_OK, String.format(Contants.FOR_GET_ALL_POST_SCC, posts.size()), posts); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_SEARCH) @GET public Response search( @NotNull(message = "{query.NotNull}") @Size(min = 1, max = 64, message = "{query.Size}") @PathParam(value = "query") String query) { List<Post> posts = null; try { posts = postSer.search(query); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<List<Post>> result = new ResultMessage<List<Post>>(Contants.CODE_OK, String.format(Contants.FOR_SEARCH_SCC, posts.size()), posts); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_GET_BY_ID) @GET public Response getById(@PathParam(value = "id") int postId) { Post post = null; try { post = postSer.get(postId); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } if (null == post) { // log logger.warn("User with id={} does not exist", postId); // Response error ResultMessage userNotExistMsg = new ResultMessage(Contants.CODE_POST_NOT_EXIST, Contants.MSG_POST_NOT_EXIST, String.format(Contants.FOR_POST_NOT_EXIST, postId)); return Response.status(Contants.CODE_BAD_REQUEST).entity(userNotExistMsg).build(); } // Response success ResultMessage<Post> result = new ResultMessage<Post>(Contants.CODE_OK, Contants.MSG_GET_POST_SCC, post); return Response.status(Contants.CODE_OK).entity(result).build(); } } <file_sep>package com.mulodo.miniblog.pojo; import java.sql.Timestamp; import java.util.Calendar; import java.util.Date; import java.util.Set; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import org.codehaus.jackson.annotate.JsonIgnore; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.annotate.JsonPropertyOrder; import org.codehaus.jackson.map.annotate.JsonDeserialize; import org.hibernate.annotations.Cascade; import org.hibernate.annotations.CascadeType; import org.hibernate.annotations.ForeignKey; import com.mulodo.miniblog.config.CustomerTimestampDeserialize; @Entity @Table(name = "posts") @JsonPropertyOrder({ "user_id", "post_id", "title", "description", "content", "create_time", "edit_time", "public_time" }) public class Post { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @JsonProperty("post_id") private int id; @Column(name = "title", length = 128, nullable = false) @JsonProperty("title") private String title; @Column(name = "description", length = 512, nullable = false) @JsonProperty("description") private String description; @Column(name = "content", length = 8192, columnDefinition = "TEXT", nullable = false) @Basic(fetch = FetchType.LAZY) @JsonProperty("content") private String content; @Column(name = "create_time", columnDefinition = "TIMESTAMP", nullable = false) @JsonProperty("create_time") @JsonDeserialize(using = CustomerTimestampDeserialize.class) private Timestamp createTime; @Column(name = "edit_time", columnDefinition = "TIMESTAMP", nullable = true) @JsonProperty("edit_time") @JsonDeserialize(using = CustomerTimestampDeserialize.class) private Timestamp editTime; @Column(name = "public_time", columnDefinition = "TIMESTAMP", nullable = true) @JsonProperty("public_time") @JsonDeserialize(using = CustomerTimestampDeserialize.class) private Timestamp publicTime; @ManyToOne @JoinColumn(name = "user_id", referencedColumnName = "id", nullable = false) @Cascade(CascadeType.SAVE_UPDATE) @ForeignKey(name = "fk_posts_users") @JsonIgnore private User user; @OneToMany(mappedBy = "post", targetEntity = Comment.class) @Cascade(CascadeType.ALL) @JsonIgnore private Set<Comment> comments; // @Transient @Column(name = "user_id", updatable = false, insertable = false) @JsonProperty("user_id") private int userId; public Post() { // Current time this.createTime = new Timestamp(System.currentTimeMillis()); } /** * @return the id */ public int getId() { return id; } /** * @param id * the id to set */ public void setId(int id) { this.id = id; } /** * @return the title */ public String getTitle() { return title; } /** * @param title * the title to set */ public void setTitle(String title) { this.title = title; } /** * @return the description */ public String getDescription() { return description; } /** * @param description * the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the content */ public String getContent() { return content; } /** * @param content * the content to set */ public void setContent(String content) { this.content = content; } /** * @return the createTime */ public Timestamp getCreateTime() { return createTime; } /** * @param createTime * the createTime to set */ public void setCreateTime(Timestamp createTime) { this.createTime = createTime; } /** * @return the editTime */ public Timestamp getEditTime() { return editTime; } /** * @param editTime * the editTime to set */ public void setEditTime(Timestamp editTime) { this.editTime = editTime; } /** * @return the publicTime */ public Timestamp getPublicTime() { return publicTime; } /** * @param publicTime * the publicTime to set */ public void setPublicTime(Timestamp publicTime) { this.publicTime = publicTime; } /** * @return the user */ public User getUser() { return user; } /** * @param user * the user to set */ public void setUser(User user) { this.user = user; } /** * @return the comments */ public Set<Comment> getComments() { return comments; } /** * @param comments * the comments to set */ public void setComments(Set<Comment> comments) { this.comments = comments; } /** * @return the userId */ public int getUserId() { return userId; } /** * @param userId * the userId to set */ public void setUserId(int userId) { this.userId = userId; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (null == obj) return false; if (getClass() != obj.getClass()) return false; Post other = (Post) obj; if (null == content) { if (other.content != null) { // System.out.println("content"); return false; } } else if (!content.equals(other.content)) { // System.out.println("content"); return false; } if (null == createTime) { if (other.createTime != null) { // System.out.println("createTime"); return false; } } else if (!timestampEqualIgnoreSecord(createTime, other.createTime)) { // System.out.println("createTime"); return false; } if (null == description) { if (other.description != null) { // System.out.println("description"); return false; } } else if (!description.equals(other.description)) { // System.out.println("description"); return false; } if (null == editTime) { if (other.editTime != null) { // System.out.println("editTime"); return false; } } else if (!timestampEqualIgnoreSecord(editTime, other.editTime)) { // System.out.println("editTime"); return false; } if (null == publicTime) { if (other.publicTime != null) { // System.out.println("publicTime"); return false; } } else if (!timestampEqualIgnoreSecord(publicTime, other.publicTime)) { // System.out.println("publicTime"); return false; } if (null == title) { if (other.title != null) { // System.out.println("title"); return false; } } else if (!title.equals(other.title)) { // System.out.println("title"); return false; } // if (userId != other.userId) { // // System.out.println("userId"); // return false; // } return true; } private boolean timestampEqualIgnoreSecord(Timestamp a, Timestamp b) { if (null == a && null == b) { return true; } if (null == a || null == b) { return false; } Calendar cal = Calendar.getInstance(); // A Timestamp cal.setTimeInMillis(a.getTime()); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); Date aDate = cal.getTime(); // B Timestamp cal.setTimeInMillis(b.getTime()); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); Date bDate = cal.getTime(); return aDate.equals(bDate); } } <file_sep>'use strict'; miniBlog.service('authSer', [ '$rootScope', '$http', '$location', '$cookies', '$cookieStore', 'toaster', 'util', function($rootScope, $http, $location, $cookies, $cookieStore, toaster, util) { // Check login status is logged this.isLogged = function() { // Check in cookie if (null != $cookies.token && undefined != $cookies.token) { return true; } // Check in cache if (null != $rootScope.currentUser && undefined != $rootScope.currentUser) { return true; } // Not login return false; } // Authenticate user is NOT logged. If logged then redirect to home // page and show notify this.authNotLogged = function() { if (true == this.isLogged()) { this.errorHandler('User logged'); return true; } return false; }; // Authenticate user is logged. If NOT logged then redirect to home // page and show notify this.authLogged = function() { if (false == this.isLogged()) { this.errorHandler('User not logged yet'); return true; } return false; }; // redirect to home page and show error notify this.errorHandler = function(message) { // redirect to home page $location.path('#/'); $location.replace(); // show notify toaster.pop('error', message); }; // Login this.login = function(user, rememberLogin) { $http({ method : 'POST', url : REST_API_URL + 'tokens/login', data : $.param(user), // pass in data as strings // set the headers so angular passing info as form data headers : { 'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8' } }).success(function(data) { var user = data.data; // Save login data $rootScope.isLogged = true; $rootScope.currentUser = user; $rootScope.token = user.token; // Get user info // getUserInfo(user.user_id); // Set to cookies if (true === rememberLogin) { $cookies.user_id = user.user_id; $cookies.username = user.username; $cookies.token = user.token; } // redirect to home page $location.path('#/'); $location.replace(); // // back to previous page // window.history.back(); // Show notify // toaster.pop('success', 'Login success'); toaster.pop('success', data.meta.message); }).error(function(data) { // Show notify util.showErrorMsgs(data.meta.messages); }); }; // Logout this.logout = function() { // Check is logged to call rest if (this.isLogged()) { // Prepare data to call logout rest var user = {}; user.token = $rootScope.token; // Call rest $http({ method : 'POST', url : REST_API_URL + 'tokens/logout', data : $.param(user), // pass in data as strings // set the headers so angular passing info as form data headers : { 'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8' } }); // Clean scope $rootScope.isLogged = false; $rootScope.currentUser = null; // Clean cookies $cookieStore.remove("user_id"); $cookieStore.remove("username"); $cookieStore.remove("token"); // Clean nav items list $rootScope.navItems = null; // Clean preview post // postSer.setPreviewPost(null); // Show notify toaster.pop('success', 'Logout success'); } else { // Show notify toaster.pop('error', 'User not logged'); } } // Change password this.changePassword = function(user) { $http({ method : 'PUT', url : REST_API_URL + 'users/pass', data : $.param(user), // pass in data as strings // set the headers so angular passing info as form data headers : { 'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8' } }).success(function(data) { var token = data.data.token; // Set to cookies if (null !== $cookies.token && undefined !== $cookies.token) { $cookies.token = token; } // Set token to cache $rootScope.token = token // redirect to home page $location.path('#/'); $location.replace(); // // back to previous page // window.history.back(); // Show notify // toaster.pop('success', 'Change password success'); toaster.pop('success', data.meta.message); }).error(function(data) { // Show notify util.showErrorMsgs(data.meta.messages); }); }; // Change password this.register = function(user) { $http({ method : 'POST', url : REST_API_URL + 'users', data : $.param(user), // pass in data as strings headers : { 'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8' } // set the headers so angular passing info as form data }).success(function(data) { var user = data.data; // Save registered user data $rootScope.isLogged = true; $rootScope.currentUser = user; $rootScope.token = user.token; // Save user info to cache $rootScope.users[user.user_id] = user; // Set to cookies $cookies.user_id = user.user_id; $cookies.username = user.username; $cookies.token = user.token; // redirect to home page $location.path('#/'); $location.replace(); // Show notify // toaster.pop('success', 'Register success'); toaster.pop('success', data.meta.message); }).error(function(data) { // Show notify util.showErrorMsgs(data.meta.messages); }); }; // Edit profile this.editProfile = function(user) { $http({ method : 'PUT', url : REST_API_URL + 'users', data : $.param(user), // pass in data as strings headers : { 'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8' } // set the headers so angular passing info as form data }).success(function(data) { var user = data.data; // Save updated user data from rest to global variable $rootScope.currentUser = user; // redirect to home page $location.path('#/'); $location.replace(); // Show notify // toaster.pop('success', 'Update profile success'); toaster.pop('success', data.meta.message); }).error(function(data) { // Show notify util.showErrorMsgs(data.meta.messages); }); }; // Get user info by id. if user info doesn't exist in cache the call // rest else call rest to get // this function user callback to handle success and error event this.getUserInfo = function(user_id, successCallBack, errorCallBack) { var users = $rootScope.users; // Not existed in cache if (null === users[user_id] || undefined === users[user_id]) { $http.get(REST_API_URL + 'users/' + user_id) // Success .success(function(data, status, headers, config) { var user = data.data; // Set user to list users users[user.user_id] = user; // Callback successCallBack && successCallBack(user); }).error(function(data) { util.showErrorMsgs(data.meta.messages) // Callback errorCallBack && errorCallBack(data); }); // existed in cache } else { successCallBack && successCallBack(users[user_id]); } } // Search username, firstname, lastname this.search = function(query, scope) { $http.get(REST_API_URL + 'users/' + query) // Success .success(function(data, status, headers, config) { // Set user to list users scope.searchResultUsers = data.data; }); } } ]); <file_sep>'use strict'; var REST_API_URL = "/miniblog.api/" var miniBlog = angular.module('MiniBlog', [ 'ngRoute', 'ngCookies', 'summernote', 'toaster', 'postControllers', 'userControllers' ]); miniBlog.constant('REST_API_URL', "/miniblog.api/"); miniBlog.constant('RICHTEXTOPTION', { height : 300, toolbar : [ // [groupname, [button list]] [ 'style', [ 'bold', 'italic', 'underline', 'clear' ] ], // font [ 'font', [ 'strikethrough' ] ], [ 'fontsize', [ 'fontsize' ] ], // color [ 'color', [ 'color' ] ], // param [ 'para', [ 'ul', 'ol', 'paragraph' ] ], // insert [ 'insert', [ 'link', 'picture' ] ], // view [ 'view', [ 'fullscreen', 'codeview' ] ], // help [ 'help', [ 'help' ] ] ] }); miniBlog.run(function($rootScope, $cookies, $location) { // Create user hashmap $rootScope.users = {}; // logged flag var token = $cookies.token; var username = $cookies.username; var user_id = $cookies.user_id; if (null != token) { // get cookie info and set to root scope $rootScope.isLogged = true; $rootScope.currentUser = {}; $rootScope.currentUser.username = username; $rootScope.currentUser.user_id = user_id; $rootScope.token = token; } // Create routing function $rootScope.search = function(url) { $location.path('search/' + url); $location.replace(); }; }); miniBlog.config([ '$routeProvider', function($routeProvider) { $routeProvider.when('/', { // Top 10 posts newest templateUrl : 'partials/posts.html', controller : 'topPostCtrl' }).when('/all_posts', { // All posts templateUrl : 'partials/posts.html', controller : 'allPostCtrl' }).when('/users/:userId', { // List all posts of user templateUrl : 'partials/posts.html', controller : 'postOfUserCtrl' }).when('/posts/:postId', { // Detail of post templateUrl : 'partials/post_detail.html', controller : 'postDetailCtrl' }).when('/login', { // Login templateUrl : 'partials/login.html', controller : 'loginCtrl' }).when('/logout', { // Logout controller : 'logoutCtrl', template : " ", redirectTo : '/' }).when('/register', { // Register new user templateUrl : 'partials/register.html', controller : 'registerCtrl' }).when('/profile', { // Register new user templateUrl : 'partials/profile.html', controller : 'profileCtrl' }).when('/change_password', { // Register new user templateUrl : 'partials/change_password.html', controller : 'chpwdCtrl' }).when('/search/:query', { // Register new user templateUrl : 'partials/search.html', controller : 'searchCtrl' }).when('/new_post', { // Register new user templateUrl : 'partials/post.html', controller : 'newPostCtrl' }).when('/preview_post', { // Register new user templateUrl : 'partials/post_detail.html', controller : 'previewPostCtrl' }).when('/edit_post/:postId', { // Register new user templateUrl : 'partials/post.html', controller : 'editPostCtrl' }).otherwise({ redirectTo : '/' }); } ]); <file_sep>'use strict'; miniBlog.service('util', [ '$rootScope', '$http', '$location', '$cookies', '$cookieStore', 'toaster', function($rootScope, $http, $location, $cookies, $cookieStore, toaster) { // Create upload element and handle upload success event this.uploadCompleteEvent = function(scope) { // event to create upload element scope.$on('$routeChangeSuccess', function() { $("#fileUploader").fileinput({ uploadUrl : REST_API_URL + "users/upload", uploadAsync : true, // dropZoneEnabled:false, previewFileType : "image", browseClass : "btn btn-success", browseLabel : "Pick Image", browseIcon : '<i class="glyphicon glyphicon-picture"></i>', removeClass : "btn btn-danger", removeLabel : "Delete", removeIcon : '<i class="glyphicon glyphicon-trash"></i>', uploadClass : "btn btn-info", uploadLabel : "Upload", uploadIcon : '<i class="glyphicon glyphicon-upload"></i>' }); // get file name when upload success $("#fileUploader").on('fileuploaded', function(event, data) { // Call delegate var meta = data.response.meta; if (200 === meta.code) { // add path of image scope.user.avatarlink = 'img/' + meta.messages[0]; // Show notify toaster.pop('success', 'Upload success'); } else { showErrorMsgs(meta.messages) } }); }); } // Add new item into navigation bar this.addNavItems = function(title, url, type) { // Define item var navItem = {}; navItem.title = title; navItem.url = url; navItem.type = type; // Add to nav items list var navItems = $rootScope.navItems; if (null === navItems || undefined === navItems) { // Define nav items list if not exist $rootScope.navItems = [ navItem ]; } else { // Remove existed item for (var i = 0; i < navItems.length; i++) { // Check existed if (navItems[i].url === url) { // Remove $rootScope.navItems.splice(i, 1); break; } } // Add to top $rootScope.navItems.unshift(navItem); } } // Show error messages. input is array error messages this.showErrorMsgs = function(messages) { for (var i = 0; i < messages.length; i++) { toaster.pop('error', messages[i]); } } } ]);<file_sep>/** * */ package com.mulodo.miniblog.common; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.sql.Timestamp; import java.util.Calendar; import java.util.Date; import javax.ws.rs.core.MultivaluedMap; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author TriLe */ public class Util { private static final Logger logger = LoggerFactory.getLogger(Util.class); /** * Hash input string * * @param value * string which need to hash * @return hash value of input */ public static String hashSHA256(String value) { // Check null and empty if (StringUtils.isEmpty(value)) { return null; } MessageDigest md; try { md = MessageDigest.getInstance(Contants.HASH_ALGORITHM); } catch (NoSuchAlgorithmException e) { // Log exception logger.error("HASH_ALGORITHM=" + Contants.HASH_ALGORITHM, e); return null; } md.update(value.getBytes()); byte byteData[] = md.digest(); // convert the byte to hex format method 1 StringBuffer sb = new StringBuffer(); for (int i = 0; i < byteData.length; i++) { sb.append(Integer.toString((byteData[i] & 0xff) + 0x100, 16).substring(1)); } // convert the byte to hex format method 2 StringBuffer hexString = new StringBuffer(); for (int i = 0; i < byteData.length; i++) { String hex = Integer.toHexString(0xff & byteData[i]); if (hex.length() == 1) hexString.append('0'); hexString.append(hex); } // Log result String result = hexString.toString(); logger.debug("Value: [{}]; Hash: [{}]", value, result); return result; } /** * Create new token by hash user ID and current date time (to unsure token * is unique) * * @param userID * ID of user * @return new token if userID > 0 and NULL otherwise */ public static String createToken(int userID) { // Validate userID if (0 > userID) { return null; } // Get current ms long currentMs = System.currentTimeMillis(); // Append username and current date time and then hash to create new // token return hashSHA256(userID + "@" + currentMs); } /** * Create current <b>Date</b> without <b>Time</b> * * @return Current <b>Date</b> without <b>Time</b> */ public static Date createDateIgnoreTime() { Calendar cal = Calendar.getInstance(); cal.set(Calendar.HOUR, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * Create current <b>Timestamp</b> without <b>Secord</b> and * <b>Milisecord</b> * * @return Current <b>Timestamp</b> without <b>Secord</b> and * <b>Milisecord</b> */ public static Timestamp createTimestampIgnoreSecord() { Calendar cal = Calendar.getInstance(); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return new Timestamp(cal.getTimeInMillis()); } /** * header sample { Content-Type=[image/png], Content-Disposition=[form-data; * name="file"; filename="filename.extension"] } **/ // get uploaded filename, is there a easy way in RESTEasy? public static String getFileName(MultivaluedMap<String, String> header) { String[] contentDisposition = header.getFirst("Content-Disposition").split(";"); for (String filename : contentDisposition) { if ((filename.trim().startsWith("filename"))) { String[] name = filename.split("="); String finalFileName = name[1].trim().replaceAll("\"", ""); return finalFileName; } } return Contants.UNKNOWN_FILE_NAME; } // save to somewhere public static void writeFile(byte[] content, String filename) throws IOException { File file = new File(filename); if (!file.exists()) { file.createNewFile(); } // auto close resource try (FileOutputStream fop = new FileOutputStream(file)) { fop.write(content); fop.flush(); } // fop.close(); } } <file_sep># trile.miniblog trile.miniblog :|<file_sep>/** * */ package com.mulodo.miniblog.rest.controller; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.Map; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import javax.ws.rs.Consumes; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import org.apache.commons.io.IOUtils; import org.hibernate.HibernateException; import org.jboss.resteasy.plugins.providers.multipart.InputPart; import org.jboss.resteasy.plugins.providers.multipart.MultipartFormDataInput; import org.jboss.resteasy.plugins.validation.hibernate.ValidateRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import com.mulodo.miniblog.common.Contants; import com.mulodo.miniblog.common.Util; import com.mulodo.miniblog.message.ResultMessage; import com.mulodo.miniblog.pojo.User; import com.mulodo.miniblog.service.TokenService; import com.mulodo.miniblog.service.UserService; /** * @author TriLe */ @Controller @Path(Contants.URL_USER) @Produces(MediaType.APPLICATION_JSON) @ValidateRequest public class UserController { private static final Logger logger = LoggerFactory.getLogger(UserController.class); @Autowired private UserService userSer; @Autowired private TokenService tokenSer; @SuppressWarnings("rawtypes") @Path(Contants.URL_ADD) @POST public Response add( @NotNull(message = "{username.NotNull}") @Pattern(regexp = Contants.WORDS_VALID_REGEX, message = "{username.Invalid}") @Size(min = 4, max = 64, message = "{username.Size}") @FormParam(value = "username") String username, @NotNull(message = "{password.NotNull}") @Size(min = 4, max = 999, message = "{password.Size}") @FormParam(value = "password") String password, @NotNull(message = "{firstname.NotNull}") @Pattern(regexp = Contants.WORDS_VALID_REGEX, message = "{firstname.Invalid}") @Size(min = 1, max = 64, message = "{firstname.Size}") @FormParam(value = "firstname") String firstname, @NotNull(message = "{lastname.NotNull}") @Pattern(regexp = Contants.WORDS_VALID_REGEX, message = "{lastname.Invalid}") @Size(min = 1, max = 64, message = "{lastname.Size}") @FormParam(value = "lastname") String lastname, @Size(min = 1, max = 256, message = "{avatarlink.Size}") @FormParam(value = "avatarlink") String avatarlink) { // Check username existed in db if (userSer.checkUserNameExist(username)) { // Log logger.warn("Username [{}] existed", username); ResultMessage errorMsg = new ResultMessage(Contants.CODE_INPUT_ERR, Contants.MSG_USER_EXIST, Contants.MSG_USER_EXIST); return Response.status(Contants.CODE_BAD_REQUEST).entity(errorMsg).build(); } User user = new User(); // Set username user.setUserName(username); // Set password user.setPassHash(<PASSWORD>); // Set firstname user.setFirstName(firstname); // Set lastname user.setLastName(lastname); // Set Avatarlink user.setAvatarLink(avatarlink); // Call user service to insert into db try { user = userSer.add(user); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<User> resultMsg = new ResultMessage<User>(Contants.CODE_CREATED, Contants.MSG_CREATE_USER_SCC, user); return Response.status(Contants.CODE_CREATED).entity(resultMsg).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_UPDATE) @PUT public Response update( @NotNull(message = "{user_id.NotNull}") @FormParam(value = "user_id") @Min(value = 0) Integer user_id, @Pattern(regexp = Contants.WORDS_VALID_REGEX, message = "{firstname.Invalid}") @Size(min = 1, max = 64, message = "{firstname.Size}") @FormParam(value = "firstname") String firstname, @Pattern(regexp = Contants.WORDS_VALID_REGEX, message = "{lastname.Invalid}") @Size(min = 1, max = 64, message = "{lastname.Size}") @FormParam(value = "lastname") String lastname, @Size(min = 1, max = 256, message = "{avatarlink.Size}") @FormParam(value = "avatarlink") String avatarlink, @NotNull(message = "{token.NotNull}") @Size(min = 64, max = 64, message = "{token.Size}") @FormParam(value = "token") String token) { // Check have any field change? if (null == firstname && null == lastname && null == avatarlink) { // Log logger.warn(Contants.MSG_MISS_ALL_FIELDS); // Miss all fields ResultMessage missAllFieldsMsg = new ResultMessage(Contants.CODE_INPUT_ERR, Contants.MSG_MISS_ALL_FIELDS, Contants.MSG_MISS_ALL_FIELDS_DTL); return Response.status(Contants.CODE_BAD_REQUEST).entity(missAllFieldsMsg).build(); } // Check token if (!tokenSer.checkToken(user_id, token)) { // Log logger.warn("Token {} invaild or expired", token); // Unauthorized ResultMessage unauthorizedMsg = new ResultMessage(Contants.CODE_TOKEN_ERR, Contants.MSG_TOKEN_ERR, String.format(Contants.FOR_TOKEN_ERR, token)); return Response.status(Contants.CODE_UNAUTHORIZED).entity(unauthorizedMsg).build(); } // Create new user object to call user service User user = new User(); // Set input user.setId(user_id); user.setFirstName(firstname); user.setLastName(lastname); user.setAvatarLink(avatarlink); // Update user try { user = userSer.update(user); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<User> successMsg = new ResultMessage<User>(Contants.CODE_OK, Contants.MSG_UPDATE_USER_SCC, user); return Response.status(Contants.CODE_OK).entity(successMsg).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_SEARCH) @GET public Response search( @NotNull(message = "{query.NotNull}") @Size(min = 1, max = 64, message = "{query.Size}") @PathParam(value = "query") String query) { List<User> users = null; try { users = userSer.search(query); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<List<User>> result = new ResultMessage<List<User>>(Contants.CODE_OK, String.format(Contants.FOR_SEARCH_SCC, users.size()), users); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_GET_BY_ID) @GET public Response getById(@PathParam(value = "id") int userId) { User user = null; try { user = userSer.get(userId); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } if (null == user) { // log logger.warn("User with id={} does not exist", userId); // Response error ResultMessage userNotExistMsg = new ResultMessage(Contants.CODE_USER_NOT_EXIST, Contants.MSG_USER_NOT_EXIST, String.format(Contants.FOR_USER_NOT_EXIST, userId)); return Response.status(Contants.CODE_BAD_REQUEST).entity(userNotExistMsg).build(); } // Response success ResultMessage<User> result = new ResultMessage<User>(Contants.CODE_OK, Contants.MSG_GET_USER_SCC, user); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @Path(Contants.URL_CHPWD) @PUT public Response changePassword( @NotNull(message = "{user_id.NotNull}") @FormParam(value = "user_id") @Min(value = 0) Integer user_id, @NotNull(message = "{password.NotNull}") @Size(min = 4, max = 999, message = "{password.Size}") @FormParam(value = "currentpassword") String currentpassword, @NotNull(message = "{newpassword.NotNull}") @Size(min = 4, max = 999, message = "{newpassword.Size}") @FormParam(value = "newpassword") String newpassword) { if (!userSer.checkPassword(user_id, currentpassword)) { // log logger.warn("User id={} or password invalid", user_id); // Response error ResultMessage invalidMsg = new ResultMessage(Contants.CODE_PWD_INVALID, Contants.MSG_PWD_INVALID, Contants.MSG_PWD_INVALID); return Response.status(Contants.CODE_BAD_REQUEST).entity(invalidMsg).build(); } // Call service to change password User user = null; try { user = userSer.changePassword(user_id, newpassword); } catch (HibernateException e) { // Log logger.warn(Contants.MSG_DB_ERR, e); // Response error ResultMessage dbErrMsg = new ResultMessage(Contants.CODE_DB_ERR, Contants.MSG_DB_ERR, String.format(Contants.FOR_DB_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(dbErrMsg).build(); } // Response success ResultMessage<User> result = new ResultMessage<User>(Contants.CODE_OK, Contants.MSG_CHANGE_PWD_SCC, user); return Response.status(Contants.CODE_OK).entity(result).build(); } @SuppressWarnings("rawtypes") @POST @Path("/upload") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response uploadFile(MultipartFormDataInput input) { Map<String, List<InputPart>> uploadForm = input.getFormDataMap(); List<InputPart> inputParts = uploadForm.get("uploadedFile"); if (null == inputParts) { // Log logger.warn(Contants.MSG_INVALID_UPLOAD_DATA_ERR); // Response error ResultMessage fileUploadErrMsg = new ResultMessage(Contants.CODE_FILE_UPLOAD_ERR, Contants.MSG_INVALID_UPLOAD_DATA_ERR, Contants.MSG_INVALID_UPLOAD_DATA_ERR); return Response.status(Contants.CODE_INTERNAL_ERR).entity(fileUploadErrMsg).build(); } String fileName = null; for (InputPart inputPart : inputParts) { try { MultivaluedMap<String, String> header = inputPart.getHeaders(); fileName = Util.getFileName(header); // convert the uploaded file to inputstream InputStream inputStream = inputPart.getBody(InputStream.class, null); byte[] bytes = IOUtils.toByteArray(inputStream); // constructs upload file path (append current ms to create // unique path) fileName = System.currentTimeMillis() + "_" + fileName; String filePath = Contants.UPLOADED_FILE_PATH + fileName; Util.writeFile(bytes, filePath); logger.debug("Upload file [{}] success!", filePath); } catch (IOException e) { // Log logger.warn(Contants.MSG_FILE_UPLOAD_ERR, e); // Response error ResultMessage fileUploadErrMsg = new ResultMessage( Contants.CODE_INVALID_UPLOAD_DATA_ERR, Contants.MSG_FILE_UPLOAD_ERR, String.format(Contants.FOR_FILE_UPLOAD_ERR, e.getMessage())); return Response.status(Contants.CODE_INTERNAL_ERR).entity(fileUploadErrMsg).build(); } } ResultMessage result = new ResultMessage(Contants.CODE_OK, Contants.MSG_UPLOAD_SCC, fileName); return Response.status(Contants.CODE_OK).entity(result).build(); } } <file_sep>/** * */ package com.mulodo.miniblog.exception; /** * Thrown when User is <b>not</b> owner of resource * * @author TriLe */ public class NotAllowException extends RuntimeException { /** * */ private static final long serialVersionUID = 594248027136829863L; /** * */ public NotAllowException() { super(); } /** * @param message */ public NotAllowException(String message) { super(message); } /** * @param cause */ public NotAllowException(Throwable cause) { super(cause); } /** * @param message * @param cause */ public NotAllowException(String message, Throwable cause) { super(message, cause); } /** * @param message * @param cause * @param enableSuppression * @param writableStackTrace */ public NotAllowException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } }
1819543c2df398c7dcde938b89356c440d0f9632
[ "JavaScript", "Java", "Markdown" ]
12
Java
le-tri-mulodo/trile.miniblog
cf227788c21243a4532ed67bb6df7a672490ff53
0e840ac9534aa1bd9b23814ca1c41abf168dc801
refs/heads/master
<repo_name>SabinPruna/bdd-crud-testing<file_sep>/tema2_jpa_jse/src/main/java/facultate/bdd/tema2/dao/impl/BuyerDAOImpl.java package facultate.bdd.tema2.dao.impl; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root; import facultate.bdd.tema2.entities.Book; import facultate.bdd.tema2.entities.Buyer; public class BuyerDAOImpl implements facultate.bdd.tema2.dao.interfaces.BuyerDAO { public EntityManagerFactory emFactory; public EntityManager entityManager; public BuyerDAOImpl(String persistenceUnitName) { emFactory = Persistence.createEntityManagerFactory(persistenceUnitName); } @Override public void close() { emFactory.close(); } @Override public Buyer createOrUpdate(Buyer entity) { try { entityManager = emFactory.createEntityManager(); try { entityManager.getTransaction().begin(); entityManager.persist(entity); entityManager.getTransaction().commit(); } catch (Exception ex) { entityManager.getTransaction().rollback(); entityManager.getTransaction().begin(); entity = entityManager.merge(entity); entityManager.getTransaction().commit(); } return entity; } catch (Exception ex) { System.out.println(ex.getMessage()); entityManager.getTransaction().rollback(); return null; } finally { entityManager.close(); } } @Override public Buyer findById(int id) { try { entityManager = emFactory.createEntityManager(); return entityManager.find(Buyer.class, id); } catch (Exception ex) { System.out.println(ex.getMessage()); return null; } finally { entityManager.close(); } } @Override public Buyer update(Buyer entity) { try { entityManager = emFactory.createEntityManager(); entityManager.getTransaction().begin(); entityManager.merge(entity); entityManager.getTransaction().commit(); return entity; } catch (Exception ex) { System.out.println(ex.getMessage()); entityManager.getTransaction().rollback(); return null; } finally { entityManager.close(); } } @Override public void delete(Buyer entity) { try { entityManager = emFactory.createEntityManager(); entityManager.getTransaction().begin(); entity = entityManager.find(Buyer.class, entity.getId()); entityManager.remove(entity); entityManager.getTransaction().commit(); } catch (Exception ex) { System.out.println(ex.getMessage()); entityManager.getTransaction().rollback(); } finally { entityManager.close(); } } @Override public void deleteAll() { try { for (Buyer b : readAll()) { delete(b); } } catch (Exception ex) { System.out.println(ex.getMessage()); } } @Override public List<Buyer> readAll() { try { entityManager = emFactory.createEntityManager(); return entityManager.createQuery("from Buyer", Buyer.class).getResultList(); } catch (Exception ex) { System.out.println(ex.getMessage()); return null; } finally { entityManager.close(); } } } <file_sep>/tema2_jpa_jse/src/main/java/facultate/bdd/tema2/dao/interfaces/GenreDAO.java package facultate.bdd.tema2.dao.interfaces; import facultate.bdd.tema2.entities.Genre; /** TODO: Implement this interface **/ public interface GenreDAO extends GenericDAO<Genre> { } <file_sep>/tema2_jpa_jse/src/main/java/facultate/bdd/tema2/dao/interfaces/OrderDAO.java package facultate.bdd.tema2.dao.interfaces; import facultate.bdd.tema2.entities.Order; /** TODO: Implement this interface **/ public interface OrderDAO extends GenericDAO<Order> { }
06db35c125502345e0289f8ae8720da9c9721eee
[ "Java" ]
3
Java
SabinPruna/bdd-crud-testing
af7607ee317671dac691411dbf989333d9aef692
b23398ecf6c1cd831eeea09ab8d5c4587d38fd2b
refs/heads/main
<file_sep>NAME = "[name]" with open("./Input/Names/invited_names.txt") as names_file: names = names_file.readlines() with open("./Input/Letters/starting_letter.docx") as new_letter: contents = new_letter.read() for name in names: name_strip = name.strip() new_let = contents.replace(NAME, name_strip) with open(f"./Output/ReadyToSend/letter_for_{name_strip}.docx", mode="w") as completed: completed.write(new_let)
83b7b675bcb9ad9cea64fc9d8f942f0a501e06a1
[ "Python" ]
1
Python
cmlohr/py-mail-merge
0de8305fd8a6cb4061f6f8d17e9e77f0399dd2e5
8b9449b1aa324e1589da7726dd248d146ec25f71
refs/heads/master
<file_sep>Salve salve família ycarozinho na área eu fiz um projeto de jogo da velha para testar minha lógica de programação então basicamente o "index.html" vai direcionar o usuário para a página que escolher, caso ele escolha jogar dois jogadores o "index.html" direciona ele para o "jogarsozinho.html" que na verdade é para duas pessoas diferentes jogarem, caso ele escolher jogar contra computador o "index.html" direciona o usuário para o "jogaria.html" que tem uma espécie de inteligência artifical bem básica. O código inteiro foi feito usando HTML5,CSS3 e Javascript básicos. Arquivo upado no Git <file_sep>var espnum img="_imagens/letra_x_modificada.png" auxiliar="X" var c=1 var espacos=[] var jogadorx=[] var jogadoro=[] function ganhou(){ var win=false var aux=0 for(var i=1;i<=3;i++){ if(jogadorx.indexOf(i)!=-1){ aux++ } } if(aux!=3){ aux=0 for(var i=1;i<=3;i++){ if(jogadoro.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=4;i<=6;i++){ if(jogadorx.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=4;i<=6;i++){ if(jogadoro.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=7;i<=9;i++){ if(jogadorx.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=7;i<=9;i++){ if(jogadoro.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=1;i<=7;i+=3){ if(jogadorx.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=1;i<=7;i+=3){ if(jogadoro.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=2;i<=8;i+=3){ if(jogadorx.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=2;i<=8;i+=3){ if(jogadoro.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=3;i<=9;i+=3){ if(jogadorx.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=3;i<=9;i+=3){ if(jogadoro.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=1;i<=9;i+=4){ if(jogadorx.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=1;i<=9;i+=4){ if(jogadoro.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=3;i<=7;i+=2){ if(jogadorx.indexOf(i)!=-1){ aux++ } } } if(aux!=3){ aux=0 for(var i=3;i<=7;i+=2){ if(jogadoro.indexOf(i)!=-1){ aux++ } } } if(aux==3){ win=true } if(win==true){ alert(`JOGADOR ${auxiliar} GANHOU`) window.location.reload() } if(aux!=3){ var velha=0 for(var i=1;i<=9;i++){ if(espacos.indexOf(i)!=-1){ velha++ } } if(velha==9){ alert("DEU VELHA") vencer=true window.location.reload() } } } function verificar(v){ var aux aux=espacos.indexOf(v) if(aux==-1){ return true }else{ return false } } function botarimg(v){ var imagem=document.getElementById(`espaco${v}`) imagem.setAttribute("src",img) } function mudarjogador(){ if(c==1){ img="_imagens/letra_x_modificada.png" auxiliar="O" c=0 }else{ img="_imagens/letra_o_modificado.png" auxiliar="X" c=1 } } function clicou(v){ var aprovado=verificar(v) if(aprovado==false){ alert("Espaço já marcado") }else{ switch(v){ case 1: espacos.push(1) if(auxiliar=="X"){ jogadorx.push(1) }else{ jogadoro.push(1) } espnum=1 break case 2: espacos.push(2) if(auxiliar=="X"){ jogadorx.push(2) }else{ jogadoro.push(2) } espnum=2 break case 3: espacos.push(3) if(auxiliar=="X"){ jogadorx.push(3) }else{ jogadoro.push(3) } espnum=3 break case 4: espacos.push(4) if(auxiliar=="X"){ jogadorx.push(4) }else{ jogadoro.push(4) } espnum=4 break case 5: espacos.push(5) if(auxiliar=="X"){ jogadorx.push(5) }else{ jogadoro.push(5) } espnum=5 break case 6: espacos.push(6) if(auxiliar=="X"){ jogadorx.push(6) }else{ jogadoro.push(6) } espnum=6 break case 7: espacos.push(7) if(auxiliar=="X"){ jogadorx.push(7) }else{ jogadoro.push(7) } espnum=7 break case 8: espacos.push(8) if(auxiliar=="X"){ jogadorx.push(8) }else{ jogadoro.push(8) } espnum=8 break case 9: espacos.push(9) if(auxiliar=="X"){ jogadorx.push(9) }else{ jogadoro.push(9) } espnum=9 break } ganhou() mudarjogador() botarimg(espnum) escrever() } } function escrever(){ var escrever=document.getElementById("teste") escrever.innerHTML=`<p>Vez do jogador ${auxiliar}</p>` }
156d9863c3a84ecfbe48cadceeb1460ea24fe41b
[ "Markdown", "JavaScript" ]
2
Markdown
ycarotrindade/jogo-da-velha
b108dc4613a078227b295d75ebecc5ab04d4c071
b8b314a39c89317b0e7792ac76b79cc802c5a144
refs/heads/master
<repo_name>roclet/angular-card-slider<file_sep>/src/app/carousel/models/SliderBaseModel.ts import { SliderCarouselModel } from "./SliderCarouseModel"; export interface SliderBaseModel { isHovered: boolean, imageUrl: null, data: Array<SliderCarouselModel> }<file_sep>/src/app/carousel/models/SliderCarouseModel.ts // we can add more properties if we wish export interface SliderCarouselModel { title: string; imageUrl: string; } <file_sep>/src/app/draggable-picker/draggable-picker.component.ts import { AfterViewInit, Component, Input, OnInit } from '@angular/core'; import { gsap } from 'gsap'; import Draggable from 'gsap/Draggable'; import { SliderCarouselModel } from '../carousel/models/SliderCarouseModel'; @Component({ selector: 'app-draggable-picker', templateUrl: './draggable-picker.component.html', styleUrls: ['./draggable-picker.component.scss'], }) export class DraggablePickerComponent implements OnInit, AfterViewInit { @Input() slidesData: SliderCarouselModel[]; state = { draggable: null, index: 1, data: [ { title: 'Home Internet' }, { title: 'Add a Phone-line' }, { title: 'Home Internet' }, { title: 'Get a device' }, { title: 'Upgrade' }, { title: 'Mobile Internet' }, { title: 'Home Internet' }, { title: 'Add a Phone-line' }, ], wrapWidth: 0, }; // variables declaration picker; cells; proxy; cellWidth: number = 0; draggable: any; baseTl: any; cellStep: number = 0; cells0: any; wrapWidth: any; numCells: any; wrapProgress: any; animation: any; x: number; status: boolean = true; constructor() {} ngOnInit(): void {} ngAfterViewInit() { this.initCarouselSlider(); } initCarouselSlider() { gsap.registerPlugin(Draggable); gsap.defaults({ ease: 'none' }); this.picker = document.querySelector('.picker'); this.cells = gsap.utils.toArray('.cell'); this.proxy = document.createElement('div'); this.cellWidth = 320; this.numCells = this.cells.length; this.cellStep = 1 / this.numCells; this.wrapWidth = this.cellWidth * this.numCells; this.baseTl = gsap.timeline({ paused: true }); const baseTl11 = gsap.timeline({ paused: true }); this.wrapProgress = gsap.utils.wrap(0, 1); gsap.set(this.picker, { //perspective: 1100, width: this.wrapWidth - this.cellWidth, }); for (var i = 0; i < this.cells.length; i++) { this.initCell(this.cells[i], i); } this.animation = gsap .timeline({ repeat: -1, paused: true }) .add(this.baseTl.tweenFromTo(1, 2, { immediateRender: true })); this.draggable = new Draggable(this.proxy, { // allowContextM enu: true, type: 'x', trigger: this.picker, inertia: true, onDrag: this.updateProgress, onThrowUpdate: this.updateProgress, snap: { x: this.snapX, }, onThrowComplete: function () { let biggestElement = this.cells .slice(0) .sort( (a, b) => Number(gsap.getProperty(a, 'scaleX')) - Number(gsap.getProperty(b, 'scaleX')) ) .pop(); }, }); } snapX(x) { return Math.round(x / this.cellWidth) * this.cellWidth; } updateProgress() { const baseTl11 = gsap.timeline({ paused: true }); const animation1 = gsap .timeline({ repeat: -1, paused: true }) .add(baseTl11.tweenFromTo(1, 2, { immediateRender: true })); const wrapProgress1 = gsap.utils.wrap(0, 1); animation1.progress(wrapProgress1(this.x / this.wrapWidth)); } initCell(element, index) { let i = index; gsap.set(element, { width: this.cellWidth, scale: 0.6, //rotationX: rotationX, x: -this.cellWidth, }); const tl = gsap .timeline({ repeat: 1 }) .to( element, 1, { x: '+=' + this.wrapWidth /*, rotationX: -rotationX*/ }, 0 ) .to( element, this.cellStep, { color: '#000000', height: '600px', scale: 1, repeat: 1, yoyo: true }, 0.5 - this.cellStep ); this.baseTl.add(tl, i * -this.cellStep); } backSlide() { let i = this.state.index; let nextPosition = i - 0.1; let element = this.baseTl.tweenFromTo(i, nextPosition, { immediateRender: true, duration: 2, }); let tl = gsap .timeline({ repeat: 100 }) .to(element, 1, { x: '+=' + this.state.wrapWidth }, 0) .to(element, this.cellStep, { repeat: 100, yoyo: false }, i); i = i - 0.1; let baseTl = this.baseTl.add(tl, i); this.status = false; } nextSlide() { let i = this.state.index; let nextPosition = i + 0.1; let el = this.cells[i]; let element = this.baseTl.tweenFromTo(i, nextPosition, { immediateRender: true, duration: 2, }); let tl = gsap .timeline({ repeat: 100 }) .to(element, 1, { x: '+=' + this.state.wrapWidth }, 0) .to(element, this.cellStep, { repeat: 100, yoyo: false }, i); i = i + 0.1; let baseTl = this.baseTl.add(tl, i); this.status = false; } } <file_sep>/src/app/draggable-picker/draggable-picker.component.spec.ts import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { DraggablePickerComponent } from './draggable-picker.component'; describe('DraggablePickerComponent', () => { let component: DraggablePickerComponent; let fixture: ComponentFixture<DraggablePickerComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ DraggablePickerComponent ] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(DraggablePickerComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); }); <file_sep>/src/app/helpers/data.ts import { CarouselModel } from '../carousel/models/CarouselModel'; import { SliderCarouselModel } from '../carousel/models/SliderCarouseModel'; export const data: CarouselModel[] = [ { title: '<NAME>', url: './assets/2.jpg' }, { title: 'Unknown', url: './assets/1.png' }, { title: '<NAME>', url: './assets/4.jpg' }, { title: '<NAME>', url: './assets/5.jpg' }, { title: 'Unknown', url: './assets/6.jpg' }, { title: 'Unknown', url: './assets/7.jpg' }, { title: 'Unknown', url: './assets/8.jpg' }, ]; // SliderBaseModel export const sliderData: SliderCarouselModel[] = [ { title: 'Home Internet', imageUrl: 'http://www.bilingo.co.za/mtn-digital/assets/small-card.png', }, { title: 'Add a Phone-line', imageUrl: 'http://www.bilingo.co.za/mtn-digital/assets/small-card.png', }, { title: 'Home Internet', imageUrl: 'http://www.bilingo.co.za/mtn-digital/assets/small-card.png', }, { title: 'Get a device', imageUrl: 'http://www.bilingo.co.za/mtn-digital/assets/small-card.png', }, { title: 'Upgrade', imageUrl: 'http://www.bilingo.co.za/mtn-digital/assets/small-card.png', }, { title: 'Mobile Internet', imageUrl: 'http://www.bilingo.co.za/mtn-digital/assets/small-card.png', }, { title: 'Home Internet', imageUrl: 'http://www.bilingo.co.za/mtn-digital/assets/small-card.png', }, { title: 'Add a Phone-line', imageUrl: 'http://www.bilingo.co.za/mtn-digital/assets/small-card.png', }, ]; <file_sep>/src/app/carousel/models/CarouselModel.ts // we can add more properties if we wish export interface CarouselModel { url: string; title: string; }
e17fcc20b390d26cc38ad7bafb51f21542ddd3a7
[ "TypeScript" ]
6
TypeScript
roclet/angular-card-slider
2e97674e293dd2e1f0b3db6f03c57accad2c0482
e3331e94983cb7f708883ffaaadbecd91f399717
refs/heads/master
<file_sep>// // ThirdViewController.swift // BuggyApp // // Created by <NAME> on 28/6/19. // Copyright © 2019 scbeasy. All rights reserved. // import UIKit class ThirdViewController: UIViewController { @IBOutlet weak var mBtn: UIButton! @IBOutlet weak var mText: UITextField! override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view. } @IBAction func onClick(_ sender: AnyObject) { // let storyboard = UIStoryboard(name: "Main", bundle: nil) // let secondController = storyboard.instantiateViewController(withIdentifier: "secondary") as! SecondaryViewController // SecondaryViewController.mL.text = "Hello!" present(secondController, animated: true) } }
57880915cd673c9d42bd75a05dbf98810e814072
[ "Swift" ]
1
Swift
plueksaporn/BuggyApp-iOS
23ded0db5815cefe0adbe145b3164a24d6f5ec9f
753da84b0d8ea6f8621feb4de3a7c70b10080ef1
refs/heads/master
<file_sep>var fs = require('fs'); var path=require('path'); var file = process.argv[2]; var type = '.' + process.argv[3]; fs.readdir(file,function Addone (err, list) { list.forEach(function (file){ if (path.extname(file)===type){ console.log(file); } } ) } );
f35fbcf61e7144f00b4f441b4138b617b4d494e6
[ "JavaScript" ]
1
JavaScript
narayananvbaradwaj/nodeschool-nodejs
0cd23ed46fbcd2b7ff9298a79f8d0d5104fd7be0
521eb1c616013424c7bf6ae8ab50a7c2e1681aa2
refs/heads/master
<file_sep>const router = require('express').Router(); const { getAll, getById, add, update, remove } = require('../controllers/MemoryController'); router.get('/', getAll); router.get('/:id', getById); router.post('/', add); router.put('/:id', update); router.delete('/:id', remove); module.exports = router; <file_sep>const mongoose = require('mongoose'); const options = { useNewUrlParser: true, useUnifiedTopology: true } mongoose.connect(process.env.MONGODB_CONNECTION_STRING, options) .then(_ => console.log('Veritabanı bağlantısı yapıldı...!')) .catch((err) => { console.error(err) }); <file_sep>const MemoryModel = require('../models/MemoryModel'); /** * Get All Memories * @param request * @param response * @param next * @returns {Promise<void>} */ const getAll = async function(request, response, next) { try { const memories = await MemoryModel.find({}); if(memories.length > 0) { await response.status(200).json({ status: 200, data: memories, result: 'OK' }); } else { await response.status(404).json({ status: 404, data: [], result: 'FAIL' }); } }catch(err) { throw err; } } /** * Get a memory by id * @param request * @param response * @param next * @returns {Promise<void>} */ const getById = async function(request, response, next) { try { const id = request.params.id; const memory = await MemoryModel.find({_id: id}); if(memory.length > 0) { await response.status(200).json({ status: 200, data: memory, result: 'OK' }); } else { await response.status(404).json({ status: 404, data: [], result: 'FAIL' }); } }catch(err) { throw err; } } /** * Add a new memory * @param request * @param response * @param next * @returns {Promise<void>} */ const add = async function(request, response, next) { try { const {title, content, creator, image} = request.body; const memory = new MemoryModel({title, content, creator, image}); await memory.save(); await response.status(201).json({ status: 201, data: memory, result: 'OK' }) }catch(err) { throw err; } } /** * Update a memory by new values * @param request * @param response * @param next * @returns {Promise<void>} */ const update = async function(request, response, next) { try { const id = request.params.id; const {title, content, creator, image} = request.body; const updatedMemory = await MemoryModel.findByIdAndUpdate(id, {title, content, creator, image}, {new: true}); await response.status(200).json({ status: 200, data: updatedMemory, result: 'OK' }) }catch(err) { throw err; } } /** * Remove a memory by id * @param request * @param response * @param next * @returns {Promise<void>} */ const remove = async function(request, response, next) { try { const id = request.params.id; const deleteMemory = await MemoryModel.findByIdAndRemove(id); await response.status(200).json({ status: 200, data: deleteMemory, result: 'OK' }) }catch(err) { throw err; } } module.exports = { getAll, getById, add, update, remove } <file_sep>const mongoose = require('mongoose'); const Schema = mongoose.Schema; const MemorySchema = new Schema({ title: { type: String, required: [true, '{PATH} alanı boş bırakılamaz'] }, content: { type: String, required: [true, '{PATH} alanı boş bırakılamaz'] }, creator: { type: String, required: [true, '{PATH} alanı boş bırakılamaz'] }, image: { type: String } }, { collection: 'memories', timestamps: true }); const MemoryModel = mongoose.model('memories', MemorySchema); module.exports = MemoryModel; <file_sep>const express = require('express'); const app = express(); app.use(express.json()); app.use(express.urlencoded({extended: true})); const dotenv = require('dotenv'); const port = process.env.PORT || 3000; dotenv.config(); require('./config/database.config'); const MemoryRouter = require('./routers/MemoryRouter'); app.use('/memory', MemoryRouter); app.listen(port, async () => { console.log(`Uygulama, ${port} portundan dinleniyor...`); });
dfad63eb1be4c672f81df48253ae2f4354959146
[ "JavaScript" ]
5
JavaScript
biyonik/MemoryBox
1d98d25da540b1673aa73e106733f6259f6e2c84
153ee38dabbe4196bb79211f36cfbcb930cb2bd9
refs/heads/master
<repo_name>jeswa/DNA<file_sep>/core/transaction/payload/BookKeeping.go package payload import "io" type BookKeeping struct { } func (a *BookKeeping) Data() []byte { return []byte{0} } func (a *BookKeeping) Serialize(w io.Writer) error { return nil } func (a *BookKeeping) Deserialize(r io.Reader) error { return nil } <file_sep>/net/node/infoUpdate.go package node import ( "DNA/common/config" "DNA/common/log" "DNA/core/ledger" . "DNA/net/message" . "DNA/net/protocol" "fmt" "net" "strconv" "time" ) func keepAlive(from *Noder, dst *Noder) { // Need move to node function or keep here? } func (node *node) GetBlkHdrs() { if node.local.GetNbrNodeCnt() < MINCONNCNT { return } noders := node.local.GetNeighborNoder() for _, n := range noders { if uint64(ledger.DefaultLedger.Store.GetHeaderHeight()) < n.GetHeight() { if n.LocalNode().IsSyncFailed() == false { SendMsgSyncHeaders(n) n.StartRetryTimer() break } } } } func (node *node) SyncBlk() { headerHeight := ledger.DefaultLedger.Store.GetHeaderHeight() currentBlkHeight := ledger.DefaultLedger.Blockchain.BlockHeight if currentBlkHeight >= headerHeight { return } var dValue int32 var reqCnt uint32 var i uint32 noders := node.local.GetNeighborNoder() for _, n := range noders { n.RemoveFlightHeightLessThan(currentBlkHeight) count := MAXREQBLKONCE - uint32(n.GetFlightHeightCnt()) dValue = int32(headerHeight - currentBlkHeight - reqCnt) flights := n.GetFlightHeights() if count == 0 { for _, f := range flights { hash := ledger.DefaultLedger.Store.GetHeaderHashByHeight(f) ReqBlkData(n, hash) } } for i = 1; i <= count && dValue >= 0; i++ { hash := ledger.DefaultLedger.Store.GetHeaderHashByHeight(currentBlkHeight + reqCnt) ReqBlkData(n, hash) n.StoreFlightHeight(currentBlkHeight + reqCnt) reqCnt++ dValue-- } } } func (node *node) SendPingToNbr() { noders := node.local.GetNeighborNoder() for _, n := range noders { t := n.GetLastRXTime() if time.Since(t).Seconds() > PERIODUPDATETIME { if n.GetState() == ESTABLISH { buf, err := NewPingMsg() if err != nil { log.Error("failed build a new ping message") } else { go n.Tx(buf) } } } } } func (node *node) HeartBeatMonitor() { noders := node.local.GetNeighborNoder() for _, n := range noders { if n.GetState() == ESTABLISH { t := n.GetLastRXTime() if time.Since(t).Seconds() > (PERIODUPDATETIME * KEEPALIVETIMEOUT) { log.Warn("keepalive timeout!!!") n.SetState(INACTIVITY) n.CloseConn() } } } } func (node *node) ReqNeighborList() { buf, _ := NewMsg("getaddr", node.local) go node.Tx(buf) } func (node *node) ConnectSeeds() { if node.nbrNodes.GetConnectionCnt() == 0 { seedNodes := config.Parameters.SeedList for _, nodeAddr := range seedNodes { go node.Connect(nodeAddr) } } } func getNodeAddr(n *node) NodeAddr { var addr NodeAddr addr.IpAddr, _ = n.GetAddr16() addr.Time = n.GetTime() addr.Services = n.Services() addr.Port = n.GetPort() addr.ID = n.GetID() return addr } func (node *node) reconnect(peer *node) error { isTls := config.Parameters.IsTLS addr := getNodeAddr(peer) var ip net.IP ip = addr.IpAddr[:] nodeAddr := ip.To16().String() + ":" + strconv.Itoa(int(addr.Port)) log.Info("try to reconnect peer, peer addr is ", nodeAddr) var conn net.Conn var err error if isTls { conn, err = TLSDial(nodeAddr) if err != nil { return nil } } else { conn, err = NonTLSDial(nodeAddr) if err != nil { return nil } } t := time.Now() peer.UpdateRXTime(t) peer.tryTimes = 0 peer.conn = conn peer.addr, err = parseIPaddr(conn.RemoteAddr().String()) peer.local = node log.Info(fmt.Sprintf("Reconnect node %s connect with %s with %s", conn.LocalAddr().String(), conn.RemoteAddr().String(), conn.RemoteAddr().Network())) go peer.rx() if node.GetID() > peer.GetID() { peer.SetState(HAND) buf, _ := NewVersion(node) peer.Tx(buf) } return nil } func (node *node) TryConnect() { node.nbrNodes.RLock() defer node.nbrNodes.RUnlock() for _, n := range node.nbrNodes.List { if n.GetState() == INACTIVITY && n.tryTimes < 3 { //try to connect n.tryTimes++ node.reconnect(n) } } } // FIXME part of node info update function could be a node method itself intead of // a node map method // Fixme the Nodes should be a parameter func (node *node) updateNodeInfo() { ticker := time.NewTicker(time.Second * PERIODUPDATETIME) quit := make(chan struct{}) for { select { case <-ticker.C: node.ConnectSeeds() node.SendPingToNbr() node.GetBlkHdrs() node.SyncBlk() node.HeartBeatMonitor() node.TryConnect() case <-quit: ticker.Stop() return } } // TODO when to close the timer //close(quit) } <file_sep>/net/httprestful/server.go package httprestful import ( . "DNA/common/config" "DNA/common/log" "DNA/core/ledger" "DNA/events" "DNA/net/httprestful/common" Err "DNA/net/httprestful/error" . "DNA/net/httprestful/restful" . "DNA/net/protocol" "strconv" ) const OAUTH_SUCCESS_CODE = "r0000" func StartServer(n Noder) { common.SetNode(n) ledger.DefaultLedger.Blockchain.BCEvents.Subscribe(events.EventBlockPersistCompleted, SendBlock2NoticeServer) func() common.ApiServer { rest := InitRestServer(checkAccessToken) go rest.Start() return rest }() } func SendBlock2NoticeServer(v interface{}) { if len(Parameters.NoticeServerAddr) == 0 || !common.CheckPushBlock() { return } go func() { req := make(map[string]interface{}) req["Height"] = strconv.FormatInt(int64(ledger.DefaultLedger.Blockchain.BlockHeight), 10) req = common.GetBlockByHeight(req) repMsg, _ := common.PostRequest(req, Parameters.NoticeServerAddr) if repMsg[""] == nil { //TODO } }() } func checkAccessToken(auth_type, access_token string) (cakey string, errCode int64, result interface{}) { if len(Parameters.OauthServerAddr) == 0 { return "", Err.SUCCESS, "" } req := make(map[string]interface{}) req["token"] = access_token req["auth_type"] = auth_type repMsg, err := common.OauthRequest("GET", req, Parameters.OauthServerAddr) if err != nil { log.Error("Oauth timeout:", err) return "", Err.OAUTH_TIMEOUT, repMsg } if repMsg["code"] == OAUTH_SUCCESS_CODE { msg, ok := repMsg["msg"].(map[string]interface{}) if !ok { return "", Err.INVALID_TOKEN, repMsg } if CAkey, ok := msg["cakey"].(string); ok { return CAkey, Err.SUCCESS, repMsg } } return "", Err.INVALID_TOKEN, repMsg } <file_sep>/core/transaction/payload/IssueAsset.go package payload import "io" type IssueAsset struct { } func (a *IssueAsset) Data() []byte { //TODO: implement IssueAsset.Data() return []byte{0} } func (a *IssueAsset) Serialize(w io.Writer) error { return nil } func (a *IssueAsset) Deserialize(r io.Reader) error { return nil } <file_sep>/net/httprestful/restful/server.go package restful import ( . "DNA/common/config" "DNA/common/log" . "DNA/net/httprestful/common" Err "DNA/net/httprestful/error" "context" "crypto/tls" "encoding/json" "io/ioutil" "net" "net/http" "strconv" "strings" "sync" "time" ) type handler func(map[string]interface{}) map[string]interface{} type Action struct { sync.RWMutex name string handler handler } type restServer struct { router *Router listener net.Listener server *http.Server postMap map[string]Action getMap map[string]Action checkAccessToken func(auth_type, access_token string) (string, int64, interface{}) } const ( Api_Getconnectioncount = "/api/v1/node/connectioncount" Api_Getblockbyheight = "/api/v1/block/details/height/:height" Api_Getblockbyhash = "/api/v1/block/details/hash/:hash" Api_Getblockheight = "/api/v1/block/height" Api_Gettransaction = "/api/v1/transaction/:hash" Api_Getasset = "/api/v1/asset/:hash" Api_GetUnspendOutput = "/api/v1/asset/unspendoutput" Api_Restart = "/api/v1/restart" Api_SendRawTransaction = "/api/v1/transaction" Api_SendCustomRecordTxByTransfer = "/api/v1/custom/transaction/record" Api_OauthServerAddr = "/api/v1/config/oauthserver/addr" Api_NoticeServerAddr = "/api/v1/config/noticeserver/addr" Api_NoticeServerState = "/api/v1/config/noticeserver/state" ) func InitRestServer(checkAccessToken func(string, string) (string, int64, interface{})) ApiServer { rt := &restServer{} rt.checkAccessToken = checkAccessToken rt.router = NewRouter() rt.registryMethod() rt.initGetHandler() rt.initPostHandler() return rt } func (rt *restServer) Start() error { if Parameters.HttpRestPort == 0 { log.Fatal("Not configure HttpRestPort port ") return nil } tlsFlag := false if tlsFlag || Parameters.HttpRestPort%1000 == 443 { var err error rt.listener, err = rt.initTlsListen() if err != nil { log.Error("Https Cert: ", err.Error()) return err } } else { var err error rt.listener, err = net.Listen("tcp", ":"+strconv.Itoa(Parameters.HttpRestPort)) if err != nil { log.Fatal("net.Listen: ", err.Error()) return err } } rt.server = &http.Server{Handler: rt.router} err := rt.server.Serve(rt.listener) if err != nil { log.Fatal("ListenAndServe: ", err.Error()) return err } return nil } func (rt *restServer) registryMethod() { getMethodMap := map[string]Action{ Api_Getconnectioncount: {name: "getconnectioncount", handler: GetConnectionCount}, Api_Getblockbyheight: {name: "getblockbyheight", handler: GetBlockByHeight}, Api_Getblockbyhash: {name: "getblockbyhash", handler: GetBlockByHash}, Api_Getblockheight: {name: "getblockheight", handler: GetBlockHeight}, Api_Gettransaction: {name: "gettransaction", handler: GetTransactionByHash}, Api_Getasset: {name: "getasset", handler: GetAssetByHash}, Api_GetUnspendOutput: {name: "getunspendoutput", handler: GetUnspendOutput}, Api_OauthServerAddr: {name: "getoauthserveraddr", handler: GetOauthServerAddr}, Api_NoticeServerAddr: {name: "getnoticeserveraddr", handler: GetNoticeServerAddr}, Api_Restart: {name: "restart", handler: rt.Restart}, } postMethodMap := map[string]Action{ Api_SendRawTransaction: {name: "sendrawtransaction", handler: SendRawTransaction}, Api_SendCustomRecordTxByTransfer: {name: "sendrecord", handler: SendRecorByTransferTransaction}, Api_OauthServerAddr: {name: "setoauthserveraddr", handler: SetOauthServerAddr}, Api_NoticeServerAddr: {name: "setnoticeserveraddr", handler: SetNoticeServerAddr}, Api_NoticeServerState: {name: "setpostblock", handler: SetPushBlockFlag}, } rt.postMap = postMethodMap rt.getMap = getMethodMap } func (rt *restServer) getPath(url string) string { if strings.Contains(url, strings.TrimRight(Api_Getblockbyheight, ":height")) { return Api_Getblockbyheight } else if strings.Contains(url, strings.TrimRight(Api_Getblockbyhash, ":hash")) { return Api_Getblockbyhash } else if strings.Contains(url, strings.TrimRight(Api_Gettransaction, ":hash")) { return Api_Gettransaction } else if strings.Contains(url, strings.TrimRight(Api_Getasset, ":hash")) { if url != Api_GetUnspendOutput { return Api_Getasset } } return url } func (rt *restServer) initGetHandler() { for k, _ := range rt.getMap { rt.router.Get(k, func(w http.ResponseWriter, r *http.Request) { var reqMsg = make(map[string]interface{}) var data []byte var err error var resp map[string]interface{} access_token := r.FormValue("access_token") auth_type := r.FormValue("auth_type") CAkey, errCode, result := rt.checkAccessToken(auth_type, access_token) if errCode > 0 && r.URL.Path != Api_OauthServerAddr { resp = ResponsePack(errCode) resp["Result"] = result goto ResponseWrite } if h, ok := rt.getMap[rt.getPath(r.URL.Path)]; ok { reqMsg["Height"] = getParam(r, "height") reqMsg["Hash"] = getParam(r, "hash") reqMsg["CAkey"] = CAkey reqMsg["Raw"] = r.FormValue("raw") reqMsg["Addr"] = r.FormValue("addr") reqMsg["Assetid"] = r.FormValue("assetid") resp = h.handler(reqMsg) resp["Action"] = h.name } else { resp = ResponsePack(Err.INVALID_METHOD) } ResponseWrite: resp["Desc"] = Err.ErrMap[resp["Error"].(int64)] data, err = json.Marshal(resp) if err != nil { log.Fatal("HTTP Handle - json.Marshal: %v", err) return } w.Header().Add("Access-Control-Allow-Headers", "Content-Type") w.Header().Set("content-type", "application/json") w.Header().Set("Access-Control-Allow-Origin", "*") w.Write([]byte(data)) }) } } func (rt *restServer) initPostHandler() { for k, _ := range rt.postMap { rt.router.Post(k, func(w http.ResponseWriter, r *http.Request) { body, _ := ioutil.ReadAll(r.Body) defer r.Body.Close() var reqMsg = make(map[string]interface{}) var data []byte var err error access_token := r.FormValue("access_token") auth_type := r.FormValue("auth_type") var resp map[string]interface{} CAkey, errCode, result := rt.checkAccessToken(auth_type, access_token) if errCode > 0 && r.URL.Path != Api_OauthServerAddr { resp = ResponsePack(errCode) resp["Result"] = result goto ResponseWrite } if h, ok := rt.postMap[rt.getPath(r.URL.Path)]; ok { if err = json.Unmarshal(body, &reqMsg); err == nil { reqMsg["CAkey"] = CAkey reqMsg["Raw"] = r.FormValue("raw") resp = h.handler(reqMsg) resp["Action"] = h.name } else { resp = ResponsePack(Err.ILLEGAL_DATAFORMAT) resp["Action"] = h.name data, _ = json.Marshal(resp) } } ResponseWrite: resp["Desc"] = Err.ErrMap[resp["Error"].(int64)] data, err = json.Marshal(resp) if err != nil { log.Fatal("HTTP Handle - json.Marshal: %v", err) return } w.Header().Add("Access-Control-Allow-Headers", "Content-Type") w.Header().Set("content-type", "application/json") w.Header().Set("Access-Control-Allow-Origin", "*") w.Write([]byte(data)) }) } //Options for k, _ := range rt.postMap { rt.router.Options(k, func(w http.ResponseWriter, r *http.Request) { w.Header().Add("Access-Control-Allow-Headers", "Content-Type") w.Header().Set("content-type", "application/json") w.Header().Set("Access-Control-Allow-Origin", "*") w.Write([]byte{}) }) } } func (rt *restServer) Stop() { rt.server.Shutdown(context.Background()) } func (rt *restServer) Restart(cmd map[string]interface{}) map[string]interface{} { go func() { time.Sleep(time.Second) rt.Stop() time.Sleep(time.Second) go rt.Start() }() var resp = ResponsePack(Err.SUCCESS) return resp } func (rt *restServer) initTlsListen() (net.Listener, error) { CertPath := Parameters.RestCertPath KeyPath := Parameters.RestKeyPath // load cert cert, err := tls.LoadX509KeyPair(CertPath, KeyPath) if err != nil { log.Error("load keys fail", err) return nil, err } tlsConfig := &tls.Config{ Certificates: []tls.Certificate{cert}, } log.Info("TLS listen port is ", strconv.Itoa(Parameters.HttpRestPort)) listener, err := tls.Listen("tcp", ":"+strconv.Itoa(Parameters.HttpRestPort), tlsConfig) if err != nil { log.Error(err) return nil, err } return listener, nil } <file_sep>/core/transaction/payload/DeployCode.go package payload import ( . "DNA/core/code" "DNA/common/serialization" "io" ) type DeployCode struct { Code *FunctionCode Name string CodeVersion string Author string Email string Description string } func (dc *DeployCode) Data() []byte { // TODO: Data() return []byte{0} } func (dc *DeployCode) Serialize(w io.Writer) error { err := dc.Code.Serialize(w) if err != nil { return err } err = serialization.WriteVarString(w,dc.Name) if err != nil { return err } err = serialization.WriteVarString(w,dc.CodeVersion) if err != nil { return err } err = serialization.WriteVarString(w,dc.Author) if err != nil { return err } err = serialization.WriteVarString(w,dc.Email) if err != nil { return err } err = serialization.WriteVarString(w,dc.Description) if err != nil { return err } return nil } func (dc *DeployCode) Deserialize(r io.Reader) error { err := dc.Code.Deserialize(r) if err != nil { return err } dc.Name,err = serialization.ReadVarString(r) if err != nil { return err } dc.CodeVersion,err = serialization.ReadVarString(r) if err != nil { return err } dc.Author,err = serialization.ReadVarString(r) if err != nil { return err } dc.Email,err = serialization.ReadVarString(r) if err != nil { return err } dc.Description,err = serialization.ReadVarString(r) if err != nil { return err } return nil } <file_sep>/core/transaction/payload/TransferAsset.go package payload import "io" type TransferAsset struct { } func (a *TransferAsset) Data() []byte { //TODO: implement TransferAsset.Data() return []byte{0} } func (a *TransferAsset) Serialize(w io.Writer) error { return nil } func (a *TransferAsset) Deserialize(r io.Reader) error { return nil } <file_sep>/net/httprestful/common/common.go package common import ( . "DNA/common" . "DNA/common/config" "DNA/core/ledger" tx "DNA/core/transaction" . "DNA/net/httpjsonrpc" Err "DNA/net/httprestful/error" . "DNA/net/protocol" "bytes" "encoding/hex" "encoding/json" "io/ioutil" "net" "net/http" "regexp" "strconv" "time" ) var node Noder var pushBlockFlag bool = true var oauthClient = NewOauthClient() type ApiServer interface { Start() error Stop() } func SetNode(n Noder) { node = n } func CheckPushBlock() bool { return pushBlockFlag } //Node func GetConnectionCount(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) if node != nil { resp["Result"] = node.GetConnectionCnt() } return resp } //Block func GetBlockHeight(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) resp["Result"] = ledger.DefaultLedger.Blockchain.BlockHeight return resp } func getBlock(hash Uint256, getTxBytes bool) (interface{}, int64) { block, err := ledger.DefaultLedger.Store.GetBlock(hash) if err != nil { return "", Err.UNKNOWN_BLOCK } if getTxBytes { w := bytes.NewBuffer(nil) block.Serialize(w) return hex.EncodeToString(w.Bytes()), Err.SUCCESS } blockHead := &BlockHead{ Version: block.Blockdata.Version, PrevBlockHash: ToHexString(block.Blockdata.PrevBlockHash.ToArray()), TransactionsRoot: ToHexString(block.Blockdata.TransactionsRoot.ToArray()), Timestamp: block.Blockdata.Timestamp, Height: block.Blockdata.Height, ConsensusData: block.Blockdata.ConsensusData, NextBookKeeper: ToHexString(block.Blockdata.NextBookKeeper.ToArray()), Program: ProgramInfo{ Code: ToHexString(block.Blockdata.Program.Code), Parameter: ToHexString(block.Blockdata.Program.Parameter), }, Hash: ToHexString(hash.ToArray()), } trans := make([]*Transactions, len(block.Transactions)) for i := 0; i < len(block.Transactions); i++ { trans[i] = TransArryByteToHexString(block.Transactions[i]) } b := BlockInfo{ Hash: ToHexString(hash.ToArray()), BlockData: blockHead, Transactions: trans, } return b, Err.SUCCESS } func GetBlockByHash(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) param := cmd["Hash"].(string) if len(param) == 0 { resp["Error"] = Err.INVALID_PARAMS return resp } var getTxBytes bool = false if raw, ok := cmd["Raw"].(string); ok && raw == "1" { getTxBytes = true } var hash Uint256 hex, err := hex.DecodeString(param) if err != nil { resp["Error"] = Err.INVALID_PARAMS return resp } if err := hash.Deserialize(bytes.NewReader(hex)); err != nil { resp["Error"] = Err.INVALID_TRANSACTION return resp } resp["Result"], resp["Error"] = getBlock(hash, getTxBytes) return resp } func GetBlockByHeight(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) param := cmd["Height"].(string) if len(param) == 0 { resp["Error"] = Err.INVALID_PARAMS return resp } var getTxBytes bool = false if raw, ok := cmd["Raw"].(string); ok && raw == "1" { getTxBytes = true } height, err := strconv.ParseInt(param, 10, 64) if err != nil { resp["Error"] = Err.INVALID_PARAMS return resp } index := uint32(height) hash, err := ledger.DefaultLedger.Store.GetBlockHash(index) if err != nil { resp["Error"] = Err.UNKNOWN_BLOCK return resp } resp["Result"], resp["Error"] = getBlock(hash, getTxBytes) return resp } //Asset func GetAssetByHash(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) str := cmd["Hash"].(string) hex, err := hex.DecodeString(str) if err != nil { resp["Error"] = Err.INVALID_PARAMS return resp } var hash Uint256 err = hash.Deserialize(bytes.NewReader(hex)) if err != nil { resp["Error"] = Err.INVALID_TRANSACTION return resp } asset, err := ledger.DefaultLedger.Store.GetAsset(hash) if err != nil { resp["Error"] = Err.UNKNOWN_TRANSACTION return resp } resp["Result"] = asset return resp } func GetUnspendOutput(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) addr := cmd["Addr"].(string) assetid := cmd["Assetid"].(string) var programHash Uint160 var assetHash Uint256 bys, err := hex.DecodeString(addr) if err != nil { resp["Error"] = Err.INVALID_PARAMS return resp } if err := programHash.Deserialize(bytes.NewReader(bys)); err != nil { resp["Error"] = Err.INVALID_PARAMS return resp } bys, err = hex.DecodeString(assetid) if err != nil { resp["Error"] = Err.INVALID_PARAMS return resp } if err := assetHash.Deserialize(bytes.NewReader(bys)); err != nil { resp["Error"] = Err.INVALID_PARAMS return resp } type TxOutputInfo struct { AssetID string Value int64 ProgramHash string } outputs := make(map[string]*TxOutputInfo) height := ledger.DefaultLedger.GetLocalBlockChainHeight() var i uint32 // construct global UTXO table for i = 0; i <= height; i++ { block, err := ledger.DefaultLedger.GetBlockWithHeight(i) if err != nil { resp["Error"] = Err.INTERNAL_ERROR return resp } // skip the bookkeeping transaction for _, t := range block.Transactions[1:] { // skip the register transaction if t.TxType == tx.RegisterAsset { continue } txHash := t.Hash() txHashHex := ToHexString(txHash.ToArray()) for i, output := range t.Outputs { if output.AssetID.CompareTo(assetHash) == 0 && output.ProgramHash.CompareTo(programHash) == 0 { key := txHashHex + ":" + strconv.Itoa(i) asset := ToHexString(output.AssetID.ToArray()) pHash := ToHexString(output.ProgramHash.ToArray()) value := int64(output.Value) info := &TxOutputInfo{ asset, value, pHash, } outputs[key] = info } } } } // delete spent output from global UTXO table height = ledger.DefaultLedger.GetLocalBlockChainHeight() for i = 0; i <= height; i++ { block, err := ledger.DefaultLedger.GetBlockWithHeight(i) if err != nil { return DnaRpcInternalError } // skip the bookkeeping transaction for _, t := range block.Transactions[1:] { // skip the register transaction if t.TxType == tx.RegisterAsset { continue } for _, input := range t.UTXOInputs { refer := ToHexString(input.ReferTxID.ToArray()) index := strconv.Itoa(int(input.ReferTxOutputIndex)) key := refer + ":" + index delete(outputs, key) } } } resp["Result"] = outputs return resp } //Transaction func GetTransactionByHash(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) str := cmd["Hash"].(string) bys, err := hex.DecodeString(str) if err != nil { resp["Error"] = Err.INVALID_PARAMS return resp } var hash Uint256 err = hash.Deserialize(bytes.NewReader(bys)) if err != nil { resp["Error"] = Err.INVALID_TRANSACTION return resp } tx, err := ledger.DefaultLedger.Store.GetTransaction(hash) if err != nil { resp["Error"] = Err.UNKNOWN_TRANSACTION return resp } if raw, ok := cmd["Raw"].(string); ok && raw == "1" { w := bytes.NewBuffer(nil) tx.Serialize(w) resp["Result"] = hex.EncodeToString(w.Bytes()) return resp } tran := TransArryByteToHexString(tx) resp["Result"] = tran return resp } func SendRawTransaction(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) str, ok := cmd["Data"].(string) if !ok { resp["Error"] = Err.INVALID_PARAMS return resp } hex, err := hex.DecodeString(str) if err != nil { resp["Error"] = Err.INVALID_PARAMS return resp } var txn tx.Transaction if err := txn.Deserialize(bytes.NewReader(hex)); err != nil { resp["Error"] = Err.INVALID_TRANSACTION return resp } var hash Uint256 hash = txn.Hash() if err := VerifyAndSendTx(&txn); err != nil { resp["Error"] = Err.INTERNAL_ERROR return resp } resp["Result"] = ToHexString(hash.ToArray()) return resp } //record func getRecordData(cmd map[string]interface{}) ([]byte, int64) { if raw, ok := cmd["Raw"].(string); ok && raw == "1" { str, ok := cmd["RecordData"].(string) if !ok { return nil, Err.INVALID_PARAMS } bys, err := hex.DecodeString(str) if err != nil { return nil, Err.INVALID_PARAMS } return bys, Err.SUCCESS } type Data struct { Algrithem string `json:Algrithem` Desc string `json:Desc` Hash string `json:Hash` Text string `json:Text` Signature string `json:Signature` } type RecordData struct { CAkey string `json:CAkey` Data Data `json:Data` SeqNo string `json:SeqNo` Timestamp float64 `json:Timestamp` //TrdPartyTimestamp float64 `json:TrdPartyTimestamp` } tmp := &RecordData{} reqRecordData, ok := cmd["RecordData"].(map[string]interface{}) if !ok { return nil, Err.INVALID_PARAMS } reqBtys, err := json.Marshal(reqRecordData) if err != nil { return nil, Err.INVALID_PARAMS } if err := json.Unmarshal(reqBtys, tmp); err != nil { return nil, Err.INVALID_PARAMS } tmp.CAkey, ok = cmd["CAkey"].(string) if !ok || tmp.Timestamp == 0 || len(tmp.Data.Hash) == 0 || len(tmp.Data.Algrithem) == 0 || len(tmp.Data.Desc) == 0 { return nil, Err.INVALID_PARAMS } repBtys, err := json.Marshal(tmp) if err != nil { return nil, Err.INVALID_PARAMS } return repBtys, Err.SUCCESS } func SendRecorByTransferTransaction(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) var recordData []byte recordData, resp["Error"] = getRecordData(cmd) if recordData == nil { return resp } var inputs []*tx.UTXOTxInput var outputs []*tx.TxOutput transferTx, _ := tx.NewTransferAssetTransaction(inputs, outputs) record := tx.NewTxAttribute(tx.Description, recordData) transferTx.Attributes = append(transferTx.Attributes, &record) hash := transferTx.Hash() resp["Result"] = ToHexString(hash.ToArray()) if err := VerifyAndSendTx(transferTx); err != nil { resp["Error"] = Err.INTERNAL_ERROR return resp } return resp } func SendRecodTransaction(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) var recordData []byte recordData, resp["Error"] = getRecordData(cmd) if recordData == nil { return resp } recordType := "record" recordTx, _ := tx.NewRecordTransaction(recordType, recordData) hash := recordTx.Hash() resp["Result"] = ToHexString(hash.ToArray()) if err := VerifyAndSendTx(recordTx); err != nil { resp["Error"] = Err.INTERNAL_ERROR return resp } return resp } //config func GetOauthServerAddr(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) resp["Result"] = Parameters.OauthServerAddr return resp } func SetOauthServerAddr(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) addr, ok := cmd["Addr"].(string) if !ok { resp["Error"] = Err.INVALID_PARAMS return resp } if len(addr) > 0 { var reg *regexp.Regexp pattern := `((http|https)://)(([a-zA-Z0-9\._-]+\.[a-zA-Z]{2,6})|([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}))(:[0-9]{1,4})*(/[a-zA-Z0-9\&%_\./-~-]*)?` reg = regexp.MustCompile(pattern) if !reg.Match([]byte(addr)) { resp["Error"] = Err.INVALID_PARAMS return resp } } Parameters.OauthServerAddr = addr resp["Result"] = Parameters.OauthServerAddr return resp } func GetNoticeServerAddr(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) resp["Result"] = Parameters.NoticeServerAddr return resp } func SetPushBlockFlag(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) start, ok := cmd["Open"].(bool) if !ok { resp["Error"] = Err.INVALID_PARAMS return resp } if start { pushBlockFlag = true } else { pushBlockFlag = false } resp["Result"] = pushBlockFlag return resp } func SetNoticeServerAddr(cmd map[string]interface{}) map[string]interface{} { resp := ResponsePack(Err.SUCCESS) addr, ok := cmd["Addr"].(string) if !ok || len(addr) == 0 { resp["Error"] = Err.INVALID_PARAMS return resp } var reg *regexp.Regexp pattern := `((http|https)://)(([a-zA-Z0-9\._-]+\.[a-zA-Z]{2,6})|([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}))(:[0-9]{1,4})*(/[a-zA-Z0-9\&%_\./-~-]*)?` reg = regexp.MustCompile(pattern) if !reg.Match([]byte(addr)) { resp["Error"] = Err.INVALID_PARAMS return resp } Parameters.NoticeServerAddr = addr resp["Result"] = Parameters.NoticeServerAddr return resp } func ResponsePack(errCode int64) map[string]interface{} { resp := map[string]interface{}{ "Action": "", "Result": "", "Error": errCode, "Desc": "", "Version": "1.0.0", } return resp } func PostRequest(cmd map[string]interface{}, url string) (map[string]interface{}, error) { var repMsg = make(map[string]interface{}) data, err := json.Marshal(cmd) if err != nil { return repMsg, err } reqData := bytes.NewBuffer(data) transport := http.Transport{ Dial: func(netw, addr string) (net.Conn, error) { conn, err := net.DialTimeout(netw, addr, time.Second*10) if err != nil { return nil, err } conn.SetDeadline(time.Now().Add(time.Second * 10)) return conn, nil }, DisableKeepAlives: false, } client := &http.Client{Transport: &transport} request, err := http.NewRequest("POST", url, reqData) if err != nil { return repMsg, err } request.Header.Set("Content-type", "application/json") response, err := client.Do(request) if response != nil { defer response.Body.Close() if response.StatusCode == 200 { body, _ := ioutil.ReadAll(response.Body) if err := json.Unmarshal(body, &repMsg); err == nil { return repMsg, err } } } if err != nil { return repMsg, err } return repMsg, err } func NewOauthClient() *http.Client { c := &http.Client{ Transport: &http.Transport{ Dial: func(netw, addr string) (net.Conn, error) { conn, err := net.DialTimeout(netw, addr, time.Second*10) if err != nil { return nil, err } conn.SetDeadline(time.Now().Add(time.Second * 10)) return conn, nil }, DisableKeepAlives: false, }, } return c } func OauthRequest(method string, cmd map[string]interface{}, url string) (map[string]interface{}, error) { var repMsg = make(map[string]interface{}) var response *http.Response var err error switch method { case "GET": req, err := http.NewRequest("GET", url, nil) if err != nil { return repMsg, err } response, err = oauthClient.Do(req) case "POST": data, err := json.Marshal(cmd) if err != nil { return repMsg, err } reqData := bytes.NewBuffer(data) req, err := http.NewRequest("POST", url, reqData) if err != nil { return repMsg, err } req.Header.Set("Content-type", "application/json") response, err = oauthClient.Do(req) default: return repMsg, err } if response != nil { defer response.Body.Close() body, _ := ioutil.ReadAll(response.Body) if err := json.Unmarshal(body, &repMsg); err == nil { return repMsg, err } } if err != nil { return repMsg, err } return repMsg, err } <file_sep>/consensus/dbft/consensusState.go package dbft type ConsensusState byte const ( Initial ConsensusState = 0x00 Primary ConsensusState = 0x01 Backup ConsensusState = 0x02 RequestSent ConsensusState = 0x04 RequestReceived ConsensusState = 0x08 SignatureSent ConsensusState = 0x10 BlockSent ConsensusState = 0x20 ) func (state ConsensusState) HasFlag(flag ConsensusState) bool{ return (state & flag) == flag }
114ccbdcb9b0abb03c98dec8b4a79154743a4b52
[ "Go" ]
9
Go
jeswa/DNA
64e0522d2ca13f57afeb399981bbb062b2634ee7
3700361fe82f42f6b912da589e4e0ae1e9dc1c90
refs/heads/master
<repo_name>kjannenga/playlist-assessment<file_sep>/index.js document.addEventListener('DOMContentLoaded', function () { function getAlbums(url) { return new Promise((resolve, reject) => { fetch(url) .then(res => { if (!res.ok) { if (res.status == 404) { throw new Error("albums not found") } } return res.json() }) .then(json => { resolve(json) }) .catch(err => { reject(err) console.log(err) }) }) } getAlbums('https://lit-fortress-6467.herokuapp.com/object') .then(albums => { getThreeAlbums(albums) getallAlbums(albums) }) .catch(err => { console.log(err) }) function getThreeAlbums(albums) { for(let i = 0; i < 3; i++){ let max = Math.floor(albums.results.length); let num = Math.floor(Math.random() * max); var t = document.getElementById("over") var d = document.createElement('DIV'); var x = document.createElement('IMG') x.setAttribute("src", "../images/" + albums.results[num].cover_art); x.setAttribute("width", "200"); x.setAttribute("height", "200"); d.append(x) t.append(d) } } function getallAlbums(albums){ for (let i = 0; i < albums.results.length; i++) { var t = document.getElementById("albumList") var d = document.createElement('DIV'); var x = document.createElement('IMG') x.setAttribute("src", "../images/" + albums.results[i].cover_art); x.setAttribute("width", "200"); x.setAttribute("height", "200"); d.append(x) t.append(d) } } })
fac602ae7247cb110694598ee7e4a99385a0d67b
[ "JavaScript" ]
1
JavaScript
kjannenga/playlist-assessment
0395ec42a00e5d7badc598b725f7ffcc6e6ca244
a131480cbd321857a655d69857d8475024fe01c5
refs/heads/master
<repo_name>fjh1997/alien_auto_login<file_sep>/README.md # alien_auto_login 原帖地址:https://keylol.com/forum.php?mod=viewthread&tid=567722&page=1#pid10946064 <br>在https://na.alienwarearena.com 上面連續登錄28天有Arp獎勵,但每天登錄又比較麻煩,恰好手上有一個閒著的linux服務器,索性寫個腳本自動登錄。 ## install dependency 首先安裝nodeJS以及帶chromium的puppeteer ```bash curl -sL https://deb.nodesource.com/setup_13.x | sudo -E bash - sudo apt-get install -y nodejs # 安裝nodejs npm i puppeteer # 安裝puppeteer 這個因為要安裝chromium,時間比較長,可以借助美區梯子安裝方法如下: # npm config set proxy http://127.0.0.1:8118 # npm config set https-proxy http://127.0.0.1:8118 ``` ## download and test 然後下载该腳本命名為alien_login.js ```bash wget https://github.com/fjh1997/alien_auto_login/raw/master/alien_login.js ``` 之後直接測試 ```bash node alien_login.js ``` ## task schedule 如果提示登錄成功就可以了 之後可以在crontab裡面添加計劃任務,格式為 分 時 日 月 周幾 命令 ```bash #保存當前的crontab到mycron文件 crontab -l > mycron #把新命令添加到mycron echo "0 0 * * * node ~/alien_login.js" >> mycron #安裝新的cron文件 crontab mycron ``` <file_sep>/alien_login.js const puppeteer = require('puppeteer'); puppeteer.launch({ headless: true,//指定是不是無頭模式,調試的時候可以改成false args: ['--proxy-server=socks5://127.0.0.1:1082'],//用到的美區代理 slowMo: 100 }).then(async browser => {//使用slowmode防止被alienware判斷為是機器。 try { const page = await browser.newPage(); console.log("正在打開頁面....."); await page.goto('https://na.alienwarearena.com/', { timeout: 0, waitUntil: "load" }); console.log("打開頁面完成,正在尋找登錄面版"); await page.waitForSelector('[class="nav-link nav-link-login"]'); console.log("登錄面版找到,點擊登錄面板"); await page.click('[class="nav-link nav-link-login"]'); console.log("點擊完成,等待登錄框出現。。。"); await page.waitForSelector('#_username'); console.log("登錄框出現,正在輸入用戶名密碼....."); await page.$eval('#_username', el => el.value = 'fjh1997');//這裡寫你的用戶名 await page.$eval('#_password', el => el.value = '<PASSWORD>');//這裡寫你的密碼 console.log("用戶名密碼輸入完成,正在等待登錄按鈕.....") await page.waitForSelector('#_login'); console.log("正在點擊登錄按鈕....."); await page.click('#_login'); console.log("等待登錄....."); await page.waitForSelector("#umCollapse"); console.log("登錄成功"); } catch (err) { console.log("登錄失敗"); console.error(err.message); } finally { await browser.close(); } });
fa2c62aada974b15460d72417ed3ab35789feeb2
[ "Markdown", "JavaScript" ]
2
Markdown
fjh1997/alien_auto_login
36ce4e1e858d89d79aaa51094a79b685c648cf8f
531aef4d24c6288c1be3792f3d2ef660061525c2
refs/heads/master
<file_sep>/** * Created by komal.chaudhary on 2/22/2016. */ //Service_Train_Fare_Enquiry angular.module("myApp").factory('Service_Train_Fare_Enquiry', function ($http, Key_Train_API) { return function getObjectJSON(train_name_no, source_code, dest_code, age, doj, quota_code) { var key = Key_Train_API(); var url = "http://api.railwayapi.com/fare/train/" + train_name_no + "/source/" + source_code + "/dest/" + dest_code + "/age/" + age + "/quota/" + quota_code + "/doj/" + doj + "/apikey/" + key + "/"; console.log(url); return $http.get(url); } }); <file_sep>/** * Created by komal.chaudhary on 2/21/2016. */ //Service_Seat_Availability /* * @name Service_Seat_Availability * return : Function * @param : train_name_no, source_code, dest_code, doj, class_code, quota_code * train_name_no :- Train Number * source_code :- Source Name / Code * dest_code :- Destination Name * doj :- Date of Joining * class_code :- Seat Class * quota_code :- Quota Codes */ angular.module("myApp").factory('Service_Seat_Availability', function ($http, Key_Train_API) { return function getObjectJSON(train_name_no, source_code, dest_code, doj, class_code, quota_code) { var key = Key_Train_API(); var url = "http://api.railwayapi.com/route/train/" + train_name_no + "/apikey/" + key + "/"; url="http://api.railwayapi.com/check_seat/train/" + train_name_no + "/source/" + source_code + "/dest/" + dest_code + "/date/" + doj + "/class/" + class_code + "/quota/" + quota_code + "/apikey/" + key + "/"; console.log(url); return $http.get(url); } }); <file_sep>/** * Created by komal.chaudhary on 2/19/2016. */ /* * Key_Train_API * return : Railway API Key */ angular.module("myApp").factory('Key_Train_API', function () { return function getKey() { // komal pcedq5108 // komal <EMAIL> wokwg8584 // pravin <EMAIL> wsjak4280 <-- // Narayan <EMAIL> hnvbp5114 // shashank <EMAIL> vpeoo7497 // Mohak <EMAIL> ocvhq2524 // <NAME> <EMAIL> ktgbb6932 // <EMAIL> xhwbb4814 // <EMAIL> kuoef4046 var key = "<KEY>"; return key; } });<file_sep>/** * Created by komal.chaudhary on 2/23/2016. */ //@name : Ctrl_Train_Between_Stations angular.module("myApp").controller('Ctrl_Train_Between_Stations', [ '$scope', '$injector' ,'$rootScope', Ctrl_Train_Between_Stations] ); function Ctrl_Train_Between_Stations( $scope, $injector, $rootScope ) { $scope.optSuggSrcCode = ""; $scope.optSuggDesCode = ""; $scope.txtSuggTrainDate = ""; $scope.betweens_form = true; // Used for Display Between Form, true->display false->hide $scope.availableStations = []; // Used for Auto Complete Station Names $scope.stationSrc = []; // used for holding all json data station name with it's code $scope.stationDes = []; // used for holding all json data station name with it's code $scope.Trains_Between_Station = []; // used for holding Trains_Between_Station $scope.iv_error = false; //display Error $scope.iv_error_msg = ""; // holds error message. $scope.displayLoaderAutocomplete_src = false; $scope.displayLoaderAutocomplete_des = false; $scope.displayMainLoader = false; $scope.blankDesResult = false; $rootScope.home_page = false; // hide Home Page var Service_Station_List = $injector.get("Service_Station_List"); //inject Service Manually... var Service_Intermediator = $injector.get("Service_Intermediator"); if ( Service_Intermediator.Train_Route.length !== 0 ) { $scope.routes = Service_Intermediator.Train_Route; } else { } /* * @name : txtStationName_keyUp * @type : Function * @desc : invoke on keyup event of both textbox i.e. source & destination station name * @param : evt, object, text * evt -> Event Object * obj -> Control Object on which event ocuures * text -> typed text in textbox */ $scope.txtStationName_keyUp = function(evt, obj, text){ if(text.length > 2) { $scope.displayLoaderAutocomplete_src = true; var promise = Service_Station_List(text); promise.then(function (response) { $scope.displayLoaderAutocomplete_src = false; $scope.stationSrc = response.data.station; for (var i = 0; i < response.data.station.length; i++) { $scope.availableStations[i] = response.data.station[i].fullname + " " + response.data.station[i].code; } $("#" + evt.target.id).autocomplete({ source: $scope.availableStations }); }); } } $scope.txtStationNameDes_keyUp = function(evt, obj, text){ if(text.length > 2) { $scope.displayLoaderAutocomplete_des = true; var promise = Service_Station_List(text); promise.then(function (response) { $scope.displayLoaderAutocomplete_des = false; $scope.stationDes = response.data.station; for (var i = 0; i < response.data.station.length; i++) { $scope.availableStations[i] = response.data.station[i].fullname + " " + response.data.station[i].code; } $("#" + evt.target.id).autocomplete({ source: $scope.availableStations }); }); } } /* @name : btnGetDetails * @type : Function * @desc : Click Event Function on Button Get Details..... * invoke Service_Trains_between_stations Service & get data from API */ $scope.btnGetDetails = function(){ var optSuggSrcCode = $scope.optSuggSrcCode; var optSuggDesCode = $scope.optSuggDesCode; var txtSuggTrainDate = $scope.txtSuggTrainDate; optSuggSrcCode = document.getElementById("optSuggSrcCode").value; optSuggDesCode = document.getElementById("optSuggDesCode").value; txtSuggTrainDate = document.getElementById("txtSuggDatePicker").value; var inputDate = new Date(txtSuggTrainDate); var sysDate = new Date(); $scope.optSuggSrcCode = optSuggSrcCode; $scope.optSuggDesCode = optSuggDesCode; var mydate = txtSuggTrainDate.split("-"); var mydate = txtSuggTrainDate.split("-"); var dd = $scope.txtSuggTrainDate.getDate(); var mm = $scope.txtSuggTrainDate.getMonth() + 1; var yyyy = $scope.txtSuggTrainDate.getFullYear(); txtSuggTrainDate = dd + "-" + mm; $scope.txtSuggTrainDate = txtSuggTrainDate + "-" + yyyy; //Validation var d = new Date(); if(optSuggSrcCode.trim().length === 0 ) { $scope.iv_error = true; $scope.iv_error_msg = "Error : Invalid Source Station Name"; } else if(optSuggDesCode.trim().length === 0) { $scope.iv_error = true; $scope.iv_error_msg = "Error : Invalid Destination Station Name"; } else if(txtSuggTrainDate.trim().length === 0){ $scope.iv_error = true; $scope.iv_error_msg = "Error : Enter Valid Date"; } else if(!mydate[0] || !mydate[1] || !mydate[2]){ $scope.iv_error = true; $scope.iv_error_msg = "Error : Enter Valid Date"; } else if( inputDate < sysDate ){ if((inputDate.getDay() === sysDate.getDay() && inputDate.getMonth() === sysDate.getMonth() && inputDate.getFullYear() === sysDate.getFullYear() )){ $scope.Trains_between_stations(optSuggSrcCode, optSuggDesCode, txtSuggTrainDate); } else{ $scope.iv_error = true; $scope.iv_error_msg = "Error : Date must be greater than equals to Today's Date"; } } else { $scope.iv_error_msg = ""; $scope.blankDesResult = false; //convert station name into station code from API var SrcName = optSuggSrcCode ; var DesName = optSuggDesCode ; optSuggSrcCode = optSuggSrcCode.split(" ").splice(-1)[0]; optSuggDesCode = optSuggDesCode.split(" ").splice(-1)[0]; $scope.Trains_between_stations(optSuggSrcCode,SrcName, optSuggDesCode, DesName, txtSuggTrainDate); } } $scope.Trains_between_stations = function(optSuggSrcCode, SrcName, optSuggDesCode, DesName, txtSuggTrainDate){ $scope.blankDesResult = false; ///manually Inject Service_Trains_between_stations Sewrvice //param : source, dest, date // Source : Source Station name // Destination : Destination Station name // date : Journey Date in DD-MM Format $scope.displayMainLoader = true; var Service_Trains_between_stations = $injector.get("Service_Trains_between_stations"); var promise = Service_Trains_between_stations(optSuggSrcCode, optSuggDesCode, txtSuggTrainDate); promise.then(function (response) { $scope.displayMainLoader = false; $scope.Trains_Between_Station = response.data.train; if(response.data.train.length === 0){ //display error alert("There is no direct reserved train"); $scope.blankDesResult = true; $scope.blankDesResultMag = "There is no direct reserved train from " + SrcName + " to " + DesName; $scope.betweens_form = true; } else $scope.betweens_form = false; }) .catch( function (reason) { $scope.betweens_form = true; $scope.displayMainLoader = false; }); } /* * @name : GoTrainRoute * @type : Function * @desc : Invoked when result row clicked * @param : train No will be send to Train Route View */ $scope.GoTrainRoute = function(train_no){ var location = $injector.get("$location"); location.path("/landing_page:" + train_no); } } <file_sep>/** * Created by komal.chaudhary on 2/20/2016. */ angular.module("myApp").controller('Ctrl_landing_page', [ '$scope', '$injector','$rootScope', Ctrl_landing_page] ); function Ctrl_landing_page( $scope, $injector, $rootScope ) { //Manually Inject all requied things var routeParams = $injector.get("$routeParams"); var Service_Train_Route_Information = $injector.get("Service_Train_Route_Information"); $rootScope.home_page = false; // hide Home Page //Declare Required Variable in Landing Page Controller $scope.train = ""; //contains info about selected train with $scope.days = ["-"]; //contain info about train avaiability on days $scope.routes = []; //contain train route info $scope.JN_Names = []; //contain train route info $scope.sourceCode = ""; // used for passing it to Service Seat Availability $scope.desCode = ""; // used for passing it to Service Seat Availability $scope.seat_availability = []; // used for holding seat_availability API $scope.Train_Fare_Enquiry = []; // used for hloding Train Fare Enquiry $rootScope.showGoogleMap = false; //Request for Train Toute Information JSON using train no var train_no = routeParams.train_no.replace(":",""); $rootScope.train_route_loader = true; var promise = Service_Train_Route_Information(train_no); promise.then(function(response) { var Service_Intermediator = $injector.get("Service_Intermediator"); var Service_Intermediator_Selected_Train = $injector.get("Service_Intermediator_Selected_Train"); $scope.train = response.data.train; $scope.routes = response.data.route; for(var i = 0; i < response.data.train.days.length; i++){ if(response.data.train.days[i].runs === "Y") { $scope.days[i] = response.data.train.days[i]["day-code"]; } } $scope.txtTrainNo = $scope.train.number; $rootScope.train_route_loader = false; // hides the loader $rootScope.showGoogleMap = true; // show google map options Service_Intermediator.Train_Route = $scope.routes; //store Route info into the Intermediator Service Service_Intermediator_Selected_Train.Train = $scope.train; //store Train Into into Intermediator Service //$scope.displayMap(); } ); } //Ctrl_landing_page<file_sep>/** * Created by komal.chaudhary on 2/19/2016. */ //Factory For Get Autocomplete JSON /* * Service_TrainAutoComplete * Inject : $http & Ket_Train_API which contain kEy * return function getObjectJSON(train_name_no) {} * accept train No. or name and return JSON Object */ angular.module("myApp").factory('Service_TrainAutoComplete', function ($http, Key_Train_API) { return function getObjectJSON(train_name_no) { var key = Key_Train_API(); var url = "http://api.railwayapi.com/suggest_train/trains/" + train_name_no + "/apikey/" + key + "/"; //console.log(url); url = "json/AllTrains.json"; return $http.get(url); } }); //C:\Users\komal.chaudhary\WebstormProjects\Train_Journey\app\json\AllTrains.json<file_sep>/** * Created by komal.chaudhary on 2/24/2016. */ angular.module("myApp").controller('Ctrl_Fare_Enquiry', [ '$scope', '$injector','$rootScope', Ctrl_Fare_Enquiry] ); function Ctrl_Fare_Enquiry( $scope, $injector, $rootScope ) { $rootScope.home_page = false; // hide Home Page //manually inject Service_Intermediator_Selected_Train var Service_Intermediator_Selected_Train = $injector.get("Service_Intermediator_Selected_Train"); $scope.train = Service_Intermediator_Selected_Train.Train; var Service_Intermediator = $injector.get("Service_Intermediator"); $scope.routes = Service_Intermediator.Train_Route; if($scope.routes.length === 0) { var location = $injector.get("$location"); location.path("#" ); $rootScope.btnHome_Click(); } $scope.errorMsg = ""; $scope.displayError = false; $scope.txtTrainNo = $scope.train.number; $scope.displayMainLoader = false; $scope.displayResult = false; /* * @name : btnGetFare * @desc : Invoked on Click Event of Button Get Fare */ $scope.btnGetFare = function(){ var txtTrainNo = $scope.txtTrainNo; var txtTrainDate = $scope.txtTrainDate; var optFareSrcCode = $scope.optFareSrcCode; var optFareDesCode = $scope.optFareDesCode; var txtAge = $scope.txtAge; var optFareQuota = $scope.optFareQuota; var inputDate = new Date(txtTrainDate); var sysDate = new Date(); txtTrainDate = document.getElementById("txtDatePicker").value; var date = txtTrainDate.split("-"); //var dd = date[2]; //var mm = date[1]; //var yyyy = date[0]; var dd = $scope.txtTrainDate.getDate(); var mm = $scope.txtTrainDate.getMonth() + 1; var yyyy = $scope.txtTrainDate.getFullYear(); txtTrainDate = dd + "-" + mm + "-" + yyyy; $scope.displayError = false; $scope.displayResult = false; if(txtAge === undefined || parseInt(txtAge) == 0) { $scope.displayError = true; $scope.errorMsg = "Error : Enter Valid Age."; } else if(optFareSrcCode === undefined) { $scope.displayError = true; $scope.errorMsg = "Error : Select Source & Destination Station."; } else if(optFareDesCode === undefined) { $scope.displayError = true; $scope.errorMsg = "Error : Select Source & Destination Station."; } else if(!parseInt(date[0]) || !parseInt(date[1]) || !parseInt(date[2])){ $scope.displayError = true; $scope.errorMsg = "Error : Enter Valid Date"; } else if( inputDate < sysDate ){ if((inputDate.getDay() === sysDate.getDay() && inputDate.getMonth() === sysDate.getMonth() && inputDate.getFullYear() === sysDate.getFullYear() )){ $scope.getTrain_Fare_Enquiry(txtTrainNo, optFareSrcCode, optFareDesCode, txtAge, txtTrainDate, optFareQuota); } else{ $scope.displayError = true; $scope.errorMsg = "Error : Date must be greater than equals to Today's Date"; } } else { $scope.getTrain_Fare_Enquiry(txtTrainNo, optFareSrcCode, optFareDesCode, txtAge, txtTrainDate, optFareQuota); } //alert("txtTrainNo = " + txtTrainNo + "\n" + // "txtTrainDate = " + txtTrainDate + "\n" + // "optFareSrcCode = " + optFareSrcCode.length + "\n" + // "optFareDesCode = " + optFareDesCode + "\n" + // "txtAge = " + txtAge + "\n" + // "optFareQuota = " + optFareQuota + "\n" ); } $scope.getTrain_Fare_Enquiry = function (txtTrainNo, optFareSrcCode, optFareDesCode, txtAge, txtTrainDate, optFareQuota) { //alert("txtTrainNo = " + txtTrainNo + "\n" + // "txtTrainDate = " + txtTrainDate + "\n" + // "optFareSrcCode = " + optFareSrcCode + "\n" + // "optFareDesCode = " + optFareDesCode + "\n" + // "txtAge = " + txtAge + "\n" + // "optFareQuota = " + optFareQuota + "\n" ); $scope.displayMainLoader = true; // inject service manually // Service :- Service_Train_Fare_Enquiry // train_name_no, source_code, dest_code, age, doj, quota_code var Service_Train_Fare_Enquiry = $injector.get("Service_Train_Fare_Enquiry"); var promise = Service_Train_Fare_Enquiry(txtTrainNo, optFareSrcCode, optFareDesCode, txtAge, txtTrainDate, optFareQuota); promise.then(function(response) { $scope.Train_Fare_Enquiry = response.data; $scope.txtTrainDate = txtTrainDate; $scope.displayMainLoader = false; $scope.displayResult = true; } ) .catch( function (reason) { $scope.betweens_form = true; $scope.displayMainLoader = false; $scope.displayResult = false; alert(reason); }); } }<file_sep>/** * Created by komal.chaudhary on 2/23/2016. */ //used to display Train Route Information on the Map angular.module("myApp").controller('Ctrl_Google_Map', [ '$scope', '$injector','$rootScope', Ctrl_Google_Map] ); function Ctrl_Google_Map( $scope, $injector, $rootScope ) { //Initialize & Display Google Map // $scope.displayMap = function () { var Service_Intermediator = $injector.get("Service_Intermediator"); $rootScope.home_page = false; // hide Home Page if ( Service_Intermediator.Train_Route.length !== 0) { var routes = Service_Intermediator.Train_Route; var bound = new google.maps.LatLngBounds(); for (var i = 0; i < routes.length; i++) { bound.extend( new google.maps.LatLng( routes[i].lat , routes[i].lng ) ); //$scope.JN_Names[i] = $scope.routes[i].fullname; // OTHER CODE } var lati = bound.getCenter().lat(); var lngi = bound.getCenter().lng(); //console.log( tempdata ); var map = new google.maps.Map(document.getElementById('map1'), { zoom: 6, center: {lat: lati, lng: lngi}, mapTypeId: google.maps.MapTypeId.TERRAIN }); var infowindow = new google.maps.InfoWindow(); var marker, i, prelat, prelng, mrkColor; prelat = routes[0].lat; prelng = routes[0].lng; for (i = 0; i < routes.length; i++) { mrkColor = 'http://maps.google.com/mapfiles/ms/icons/green-dot.png'; mrkColor = 'http://maps.google.com/mapfiles/ms/micons/flag.png'; if(i === 0 || i === routes.length - 1) mrkColor = 'http://maps.google.com/mapfiles/ms/icons/red-dot.png'; var line = new google.maps.Polyline({ path: [ new google.maps.LatLng(prelat, prelng), new google.maps.LatLng( routes[i].lat , routes[i].lng ) ], strokeColor: "blue", strokeOpacity: 1.0, strokeWeight: 2, map: map }); prelat = routes[i].lat; prelng = routes[i].lng; marker = new google.maps.Marker({ position: new google.maps.LatLng( routes[i].lat , routes[i].lng ), icon : mrkColor, animation: google.maps.Animation.DROP, map: map }); google.maps.event.addListener(marker, 'click', (function(marker, i) { return function() { infowindow.setContent( routes[i].fullname ); infowindow.open(map, marker); } })(marker, i)); } //Animation var marker1 = new google.maps.Marker({ position: new google.maps.LatLng( routes[0].lat , routes[0].lng ), icon : "http://maps.google.com/mapfiles/kml/paddle/blu-blank.png", animation: google.maps.Animation.DROP, map: map }); counter = 1; var route_Length = routes.length; interval = window.setInterval(function () { counter++; if (counter >= route_Length) { //window.clearInterval(interval); counter = 0; } var pos = new google.maps.LatLng(routes[counter].lat , routes[counter].lng); marker1.setPosition(pos); }, 520); //http://maps.google.com/mapfiles/kml/pal4/icon49.png } else{ var location = $injector.get("$location"); location.path("#" ); $rootScope.btnHome_Click(); } } <file_sep>/** * Created by komal.chaudhary on 2/19/2016. */ /* * Key_GMap_API Factory * Return Google Map Key */ angular.module("myApp").factory('Key_GMap_API', function () { return function getKey() { var key = "<KEY>"; return key; } });<file_sep>/** * Created by komal.chaudhary on 2/23/2016. */ // Factory For Hold Data between Controllers // This Factory Hold Train Route API returned Information angular.module("myApp").factory('Service_Intermediator', function () { return { Train_Route: [] }; }); <file_sep>/** * Created by komal.chaudhary on 2/24/2016. */ angular.module("myApp").controller('Ctrl_Seat_Availability', [ '$scope', '$injector' ,'$rootScope', Ctrl_Seat_Availability] ); function Ctrl_Seat_Availability( $scope, $injector, $rootScope, Service_TrainAutoComplete ) { $scope.stationSrc = []; //used for hold list of Names of Source Station List $scope.stationDes = []; //used for hold list of Names of Destination Station List $scope.train = []; // used for Selected train info $scope.routes = []; // used for holding routes info for a train $scope.displatResultTable = false; // True -> display false -> No display $rootScope.home_page = false; // hide Home Page $scope.iv_error_msg = ""; // display error message $scope.DisplayResultError = false; $scope.displayMainLoader = false; $scope.iv_error = false; //manually inject Service_Intermediator_Selected_Train var Service_Intermediator_Selected_Train = $injector.get("Service_Intermediator_Selected_Train"); $scope.train = Service_Intermediator_Selected_Train.Train; var Service_Intermediator = $injector.get("Service_Intermediator"); $scope.routes = Service_Intermediator.Train_Route; if($scope.routes.length === 0 || $scope.train.length === 0){ var location = $injector.get("$location"); location.path("#" ); $rootScope.btnHome_Click(); } /* * @name : txtStationName_keyUp($event,this, optSrcCode) * @param : $event,this, optSrcCode * $event Object * this current object * optSrcCode ; Types text * @desc : Invoked on keyup event of source textbox */ $scope.txtStationName_keyUp = function(evt, obj, text){ if(text.length > 2) { var promise = new Service_Station_List(text); promise.then(function (response) { $scope.stationSrc = response.data.station; for (var i = 0; i < response.data.station.length; i++) { $scope.availableStations[i] = response.data.station[i].fullname; } $("#" + evt.target.id).autocomplete({ source: $scope.availableStations }); }); } } $scope.txtStationNameDes_keyUp = function(evt, obj, text){ if(text.length > 2) { var promise = Service_Station_List(text); promise.then(function (response) { $scope.stationDes = response.data.station; for (var i = 0; i < response.data.station.length; i++) { $scope.availableStations[i] = response.data.station[i].fullname; } $("#" + evt.target.id).autocomplete({ source: $scope.availableStations }); }); } } /* * @name btnSeatAvailability * @desc : Invoke on Button Click Event for Seat Availability * Used for Display Seat Availability for a train on given date with class & category */ $scope.btnSeatAvailability = function() { $scope.displatResultTable = false; var train_no = $scope.train.number; var optSrcCode = $scope.optSrcCode; var optDesCode = $scope.optDesCode; var doj = $scope.doj; doj = document.getElementById("datepicker").value; var optClass = $scope.optClass; var optQuota = $scope.optQuota; var msec = Date.parse(doj); var inputDate = new Date(msec); var sysDate = new Date(); var date = doj.split("-"); /*var dd = date[2]; var mm = date[1]; var yyyy = date[0];*/ var dd = $scope.doj.getDate(); var mm = $scope.doj.getMonth() + 1; var yyyy = $scope.doj.getFullYear(); doj = dd + "-" + mm + "-" + yyyy; //$scope.doj = doj; if(!date[0] || !date[1] || !date[2]){ $scope.iv_error = true; $scope.iv_error_msg = "Error : Enter Valid Date"; } else if( inputDate < sysDate ){ if((inputDate.getDay() === sysDate.getDay() && inputDate.getMonth() === sysDate.getMonth() && inputDate.getFullYear() === sysDate.getFullYear() )){ $scope.check_Seat_Availability(train_no, optSrcCode, optDesCode, doj, optClass, optQuota); } else{ $scope.iv_error = true; $scope.iv_error_msg = "Error : Date must be greater than equals to Today's Date"; } } else{ $scope.check_Seat_Availability(train_no, optSrcCode, optDesCode, doj, optClass, optQuota); } } /* inject Service_Seat_Availability factory * @param : train_name_no, source_code, dest_code, doj, class_code, quota_code */ $scope.check_Seat_Availability = function(train_no, optSrcCode, optDesCode, doj, optClass, optQuota){ $scope.iv_error = false; $scope.displayMainLoader = true; $scope.DisplayResultError = false; //alert("optSrcCode = " + optSrcCode+ "\n" + // "optDesCode = " + optDesCode + "\n" + // "doj = " + doj + "\n" + // "optClass = " + optClass + "\n" + // "optQuota = " + optQuota + "\n" ); var Service_Seat_Availability = $injector.get("Service_Seat_Availability"); var promise = Service_Seat_Availability(train_no, optSrcCode, optDesCode, doj, optClass, optQuota); promise.then(function(response) { $scope.seat_availability = response.data; $scope.seat_availability = response.data; $scope.displatResultTable = true; if($scope.seat_availability.availability.length === 0){ $scope.displatResultTable = false; $scope.DisplayResultError = true; } $scope.displayMainLoader = false; $scope.doj = doj; } ) .catch( function(reason) { $scope.displatResultTable = false; }); } }<file_sep>/** * Created by komal.chaudhary on 2/19/2016. */ angular.module("myApp").controller('Ctrl_Search', [ '$scope', '$injector' ,'$rootScope' ,'Service_TrainAutoComplete' , Ctrl_Search] ); function Ctrl_Search( $scope, $injector, $rootScope, Service_TrainAutoComplete ) { //alert("Ctrl_Search"); $scope.availableTags = [ ]; $rootScope.showGoogleMap = false; //hide google Map Option $rootScope.home_page = true; // display Home Page $scope.displayLoaderAutocomplete = false; //Display Loader Autocomplete //$scope.btnHome_Click = function(){ // $rootScope.home_page = true; // //alert("$rootScope.home_page = " + $rootScope.home_page); //} /** * @name complete * @desc Invoke on text change event on Search Textbox & ng-model is txtSearch * used for autocomplete */ var promise = Service_TrainAutoComplete(); $scope.tarinAutoCompleteLoaded = false; $scope.complete = function (myEvent) { if(myEvent.keyCode == 13) { var text = $scope.txtSearch; if(text.length > 0) { $scope.goLandPage(); } } else { var text = $scope.txtSearch; if( text ) { if (text.length > 2) { // check string length $scope.displayLoaderAutocomplete = true; //var promise = Service_TrainAutoComplete(text); //promise.then(function(response) { // $scope.displayLoaderAutocomplete = false; // $scope.availableTags = response.data.train; // $("#tags").autocomplete({ // source: $scope.availableTags // }); //} ); if (!$scope.tarinAutoCompleteLoaded) { promise.then(function (response) { $scope.displayLoaderAutocomplete = false; $scope.availableTags = response.data; $("#tags").autocomplete({ source: $scope.availableTags }); $scope.tarinAutoCompleteLoaded = true; }); } else $scope.displayLoaderAutocomplete = false; } } } } /* * @name btnSearch_Click * @desc call on click event of Search button from Index.html */ $scope.btnSearch_Click = function() { var text = $scope.txtSearch; if(text.length > 0) { $scope.goLandPage(); } } /* * @name goLandPage * @desc Used for display next view */ $scope.goLandPage = function(){ $rootScope.train_route_loader = true; //logic for extract number from String var txt = $scope.txtSearch; txt = document.getElementById("tags").value; var numb = txt.match(/\d/g); numb = numb.join(""); //alert("Train No " + numb); var location = $injector.get("$location"); location.path("/landing_page:" + numb); } } <file_sep>/** * Created by komal.chaudhary on 2/24/2016. */ //used for holding selected train Info angular.module("myApp").factory('Service_Intermediator_Selected_Train', function () { return { Train : [] }; });
82320554224852555f20ab8b15837fdad5c3c34a
[ "JavaScript" ]
13
JavaScript
nkomalc/Train_Journey
f017d7b04409ac4302e29548c447f5cc4593e919
2e2d72f58621904bc0c56a2d95bc691ed84baec7
refs/heads/master
<file_sep>\name{Portfolio} \alias{Portfolio} \docType{data} \title{Portfolio Data %% ~~ data name/kind ... ~~ } \description{A simple simulated data set containing 100 returns for each of two assets, X and Y. The data is used to estimate the optimal fraction to invest in each asset to minimize investment risk of the combined portfolio. One can then use the Bootstrap to estimate the standard error of this estimate. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{Portfolio} \format{ A data frame with 100 observations on the following 2 variables. \describe{ \item{\code{X}}{Returns for Asset X} \item{\code{Y}}{Returns for Asset Y} } } \source{Simulated data %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ summary(Portfolio) attach(Portfolio) plot(X,Y) } \keyword{datasets} <file_sep>\name{Khan} \alias{Khan} \docType{data} \title{Khan Gene Data %% ~~ data name/kind ... ~~ } \description{The data consists of a number of tissue samples corresponding to four distinct types of small round blue cell tumors. For each tissue sample, 2308 gene expression measurements are available. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{Khan} \format{ The format is a list containing four components: \code{xtrain}, \code{xtest}, \code{ytrain}, and \code{ytest}. \code{xtrain} contains the 2308 gene expression values for 63 subjects and \code{ytrain} records the corresponding tumor type. \code{ytrain} and \code{ytest} contain the corresponding testing sample information for a further 20 subjects. } \source{This data were originally reported in: <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. Classification and diagnostic prediction of cancers using gene expression profiling and artificial neural networks. Nature Medicine, v.7, pp.673-679, 2001. The data were also used in: <NAME>, <NAME>, <NAME>, and <NAME>. Diagnosis of Multiple Cancer Types by Shrunken Centroids of Gene Expression. Proceedings of the National Academy of Sciences of the United States of America, v.99(10), pp.6567-6572, May 14, 2002. %They are also available in the pamr package. %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ table(Khan$ytrain) table(Khan$ytest) } \keyword{datasets} <file_sep>var mongoose=require('mongoose'); module.exports=mongoose.model('activity',{ username:String, timestamp:{type:Date,default:Date.now}, post_id:String, type:String }); <file_sep>\name{Default} \alias{Default} \docType{data} \title{Credit Card Default Data %% ~~ data name/kind ... ~~ } \description{A simulated data set containing information on ten thousand customers. The aim here is to predict which customers will default on their credit card debt. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{Default} \format{ A data frame with 10000 observations on the following 4 variables. \describe{ \item{\code{default}}{A factor with levels \code{No} and \code{Yes} indicating whether the customer defaulted on their debt} \item{\code{student}}{A factor with levels \code{No} and \code{Yes} indicating whether the customer is a student} \item{\code{balance}}{The average balance that the customer has remaining on their credit card after making their monthly payment} \item{\code{income}}{Income of customer} } } \source{Simulated data %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ summary(Default) glm(default~student+balance+income,family="binomial",data=Default) } \keyword{datasets} <file_sep>$(document).ready(function(){ var socket=io.connect('http://localhost:3000'); var timestamp; var commentno={}; var msgstamp; var msgcount=1; var arr=new Array; var noticount=1; var globalu; /* Function working fine */ $.ajax({ url:'http://127.0.0.1:3000/timeline', type:'POST', dataType:'text', cache:true, success:function(data){ data=JSON.parse(data); var user=data['user']; var onlineHtml = data['onlineHtml']; globalu=user; timestamp=data['timestamp']; var msg=data['messages']; data=data['html']; console.log(msg); $("#onlinePeople").append(onlineHtml); for(var i=0;i<data.length;i++){ var x=data[i]; var string=''; string+='<h3 style="color:#3B5999;margin-left:1%;margin-top:1%;">'+x['type']+'</h3><br><h5 style="margin-left:1%;">'+x['status']+'</h5><img src="./images/'+x['images']+'" class="img-responsive" style="width:90%;margin:auto;">'; string+='<div class="panel-body"><div class="col-sm-3"><div style="float:left;"> <a href="#" style="float:right;" class="love_react" id="'+x['_id']+'_love">'+(x['love'].length).toString()+' Loves </a> </div><div class="love_list" id="'+x['_id']+'_love_list" style="float:right;">'; for(var j=0;j<x['love'].length;j++){ string+='<p>'+x['love'][j]+'</p>'; } if(x['love_more']>0){ string+='<p>'+x['love_more']+' More</p>'; } string+='</div></div>'; string+='<div class="panel-body"><div class="col-sm-3"><div style="float:left;"> <a href="#" style="float:right;" class="haha_react" id="'+x['_id']+'_haha">'+(x['haha'].length).toString()+' Haha </a> </div><div class="haha_list" id="'+x['_id']+'_haha_list" style="float:right;">'; for(var j=0;j<x['haha'].length;j++){ string+='<p>'+x['haha'][j]+'</p>'; } if(x['haha_more']>0){ string+='<p>'+x['haha_more']+' More</p>'; } string+='</div></div></div><div class="panel-body"><div class="col-sm-1"><img src="./images/'+user+'.jpg" width="28px"></div><div class="col-sm-9"><form class="navbar-form navbar-left comment_form" id="'+x['_id']+'_commentform"><div class="input-group input-group-sm"><input class="form-control" placeholder="Comment" name="comment" id="'+x['_id']+'_comment" type="text">'; string+='<div class="input-group-btn"><button class="btn btn-primary" type="submit">Comment</button></div></div></form></div></div>'; string+='<div class="panel-body comment_list" id="'+x['_id']+'_commentlist"></div><div class="panel-body"><div class="col-sm-6"><a href="#"> <p class="loadcomments" id="'+x['_id']+'loadcomments"> Load More comments </p> </a></div></div></div>'; commentno[x['_id']]=new Date(); $("#mainarea").append(string); } console.log(commentno); if(msg.length<5){ msgcount=0; } for(var i=0;i<msg.length;i++){ var rawdata=msg[i]; var string=''; var seen=''; if(rawdata['seen']=='FALSE'){ seen='Not seen'; } else if(rawdata['seen']!='TRUE'){ seen='Seen '; } string+="<li class='msg_list' id='li_"+rawdata['friend']+"'><a href='#' class='msg_link' id='"+rawdata['friend']+"'><p style='width:320px;'><img src='./images/"+rawdata['friend']+".jpg' height='28px' width='28px'><span style='float:right;font-size:20px;'> "+rawdata['friend']+"</span> </p><p> <span style='float:left;' id='msg_"+rawdata['friend']+"'>"; string+=rawdata['message']+"</span> <span style='float:right;' id='seen_"+rawdata['friend']+"'>"+seen+" </span> </p></a></li>"; msgstamp=rawdata['timestamp']; arr.push(rawdata['timestamp']); $("#load_message").append(string); } }, error: function (xhr, status, error) { console.log('Error: ' + error.message); } }); /* Function working fine */ $("#mainarea").on('click','.love_react',function(){ var id=this.id; id=id.split("_love")[0]; react('love',id); }); /* Function working fine */ $("#mainarea").on('click','.haha_react',function(){ var id=this.id; id=id.split("_haha")[0]; react('haha',id); }); /* Function working fine */ var react=function(type,post_id){ console.log(type); var haha; var love; if(type=='love'){ love = true; haha = false; } else if(type=='haha'){ love = false; haha = true; } $.ajax({ url:'http://127.0.0.1:3000/react', data:{'love':love,'haha':haha,'id':post_id}, type:'POST', dataType:'text/html', cache:true, success:function(data){ console.log(data); }, error:function(){ console.log('error'); } }); } /* Function working fine */ socket.on('activity',function(data){ $("#activity_list").prepend('<h5 class="rightbar" id="'+data['_id']+'_activity">'+data['act']+'</h5>'); }); socket.on('online',function(data){ $("#onlinePeople").prepend('<h5 id="user_'+data['user']+'">'+data['user']+'</h5>'); }); socket.on('offline',function(data){ console.log('hello'); $("#user_"+data['user']).remove(); }); /* Function working fine */ $("#activity_list").on('click','.rightbar',function(){ var id=this.id; id=id.split("_activity")[0]; $.ajax({ url:'http://127.0.0.1:3000/viewpost', data:{'post_id':id}, type:'POST', dataType:'text', cache:true, success:function(data){ $("html").html(data); }, error:function(){ console.log('error'); } }); }); /*setInterval(function(){ $("#hidden").hide(); }, 5000); */ /* Function working fine */ $("#search_form #srch-term").bind('input',function(){ var term=$("#srch-term").val(); if(term!='') search(term); else{ document.getElementById('search_results').innerHTML=''; } }); /* Function working fine */ var search=function(data){ $.ajax({ url:'http://127.0.0.1:3000/search', data:{'term':data}, type:'POST', dataType:'text', cache:true, success:function(data){ document.getElementById('search_results').innerHTML=data; $("#hidden").show(); }, error:function(){ console.log('error'); } }); } /* Function working fine */ $("#search_form").submit(function(e){ e.preventDefault(); var term=$("#srch-term").val(); $("#srch-term").val(''); search(term); }); socket.on('seen',function(data){ data=JSON.parse(data); var seen=data['time']; var user=data['second']; $("#load_message").find('#seen_'+user).html('Seen '+seen); }); socket.on('newmsg',function(data){ var v=$("#top_message").text(); if(v==''){ v=1; } else{ v=parseInt(v); v=v+1; } $("#top_message").html(v.toString()); var msg=data['data']; msg=msg.split(":"); var string; string+="<li class='msg_list' id='li_"+data['second']+"'><a href='#' class='msg_link' id='"+data['second']+"'><p style='width:320px;'><img src='./images/"+data['second']+".jpg' height='28px' width='28px'><span style='float:right;font-size:20px;'> "+data['second']+"</span> </p><p> <span style='float:left;' id='msg_"+data['second']+"'>"; string+=msg[1]+"</span> <span style='float:right;' id='seen_"+data['second']+"'> </span> </p></a></li>"; if($("#load_message").find("#li_"+data['second']).length!=0){ $("#load_message").find("#li_"+data['second']).remove(); $("#load_message").prepend(string); } else{ if(msgcount==1){ $("#load_message").children().last().remove(); arr.splice(-1,1); arr.unshift(new Date()); msgstamp=arr[arr.length-1]; } $("#load_message").prepend(string); } }); /* Function working fine */ $("#mainarea").on('submit','.comment_form',function(){ console.log('registered'); var id=this.id; var postid=id.split("_commentform"); postid=postid[0]; var comment=$("#mainarea").find('#'+postid+'_comment').text(); console.log(comment); $.ajax({ url:'http://127.0.0.1:3000/comment', data:{'id':postid,'comment':comment}, type:'POST', dataType:'text/html', cache:true, success:function(data){ console.log('success'); }, error:function(){ console.log('error'); } }); }); socket.on('newreply',function(data){ data=JSON.parse(data); var string=''; string+='<div class="col-sm-12"><div class="col-sm-1"><img src="./images/'+data['user']+'" height="28px" width="28px"></div><div class="col-sm-11"><p class="form-control">'+data['reply']+' </p></div></div>'; $("#mainarea").find("#"+data['commentid']+'_mainreplydiv').append(string); }); socket.on('newcomment',function(data){ data=JSON.parse(data); var string=''; string+='<div class="col-sm-1"><img src="./images/'+data['user']+'" height="28px" width="28px"></div><div class="col-sm-11"><p class="form-control">'+data['comment']+'</p><div class="col-sm-12"><button class="loadreplylist btn btn-primary" id="'+data['comment_id']+'" type="button">View Replies </button></div><div><div class="col-sm-12"><div class="col-sm-1"><img src="./images/'+globalu+'" height="28px" width="28px"></div><div class="col-sm-11"><form class="reply_form" id="'+data['comment_id']+'_replyform"><input type="text" name="reply" id="'+data['comment_id']+'_reply" class="form-control" placeholder="Reply"><button class="btn btn-primary pull-right" type="submit">Reply</button></form></div></div></div><div class="reply_list" id="'+data['comment_id']+'_mainreplydiv"></div>'; $("#mainarea").find("#"+data['postid']+'_commentlist').prepend(string); }); /* Function working fine */ socket.on('newnotification',function(data){ console.log(data); var v=$("#top_notification").text(); if(v==''){ v=1; } else{ v=parseInt(v); v=v+1; } $("#top_notification").html(v.toString()); $("#load_notifications").prepend('<li class="notification_list" id="'+data['id']+'_post"><a href="#"><p style="width:320px;"><img src="./images/'+data['user']+'" height="28px" width="28px"><span>'+data['noti']+' </span> </p></a></li>'); }); $("#load_notifications").on('click','.notification_list',function(){ var id=this.id; id=id.split("_post")[0]; window.location.href='/viewpost?post_id='+id; }); /* Function working fine */ socket.on('newreact',function(data){ var post_id=data['post_id']; if(data['react'] == 'love'){ var x=$("#mainarea").find("#"+post_id+'_love').text(); x=x.split(" "); x=parseInt(x[0]); x=x+1; $("#mainarea").find("#"+post_id+'_love').html(x.toString() + " Love"); } else if(data['react'] == 'haha'){ var x=$("#mainarea").find("#"+post_id+'_haha').text(); x=x.split(" "); x=parseInt(x[0]); x=x+1; $("#mainarea").find("#"+post_id+'_haha').html(x.toString()+" Haha"); } }); /* Function working fine */ $("#mainarea").on('click','.loadcomments',function(){ var id=this.id; id=id.split("loadcomments")[0]; $.ajax({ url:'http://127.0.0.1:3000/loadcomment', data:{'commentstamp':commentno[id],'post_id':id}, type:'POST', dataType:'text', cache:true, success:function(data){ console.log('success'); data=JSON.parse(data); if(data['timestamp'] != null){ console.log("changed"); commentno[id]=data['timestamp']; } var string=''; data=data['comments']; console.log(data); for(var i=0;i<data.length;i++){ var temp=data[i]; string+='<div class="col-sm-1"><img src="./images/'+temp['user']+'.jpg" height="28px" width="28px"></div><div class="col-sm-11"><p class="form-control">'+temp['comment']+'</p><div class="col-sm-12"><button class="loadreplylist btn btn-primary" id="'+temp['comment_id']+'" type="button">View Replies </button></div><div><div class="col-sm-12"><div class="col-sm-1"><img src="./images/'+globalu+'.jpg" height="28px" width="28px"></div><div class="col-sm-11"><form class="reply_form" id="'+temp['comment_id']+'_replyform"><input type="text" name="reply" id="'+temp['comment_id']+'_reply" class="form-control" placeholder="Reply"><button class="btn btn-primary pull-right" type="submit">Reply</button></form></div></div></div><div class="reply_list" id="'+temp['comment_id']+'_mainreplydiv"></div></div>'; } var appendid = "#"+id+"_commentlist"; $("#mainarea").find(appendid).append(string); }, error:function (xhr, status, error) { console.log('Error: ' + error.message); } }); }); $("#mainarea").on('click','.loadreplylist',function(){ var comment_id=this.id; $.ajax({ url:'http://127.0.0.1:3000/loadreply', data:{'id':comment_id}, type:'POST', dataType:'text/html', cache:true, success:function(data){ for(var i=0;i<data.length;i++){ var temp=data[i]; temp=temp.split(":"); var user=temp[0]; var string=''; string+='<div class="col-sm-12"><div class="col-sm-1"><img src="./images/'+user+'" height="28px" width="28px"></div><div class="col-sm-11"><p class="form-control">'+temp[1]+' </p></div></div>'; $("#mainarea").find("#"+comment_id+'_mainreplydiv').html(string); } }, error:function(){ console.log('error'); } }); }); /* Function working fine */ $("#load_more_stories").on('click',function(data){ console.log(timestamp); $.ajax({ url:'http://127.0.0.1:3000/load', type:'POST', data:{'mainstamp':timestamp}, dataType:'text', cache:true, success:function(data) { data=JSON.parse(data); var user=data['user']; if(data['timestamp'] != null){ timestamp=data['timestamp']; } data=data['html']; for(var i=0;i<data.length;i++){ var x=data[i]; var string=''; string+='<h3 style="color:#3B5999;margin-left:1%;margin-top:1%;">'+x['type']+'</h3><br><h5 style="margin-left:1%;">'+x['status']+'</h5><img src="./images/'+x['images']+'" class="img-responsive" style="width:90%;margin:auto;">'; string+='<div class="panel-body"><div class="col-sm-3"><div style="float:left;"> <a href="#" style="float:right;" class="love_react" id="'+x['_id']+'_love">'+(x['love'].length).toString()+'Loves </a> </div><div class="love_list" id="'+x['_id']+'_love_list" style="float:right;">'; for(var j=0;j<x['love'].length;j++){ string+='<p>'+x['love'][j]+'</p>'; } if(x['love_more']>0){ string+='<p>'+x['love_more']+' More</p>'; } string+='</div></div>'; string+='<div class="panel-body"><div class="col-sm-3"><div style="float:left;"> <a href="#" style="float:right;" class="haha_react" id="'+x['_id']+'_haha">'+(x['haha'].length).toString()+'Haha </a> </div><div class="haha_list" id="'+x['_id']+'_haha_list" style="float:right;">'; for(var j=0;j<x['haha'].length;j++){ string+='<p>'+x['haha'][j]+'</p>'; } if(x['haha_more']>0){ string+='<p>'+x['haha_more']+' More</p>'; } string+='</div></div></div><div class="panel-body"><div class="col-sm-1"><img src="./images/'+user+'.jpg" width="28px"></div><div class="col-sm-9"><form class="navbar-form navbar-left comment_form" id="'+x['_id']+'_commentform"><div class="input-group input-group-sm"><input class="form-control" placeholder="Comment" name="comment" id="'+x['_id']+'_comment" type="text">'; string+='<div class="input-group-btn"><button class="btn btn-primary" type="submit">Comment</button></div></div></form></div></div>'; string+='<div class="panel-body comment_list" id="'+x['_id']+'_commentlist"></div><div class="panel-body"><div class="col-sm-6"><a href="#"> <p class="loadcomments" id="'+x['_id']+'loadcomments"> Load More comments </p> </a></div></div></div>'; commentno[x['_id']]=new Date(); } $("#mainarea").append(string); }, error:function(){ console.log('error'); } }); }); /* Function working fine */ $("#load_more_messages").on('click',function(){ console.log(msgstamp); $.ajax({ url:'http://127.0.0.1:3000/fillmsg', data:{'msgstamp':msgstamp}, type:'POST', dataType:'text', cache:true, success:function(data){ if(data != null){ var msg=data; for(var i=0;i<msg.length;i++){ console.log('yes'); var rawdata=msg[i]; var string=''; var seen=''; if(rawdata['seen']=='FALSE'){ seen='Not seen'; } else if(rawdata['seen']!='TRUE'){ seen='Seen '; } string+="<li class='msg_list' id='li_"+rawdata['friend']+"'><a href='#' class='msg_link' id='"+rawdata['friend']+"'><p style='width:320px;'><img src='./images/"+rawdata['friend']+"'.jpg' height='28px' width='28px'><span style='float:right;font-size:20px;'> "+rawdata['friend']+"</span> </p><p> <span style='float:left;' id='msg_"+rawdata['friend']+"'>"; string+=rawdata['message']+"</span> <span style='float:right;' id='seen_"+rawdata['friend']+"'>"+seen+" </span> </p></a></li>"; msgstamp=rawdata['timestamp']; arr.push(rawdata['timestamp']); $("#load_message").append(string); } } }, error:function(){ console.log('error'); } }); }); $("#mainarea").on('submit','.reply_form',function(){ var id=this.id; var comment_id=id.split("_replyform")[0]; var reply=$("#mainarea").find("#"+comment_id+'_reply').text(); $.ajax({ url:'http://1192.168.3.11:3000/reply', data:{'id':comment_id,'reply':reply}, type:'POST', dataType:'text/html', cache:true, success:function(data){ console.log('success'); }, error:function(){ console.log('error'); } }) }); }); <file_sep>\name{Credit} \alias{Credit} \docType{data} \title{Credit Card Balance Data %% ~~ data name/kind ... ~~ } \description{A simulated data set containing information on ten thousand customers. The aim here is to predict which customers will default on their credit card debt. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{Credit} \format{ A data frame with 10000 observations on the following 4 variables. \describe{ \item{\code{ID}}{Identification} \item{\code{Income}}{Income in $10,000's} \item{\code{Limit}}{Credit limit} \item{\code{Rating}}{Credit rating} \item{\code{Cards}}{Number of credit cards} \item{\code{Age}}{Age in years} \item{\code{Education}}{Number of years of education} \item{\code{Gender}}{A factor with levels \code{Male} and \code{Female}} \item{\code{Student}}{A factor with levels \code{No} and \code{Yes} indicating whether the individual was a student} \item{\code{Married}}{A factor with levels \code{No} and \code{Yes} indicating whether the individual was married} \item{\code{Ethnicity}}{A factor with levels \code{African American}, \code{Asian}, and \code{Caucasian} indicating the individual's ethnicity} \item{\code{Balance}}{Average credit card balance in $.} } } \source{Simulated data, with thanks to <NAME> for pointing out that this was omitted, and supplying the data and man documentation page on Oct 19, 2017 %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ summary(Credit) lm(Balance ~ Student + Limit, data=Credit) } \keyword{datasets} <file_sep>\name{Auto} \alias{Auto} \docType{data} \title{ Auto Data Set } \description{Gas mileage, horsepower, and other information for 392 vehicles.} \usage{Auto} \format{ A data frame with 392 observations on the following 9 variables. \describe{ \item{\code{mpg}}{miles per gallon} \item{\code{cylinders}}{Number of cylinders between 4 and 8} \item{\code{displacement}}{Engine displacement (cu. inches)} \item{\code{horsepower}}{Engine horsepower} \item{\code{weight}}{Vehicle weight (lbs.)} \item{\code{acceleration}}{Time to accelerate from 0 to 60 mph (sec.)} \item{\code{year}}{Model year (modulo 100)} \item{\code{origin}}{Origin of car (1. American, 2. European, 3. Japanese)} \item{\code{name}}{Vehicle name}} The orginal data contained 408 observations but 16 observations with missing values were removed.} \source{This dataset was taken from the StatLib library which is maintained at Carnegie Mellon University. The dataset was used in the 1983 American Statistical Association Exposition. } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ pairs(Auto) attach(Auto) hist(mpg) } \keyword{datasets} <file_sep>\name{College} \alias{College} \docType{data} \title{U.S. News and World Report's College Data %% ~~ data name/kind ... ~~ } \description{Statistics for a large number of US Colleges from the 1995 issue of US News and World Report. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{College} \format{ A data frame with 777 observations on the following 18 variables. \describe{ \item{\code{Private}}{A factor with levels \code{No} and \code{Yes} indicating private or public university} \item{\code{Apps}}{Number of applications received} \item{\code{Accept}}{Number of applications accepted} \item{\code{Enroll}}{Number of new students enrolled} \item{\code{Top10perc}}{Pct. new students from top 10\% of H.S. class} \item{\code{Top25perc}}{Pct. new students from top 25\% of H.S. class} \item{\code{F.Undergrad}}{Number of fulltime undergraduates} \item{\code{P.Undergrad}}{Number of parttime undergraduates} \item{\code{Outstate}}{Out-of-state tuition} \item{\code{Room.Board}}{Room and board costs} \item{\code{Books}}{Estimated book costs} \item{\code{Personal}}{Estimated personal spending} \item{\code{PhD}}{Pct. of faculty with Ph.D.'s} \item{\code{Terminal}}{Pct. of faculty with terminal degree} \item{\code{S.F.Ratio}}{Student/faculty ratio} \item{\code{perc.alumni}}{Pct. alumni who donate} \item{\code{Expend}}{Instructional expenditure per student} \item{\code{Grad.Rate}}{Graduation rate} } } \source{This dataset was taken from the StatLib library which is maintained at Carnegie Mellon University. The dataset was used in the ASA Statistical Graphics Section's 1995 Data Analysis Exposition. %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ summary(College) lm(Apps~Private+Accept,data=College) } \keyword{datasets} <file_sep>\name{Carseats} \alias{Carseats} \docType{data} \title{Sales of Child Car Seats %% ~~ data name/kind ... ~~ } \description{A simulated data set containing sales of child car seats at 400 different stores. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{Carseats} \format{ A data frame with 400 observations on the following 11 variables. \describe{ \item{\code{Sales}}{Unit sales (in thousands) at each location} \item{\code{CompPrice}}{Price charged by competitor at each location} \item{\code{Income}}{Community income level (in thousands of dollars)} \item{\code{Advertising}}{Local advertising budget for company at each location (in thousands of dollars)} \item{\code{Population}}{Population size in region (in thousands)} \item{\code{Price}}{Price company charges for car seats at each site} \item{\code{ShelveLoc}}{A factor with levels \code{Bad}, \code{Good} and \code{Medium} indicating the quality of the shelving location for the car seats at each site} \item{\code{Age}}{Average age of the local population} \item{\code{Education}}{Education level at each location} \item{\code{Urban}}{A factor with levels \code{No} and \code{Yes} to indicate whether the store is in an urban or rural location} \item{\code{US}}{A factor with levels \code{No} and \code{Yes} to indicate whether the store is in the US or not} } } \source{Simulated data %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ summary(Carseats) lm.fit=lm(Sales~Advertising+Price,data=Carseats) } \keyword{datasets} <file_sep>\name{OJ} \alias{OJ} \docType{data} \title{Orange Juice Data %% ~~ data name/kind ... ~~ } \description{The data contains 1070 purchases where the customer either purchased Citrus Hill or Minute Maid Orange Juice. A number of characteristics of the customer and product are recorded. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{OJ} \format{ A data frame with 1070 observations on the following 18 variables. \describe{ \item{\code{Purchase}}{A factor with levels \code{CH} and \code{MM} indicating whether the customer purchased Citrus Hill or Minute Maid Orange Juice} % \item{\code{HouseholdID}}{Household ID} \item{\code{WeekofPurchase}}{Week of purchase} \item{\code{StoreID}}{Store ID} % \item{\code{Buy}}{0/1 dummy variable where 1 corresponds to CH purchase} \item{\code{PriceCH}}{Price charged for CH} \item{\code{PriceMM}}{Price charged for MM} \item{\code{DiscCH}}{Discount offered for CH} \item{\code{DiscMM}}{Discount offered for MM} \item{\code{SpecialCH}}{Indicator of special on CH} \item{\code{SpecialMM}}{Indicator of special on MM} \item{\code{LoyalCH}}{Customer brand loyalty for CH} % \item{\code{LoyalMM}}{Customer brand loyalty for MM} \item{\code{SalePriceMM}}{Sale price for MM} \item{\code{SalePriceCH}}{Sale price for CH} \item{\code{PriceDiff}}{Sale price of MM less sale price of CH} \item{\code{Store7}}{A factor with levels \code{No} and \code{Yes} indicating whether the sale is at Store 7} \item{\code{PctDiscMM}}{Percentage discount for MM} \item{\code{PctDiscCH}}{Percentage discount for CH} \item{\code{ListPriceDiff}}{List price of MM less list price of CH} \item{\code{STORE}}{Which of 5 possible stores the sale occured at} } } \source{Stine, <NAME>., Foster, <NAME>., <NAME>. Business Analysis Using Regression (1998). Published by Springer. %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ summary(OJ) plot(OJ$Purchase,OJ$PriceCH) } \keyword{datasets} <file_sep>$(document).ready(function(){ $("#friend").on('click','li',function(){ var id=this.id; id=id.slice(0, id.length-2); $.ajax({ url: 'http://127.0.0.1:3000', // dataType: "jsonp", data: {id:id} type: 'POST', jsonpCallback: 'callback', // this is not relevant to the POST anymore success: function (data) { var ret = jQuery.parseJSON(data); console.log('Success: ') }, error: function (xhr, status, error) { console.log('Error: ' + error.message); }, }); }); }); <file_sep>var mongoose=require('mongoose'); module.exports=mongoose.model('messages',{ username:{ type: String}, seconduser:{ type: String}, seen_now:{ type: String}, message:Array, timestamp:{type:Date,default:Date.now} }); <file_sep>\name{Hitters} \alias{Hitters} \docType{data} \title{Baseball Data %% ~~ data name/kind ... ~~ } \description{Major League Baseball Data from the 1986 and 1987 seasons. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{Hitters} \format{ A data frame with 322 observations of major league players on the following 20 variables. \describe{ \item{\code{AtBat}}{Number of times at bat in 1986} \item{\code{Hits}}{Number of hits in 1986} \item{\code{HmRun}}{Number of home runs in 1986} \item{\code{Runs}}{Number of runs in 1986} \item{\code{RBI}}{Number of runs batted in in 1986} \item{\code{Walks}}{Number of walks in 1986} \item{\code{Years}}{Number of years in the major leagues} \item{\code{CAtBat}}{Number of times at bat during his career} \item{\code{CHits}}{Number of hits during his career} \item{\code{CHmRun}}{Number of home runs during his career} \item{\code{CRuns}}{Number of runs during his career} \item{\code{CRBI}}{Number of runs batted in during his career} \item{\code{CWalks}}{Number of walks during his career} \item{\code{League}}{A factor with levels \code{A} and \code{N} indicating player's league at the end of 1986} \item{\code{Division}}{A factor with levels \code{E} and \code{W} indicating player's division at the end of 1986} \item{\code{PutOuts}}{Number of put outs in 1986} \item{\code{Assists}}{Number of assists in 1986} \item{\code{Errors}}{Number of errors in 1986} \item{\code{Salary}}{1987 annual salary on opening day in thousands of dollars} \item{\code{NewLeague}}{A factor with levels \code{A} and \code{N} indicating player's league at the beginning of 1987} } } \source{This dataset was taken from the StatLib library which is maintained at Carnegie Mellon University. This is part of the data that was used in the 1988 ASA Graphics Section Poster Session. The salary data were originally from Sports Illustrated, April 20, 1987. The 1986 and career statistics were obtained from The 1987 Baseball Encyclopedia Update published by Collier Books, Macmillan Publishing Company, New York. %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ summary(Hitters) lm(Salary~AtBat+Hits,data=Hitters) } \keyword{datasets} <file_sep> var mongoose = require('mongoose'); module.exports = mongoose.model('notifications',{ username:String, notifications:Array }); <file_sep>var http=require('http'); var express=require('express'); var path = require('path'); var session=require('express-session'); var passport=require('passport'); var cookieparser=require('cookie-parser'); var app=express(); var MemoryStore=session.MemoryStore; var http=require('http').Server(app); var fs = require('fs'); var favicon = require('static-favicon'); app.use(favicon()); var messages=require('./models/messages'); var flash=require('connect-flash'); app.use(flash()); var mongoose=require('mongoose'); var io=require('socket.io').listen(http); app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'jade'); mongoose.connect('mongodb://localhost/messapp'); var bodyParser=require('body-parser'); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ keepExtensions: true, extended: false })); app.use(cookieparser()); var storep=new MemoryStore(); app.use(session({ secret: 'InterIITTechMeet17', store:storep, cookie:{ key:'connect.sid', maxAge: 1000 * 24 * 60 } })); app.use(express.static(__dirname + '/views')); app.use(passport.initialize()); app.use(passport.session()); var routes = require('./routes/index')(passport,io); app.use('/', routes); var initpassport=require('./passport/init'); initpassport(passport); http.listen(3000); <file_sep>$(document).ready(function(){ $(".love").on('click',function(){ var id = this.id; $.ajax({ url: 'http://127.0.0.1:3000', // dataType: "jsonp", data: {'id':id}, type: 'POST', dataType: 'html', cache: false, success: function (data) { document.getElementById('final').innerHTML=data; console.log(data); }, error: function (xhr, status, error) { console.log('Error: ' + error.message); } }); }); $("#final").on('click',function(){ document.getElementById('final').innerHTML=''; }); $(".loves").on('click',function(){ var id = this.id; $.ajax({ url: 'http://127.0.0.1:3000/friend', // dataType: "jsonp", data: {'id':id}, type: 'POST', dataType: 'text/html', cache: false, success: function (data) { console.log(data); }, error: function (xhr, status, error) { console.log('Error: ' + error.message); } }); }); }); <file_sep>\name{NCI60} \alias{NCI60} \docType{data} \title{NCI 60 Data %% ~~ data name/kind ... ~~ } \description{NCI microarray data. The data contains expression levels on 6830 genes from 64 cancer cell lines. Cancer type is also recorded. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{NCI60} \format{ The format is a list containing two elements: \code{data} and \code{labs}. \code{data} is a 64 by 6830 matrix of the expression values while \code{labs} is a vector listing the cancer types for the 64 cell lines. } \source{The data come from Ross et al. (Nat Genet., 2000). More information can be obtained at http://genome-www.stanford.edu/nci60/ % http://www-stat.stanford.edu/~tibs/ElemStatLearn/ %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ table(NCI60$labs) } \keyword{datasets} <file_sep>$(document).ready(function(){ var socket=io.connect('http://localhost:3000'); var friend; var seen=''; var index; var not_seen=new Array(); var windows=$(window); var title=$("title"); var flag=1; $.ajax({ url:'http://127.0.0.1:3000/fill', type:'POST', dataType:'text', cache:true, success:function(data){ data=JSON.parse(data); var friends='<h6>Friends</h6>'; var users='<h6>All users</h6>'; var temp=data['friends'][0]; if(temp!=undefined){ temp=temp['friends']; for(var i=0;i<temp.length;i++){ friends+="<div class='friend' id="+temp[i]+"> <div class='user'> <div class='avatar'><img src='./images/"+temp[i]+".jpg' alt='./images/user.jpg'> <div class='status off'></div> </div><div class='name' >"+temp[i]+"</div></div></div>"; } } for(var i=0;i<data['users'].length;i++){ users+="<div class='tobefriends' id="+data['users'][i]['username']+"> <div class='user'> <div class='avatar'><img src='./images/"+data['users'][i]['username']+".jpg' alt='./images/user.jpg'> <div class='status off'></div> </div><div class='name'>"+data['users'][i]['username']+"</div></div></div>"; } document.getElementById('friends_list').innerHTML=friends; document.getElementById('users_list').innerHTML=users; }, error: function (xhr, status, error) { console.log('Error: ' + error.message); } }) $("body").on('click','.friend',function(){ var id = this.id; friend=id; index=10; $.ajax({ url: 'http://127.0.0.1:3000/getmsg', // dataType: "jsonp", data: {'id':id}, type: 'POST', dataType: 'html', cache: true, success: function (data) { data=JSON.parse(data); var temp='<h6 id="fixed">'+friend+'</h6> '; for(var i=0;i<data['html'].length;i++){ var x=data['html'][i]; x=x.split(":"); if(x[0]=="You "){ temp+="<div class='answer right'> <div class='avatar'><img class='image' src='./images/"+data['owner']+".jpg' alt='./images/user.jpg'> <div class='status online'></div></div> <div class='name'>You</div><div class='text'>"+x[1]+"</div> </div>"; } else{ temp+="<div class='answer left'> <div class='avatar'><img class='image' src='./images/"+friend+".jpg' alt='./images/user.jpg'> <div class='status online'></div></div> <div class='name'>"+friend+"</div><div class='text'>"+x[1]+"</div> </div>"; } } document.getElementById('msgs_coming').innerHTML=temp; if(!(data['seen']=='FALSE' || data['seen']=='TRUE')){ if(data['seen']!=undefined){ document.getElementById('seen').innerHTML='Seen at '+data['seen']; seen='Seen at'+data['seen']; } console.log(data['seen']); } if(data['verify']=='FALSE'){ socket.emit('seen',{'friend':friend}); } $("#msg_form").show(); }, error: function (xhr, status, error) { console.log('Error: ' + error.message); } }); }); $("body").on('click','.tobefriends',function(){ console.log('hello'); var id = this.id; $.ajax({ url: 'http://127.0.0.1:3000/friend', // dataType: "jsonp", data: {'id':id}, type: 'POST', dataType: 'text/html', cache: true, success: function (data) { }, error: function (xhr, status, error) { console.log('Error: ' + error.message); } }); }); socket.on('new_friend',function(data){ var frnd=data['friend']; var string="<div class='friend' id="+frnd+"><div class='user'> <div class='avatar'><img src='./images/"+frnd+".jpg' alt='./images/user.jpg'> <div class='status off'></div> </div><div class='name'>"+frnd+"</div></div></div>"; $("#friends_list").append(string); }); socket.on('seen',function(data){ if(friend==data['second']){ seen='Seen at '+data['time']; document.getElementById('seen').innerHTML='Seen at '+data['time']; } }); setInterval(function(){ var i=0; for(var i=0;i<not_seen.length;i++){ if(friend==not_seen[i] && flag==0){ console.log(not_seen[i]); socket.emit('seen',{'friend':not_seen[i]}); not_seen.splice(i,1); } } }, 500); windows.focus(function(){ title.text("VAAD SAMVAAD"); flag=0; }); windows.blur(function(){ flag=1; }); socket.on('newmsg',function(data){ var x=data['data'].split(":"); document.getElementById('seen').innerHTML=''; if(x[0]=='You '){ $("#msgs_coming").append("<div class='answer right'> <div class='avatar'><img class='image' src='./images/"+data['second']+".jpg' alt='./images/user.jpg'> <div class='status online'></div></div> <div class='name'>You</div><div class='text'>"+x[1]+"</div> </div>"); } else{ if(flag==1){ title.text(data['second']+" messaged you!"); } if(flag==1||friend!=data['second']){ if($.inArray(data['second'], not_seen)==-1){ not_seen.push(data['second']); } console.log(not_seen); } else if(friend==data['second']){ socket.emit('seen',{'friend':friend}); } if(friend==data['second']){ $("#msgs_coming").append("<div class='answer left'> <div class='avatar'><img class='image' src='./images/"+data['second']+".jpg' alt='./images/user.jpg'> <div class='status online'></div></div> <div class='name'>"+data['second']+"</div><div class='text'>"+x[1]+"</div> </div>"); } } }); socket.on('typing',function(data){ if(friend==data['friend']){ initial=document.getElementById('seen').value; document.getElementById('seen').innerHTML=friend+' is typing!!'; } }); $("#msg").blur(function(){ socket.emit('blur',{friend:friend}); }); socket.on('blur',function(data){ if(friend==data['friend']){ document.getElementById('seen').innerHTML=''; } }); $("#msg_form #msg").bind('input',function(){ console.log('love'); socket.emit('typing',{friend:friend}); }); $("#signout").on('click',function(){ $.ajax({ url:'http://127.0.0.1:3000/signout', data: {}, type: 'GET', dataType: 'text/html', cache: true, success: function (data) { }, error: function (xhr, status, error) { console.log('Error: ' + error.message); } }); window.location.href='/'; }); socket.on('initial',function(data){ console.log(data['friend']); friend=data['friend']; $.ajax({ url: 'http://127.0.0.1:3000/getmsg', // dataType: "jsonp", data: {'id':data['friend']}, type: 'POST', dataType: 'html', cache: true, success: function (data) { data=JSON.parse(data); var temp='<h6 id="fixed">'+friend+'</h6> '; for(var i=0;i<data['html'].length;i++){ var x=data['html'][i]; x=x.split(":"); if(x[0]=="You "){ temp+="<div class='answer right'> <div class='avatar'><img class='image' src='./images/"+data['owner']+".jpg' alt='./images/user.jpg'> <div class='status online'></div></div> <div class='name'>You</div><div class='text'>"+x[1]+"</div> </div>"; } else{ temp+="<div class='answer left'> <div class='avatar'><img class='image' src='./images/"+friend+".jpg' alt='./images/user.jpg'> <div class='status online'></div></div> <div class='name'>"+friend+"</div><div class='text'>"+x[1]+"</div> </div>"; } } document.getElementById('msgs_coming').innerHTML=temp; if(!(data['seen']=='FALSE' || data['seen']=='TRUE')){ if(data['seen']!=undefined){ document.getElementById('seen').innerHTML='Seen at '+data['seen']; seen='Seen at'+data['seen']; } console.log(data['seen']); } if(data['verify']=='FALSE'){ socket.emit('seen',{'friend':friend}); } $("#msg_form").show(); }, error: function (xhr, status, error) { console.log('Error: ' + error.message); } }); }); $("#msg_form").submit(function(e){ e.preventDefault(); var msg=$("#msg").val(); $("#msg").val(''); document.getElementById('seen').innerHTML=''; $.ajax({ url:'http://127.0.0.1:3000/sendmsg', data:{'id':friend,'msg':msg}, type:'POST', dataType:'text/html', cache:true, success:function(data){ console.log(data); }, error:function(){ console.log('error'); } }); }); }); <file_sep>load(file = "Wage.rda")<file_sep>\name{Weekly} \alias{Weekly} \docType{data} \title{Weekly S&P Stock Market Data %% ~~ data name/kind ... ~~ } \description{Weekly percentage returns for the S&P 500 stock index between 1990 and 2010. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{Weekly} \format{ A data frame with 1089 observations on the following 9 variables. \describe{ \item{\code{Year}}{The year that the observation was recorded} \item{\code{Lag1}}{Percentage return for previous week} \item{\code{Lag2}}{Percentage return for 2 weeks previous} \item{\code{Lag3}}{Percentage return for 3 weeks previous} \item{\code{Lag4}}{Percentage return for 4 weeks previous} \item{\code{Lag5}}{Percentage return for 5 weeks previous} \item{\code{Volume}}{Volume of shares traded (average number of daily shares traded in billions)} \item{\code{Today}}{Percentage return for this week} \item{\code{Direction}}{A factor with levels \code{Down} and \code{Up} indicating whether the market had a positive or negative return on a given week} } } \source{Raw values of the S&P 500 were obtained from Yahoo Finance and then converted to percentages and lagged. %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ summary(Weekly) lm(Today~Lag1+Lag2,data=Weekly) } \keyword{datasets} <file_sep> var mongoose = require('mongoose'); module.exports = mongoose.model('friend',{ username:String, friends:Array }); <file_sep>var express = require('express'); var router = express.Router(); var cookieparser=require('cookie'); var messages=require('../models/messages'); var multer=require('multer'); var friend=require('../models/friend'); var user=require('../models/user'); var bodyParser=require('body-parser'); var sockets={}; var imagename; var authenticated={}; var reverse={}; var chat={}; var reference={}; var activity=require('../models/activity'); var posts=require('../models/posts'); var comments=require('../models/comments'); var notifications=require('../models/notifications'); var isAuthenticated = function (req, res, next) { if (req.isAuthenticated()){ var agent=req.header('user-agent'); var ip=req.connection.remoteAddress; var string=ip+agent; authenticated[string]=req.user.username; return next(); } res.redirect('/timeline'); } var makename = function (req,res,next){ imagename = req.user.username + new Date(); return next(); } var istrue=function(req,res,next){ if(!req.isAuthenticated()) return next(); res.redirect('/timeline'); } module.exports=function(passport,io) { router.get('/',istrue,function(req,res){ res.render('index'); }); router.post('/login',passport.authenticate('signin',{ successRedirect: '/timeline', failureRedirect: '/signup' })); router.get('/signup',function(req,res){ if(!req.isAuthenticated()){ res.render('index'); } else { res.redirect('/'); } }); var storage=multer.diskStorage( {destination:function(req,file,cb){ cb(null,'/home/vinayak/Desktop/facebook_copy/myself/views/images/'); }, filename:function(req,file,cb){ cb(null,req.user.username+'.jpg'); } }); var storagenew = multer.diskStorage( {destination:function(req,file,cb){ cb(null,'/home/vinayak/Desktop/facebook_copy/myself/views/images/'); }, filename:function(req,file,cb){ cb(null,imagename); } }); var filter=function(req,file,cb){ if (!file.originalname.match(/\.(jpg|jpeg|png|gif)$/)) { return cb(new Error('Only image files are allowed!'), false); } cb(null, true); }; var size=function(req,file,cb){ if(file.size>102400){ return cb(new Error('Max Limit crossed!'),false); } cb(null,true); } var uploadnew=multer({storage:storagenew,fileFilter:filter,limits:size}); var upload=multer({storage:storage,fileFilter:filter,limits:size}); router.post('/upload',isAuthenticated, upload.single('profile_pic'),function(req,res){ res.redirect('/timeline'); }); router.post('/signup',passport.authenticate('signup',{ successRedirect: '/timeline', failureRedirect: '/signup' })); router.post('/sendmsg',isAuthenticated,function(req,res){ var a; var id=req.body.id; var msg=req.body.msg; var msg1="You : "+msg; var msg2=req.user.username+" : "+msg; var time=new Date(); console.log(msg1+msg2); var query=messages.find({username:req.user.username,seconduser:id}).select('message'); query.exec(function(err,msg){ if(msg[0]!=undefined){ a=msg[0]['message']; a.push(msg1); messages.update({username:req.user.username,seconduser:id},{'message':a,'seen_now':'FALSE','timestamp':time},{multi:true},function(err){ if(err){ throw err; } }); } else{ a=new Array(); a.push(msg1); var x=new messages(); x.username=req.user.username; x.seconduser=id; x.message=a; x.seen_now='FALSE'; x.save(function(err){ if(err){ console.log(err); } }); } }); query=messages.find({username:id,seconduser:req.user.username}).select('message'); query.exec(function(err,msg){ if(msg[0]!=undefined){ a=msg[0]['message']; a.push(msg2); messages.update({username:id,seconduser:req.user.username},{'message':a,'seen_now':'TRUE','timestamp':time},{multi:true},function(err){ if(err){ throw err; } }); } else{ a=new Array(); a.push(msg2); var x=new messages(); x.username=id; x.seconduser=req.user.username; x.message=a; x.seen_now='TRUE'; x.save(function(err){ }); } }); if(sockets[id]!=undefined){ io.to(sockets[id]).emit('newmsg',{'data':msg2,'first':id,'second':req.user.username}); } io.to(sockets[req.user.username]).emit('newmsg',{'data':msg1,'first':id,'second':req.user.username}); res.end('success'); }); io.sockets.on('connection',function(socket){ var agent=socket.handshake.headers['user-agent']; var ip= socket.request.connection.remoteAddress; if(ip=='::1'){ ip='::ffff:127.0.0.1'; } var string=ip+agent; if(authenticated[string]!=undefined){ console.log('connected'); var x=authenticated[string]; sockets[x]=socket.id; reference[x]=socket.request.headers.referer; if(reference[x]=='http://127.0.0.1:3000/home'){ if(chat[x]!=undefined){ io.to(socket.id).emit('initial',{friend:chat[x]}); } } reverse[socket.id]=x; } socket.on('seen',function(data){ var user=reverse[socket.id]; var date=new Date(); console.log(date); messages.update({username:data['friend'],seconduser:user},{'seen_now':date},{multi:true},function(err){ if(err){ throw err; } }); if(sockets[data['friend']]!=undefined){ io.to(sockets[data['friend']]).emit('seen',{'time':date.toLocaleTimeString(),'second':user}); } }); socket.on('typing',function(data){ var user=reverse[socket.id]; if(sockets[data['friend']]!=undefined&&reference[user]=='http://127.0.0.1:3000/home'){ io.to(sockets[data['friend']]).emit('typing',{friend:user}); } }); socket.on('blur',function(data){ var user=reverse[socket.id]; if(sockets[data['friend']]!=undefined&&reference[user]=='http://127.0.0.1:3000/home'){ io.to(sockets[data['friend']]).emit('blur',{friend:user}); } }); socket.on('disconnect',function(data){ if(!socket.id){ return; } var user=reverse[socket.id]; friend.find({'username':user}).select('friends').exec(function(err,result){ if(result[0] != undefined) result = result[0]['friends']; result.forEach(function(value){ if(sockets[value] != undefined){ io.to(sockets[value]).emit('offline',{'user':user}); } }); }); delete sockets[user]; delete reverse[socket.id]; }); }); router.get('/home',isAuthenticated,function(req,res){ var options = { maxAge: 1000 * 60 * 15, // would expire after 15 minutes httpOnly: true, // The cookie only accessible by the web server// Indicates if the cookie should be signed } res.cookie('user',req.user.username,options); res.render('home'); }); router.post('/fill',isAuthenticated,function(req,res){ var query=friend.find({'username':req.user.username}).select('friends'); var sql=user.find().select('username'); query.exec(function(err,friend){ if(!err){ sql.exec(function(err,users_ava){ if(!err){ res.send({users:users_ava,friends:friend}) } }); } }); }); /* Function working fine */ router.post('/friend',isAuthenticated,function(req,res){ var id=req.body.id; if(id==req.user.username){ res.redirect('/home'); } else{ var a; var query=friend.find({'username':req.user.username}); query.select('friends'); query.exec(function(err,frien){ if(frien[0]!=undefined){ a=frien[0]['friends']; if(a.indexOf(id)>-1){ res.redirect('/home'); } a.push(id); friend.update({'username':req.user.username},{'friends':a},{multi:true},function(err){ if(err){ throw err; } }); } else{ a=new Array(); a.push(id); var x=new friend(); x.username=req.user.username; x.friends=a; x.save(function(err){ }); } }); query=friend.find({'username':id}); query.select('friends'); query.exec(function(err,frien){ if(frien[0]!=undefined){ a=frien[0]['friends']; a.push(req.user.username); friend.update({'username':id},{'friends':a},{multi:true},function(err){ if(err){ throw err; } }); } else{ a=new Array(); a.push(req.user.username); var x=new friend(); x.username=id; x.friends=a; x.save(function(err){ }); } if(sockets[id]!=undefined){ io.to(sockets[id]).emit('new_friend',{'friend':req.user.username}); } io.to(sockets[req.user.username]).emit('new_friend',{'friend':id}); }); res.send('success'); } }); /* Function working fine */ router.post('/getmsg',isAuthenticated,function(req,res){ var a; var seen; var temp; var limit='FALSE'; var id=req.body.id; chat[req.user.username]=id; var html=''; var query=messages.find({username:req.user.username,seconduser:id}).select('message seen_now'); var verify=messages.find({username:id,seconduser:req.user.username}).select('seen_now'); verify.exec(function(err,result){ if(result[0]!=undefined){ a=result[0]['seen_now']; } }); query.exec(function(err,msg){ if(err){ console.log(err); } else{ if(msg[0]!=undefined){ html=msg[0]['message']; seen=msg[0]['seen_now']; } if(html.length>20){ html=html.splice(0,20); limit='TRUE'; } console.log(html); res.send({'html':html,'seen':seen,'verify':a,'owner':req.user.username,'limit':limit}); } }); }); router.get('/timeline',isAuthenticated,function(req,res){ res.render('timeline'); }); /* Function working fine */ var post_find=function(answers,callback){ var timestamp=new Date(); posts.find({_id:answers['post_id']}).exec(function(err,resp){ var x={}; resp=resp[0]; x['haha_more']=0; x['love_more']=0; x['username']=answers['username']; x['type']=answers['type']; x['_id']=resp['_id']; timestamp=answers['timestamp']; x['timestamp']=answers['timestamp']; x['owner']=resp['username']; x['images']=resp['image']; x['status']=resp['status']; x['love']=resp['love']; x['haha']=resp['haha']; if(x['haha'].length>10){ x['haha_more']=x['haha'].length-10; x['haha']=x['haha'].splice(0,10); } if(x['love'].length>10){ X['love_more']=x['love'].length-10; x['love']=x['love'].splice(0,10); } callback(x,timestamp); }); } /* Function working fine */ router.post('/timeline',isAuthenticated,function(req,res){ var q=friend.find({username:req.user.username}); var timestamp=new Date(); var onlineHtml = ''; q.exec(function(err,result){ if(result[0]!=undefined){ var friends=result[0]['friends']; friends.forEach(function(value){ if(sockets[value] != undefined){ onlineHtml += '<h5 id="user_'+value+'">'+value+'</h5>'; io.sockets.to(sockets[value]).emit('online',{'user':req.user.username}); } }); var query=activity.find({'username':{$in:friends},'timestamp':{$lt:timestamp}}).sort({'timestamp':1}).limit(10); var tempfunc=function(req,answers,callback,secondcallback,onlineHtml){ var html=new Array; var timestamp=new Date(); if(answers!=undefined){ var j=0; for(var i=0;i<answers.length;i++){ var value=answers[i]; post_find(value,function(data,r_time){ html.push(data); timestamp=r_time; j++; }); } setTimeout(function(){ callback(req,html,timestamp,secondcallback,onlineHtml); },1000); } }; var fn2=function(req,html,timestamp,callback,onlineHtml){ var msgs=new Array; var msgstamp=new Date(); messages.find({username:req.user.username,'timestamp':{$lt:msgstamp}}).sort({'timestamp':1}).limit(5).exec(function(err,results){ if(results!=undefined){ console.log('yes'); for(var i=0;i<results.length;i++){ var x={}; x['friend']=results[i]['seconduser']; x['seen']=results[i]['seen_now']; var msg=results[i]['message']; var l=msg.length; x['message']=msg[l-1].split(":")[1]; x['timestamp']=results[i]['timestamp']; msgs[i]=x; } } else{ console.log('problem'); } callback(html,req,msgs,timestamp,onlineHtml); }); }; var fn3=function(html,req,msgs,timestamp,onlineHtml){ var respond={}; respond['timestamp']=timestamp; respond['html']=html; respond['user']=req.user.username; respond['messages']=msgs; respond['onlineHtml'] = onlineHtml; console.log(respond); res.send(respond); } query.exec(function(err,answers){ tempfunc(req,answers,fn2,fn3,onlineHtml); }); } }); }); /* Function working fine */ router.post('/comment',isAuthenticated,function(req,response){ console.log('received'); var post_id=req.body.id; var comment=req.body.comment; console.log(comment); var comment_id; var friends; console.log(post_id); friend.find({username:req.user.username}).select('friends').exec(function(err,result){ friends=result[0]['friends']; console.log(friends); posts.find({_id:post_id}).exec(function(err,res){ console.log('res'+res[0]); if(res[0]!=undefined){ if(friends.indexOf(res[0]['username'])==-1){ console.log('fatal'); response.end('error'); } else{ console.log('another'); var x=new comments(); x.post_id=post_id; x.description=comment; x.replies=new Array; x.username=res[0]['username']; x.name=req.user.username; x.save(function(err,id){ comment_id=id['_id']; console.log('done'); }); var n=new activity(); n.username=req.user.username; n.post_id=post_id; n.type=req.user.username+" commented on "+res[0]['username']+"'s post"; n.save(function(err){ }); io.sockets.emit('newcomment',{'postid':post_id,'comment':comment,'user':req.user.username,comment_id:comment_id}); friends.forEach(function(value){ if(sockets[value]!=undefined){ io.to(sockets[value]).emit('activity',{'act':req.user.username+" commnted on "+res[0]['username']+"'s Post",'_id':post_id}); } }); comments.find({post_id:post_id}).select('name').exec(function(err,ans){ var key={}; for(var i=0;i<ans.length;i++){ var temp=ans[i]['name']; if(key[temp]==undefined){ if(sockets[temp]!=undefined && temp!=res[0]['username'] && temp!=req.user.username){ io.to(sockets[temp]).emit('newnotification',{id:post_id,noti:req.user.username+" also commnted on "+res[0]['username']+"'s Post",'_id':post_id}); } if(res[0]['username']!=temp){ key[temp] = true; var s=req.user.username+"also commnted on "+res[0]['username']+"'s Post"; notifications.find({username:temp}).exec(function(err,r){ if(r[0]==undefined){ var x=new notifications(); x.username=temp; var arr=new Array; arr.push(s); x.notifications=arr; x.save(function(err,cb){ }); } else { var noti=r[0]['notifications']; noti.push(s); notifications.update({username:temp},{notifications:noti},{multi:true},function(err){ }); } }); } } } }); var main=res[0]['username']; var s=req.user.username+" also commnted on your post"; notifications.find({username:main}).exec(function(err,r){ if(r[0]==undefined){ var x=new notifications(); x.username=main; var arr=new Array; arr.push(s); x.notifications=arr; x.save(function(err,cb){ }); } else { var noti=r[0]['notifications']; noti.push(s); notifications.update({username:main},{notifications:noti},{multi:true},function(err){ }); } if(sockets[main]!=undefined && main != req.user.username){ io.to(sockets[main]).emit('newnotification',{id:post_id,user:req.user.username,noti:req.user.username+" commented on your post"}); } }); } } }); }); }); /* Function working fine */ router.post('/react',isAuthenticated,function(req,res){ var love=req.body.love; var haha=req.body.haha; var react; if(love == true){ react= 'love'; } else{ react = 'haha'; } console.log(haha+love); console.log(react); var post_id=req.body.id; console.log(post_id); posts.find({_id:post_id}).exec(function(err,results){ var lovelist=results[0]['love']; var hahalist=results[0]['haha']; if(lovelist.indexOf(req.user.username)==-1 && hahalist.indexOf(req.user.username)==-1){ console.log('reached'); if(love == true){ lovelist.push(req.user.username); posts.update({_id:post_id},{love:lovelist},{multi:true},function(err){ }); } else{ hahalist.push(req.user.username); posts.update({_id:post_id},{haha:hahalist},{multi:true},function(err){ }); } var n=new activity(); n.username=req.user.username; n.post_id=post_id; n.type=req.user.username+" reacted "+react+" on "+results[0]['username']+"'s post"; n.save(function(err){ }); io.sockets.emit('newreact',{'react':react,'post_id':post_id}); friend.find({'username':req.user.username}).select('friends').exec(function(err,friends){ friends = friends[0]['friends']; friends.forEach(function(value){ if(sockets[value] != undefined){ console.log('socket for new activity found'); io.to(sockets[value]).emit('activity',{'act':req.user.username+" reacted "+react+" on "+results[0]['username']+"'s Post",'_id':post_id}); } }); if(sockets[results[0]['username']]!=undefined && results[0]['username'] != req.user.username){ io.to(sockets[results[0]['username']]).emit('newnotification',{'id':post_id,'user':req.user.username,'noti':req.user.username+" reacted "+react+" on your post"}); } }); var s=req.user.username+" reacted "+react+" on your post"; var main=results[0]['username']; notifications.find({username:main}).exec(function(err,r){ if(r[0]==undefined){ var x=new notifications(); x.username=main; var arr=new Array; arr.push(s); x.notifications=arr; x.save(function(err,cb){ }); } else { var noti=r[0]['notifications']; noti.push(s); notifications.update({username:main},{notifications:noti},{multi:true},function(err){ }); } }); } else{ res.end('error'); } }); }); /* Function working fine */ router.post('/loadcomment',isAuthenticated,function(req,res){ var commentstamp=req.body.commentstamp; console.log('I am'+commentstamp); var html=new Array; var post_id=req.body.post_id; var newstamp=new Date(); comments.find({post_id:post_id,timestamp:{$lt:commentstamp}}).sort({'timestamp':-1}).limit(10).exec(function(err,result){ result.forEach(function(value){ var x={}; newstamp=value['timestamp']; x['user']=value['name']; x['comment']=value['description']; x['comment_id']=value['_id']; html.push(x); }); if(result[0] == undefined){ console.log('undef'); newstamp = null; } setTimeout(function(){ console.log(html); var t={}; t['comments']=html; t['timestamp']=newstamp; res.send(t); },50); }); }); router.post('/reply',isAuthenticated,function(req,response){ var comment_id=req.body.id; var reply=req.body.reply; var friends; friend.find({username:req.user.username}).select('friends').exec(function(err,result){ friends=result[0]['friends']; comments.find({_id:comment_id}).exec(function(err,res){ if(friends.indexOf(res[0]['username'])==-1){ response.end('error'); } else{ var replies=res[0]['replies']; replies.push(req.user.username+': '+reply); comments.update({_id:comment_id},{replies:replies},{multi:true},function(err){ if(err){ console.log(err); response.end('error'); } }); var n=new activity(); n.username=req.user.username; n.post_id=res[0]['post_id']; n.type=req.user.username+" replied to a comment on "+res[0]['username']+"'s post"; n.save(function(err){ }); io.sockets.emit('newreply',{'commentid':comment_id,'reply':reply,'user':req.user.username}); friends.forEach(function(value){ if(sockets[value]!=undefined){ io.to(sockets[value]).emit('activity',{'act':req.user.username+"replied to a comment on "+res[0]['username']+"'s Post",'_id':res[0]['post_id']}); } }); var ans=res[0]['replies']; var post_id=res[0]['post_id']; var main=res[0]['username']; var passive=res[0]['name']; var key={}; for(var i=0;i<ans.length;i++){ var temp=ans[i].split(":")[0]; if(key[temp]==undefined){ if(sockets[temp] != undefined && temp != main && temp != passive){ io.to(sockets[temp]).emit('newnotification',{id:post_id,user:req.user.username,noti:req.user.username+"also replied to a comment on "+main+"'s Post"}); } if(main!=temp && temp!=passive){ key[temp]=TRUE; var s=req.user.username+"also replied to a comment on "+main+"'s Post"; notifications.find({username:temp}).exec(function(err,r){ if(r[0]==undefined){ var x=new notifications(); x.username=temp; var arr=new Array; arr.push(s); x.notifications=arr; x.save(function(err,cb){ }); } else { var noti=r[0]['notifications']; noti.push(s); notifications.update({username:temp},{notifications:noti},{multi:true},function(err){ }); } }); } } } var s=req.user.username+" replied to a comment on your post"; notifications.find({username:main}).exec(function(err,r){ if(r[0]==undefined){ var x=new notifications(); x.username=main; var arr=new Array; arr.push(s); x.notifications=arr; x.save(function(err,cb){ }); } else { var noti=r[0]['notifications']; noti.push(s); notifications.update({username:main},{notifications:noti},{multi:true},function(err){ }); } if(sockets[main]!=undefined && main != req.user.username){ io.to(sockets[main]).emit('newnotification',{id:post_id,user:req.user.username,noti:req.user.username+"also replied to a comment on your status"}); } }); s=req.user.username+" replied to a your comment on "+main+"'s post"; notifications.find({username:passive}).exec(function(err,r){ if(r[0]==undefined){ var x=new notifications(); x.username=passive; var arr=new Array; arr.push(s); x.notifications=arr; x.save(function(err,cb){ }); } else { var noti=r[0]['notifications']; noti.push(s); notifications.update({username:passive},{notifications:noti},{multi:true},function(err){ }); } if(sockets[passive]!=undefined && passive != req.user.username){ io.to(sockets[passive]).emit('newnotification',{id:post_id,user:req.user.username,noti:req.user.username+"also replied to your comment on "+main+"'s post"}); } }); } }); }); }); /* Function working fine */ router.post('/fillmsg',isAuthenticated,function(req,res){ var msgstamp=req.body.msgstamp; console.log(msgstamp); var messagesArray = new Array; messages.find({username:req.body.username,timestamp:{$lt:msgstamp}}).sort({timestamp:1}).limit(5).exec(function(err,results){ results.forEach(function(value){ var x={}; x['friend']=value['seconduser']; x['seen']=value['seen_now']; var msg=value['message']; var l = msg.length; x['message']=msg[l-1].split(":")[1]; x['timestamp']=value['timestamp']; messagesArray.push(x); }); if(results[0] == undefined){ messagesArray = null; } setTimeout(function(){ res.send(messagesArray); },100); }); }); router.post('/load',isAuthenticated,function(req,res){ var mainstamp=req.body.mainstamp; var timestamp = null; var html=new Array; var q=friend.find({username:req.user.username}); q.exec(function(err,result){ var friends=result[0]['friends']; var query=activity.find({'username':{$in:friends},'timestamp':{$lt:mainstamp}}).sort({'timestamp':1}).limit(10); query.exec(function(err,answers){ answers.forEach(function(value){ post_find(value,function(data,time){ timestamp=time; html.push(data); }); }); setTimeout(function(){ res.send({timestamp:timestamp,html:html,user:req.user.username}); console.log(timestamp); },100); }); }); }); /* Function working fine */ router.post('/updatestatus',isAuthenticated, makename, uploadnew.single('status_pic'), function(req,res){ var statustext=req.body.status; console.log(JSON.stringify(req.body)+' avadeka dabra'); x=new posts(); x.username=req.user.username; x.image=imagename; x.status=statustext; x.love=new Array; x.haha=new Array; x.save(function(err,e){ var post_id=e['_id']; var y = new activity(); y.username = req.user.username; y.post_id = post_id; y.type=req.user.username+" updated their status!"; y.save(function(err){ }); friend.find({username:req.user.username}).exec(function(err,results){ var friends = []; if(results[0] != undefined){ friends=results[0]['friends']; } friends.forEach(function(value){ if(sockets[value]!=undefined){ io.to(sockets[value]).emit('activity',{'act':req.user.username+" updated their status!",'_id':post_id}); } }); res.redirect('/home'); }); }); }); router.post('/loadreply',isAuthenticated,function(req,res){ var comment_id=req.body.id; comments.find({_id:comment_id}).select('replies').exec(function(err,result){ res.send(result[0]['replies']); }); }); /* Function working fine */ router.post('/search',isAuthenticated,function(req,res){ var term=req.body.term; var html=''; user.find({username:new RegExp("^"+term+"(.*)")}).select('username').limit(5).exec(function(err,results){ if(results[0] == undefined){ html = '<li> No results found </li>'; } else{ for(var i=0;i<results.length;i++){ html+= '<li> <a href="#"> <p>'+results[i]['username']+'</p> </a> </li>'; } } res.send(html); }); }); /* Function working fine */ router.post('/viewpost',isAuthenticated,function(req,res){ var id=req.body.post_id; console.log(req.body); res.end('Welcome'+id); }); /* Function working fine */ router.get('/signout',function(req,res){ var agent=req.header('user-agent'); var ip=req.ip; var string=ip+agent; delete authenticated[string]; console.log(authenticated); req.logout(); res.send('success'); }); return router; } <file_sep>\name{Wage} \alias{Wage} \docType{data} \title{Mid-Atlantic Wage Data %% ~~ data name/kind ... ~~ } \description{Wage and other data for a group of 3000 male workers in the Mid-Atlantic region. %% ~~ A concise (1-5 lines) description of the dataset. ~~ } \usage{Wage} \format{ A data frame with 3000 observations on the following 11 variables. \describe{ \item{\code{year}}{Year that wage information was recorded} \item{\code{age}}{Age of worker} \item{\code{maritl}}{A factor with levels \code{1. Never Married} \code{2. Married} \code{3. Widowed} \code{4. Divorced} and \code{5. Separated} indicating marital status} \item{\code{race}}{A factor with levels \code{1. White} \code{2. Black} \code{3. Asian} and \code{4. Other} indicating race} \item{\code{education}}{A factor with levels \code{1. < HS Grad} \code{2. HS Grad} \code{3. Some College} \code{4. College Grad} and \code{5. Advanced Degree} indicating education level} \item{\code{region}}{Region of the country (mid-atlantic only)} \item{\code{jobclass}}{A factor with levels \code{1. Industrial} and \code{2. Information} indicating type of job} \item{\code{health}}{A factor with levels \code{1. <=Good} and \code{2. >=Very Good} indicating health level of worker} \item{\code{health_ins}}{A factor with levels \code{1. Yes} and \code{2. No} indicating whether worker has health insurance} \item{\code{logwage}}{Log of workers wage} \item{\code{wage}}{Workers raw wage} } } \source{Data was manually assembled by <NAME>, of Open BI (www.openbi.com), from the March 2011 Supplement to Current Population Survey data. \url{http://thedataweb.rm.census.gov/TheDataWeb} %% ~~ reference to a publication or URL from which the data were obtained ~~ } \references{ <NAME>., <NAME>., <NAME>., and <NAME>. (2013) \emph{An Introduction to Statistical Learning with applications in R}, \url{www.StatLearning.com}, Springer-Verlag, New York } \examples{ summary(Wage) lm(wage~year+age,data=Wage) ## maybe str(Wage) ; plot(Wage) ... } \keyword{datasets}
c440e7862f7b93a7e2282aadccd70eec192d3fdf
[ "JavaScript", "R" ]
23
R
vinayaktrivedi/facebook-copy
c1eefe344b003f9432217097cf7f537a54fe69c8
0bf3f5f37c35473202b003757024a261c843e6ce
refs/heads/master
<repo_name>zhangweilian/verbose-guide<file_sep>/readme.md # markdown __this text will be wonderful__ ~~this text will be delete~~ _you **can** combine them__ -<NAME> []finish my changes ```c #include <stdio.h> int main(void){ for(int i=0;i<10;i++) printf("%d",i); } ``` >hello.c [github](http://www.github.com) [baidu](http://www.baidu.com) if you want to do something,you can read this book: ![Image of Yaktocat](https://octodex.github.com/images/yaktocat.png) ![Image of Labtocat](https://octodex.github.com/images/labtocat.png) ![Image of zhaoliying](http://sc.jb51.net/uploads/allimg/140708/10-140FR34P2391.jpg) <file_sep>/readme.c #include<stdio.h> int main(void){ printf("hello my name is william,i am from china.i like \n"); return 0; }
a89803b564013200c8396871dc53c059945ecf0b
[ "Markdown", "C" ]
2
Markdown
zhangweilian/verbose-guide
e2d32094aeb5bdf1ac1fe0a378ecf0b46f20a148
5a6d327b37ecec4124d1d92526536c87e0ff78d7
refs/heads/master
<repo_name>iaaron-green/shihov<file_sep>/src/main/java/DeveloperView.java import java.util.Scanner; /** * Created by Roma on 27.06.2017. */ public class DeveloperView { public static DeveloperController developerController; public static Scanner scanner = new Scanner(System.in); public static void main(String[] args) { int count =0; while (true) { System.out.println("Виберете действие: "); System.out.println("1. Создание разработчика"); System.out.println("2. Редактирование"); System.out.println("3. Получение данных (чтение)"); System.out.println("4. Удаление"); System.out.println("5. Завершыть роботу"); count = scanner.nextInt(); developerController = new DeveloperController(count); } } } <file_sep>/README.md "# shihov"
016e6f7934c557ea3177b09d48c92dcee49c3bd3
[ "Markdown", "Java" ]
2
Java
iaaron-green/shihov
e765ff4a17b14cd6d72cf1e80edb5722f687ae29
eb764fdad60748b88250bbe2cd6f0caaaa5b0d3b
refs/heads/master
<repo_name>triedal/countdown<file_sep>/src/countdown.c #include <pebble.h> #define KEY_TEMPERATURE 0 static Window *window; static TextLayer *timeLayer; //static TextLayer *batteryLayer; static TextLayer *weatherLayer; static Layer *time_ring_display_layer; // Try 10 minute increments // 68 = radius + fudge; 3 = 68*tan(2.5 degrees); 2.5 degrees per 10 minutes; const GPathInfo TIME_RING_SEGMENT_PATH_POINTS = { 3, (GPoint []) { {0, 0}, {-18, -68}, // 68 = radius + fudge; 18 = 68*tan(15 degrees); 15 degrees per hour; {18, -68}, } }; static GPath *time_ring_segment_path; static void time_ring_display_layer_update_callback(Layer *layer, GContext *ctx) { // Get a tm structure time_t temp = time(NULL); struct tm *tick_time = localtime(&temp); unsigned int max_angle = (tick_time->tm_hour) * 15; GRect bounds = layer_get_bounds(layer); GPoint center = grect_center_point(&bounds); graphics_context_set_fill_color(ctx, GColorClear); graphics_fill_circle(ctx, center, 65); graphics_context_set_fill_color(ctx, GColorBlack); for (; max_angle > 0; max_angle -= 15) { gpath_rotate_to(time_ring_segment_path, (TRIG_MAX_ANGLE / 360) * max_angle); gpath_draw_filled(ctx, time_ring_segment_path); } graphics_fill_circle(ctx, center, 60); } /* static void handle_battery(BatteryChargeState charge_state) { static char battery_text[] = "100%"; snprintf(battery_text, sizeof(battery_text), "%d%%", charge_state.charge_percent); text_layer_set_text(batteryLayer, battery_text); } */ static void update_time() { // Get a tm structure time_t temp = time(NULL); struct tm *tick_time = localtime(&temp); // Create a long-lived buffer static char timeText[] = "00:00"; char *time_format; if (clock_is_24h_style()) { time_format = "%R"; } else { time_format = "%I:%M"; } strftime(timeText, sizeof(timeText), time_format, tick_time); // Handle lack of non-padded hour format string for twelve hour clock. if (!clock_is_24h_style() && (timeText[0] == '0')) { memmove(timeText, &timeText[1], sizeof(timeText) - 1); } text_layer_set_text(timeLayer, timeText); } static void handle_minute_tick (struct tm *tick_time, TimeUnits units_changed) { update_time(); //handle_battery(battery_state_service_peek()); layer_mark_dirty(time_ring_display_layer); // Get weather update every 30 minutes if(tick_time->tm_min % 30 == 0) { // Begin dictionary DictionaryIterator *iter; app_message_outbox_begin(&iter); // Add a key-value pair dict_write_uint8(iter, 0, 0); // Send the message! app_message_outbox_send(); } } static void inbox_received_callback(DictionaryIterator *iterator, void *context) { // Store incoming information static char temperature_buffer[8]; static char weather_layer_buffer[32]; // Read first item Tuple *t = dict_read_first(iterator); // For all items while(t != NULL) { // Which key was received? switch(t->key) { case KEY_TEMPERATURE: snprintf(temperature_buffer, sizeof(temperature_buffer), "%d°", (int)t->value->int32); break; default: APP_LOG(APP_LOG_LEVEL_ERROR, "Key %d not recognized!", (int)t->key); break; } // Look for next item t = dict_read_next(iterator); } // Assemble full string and display snprintf(weather_layer_buffer, sizeof(weather_layer_buffer), "%s", temperature_buffer); text_layer_set_text(weatherLayer, weather_layer_buffer); } static void inbox_dropped_callback(AppMessageResult reason, void *context) { APP_LOG(APP_LOG_LEVEL_ERROR, "Message dropped!"); } static void outbox_failed_callback(DictionaryIterator *iterator, AppMessageResult reason, void *context) { APP_LOG(APP_LOG_LEVEL_ERROR, "Outbox send failed!"); } static void outbox_sent_callback(DictionaryIterator *iterator, void *context) { APP_LOG(APP_LOG_LEVEL_INFO, "Outbox send success!"); } static void window_load(Window *window) { Layer *window_layer = window_get_root_layer(window); GRect bounds = layer_get_frame(window_layer); // Init layer for time display timeLayer = text_layer_create(GRect(0, 55, bounds.size.w, 50)); text_layer_set_background_color(timeLayer, GColorClear); text_layer_set_text_color(timeLayer, GColorClear); text_layer_set_font(timeLayer, fonts_get_system_font(FONT_KEY_BITHAM_42_MEDIUM_NUMBERS)); text_layer_set_text_alignment(timeLayer, GTextAlignmentCenter); /* // Init layer for battery level display batteryLayer = text_layer_create(GRect(0, 0, bounds.size.w, 24)); text_layer_set_background_color(batteryLayer, GColorClear); text_layer_set_text_color(batteryLayer, GColorClear); text_layer_set_font(batteryLayer, fonts_get_system_font(FONT_KEY_GOTHIC_18_BOLD)); text_layer_set_text_alignment(batteryLayer, GTextAlignmentRight); */ // Init layer for weather display weatherLayer = text_layer_create(GRect(0, 100, 144, 30)); text_layer_set_background_color(weatherLayer, GColorClear); text_layer_set_text_color(weatherLayer, GColorClear); text_layer_set_text_alignment(weatherLayer, GTextAlignmentCenter); text_layer_set_font(weatherLayer, fonts_get_system_font(FONT_KEY_GOTHIC_24_BOLD)); text_layer_set_text(weatherLayer, ""); // Init the time left segment path time_ring_segment_path = gpath_create(&TIME_RING_SEGMENT_PATH_POINTS); gpath_move_to(time_ring_segment_path, grect_center_point(&bounds)); // Init layer for time left display time_ring_display_layer = layer_create(bounds); layer_set_update_proc(time_ring_display_layer, time_ring_display_layer_update_callback); // Add child layers to window layer layer_add_child(window_layer, time_ring_display_layer); layer_add_child(window_layer, text_layer_get_layer(timeLayer)); //layer_add_child(window_layer, text_layer_get_layer(batteryLayer)); layer_add_child(window_layer, text_layer_get_layer(weatherLayer)); } static void window_unload(Window *window) { text_layer_destroy(timeLayer); //text_layer_destroy(batteryLayer); text_layer_destroy(weatherLayer); layer_destroy(time_ring_display_layer); gpath_destroy(time_ring_segment_path); } static void init(void) { window = window_create(); window_set_background_color(window, GColorBlack); window_set_window_handlers(window, (WindowHandlers) { .load = window_load, .unload = window_unload, }); const bool animated = true; window_stack_push(window, animated); update_time(); tick_timer_service_subscribe(MINUTE_UNIT, handle_minute_tick); //battery_state_service_subscribe(handle_battery); // Register callbacks app_message_register_inbox_received(inbox_received_callback); app_message_register_inbox_dropped(inbox_dropped_callback); app_message_register_outbox_failed(outbox_failed_callback); app_message_register_outbox_sent(outbox_sent_callback); // Open AppMessage app_message_open(app_message_inbox_size_maximum(), app_message_outbox_size_maximum()); } static void deinit(void) { tick_timer_service_unsubscribe(); //battery_state_service_unsubscribe(); window_destroy(window); } int main(void) { init(); APP_LOG(APP_LOG_LEVEL_DEBUG, "Done initializing, pushed window: %p", window); app_event_loop(); deinit(); }
18fdac4e7009f1ff832af7f3d6547185a938d10d
[ "C" ]
1
C
triedal/countdown
7acf59a756a53655c14c6d85a1e68d572c93a7a5
7e71e4b3b9892c2f18752a299aa21f4dfbab384f
refs/heads/master
<repo_name>kashyp18/WEATHER<file_sep>/app/src/main/java/com/example/android/weather/Utility.java package com.example.android.weather; import android.content.Context; import android.content.SharedPreferences; import android.preference.PreferenceManager; import java.text.DateFormat; import java.util.Date; /** * Created by kashyap on 21/12/16. */ public class Utility { public static String getPreferredLocation(Context context){ SharedPreferences prefs= PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString("location","110091"); } public static boolean isMetric(Context context){ SharedPreferences prefs =PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString("units","metric").equals("metric"); } static String formatTemprature(double temprature,boolean isMetric) { double temp; if (!isMetric ) { temp = 9 * temprature / 5 + 32; } else { temp = temprature; } return String.format("%.0f", temp); } static String formatDate(long dateInMillis) { Date date =new Date(dateInMillis); return DateFormat.getDateInstance().format(date); } }
7335cb99f68adda63919061def8f1d7c82519fe0
[ "Java" ]
1
Java
kashyp18/WEATHER
187f7311e4524b40b7031dee7f03feaa790c22e5
6c8022bc6af1f435305e6a7b08b99759e9dc7caa
refs/heads/master
<file_sep>#!/usr/bin/env bash ELK_VERSION=latest #mvn clean package -Dmaven.test.skip=true -U docker build -t registry.cn-hangzhou.aliyuncs.com/engine/redisson:$ELK_VERSION . docker push registry.cn-hangzhou.aliyuncs.com/engine/redisson:$ELK_VERSION #docker run -e SPRING_PROFILES_ACTIVE="prod" -p 7000:7000 -d elk-engine:$ELK_VERSION # 拉取 #docker pull registry.cn-hangzhou.aliyuncs.com/engine/redisson:$ELK_VERSION # jar 启动 #java -jar /Volumes/CodeFile/GitHub/1346735074/elk-engine/es-scheduled/target/es-scheduled-0.0.1-SNAPSHOT.jar --spring.profiles.active=prod<file_sep># redis-start <file_sep>FROM hub.c.163.com/library/java:8-alpine MAINTAINER noly <EMAIL> ADD target/*.jar redisson.jar EXPOSE 7000 ENTRYPOINT ["java", "-jar", "-Dspring.profiles.active=${SPRING_PROFILES_ACTIVE}", "/redisson.jar"]<file_sep>package com.purge.redisson.config; import org.redisson.Redisson; import org.redisson.api.RedissonClient; import org.redisson.config.Config; import org.springframework.context.EnvironmentAware; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; import org.springframework.util.StringUtils; @Configuration public class RedissonConfig implements EnvironmentAware { private static final String PREFIX = "redis://"; private static final String HOST_KEY = "spring.redis.host"; private static final String PORT_KEY = "spring.redis.port"; private static final String CLUSTER_KEY = "spring.redis.cluster.nodes"; private static final String PASSWORD_KEY = "<PASSWORD>"; private Environment environment; // @Value("${spring.redis.host:}") // private String host; // // @Value("${spring.redis.port:}") // private String port; // // @Value("${spring.redis.cluster.nodes:}") // private String cluster; // // @Value("${spring.redis.password:}") // private String password; @Bean public RedissonClient getRedisson() { String host = environment.getProperty(HOST_KEY); String password = environment.getProperty(PASSWORD_KEY); Config config = new Config(); if (!StringUtils.isEmpty(host)) { String port = environment.getProperty(PORT_KEY); if (StringUtils.isEmpty(port)) { throw new IllegalArgumentException("[getRedisson] port is null."); } config.useSingleServer().setAddress(PREFIX + host + ":" + port); if (!StringUtils.isEmpty(password)) { config.useSingleServer().setPassword(password); } } else { String cluster = environment.getProperty(CLUSTER_KEY); if (StringUtils.isEmpty(cluster)) { throw new IllegalArgumentException("[getRedisson] cluster is null."); } String[] nodes = cluster.split(","); for (int i = 0; i < nodes.length; i++) { nodes[i] = PREFIX + nodes[i]; } config.useClusterServers().setScanInterval(2000).addNodeAddress(nodes); if (!StringUtils.isEmpty(password)) { config.useClusterServers().setPassword(<PASSWORD>); } } return Redisson.create(config); } @Override public void setEnvironment(Environment environment) { this.environment = environment; } }
3241ebbecbc96103c2e93accebf2ca510a2015c0
[ "Markdown", "Java", "Dockerfile", "Shell" ]
4
Shell
purgeteam/redis-start
7c768c6a3aa363f31fc25e85dc5c6ed13dea4872
8c53d72e48e2517fa8aaaa0ff10adad9307f6124
refs/heads/master
<repo_name>AugustinBrocquet/symfony_rest_api<file_sep>/README.md # Symfony REST API + Oauth2 [![N|Solid](https://www.shareicon.net/data/256x256/2016/06/19/603895_prog_512x512.png)](https://symfony.com/) # Api REST créée avec le Framework Symfony - Version 4.2 ## Installation Cloner le projet ```sh $ git clone https://github.com/AugustinBrocquet/symfony_rest_api.git ``` Installer les dépendances PHP. ```sh $ composer install ``` ### Dépendance PHP requises | Dépendance | Commandes | | ------ | ------ | | server | composer require server --dev | | FOSRestBundle | composer require friendsofsymfony/rest-bundle | | JMSSerializer | composer require jms/serializer-bundle | | SensioFrameworkExtraBundle | composer require sensio/framework-extra-bundle | | SymfonyValidator | composer require symfony/validator | | SymfonyOrmPack | composer require symfony/orm-pack | | FOSUserBundle | composer require friendsofsymfony/user-bundle | | FOSOauthServerBundle | composer require friendsofsymfony/oauth-server-bundle | | SwiftmailderBundle | composer require swiftmailer-bundle | | SymfonyTranslation| composer require symfony/translation | ### Développement Copier le fichier .env.dist et le renomer en .env ``` $ ./.env $ ./.env.dist ``` Remplacer le db_user et le db_password par db_name par votre identifients BDD Démarer votre serveur local ``` $ php bin/console server:run ``` Créer la base de données ``` $ php bin/console doctrine:database:create ``` Créer nos tables via les Entités ``` $ php bin/console doctrine:schema:create ``` ## Test Création d'un utilisateur pour l'Api ``` $ php bin/console fos:user:create ``` Création du Client ``` $ http://127.0.0.1:8000/createClient { "redirect-uri": "url.test.dev", "grant-type": "password" } Réponse : { "client_id": "2_1qwrj2u31u4koscs4ckk4cog8ooo4o8o4kgkg8s4gokgg04g8s", "client_secret": "<KEY>" } ``` Récupérer l'access_token de type Bearer Token ``` $ http://127.0.0.1:8000/oauth/v2/token { "client_id": "1_4r6gtm58xq0w4og84gc08k80s40gw80c4c4wcsgw04sskk80go", "client_secret": "<KEY>", "grant_type": "password", "username": "user", "password": "<PASSWORD>" } (user et user_password du user créé avec la commande php bin/console fos:user:create) Réponse : { "access_token": "<KEY>", "expires_in": 86400, "token_type": "bearer", "scope": null, "refresh_token": "<KEY>" } ``` Ajouter un produit ``` POST : http://127.0.0.1:8000/api/product Mettre Bearer Token en autorisation et copier votre acces_token { "name": "Samsung Galaxy Note9", "description": "Système d'exploitation Android" } ``` Récupérer les produits ``` GET : http://1172.16.17.32:8000/api/products ``` Récupérer un produit ``` GET : http://127.0.0.1:8000/api/product/1 ``` <file_sep>/src/Controller/ProductController.php <?php namespace App\Controller; use Symfony\Component\HttpFoundation\Request; use Symfony\Component\HttpFoundation\Response; use FOS\RestBundle\Controller\FOSRestController; use FOS\RestBundle\Controller\Annotations as Rest; use Sensio\Bundle\FrameworkExtraBundle\Configuration\Route; use App\Entity\Product; /** * Product controller. * @Route("/api", name="api_") */ class ProductController extends FOSRestController { /** * Lists all Products. * @Rest\Get("/products") * * @return Response */ public function getProducstAction() { $repository = $this->getDoctrine()->getRepository(Product::class); $products = $repository->findall(); return $this->handleView($this->view($products)); } /** * Return a Product. * @Rest\Get("/product/{product_id}") * * @return Response */ public function getProductAction(Request $request) { $repository = $this->getDoctrine()->getRepository(Product::class); $product = $repository->find($request->get('product_id')); if ( $product ) { return $this->handleView($this->view($product), Response::HTTP_OK); } else { return $this->handleView($this->view([]), Response::HTTP_OK); } } /** * Create Product. * @Rest\Post("/product") * * @return Response */ public function addProductAction(Request $request) { $product = new Product(); $data = json_decode($request->getContent(), true); try { $product->setName($data['name']); $product->setDescription($data['description']); $em = $this->getDoctrine()->getManager(); $em->persist($product); $em->flush(); return $this->handleView($this->view(['success' => true], Response::HTTP_CREATED)); } catch (Exception $e) { return $this->handleView($this->view(['success' => false, 'message' => $e], Response::HTTP_BAD_REQUEST)); } } }
be3d0b0524f8ff903ca53551f0a0f7d6c12dd8fa
[ "Markdown", "PHP" ]
2
Markdown
AugustinBrocquet/symfony_rest_api
61eb646c9396f4cf901411466a182c3a394122bb
899b6f7895d66ca8ec43c1991f2c27b1a01e89e6
refs/heads/master
<repo_name>open-switch/chef-opx-examples<file_sep>/cookbooks/route/metadata.rb name 'route' maintainer '<NAME>' maintainer_email '<EMAIL>' license 'All Rights Reserved' description 'Installs/Configures route' long_description 'Installs/Configures route' version '0.1.0' chef_version '>= 12.1' if respond_to?(:chef_version) <file_sep>/cookbooks/route/recipes/default.rb # # Cookbook:: route # Recipe:: default # # Copyright:: 2018, The Authors, All Rights Reserved. # execute 'show_route' do command 'ip route show' live_stream true end route 'add_route' do target '10.18.0.0/24' gateway '10.16.138.12' device 'eth0' ignore_failure true notifies :run, 'execute[show_route]', :immediately end route '10.18.0.0/24' do gateway '10.16.138.12' action :delete notifies :run, 'execute[show_route]', :immediately end <file_sep>/clos-fabric/roles/spine1.rb name "spine1" description "The configurations for spine1" run_list "recipe[quagga]" override_attributes "quagga" => { "clos_node" => "spine1" } <file_sep>/clos-fabric/roles/leaf1.rb name "leaf1" description "The configurations for leaf1" run_list "recipe[quagga]" override_attributes "quagga" => { "clos_node" => "leaf1" } <file_sep>/cookbooks/port-mirroring/recipes/default.rb # # Cookbook:: port-mirroring # Recipe:: default # # Copyright:: 2018, The Authors, All Rights Reserved. # execute 'delete_mirror' do command "cps_config_mirror.py delete 10" live_stream true end execute 'create_mirror' do command "cps_config_mirror.py create span e101-003-0 e101-004-0 both" live_stream true end <file_sep>/cookbooks/lldp/recipes/default.rb # # Cookbook:: lldp # Recipe:: default # # Copyright:: 2018, The Authors, All Rights Reserved. # execute 'show_configuration' do command 'sudo lldpcli show configuration' live_stream true end cookbook_file '/etc/lldpd.conf' do source 'lldpd.conf' mode '0755' action :create end service 'lldpd' do #action :nothing subscribes :restart, 'cookbook_file[/etc/lldpd.conf]', :immediately notifies :run, 'execute[show_configuration]', :immediately end <file_sep>/cookbooks/port-mirroring/metadata.rb name 'port-mirroring' maintainer 'The Authors' maintainer_email '<EMAIL>' license 'All Rights Reserved' description 'Installs/Configures port-mirroring' long_description 'Installs/Configures port-mirroring' version '0.1.0' chef_version '>= 12.1' if respond_to?(:chef_version) <file_sep>/clos-fabric/README.md # Provision CLOS fabric with Chef using Quagga example The sample cookbook/role facilitates the configuration of routing in OpenSwitch OPX using quagga. It requires the Chef setup (workstation-server-client) to be done and working with an SSH connection to an OpenSwitch OPX device. The sample topology is a two-tier CLOS fabric with two spines and two leafs connected as mesh. EBGP is running between the two tiers. All switches in spine have the same AS number, and each leaf switch has a unique AS number. All AS number used are private. Below is the example for configuring BGP and interace using Quagga. ## Routing using Quagga > **NOTE**: See Routing using Quagga in the OpenSwitch OPX Configuration Guide Release 2.1.0 for more information. ![Alt text](./../_static/quagga-routing.png?raw=true "Title") ## Dependencies The sample recipe inside the cookbook is built on resources included in the core Chef code. The resources are available in Chef 12.17.15. ## Sample cookbook - CLOS fabric configuration The sample cookbook installs quagga and copies node specific quagga configurations. Below two configuration files must be present in the ``files/default/quagga`` folder in the cookbook. - daemons - vtysh.conf Quagga configurations for all leaf-spines must be present in the files directory of the cookbook - ``leaf1.conf``, ``leaf2.conf``, ``spine1.conf``, and ``spine2.conf``. The node specific configuration file (for example ``leaf1.conf``) will be copied as ``/etc/quagga/Quagga.conf`` in the target node (leaf1). **Example - files/leaf1.conf** hostname leaf1 interface e101-049-0 ip address 10.1.1.1/24 no shutdown interface e101-051-0 ip address 172.16.31.10/24 no shutdown interface e101-001-0 ip address 172.16.31.10/24 no shutdown router bgp 64501 neighbor 10.1.1.2 remote-as 64555 neighbor 192.168.127.12 remote-as 64555 network 10.1.1.0/24 network 192.168.127.12/24 network 172.16.58.3/24 maximum-paths 16 **Example - recipes/default.rb** cookbook_file '/etc/quagga/Quagga.conf' do source "#{node['quagga']['clos_node']}.conf" mode '0755' action :create notifies :restart, "service[quagga]", :immediately end **Upload cookbook** Copy quagga folder to ~/chef-repo/cookbooks directory. knife cookbook upload quagga ## Sample role - CLOS fabric configuration The sample roles contains four roles - ``leaf1.rb``, ``leaf2.rb``, ``spine1.rb``, and ``spine2.rb``. **Example - leaf1.rb** name "leaf1" description "The configurations for leaf1" run_list "recipe[quagga]" override_attributes "quagga" => { "clos_node" => "leaf1" } **Upload role** Copy all four rb files to the ``~/chef-repo/roles`` directory. knife role from file roles/leaf1.rb knife role from file roles/leaf2.rb knife role from file roles/spine1.rb knife role from file roles/spine2.rb **Execute** > **NOTE**: Add only the specific role (such as leaf1) to the run list of the node. Do not add the cookbook. knife node run_list add leaf1 role[leaf1] knife ssh 'name:leaf1' 'sudo chef-client' -x admin ## References - https://docs.chef.io/resource_reference.html - https://docs.chef.io/roles.html (c) 2018 Dell Inc. or its subsidiaries. All Rights Reserved. <file_sep>/cookbooks/interface/recipes/default.rb # # Cookbook:: interface # Recipe:: default # # Copyright:: 2018, The Authors, All Rights Reserved. # execute 'show_intf' do command 'ifconfig e101-001-0' live_stream true end ifconfig 'add_intf' do target '7.7.7.7' mtu '777' bootproto 'dhcp' device 'e101-001-0' action [:add] notifies :run, 'execute[show_intf]', :immediately end ifconfig 'disable_intf' do device 'e101-001-0' action :disable notifies :run, 'execute[show_intf]', :immediately end ifconfig 'enable_intf' do device 'e101-001-0' target '7.7.7.7' action :enable notifies :run, 'execute[show_intf]', :immediately end ifconfig 'delete_intf' do device 'e101-001-0' action :delete notifies :run, 'execute[show_intf]', :immediately end <file_sep>/cookbooks/interface/metadata.rb name 'interface' maintainer '<NAME>' maintainer_email '<EMAIL>' license 'All Rights Reserved' description 'Installs/Configures interface' long_description 'Installs/Configures interface' version '0.1.0' chef_version '>= 12.1' if respond_to?(:chef_version) <file_sep>/cookbooks/bgp-quagga/recipes/default.rb # # Cookbook:: bgp-quagga # Recipe:: default # # Copyright:: 2018, The Authors, All Rights Reserved. # apt_package 'quagga' do action :install options '--yes' end remote_directory "/etc/quagga" do source "bgp" notifies :restart, "service[quagga]", :immediately end service "quagga" do supports :restart => true action [ :enable ] end execute "check status" do command 'service quagga status' live_stream true end execute "show configurations" do command "vtysh -c 'show running-config'" live_stream true end <file_sep>/cookbooks/lldp/README.md # Sample LLDP cookbook for OpenSwitch OPX The sample LLDP cookbook facilitates the configuration of link layer discovery protocol (LLDP) using ``lldpcli``. It requires the Chef setup (workstation-server-client) to be done and working with an SSH connection to an OpenSwitch OPX device. ## Dependencies The sample LLDP recipe inside the cookbook is built on resources included in the core Chef code. The resources are available in chef 12.17.15. ## Sample LLDP configuration The sample LLDP recipe uses the file push mechanism where an ``lldpd.conf`` file containing all ``lldpcli`` commands is first pushed to ``/etc`` in the target device, and the ``lldpd`` service is restarted for the change to take effect. The cookbook contains two files - ``files/lldpd.conf`` and ``recipes/default.rb``. **Example conf file - files/lldpd.conf** configure lldp tx-interval 33 configure lldp tx-hold 3 configure med fast-start enable configure med fast-start tx-interval 3 **Example recipe - recipes/default.rb** execute 'show_configuration' do command 'sudo lldpcli show configuration' live_stream true end cookbook_file '/etc/lldpd.conf' do source 'lldpd.conf' mode '0755' action :create end service 'lldpd' do subscribes :restart, 'cookbook_file[/etc/lldpd.conf]', :immediately notifies :run, 'execute[show_configuration]', :immediately end **Upload cookbook and execute** Copy the ``lldp`` folder to the ``~/chef-repo/cookbooks`` directory. knife cookbook upload lldp knife node run_list add opx_node lldp knife ssh 'name:opx_node' 'sudo chef-client' ## References - https://docs.chef.io/resource_reference.html - https://docs.chef.io/resource_execute.html - https://docs.chef.io/resource_cookbook_file.html - https://docs.chef.io/resource_service.html (c) 2018 Dell Inc. or its subsidiaries. All Rights Reserved. <file_sep>/cookbooks/interface/README.md # Sample interface cookbook for OpenSwitch OPX The sample interface cookbook facilitates the configuration of interface (IPv4) using Chef the resource ``ifconfig``. It requires the Chef setup (workstation-server-client) to be done and working with an SSH connection to an OpenSwitch OPX device. ## Dependencies The sample interface recipe inside the cookbook is built on resources included in the core Chef code. The resources are available in chef 12.17.15. ## Sample interface configuration The sample interface recipe uses the Chef built-in resource ``ifconfig`` to configure an interface. **Example recipe - recipes/default.rb** execute 'show_intf' do command 'ifconfig e101-001-0' live_stream true end ifconfig 'add_intf' do target '7.7.7.7' mtu '777' bootproto 'dhcp' device 'e101-001-0' action [:add] notifies :run, 'execute[show_intf]', :immediately end ifconfig 'disable_intf' do device 'e101-001-0' action :disable notifies :run, 'execute[show_intf]', :immediately end ifconfig 'enable_intf' do device 'e101-001-0' target '7.7.7.7' action :enable notifies :run, 'execute[show_intf]', :immediately end ifconfig 'delete_intf' do device 'e101-001-0' action :delete notifies :run, 'execute[show_intf]', :immediately end **Upload cookbook and execute** Copy the ``interface`` folder to the ``~/chef-repo/cookbooks`` directory. knife cookbook upload interface knife node run_list add opx_node interface knife ssh 'name:opx_node' 'sudo chef-client' -x admin ## References - https://docs.chef.io/resource_reference.html - https://docs.chef.io/resource_ifconfig.html - https://docs.chef.io/resource_execute.html (c) 2018 Dell Inc. or its subsidiaries. All Rights Reserved. <file_sep>/cookbooks/bgp-quagga/README.md # Sample BGP cookbook for OpenSwitch OPX The sample BGP cookbook facilitates the configuration of BGP quagga. It requires the Chef setup (workstation-server-client) to be done and working with SSH connection to an OpenSwitch OPX device. ## Dependencies The sample BGP recipe inside the cookbook is built on resources included in the core Chef code. The resources are available in Chef 12.17.15. ## Sample BGP configuration The sample BGP recipe for quagga first installs quagga, copies all configurations to target device, then restarts the quagga service. Apart from the recipe, these configuration files must be present in the ``files/default/bgp`` folder in the cookbook. - daemons - quagga.conf - vtysh.conf **Example - files/default/bgp/daemons** zebra=yes bgpd=yes ospfd=no ospf6d=no ripd=no ripngd=no isisd=no babeld=no **Example - files/default/bgp/Quagga.conf** hostname bgpd password <PASSWORD> router bgp 7675 bgp router-id 10.0.0.1 network 10.0.0.0/8 neighbor 10.0.0.4 remote-as 7675 neighbor 10.0.0.2 ebgp-multihop **Example - files/default/bgp/vtysh.conf** ! ! Sample configuration file for vtysh. ! service integrated-vtysh-config hostname quagga-router username root nopassword **Example recipe - recipes/default.rb** apt_package 'quagga' do action :install options '--yes' end remote_directory "/etc/quagga" do source "bgp" notifies :restart, "service[quagga]" end service "quagga" do supports :restart => true action [ :enable ] end execute "check status" do command 'service quagga status' live_stream true end execute "show configurations" do command "vtysh -c 'show running-config'" live_stream true end **Upload cookbook and execute** Copy the ``bgp-quagga`` folder to the ``~/chef-repo/cookbooks`` directory. knife cookbook upload bgp-quagga knife node run_list add opx_node bgp-quagga knife ssh 'name:opx_node' 'sudo chef-client' -x admin ## References - https://docs.chef.io/resource_reference.html - https://docs.chef.io/resource_apt_package.html - https://docs.chef.io/resource_remote_directory.html - https://docs.chef.io/resource_execute.html - https://docs.chef.io/resource_service.html (c) 2018 Dell Inc. or its subsidiaries. All Rights Reserved. <file_sep>/cookbooks/route/README.md # Sample route cookbook for OpenSwitch OPX The sample route cookbook facilitates the configuration of static route (IPv4) using Chef resource route. It requires the Chef setup (workstation-server-client) to be done and working with an SSH connection to an OpenSwitch OPX device. ## Dependencies The sample route recipe inside the cookbook is built on resources included in the core Chef code. The resources are available in chef 12.17.15. ## Sample route configuration The sample route recipe uses the Chef built-in resource route to configure static route entries into the routing table. **Example recipe - recipes/default.rb** execute 'show_route' do command 'ip route show' live_stream true end route 'add_route' do target '10.18.0.0/24' gateway '10.16.138.12' device 'eth0' ignore_failure true notifies :run, 'execute[show_route]', :immediately end route '10.18.0.0/24' do gateway '10.16.138.12' action :delete notifies :run, 'execute[show_route]', :immediately end **Upload cookbook and execute** Copy the ``route`` folder to the ``~/chef-repo/cookbooks`` directory. knife cookbook upload route knife node run_list add opx_node route knife ssh 'name:opx_node' 'sudo chef-client' -x admin ## References - https://docs.chef.io/resource_reference.html - https://docs.chef.io/resource_route.html - https://docs.chef.io/resource_execute.html (c) 2018 Dell Inc. or its subsidiaries. All Rights Reserved. <file_sep>/clos-fabric/roles/leaf2.rb name "leaf2" description "The configurations for leaf2" run_list "recipe[quagga]" override_attributes "quagga" => { "clos_node" => "leaf2" } <file_sep>/README.md # Sample Chef cookbooks for OpenSwitch OPX Chef is a powerful automation platform that transforms infrastructure into code. Whether you are operating in the cloud, on-premises or in a hybrid environment, Chef automates how infrastructure is configured, deployed, and managed across your network - no matter its size. https://docs.chef.io/ https://docs.chef.io/chef_overview.html Chef has three main components: - The Chef DK workstation is the location where users interact with Chef. On the workstation, users author and test cookbooks using tools such as Test Kitchen and interact with the Chef server using the knife and Chef command line tools. - Chef client nodes are the machines (such as OPX devices) that are managed by Chef. The Chef client is installed on each node and is used to configure the node to its desired state. - The Chef server acts as a hub for configuration data. The Chef server stores cookbooks, the policies that are applied to nodes, and metadata that describe each registered node that is being managed by Chef. Nodes use the Chef client to ask the Chef server for configuration details such as recipes, templates, and file distributions. This directory contains sample Chef cookbooks that facilitate the configuration of various features like LLDP, interface, route, BGP (quagga) and port mirroring. ## Dependencies The sample recipe inside the cookbooks are built on resources included in the core Chef code. The resources are available in Chef 12.17.15. It requires the Chef setup (workstation-server-client) to be done and working with an SSH connection to an OpenSwitch OPX device (client). ## Reference - Chef setup and execution **Chef setup** Refer to these links for the Chef (server-client-workstation) setup: https://docs.chef.io/install_server.html https://docs.chef.io/workstation.html https://docs.chef.io/chef_client_overview.html https://docs.chef.io/knife.html **Cookbooks/roles** Cookbooks are written, tested and maintained in Chef workstations. The cookbooks are uploaded to the Chef server from the workstation. https://docs.chef.io/cookbooks.html https://docs.chef.io/resource_reference.html https://docs.chef.io/roles.html > **NOTE**: Various sample cookbooks are available in https://supermarket.chef.io/cookbooks. **Example - upload cookbook and execute** Copy the sample cookbooks to ~/chef-repo/cookbooks directory in workstation. > **NOTE**: ``my_node`` is the Chef client created for a OPX device. Copy the sample cookbooks in cookbooks directory to ``~/chef-repo/cookbooks`` in the workstation. The ``clos-fabric`` folder alone contains both the cookbook and the role to configure a leaf-spine topology. **NOTE**: ``opx_node`` is the Chef client created for an OpenSwitch OPX device. Upload the cookbook (ex: lldp) from local repository to chef server. knife cookbook upload lldp Include the cookbook/role into the run_list of the managed node. knife node run_list add opx_node lldp (and/or) knife node run_list add opx_node role[leaf1] Execute the run_list of the node (chef client) knife ssh 'name:opx_node' 'sudo chef-client' (c) 2018 Dell Inc. or its subsidiaries. All Rights Reserved. <file_sep>/cookbooks/port-mirroring/README.md # Sample port mirroring cookbook for OpenSwitch OPX The sample cookbook facilitates the configuration of port-mirroring. It requires the Chef setup (workstation-server-client) to be done and working with an SSH connection to an OpenSwitch OPX device. ## Dependencies The sample port mirroring recipe inside the cookbook is built on resources included in the core Chef code. The resources are available in chef 12.17.15. ## Sample port mirroring configuration The sample port mirroring recipe uses the Chef built-in resource ``execute`` to execute the OpenSwitch OPX provided Python script to configure the port mirroring using CPS. **Example recipe - recipes/default.rb** execute 'delete_mirror' do command "cps_config_mirror.py delete 10" live_stream true end execute 'create_mirror' do command "cps_config_mirror.py create span e101-003-0 e101-004-0 both" live_stream true end **Upload cookbook and execute** Copy the ``port-mirroring`` folder to the ``~/chef-repo/cookbooks`` directory. **NOTE**: For port mirroring CPS commands to work, the ``opx_node`` node must have been configured with root login. knife cookbook upload port-mirroring knife node run_list add opx_node port-mirroring knife ssh 'name:opx_node' 'sudo chef-client' ## References - https://docs.chef.io/resource_reference.html - https://docs.chef.io/resource_execute.html (c) 2018 Dell Inc. or its subsidiaries. All Rights Reserved. <file_sep>/clos-fabric/roles/spine2.rb name "spine2" description "The configurations for spine2" run_list "recipe[quagga]" override_attributes "quagga" => { "clos_node" => "spine2" }
9847d882552a30546ae91185a17d969f798f4fb2
[ "Markdown", "Ruby" ]
19
Ruby
open-switch/chef-opx-examples
4caa9254d516eadbdb5194fa537144311ddfb211
48dd2eec94fd78dee849b72f91b1a2eb04e36ea1
refs/heads/master
<file_sep># aws aws services, cloudformation and aws cli <file_sep>import boto3 s3=boto3.client('s3') ec2 = boto3.client('ec2') iam = boto3.client('iam') ###run (aws configure ) for your account in terminal ###run (pip install boto3) ###Copy this in new file and name it aws.py ###run (python aws.py) def getbuckets(): buckets=s3.list_buckets() bucketlist=[] for i in buckets['Buckets']: bucket= i['Name'] bucketlist.append(bucket) return {"buckets":bucketlist} print(getbuckets()) ##use this link >>> https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#client ##and add more functions and test it
a8e2ced8bf40556109933984424ac1b865895bab
[ "Markdown", "Python" ]
2
Markdown
parihar1302/aws
a7684e4fbd8adb9e18ff7354fa1e4c9e08709367
32303736a1a6b40f36ebead5be570800a581e1bd