branch_name
stringclasses 15
values | target
stringlengths 26
10.3M
| directory_id
stringlengths 40
40
| languages
sequencelengths 1
9
| num_files
int64 1
1.47k
| repo_language
stringclasses 34
values | repo_name
stringlengths 6
91
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
| input
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|
refs/heads/master | <repo_name>shouchangfayi/mvc<file_sep>/Mvc.Web.Android/Controllers/BookController.cs
using Mvc.Web.Android.Helper;
using Mvc.Web.Android.Models.Book;
using NapiService;
using NapiService.Model;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Mvc.Web.Android.Controllers
{
public class BookController : BaseController
{
#region MyRegion
/// <summary>
/// BookService
/// </summary>
BookService _bookservice = new BookService();
/// <summary>
/// 首本书说明
/// </summary>
string _fristinfo = "";
#endregion
#region 主页面
/// <summary>
/// 精品页(主页)
/// </summary>
public ActionResult Index()
{
return View();
}
/// <summary>
/// 免费页
/// </summary>
public ActionResult MianFei()
{
return View();
}
/// <summary>
/// 榜单页
/// </summary>
public ActionResult Bank()
{
return View();
}
/// <summary>
/// 出版页
/// </summary>
public ActionResult Publish()
{
return View();
}
/// <summary>
/// 猜你喜欢页
/// </summary>
public ActionResult GuessLike()
{
return View();
}
/// <summary>
/// 更多精品页
/// </summary>
public ActionResult MoreJingPin(int type = (int)RecommendClass.Fine, int sort = (int)SearchSort.VipSameDayDescAndUpdateTimeDesc)
{
int[] types = { type, sort };
return View(types);
}
/// <summary>
/// 分类页
/// </summary>
/// <returns></returns>
public ActionResult Category(int Type = (int)CategoryClass.None,int Sort=(int)CategorySort.New )
{
var model = new CategoryModel();
model.Type = (CategoryClass)Type;
model.Sort = (CategorySort)Sort;
return View(model);
}
/// <summary>
/// 专题页
/// </summary>
/// <returns></returns>
public ActionResult ZhuanTi()
{
return View();
}
#endregion
#region 部分视图页面
/// <summary>
/// 效果视图1
/// </summary>
public ActionResult GetEffectRecommend(int count, int maxcount, int recommendType)
{
try
{
var booksearchlist = _bookservice.GetEffectRecommendtion((RecommendClass)recommendType, count, maxcount, out _fristinfo);
if (booksearchlist.Count == 0)
return new EmptyResult();
return PartialView("_PartEffectList", new Tuple<List<Booksearch>, string, int>(booksearchlist, _fristinfo.ShortIntro(40), 0));
}
catch (Exception e)
{
return new EmptyResult();
}
}
/// <summary>
/// 效果视图(没有图片)
/// </summary>
public ActionResult GetEffectRecommendNoImg(int count, int effecttype)
{
try
{
var bookeffectlist = _bookservice.GetEffects(effecttype, count, out _fristinfo);
if (bookeffectlist.Count == 0)
return new EmptyResult();
return PartialView("_PartEffectNoImgList", bookeffectlist);
}
catch (Exception e)
{
return new EmptyResult();
}
}
/// <summary>
/// 效果视图2
/// </summary>
public ActionResult GetEffect(int count, int effecttype)
{
try
{
var bookeffectlist = _bookservice.GetEffects(effecttype, count, out _fristinfo);
if (bookeffectlist.Count == 0)
return new EmptyResult();
return PartialView("_PartEffectList", new Tuple<List<Bookeffect>, string, int>(bookeffectlist, _fristinfo.ShortIntro(40), 1));
}
catch (Exception e)
{
return new EmptyResult();
}
}
/// <summary>
/// 效果视图(只有图片)
/// </summary>
public ActionResult GetEffectImg(int count, int effecttype)
{
try
{
var bookeffectlist = _bookservice.GetEffects(effecttype, count, out _fristinfo);
if (bookeffectlist.Count == 0)
return new EmptyResult();
bookeffectlist = bookeffectlist.OrderBy(x => System.Guid.NewGuid()).ToList();
return PartialView("_PartEffectImgList", bookeffectlist);
}
catch (Exception e)
{
return new EmptyResult();
}
}
/// <summary>
/// 榜单视图
/// </summary>
public ActionResult GetRankings(int rankingtype, int index, int size)
{
try
{
var total = 0;
var booksearchlist = _bookservice.GetRankings((NapiService.RankingClass)rankingtype, index, size, out total);
if (booksearchlist.Count == 0)
return new EmptyResult();
ViewBag.rankingtype = rankingtype;
ViewBag.rankindex = index;
ViewBag.ranktotal = total;
return PartialView("_PartRankingsList", booksearchlist);
}
catch (Exception e)
{
return new EmptyResult();
}
}
/// <summary>
/// 分类视图
/// </summary>
public ActionResult GetCategoryView(int classid = (int)CategoryClass.None, int sort = (int)CategorySort.New, int index = 0, int size = 10)
{
try
{
ViewBag.IsShow = false;
var obj = new SearchObject();
switch (classid)
{
case (int)CategoryClass.None: ViewBag.IsShow = true; break;
case (int)CategoryClass.Monthly: ViewBag.IsShow = true; classid = (int)CategoryClass.None; obj.IsMonthly = true; break;
}
switch (sort)
{
case (int)CategorySort.Free: sort = (int)CategorySort.Hot; obj.IsClientFree = true; break;
case (int)CategorySort.SpecialOffer: sort = (int)CategorySort.Hot; obj.IsSpecialoffer = true; break;
case (int)CategorySort.Finished: sort = (int)CategorySort.Hot; obj.Status = SearchBookStatus.Finished; break;
}
obj.Class = (SearchClass)classid;
obj.Sort = (SearchSort)sort;
obj.Index = index;
obj.Size = size;
var total = 0;
var booksearchlist = _bookservice.Search(obj, out total);
if (total == 0 || booksearchlist.Count == 0)
return new EmptyResult();
ViewBag.total = total;
ViewBag.size = size;
return PartialView("_PartCategoryList", booksearchlist);
}
catch (Exception e)
{
return new EmptyResult();
}
}
/// <summary>
/// 猜你喜欢部分视图
/// </summary>
/// <param name="count"></param>
/// <param name="effecttype"></param>
/// <returns></returns>
public ActionResult GetGuessLikeView(int count, int effecttype)
{
try
{
var bookeffectlist = _bookservice.GetEffects(effecttype, count, out _fristinfo);
if (bookeffectlist.Count == 0)
return new EmptyResult();
return PartialView("_PartGuessLikeList", bookeffectlist);
}
catch (Exception e)
{
return new EmptyResult();
}
}
/// <summary>
/// 更多精品部分视图
/// </summary>
/// <param name="type"></param>
/// <param name="sort"></param>
/// <param name="index"></param>
/// <param name="size"></param>
/// <returns></returns>
public ActionResult GetMoreJingPinView(int type = (int)RecommendClass.Fine, int sort = (int)SearchSort.VipSameDay_Desc, int index = 0, int size = 10)
{
try
{
var obj = new SearchObject();
switch ((RecommendClass)type)
{
case RecommendClass.Finished: obj.Status = SearchBookStatus.Finished; break;
case RecommendClass.Ancient: obj.Class = SearchClass.Aerial | SearchClass.History | SearchClass.PassThrough; break;
case RecommendClass.Modern: obj.Class = SearchClass.Urban | SearchClass.Youth | SearchClass.PowerfulFamily; break;
case RecommendClass.Occult: obj.Class = SearchClass.Ability | SearchClass.Magical | SearchClass.Occult; break;
case RecommendClass.Free: obj.IsClientFree = true; break;
}
obj.Sort = (SearchSort)sort;
obj.Index = index;
obj.Size = size;
var total = 0;
var booksearchlist = _bookservice.Search(obj, out total);
if (total == 0 || booksearchlist.Count == 0)
return new EmptyResult();
ViewBag.total = total;
ViewBag.size = size;
return PartialView("_PartMoreJingPinList", booksearchlist);
}
catch (Exception e)
{
return new EmptyResult();
}
}
/// <summary>
/// 出版部分视图
/// </summary>
/// <param name="type"></param>
/// <param name="sort"></param>
/// <param name="index"></param>
/// <param name="size"></param>
/// <returns></returns>
public ActionResult GetPublishView(int type = (int)RecommendClass.Fine, int sort = (int)SearchSort.VipSameDay_Desc, int index = 0, int size = 10)
{
try
{
var obj = new SearchObject();
switch ((RecommendClass)type)
{
case RecommendClass.Finished: obj.Status = SearchBookStatus.Finished; break;
case RecommendClass.Ancient: obj.Class = SearchClass.Aerial | SearchClass.History | SearchClass.PassThrough; break;
case RecommendClass.Modern: obj.Class = SearchClass.Urban | SearchClass.Youth | SearchClass.PowerfulFamily; break;
case RecommendClass.Occult: obj.Class = SearchClass.Ability | SearchClass.Magical | SearchClass.Occult; break;
case RecommendClass.Free: obj.IsClientFree = true; break;
}
obj.Sort = (SearchSort)sort;
obj.Index = index;
obj.Size = size;
var total = 0;
var booksearchlist = _bookservice.Search(obj, out total);
if (total == 0 || booksearchlist.Count == 0)
return new EmptyResult();
ViewBag.total = total;
ViewBag.size = size;
return PartialView("_PartMoreJingPinList", booksearchlist);
}
catch (Exception e)
{
return new EmptyResult();
}
}
#endregion
}
}
<file_sep>/Mvc.Web.Android/Controllers/HomeController.cs
using Mvc.Web.Android.Helper;
using Mvc.Web.Android.Settings;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Mvc.Web.Android.Controllers
{
public class HomeController : BaseController
{
/// <summary>
/// 首页入口
/// </summary>
public ActionResult Index()
{
return RedirectToAction("Index","Book");
}
}
}
<file_sep>/Mvc.Web.Android/Settings/SiteSettings.cs
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Linq;
using System.Web;
namespace Mvc.Web.Android.Settings
{
/// <summary>
/// 网站设置信息
/// </summary>
public static class SiteSettings
{
/// <summary>
/// 客户端免费作品功能
/// </summary>
public static int FreeBookON = ConfigurationManager.AppSettings["FreeBookON"].To<int>();
/// <summary>
/// 渠道号
/// </summary>
public static int SubscribeChannel = ConfigurationManager.AppSettings["SubscribeChannel"].To<int>(1);
/// <summary>
/// 充值渠道号
/// </summary>
public static Dictionary<string, string> PayChannel
{
get
{
var channels = new Dictionary<string, string>();
var items = ConfigurationManager.AppSettings["PayChannels"].To<string>(string.Empty).Split(new[] { "," }, StringSplitOptions.RemoveEmptyEntries);
foreach (var item in items)
{
var arr = item.Split(new[] { "|" }, StringSplitOptions.RemoveEmptyEntries);
if (arr.Length == 2)
{
channels.Add(arr[0], arr[1]);
}
}
return channels;
}
}
}
}<file_sep>/Mvc.Web.Android/Core/LoginAuthorizeAttribute.cs
using NapiService;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Mvc.Web.Android
{
/// <summary>
/// 用户登陆验证
/// </summary>
public partial class LoginAuthorizeAttribute : AuthorizeAttribute
{
private const string DEFAULT_LOGIN_URL = "/Member/Login";
private const string DEFAULT_COOKIE_KEY = "USER_FLAG";
private string _Url = string.Empty;
private string _Key = string.Empty;
public LoginAuthorizeAttribute()
{
_Url = ConfigurationManager.AppSettings["LoginUrl"];
_Key = ConfigurationManager.AppSettings["LoginKey"];
if (string.IsNullOrEmpty(_Url))
{
_Url = DEFAULT_LOGIN_URL;
}
if (string.IsNullOrEmpty(_Key))
{
_Key = DEFAULT_COOKIE_KEY;
}
}
public LoginAuthorizeAttribute(string url)
: this()
{
_Url = url;
}
public LoginAuthorizeAttribute(string url, string key)
: this(url)
{
_Key = key;
}
public override void OnAuthorization(AuthorizationContext filterContext)
{
if (filterContext.HttpContext == null)
{
throw new ArgumentNullException("filterContext");
}
else
{
if (!filterContext.ActionDescriptor.IsDefined(typeof(AllowAnonymousAttribute), true) &&
!filterContext.ActionDescriptor.ControllerDescriptor.IsDefined(typeof(AllowAnonymousAttribute), true))
{
string value = string.Empty;
HttpCookie cookie = filterContext.HttpContext.Request.Cookies[_Key];
if (cookie != null)
{
value = WebHelper.Decrypt(cookie.Value);
}
if (string.IsNullOrWhiteSpace(value))
{
filterContext.HttpContext.Response.Redirect(_Url + "?referer=" + filterContext.HttpContext.Request.RawUrl);
//filterContext.Result = new RedirectResult(_Url + "?referer=" + filterContext.HttpContext.Request.RawUrl);
}
}
}
}
}
}<file_sep>/Mvc.Web.Android/Controllers/MemberController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Mvc.Web.Android.Controllers
{
public class MemberController : Controller
{
/// <summary>
/// 用户登录
/// <para>@referer:跳转Url</para>
/// </summary>
public ActionResult Login(string referer)
{
if (!string.IsNullOrWhiteSpace(referer))
ViewBag.RefererUrl = referer;
return View();
}
/// <summary>
/// 用户注册
/// <para>@referer:跳转Url</para>
/// </summary>
public ActionResult Register(string referer)
{
if (!string.IsNullOrWhiteSpace(referer))
ViewBag.RefererUrl = referer;
return View();
}
/// <summary>
/// 找回密码
/// <para>@referer:跳转Url</para>
/// </summary>
public ActionResult FindPassword(string referer)
{
if (!string.IsNullOrWhiteSpace(referer))
ViewBag.RefererUrl = referer;
return View();
}
public MemberController()
{
ViewBag.RefererUrl = "/";
}
}
}<file_sep>/Mvc.Web.Android/Controllers/MasterController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Mvc.Web.Android.Controllers
{
/// <summary>
/// 母版页
/// </summary>
public class MasterController : BaseController
{
/// <summary>
/// 设置(版本,设备ID)
/// </summary>
public ActionResult SettingsCommon()
{
return PartialView();
}
/// <summary>
/// 宣传
/// </summary>
public ActionResult XuanChuan()
{
if (ViewBag.User.ID == 0)
return new EmptyResult();
if (IsNewMember())
return PartialView();
return new EmptyResult();
}
}
}
<file_sep>/Mvc.Web.Android/Scripts/index.js
$(document).ready(function () {
(function ($) {
$('.tab ul.tabs').addClass('active').find('> a:eq(0)').addClass('current');
//$('.tab ul.tabs li a').on("touchstart", function (g) {
// var tab = $(this).closest('.tab'),
// index = $(this).closest('a').index();
// $('ul.tabs > li > a').removeClass('current');
// $(this).addClass('current');
// tab.find('.tab_content').find('div.tabs_item').not('div.tabs_item:eq(' + index + ')').hide();
// tab.find('.tab_content').find('div.tabs_item:eq(' + index + ')').show();
// g.preventDefault();
//});
$('.tab ul.tabs li a').tap(function (g) {
var tab = $(this).closest('.tab'),
index = $('.tab ul.tabs li a').index($(this));// $(this).closest('a').index()
$('ul.tabs > li > a').removeClass('current');
$(this).addClass('current');
tab.find('.tab_content').find('div.tabs_item').not('div.tabs_item:eq(' + index + ')').hide();
tab.find('.tab_content').find('div.tabs_item:eq(' + index + ')').show();
g.preventDefault();
});
})(jQuery);
});<file_sep>/Mvc.Web.Android/Models/HttpResultModel.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace Mvc.Web.Android.Models
{
/// <summary>
/// Http响应结果
/// </summary>
public partial class HttpResponseModel
{
/// <summary>
/// 响应Code
/// </summary>
public int Code = 0;
/// <summary>
/// 响应信息
/// </summary>
public string Message = string.Empty;
/// <summary>
/// 响应数据
/// </summary>
public Object Data = default(Object);
/// <summary>
/// 附加数据
/// </summary>
public Object Attachments = default(Object);
}
}<file_sep>/Mvc.Web.Android/Controllers/BaseController.cs
using Mvc.Web.Android.Models;
using NapiService;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using System.Web.Routing;
namespace Mvc.Web.Android.Controllers
{
public class BaseController : Controller
{
/// <summary>
/// MemberService
/// </summary>
public NapiService.MemberService _memberService = new NapiService.MemberService();
/// <summary>
/// MemberTags
/// </summary>
public MemberTagsModel _memberTags = new MemberTagsModel();
/// <summary>
/// 初始化
/// </summary>
protected override void Initialize(RequestContext requestContext)
{
base.Initialize(requestContext);
var value = WebHelper.GetCookie("USER_FLAG");
if (!string.IsNullOrEmpty(value))
{
value = WebHelper.Decrypt(value);
var memberTags = WebHelper.Json<MemberTagsModel>(value);
if (memberTags != null && !string.IsNullOrEmpty(memberTags.Password))
_memberTags = WebHelper.Json<MemberTagsModel>(value);
}
ViewBag.User = _memberTags;
}
/// <summary>
/// 是否是新用户
/// </summary>
/// <returns></returns>
public bool IsNewMember()
{
string key = "MEMBER_FLAG_VALUE";
string userFlag = WebHelper.GetCookie(key);
if (userFlag == "1")
return false;
bool result = _memberService.IsCharged(_memberTags.ID, this.DeviceNumber);
if (!result)
WebHelper.SetCookie(key, "1", 60 * 24 * 900);
return result;
}
/// <summary>
/// 前往登录
/// </summary>
public void Login()
{
string returnUrl = Request.Url.OriginalString;
Response.Redirect("Member?referer=" + HttpUtility.UrlEncode(returnUrl));
}
/// <summary>
/// 设备号
/// </summary>
public string DeviceNumber
{
get
{
//return SiteHelper.DESDecode(WebHelper.GetCookie("AUTH_ID"), CONTENT_KEY);
return WebHelper.Decrypt(WebHelper.GetCookie("AUTH_ID"));
}
}
/// <summary>
/// 版本号
/// </summary>
public double Version
{
get
{
return WebHelper.Decrypt(WebHelper.GetCookie("VerXxsy")).To<double>();
}
}
/// <summary>
/// 设置价格(2015-01-01 开始注册的用户按签字5分)
/// </summary>
/// <param name="oldPrice"></param>
/// <returns></returns>
//public int SetPayment(int oldPrice)
//{
// int price = oldPrice;
// DateTime startTime = DateTime.Parse(SiteSettings.ChangePriceDate);
// DateTime setTime = DateTime.Parse("2015-01-01 00:00:00");
// DateTime now = DateTime.Now;
// if (now >= startTime)
// {
// int newPrice = (int)((oldPrice / 1.5) * 2.5);
// if (ViewBag.UserID == 0)
// return newPrice;
// if (SiteSettings.MaxUserId > 0)
// {
// if (ViewBag.UserID > SiteSettings.MaxUserId)
// price = newPrice;
// }
// else
// {
// int source = 0;
// if (now < setTime.AddMinutes(5))
// source = 1;
// NapiService.Model.Member member = _memberService.GetById(ViewBag.UserID);
// DateTime addDate = member.CreateTime;
// if (addDate >= setTime)
// price = newPrice;
// }
// }
// return price;
//}
/// <summary>
/// 判断是否是指定时间外注册的用户
/// </summary>
/// <returns></returns>
//public bool IsSpecifiedTimeMember()
//{
// bool result = false;
// DateTime startTime = DateTime.Parse(SiteSettings.ChangePriceDate);
// DateTime setTime = DateTime.Parse("2015-01-01 00:00:00");
// DateTime now = DateTime.Now;
// if (now >= startTime)
// {
// if (ViewBag.UserID == 0)
// return true;
// if (SiteSettings.MaxUserId > 0)
// {
// if (ViewBag.UserID > SiteSettings.MaxUserId)
// result = true;
// }
// else
// {
// int source = 0;
// if (now < setTime.AddMinutes(5))
// source = 1;
// NapiService.Model.Member member = _memberService.GetById(ViewBag.UserID);
// DateTime addDate = member.CreateTime;
// if (addDate >= setTime)
// result = true;
// }
// }
// return result;
//}
}
/// <summary>
/// Nxp专用字典
/// </summary>
/// <typeparam name="TKey">键类型</typeparam>
/// <typeparam name="TValue">值类型</typeparam>
public partial class NxpDictionary<TKey, TValue> : Dictionary<TKey, TValue>
{
/// <summary>
/// 获取值
/// </summary>
/// <param name="key">键名</param>
/// <param name="defValue">默认值</param>
/// <returns>值</returns>
public TValue Get(TKey key, TValue defValue = default(TValue))
{
if (this.ContainsKey(key))
return this[key];
return defValue;
}
/// <summary>
/// 获取
/// </summary>
/// <param name="key">键名</param>
/// <param name="defValue">默认值</param>
/// <returns>int</returns>
public int GetInt32(TKey key, int defValue = default(int))
{
TValue value = Get(key);
return value.To<int>(defValue);
}
/// <summary>
/// 获取bool
/// 0-false
/// 1-true
/// </summary>
/// <param name="key">键名</param>
/// <returns>bool</returns>
public bool GetBoolean(TKey key)
{
TValue value = Get(key);
return (value.To<int>() == 1);
}
/// <summary>
/// 获取时间
/// </summary>
/// <param name="key">键名</param>
/// <param name="defValue">默认值</param>
/// <returns>DateTime</returns>
public DateTime GetDateTime(TKey key, DateTime defValue = default(DateTime))
{
TValue value = Get(key);
return value.To<DateTime>(defValue);
}
/// <summary>
/// 获取double
/// </summary>
/// <param name="key">键名</param>
/// <param name="defValue">默认值</param>
/// <returns>double</returns>
public double GetDouble(TKey key, double defValue = default(int))
{
TValue value = Get(key);
return value.To<double>(defValue);
}
/// <summary>
/// 获取字符串
/// </summary>
/// <param name="key">键名</param>
/// <param name="defValue">默认值</param>
/// <returns>string</returns>
public string GetString(TKey key, string defValue = default(string))
{
TValue value = Get(key);
return value.To<string>(defValue);
}
}
}
<file_sep>/Mvc.Web.Android/Controllers/VercodeController.cs
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Drawing.Imaging;
using System.IO;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Mvc.Web.Android.Controllers
{
public class VercodeController : BaseController
{
/// <summary>
/// 随机种子
/// </summary>
Random _random = new Random((int)DateTime.Now.Ticks);
/// <summary>
/// 随机线条数
/// </summary>
const int RANDOM_LINE_COUNT = 10;
/// <summary>
/// 随机点数
/// </summary>
const int RANDOM_PRINT_COUNT = 80;
/// <summary>
/// 字体数组
/// </summary>
string[] Fonts = new[] {
"Helvetica", "Geneva", "Times New Roman", "Courier New", "Arial"
};
/// <summary>
/// 生成
/// </summary>
public ActionResult Index()
{
string exp = string.Empty;
var code = GetRandomCalc(out exp);
Session["VALIDATE_CODE"] = code;
CreateImage(exp);
return new EmptyResult();
}
/// <summary>
/// 随机获取一个加减表达式
/// </summary>
/// <returns></returns>
public int GetRandomCalc(out string exp)
{
int value = 0;
int n1 = 0;
int n2 = 0;
string n3 = _random.Next(0, 10) % 2 == 0 ? "+" : "-";
if (n3 == "+")
{
n1 = _random.Next(0, 30);
n2 = _random.Next(1, 30);
value = n1 + n2;
}
else
{
n2 = _random.Next(0, 30);
while (n1 <= n2) n1 = _random.Next(1, 60);
value = n1 - n2;
}
exp = string.Format("{0}{1}{2}=?"
, n1, n3, n2);
return value;
}
/// <summary>
/// 获取随机颜色
/// </summary>
public Color GetRandomColor()
{
var red = _random.Next(255);
var green = _random.Next(255);
var blue = (red + green > 400) ? 0 : 400 - red - green;
if (blue > 255) blue = 255;
return Color.FromArgb(red, green, blue);
}
/// <summary>
/// 创建图片
/// </summary>
/// <param name="str"></param>
public void CreateImage(string validateCode)
{
int width = validateCode.Length * 30;
Random rand = new Random();
Bitmap bmp = new Bitmap(width + validateCode.Length * 3, 50);
Graphics g = Graphics.FromImage(bmp);
g.Clear(Color.FromArgb(255, 255, 255));
DrawLine(g, bmp, RANDOM_LINE_COUNT);
DrawPrint(bmp, RANDOM_PRINT_COUNT);
for (int i = 0; i < validateCode.Length; i++)
{
Matrix matrix = new Matrix();
matrix.Shear((float)_random.Next(0, 300) / 1000 - 0.25f, (float)_random.Next(0, 100) / 1000 - 0.05f);
g.Transform = matrix;
string str = validateCode.Substring(i, 1);
LinearGradientBrush brush = new LinearGradientBrush(new Rectangle(0, 0, bmp.Width, bmp.Height), Color.Blue, Color.DarkRed, 1.2f, true);
Point pos = new Point(i * 30 + 1 + rand.Next(3), 1 + rand.Next(5));
Font font = new Font(Fonts[_random.Next(Fonts.Length - 1)], _random.Next(24, 30), FontStyle.Bold);
g.DrawString(str, font, brush, pos);
}
MemoryStream ms = new MemoryStream();
bmp.Save(ms, ImageFormat.Png);
Response.ClearContent();
Response.ContentType = "image/png";
Response.BinaryWrite(ms.ToArray());
g.Dispose();
bmp.Dispose();
Response.End();
}
/// <summary>
/// 随机画线
/// </summary>
/// <param name="graphics"></param>
/// <param name="bmp"></param>
/// <param name="count"></param>
private void DrawLine(Graphics graphics, Bitmap bmp, int count)
{
var rand = new Random((int)DateTime.Now.Ticks);
for (var i = 0; i < count; i++)
{
var x1 = rand.Next(bmp.Width);
var y1 = rand.Next(bmp.Height);
var x2 = rand.Next(bmp.Width);
var y2 = rand.Next(bmp.Height);
graphics.DrawLine(new Pen(GetRandomColor()), x1, y1, x2, y2);
}
}
/// <summary>
/// 随机点
/// </summary>
/// <param name="graphics"></param>
/// <param name="bmp"></param>
/// <param name="count"></param>
private void DrawPrint(Bitmap bmp, int count)
{
for (var i = 0; i < count; i++)
{
var x = _random.Next(bmp.Width);
var y = _random.Next(bmp.Height);
bmp.SetPixel(x, y, GetRandomColor());
}
}
}
}<file_sep>/Mvc.Web.Android/Script/Site.js
/// <reference path="jQuery.Min.js" />
/// <reference path="JX.js" />
(function () {
window.Site = {
/*接口地址*/
Host: "/Service",
/*公共操作数*/
options: {
codeFlag: 0,
delay: 60,
timer: null,
checkTimer: null
},
/*设置窗体标题*/
_setBoxTitle: function (title) {
$("#_winbox .win-header span").html(title);
},
/*重置窗体Top位置*/
_resetBoxTop: function (top) {
$("#_winbox").css({ top: top + "px" });
},
/*重置位置*/
_resetPos: function () {
var box = $("#_winbox");
var top = ($(window).height() - box.height()) * 0.4;
if (top < 0) top = 0;
box.css({ top: top + "px" });
},
/*显示错误信息*/
showMsg: function (id, message, flag) {
var color = flag ? "#00F" : "#F00";
$(id).html('<span style="color:' + color + '">' + message + '</span>');
setTimeout(function () { $(id).html(""); }, 1500);
},
getTalk: function () {
var s = [
'亲,还能再送一张么?',
'作者360°旋转跳跃鞠躬: 亲,还能再送一张么?',
'作者爱你哟!么么哒!',
'亲的投票,真真是极好的!',
'如果在我的生命当中曾经有那张票票出现过,那么其他一切的票票都会成为将就。而我不愿意将就,就是你了~',
'神马才是潇湘最壕?从今天起坐拥男神后宫!拿起你手中的票票!爱他就宣他宣他!',
'票虽说少了些,但这份心意,真真是极好的~',
'亲的投票,真真是极好的!如若能有幸再来一张,本宫便率皇帝陛下来耍个宝给亲看看',
'亲投票累了吗?来,我煮碗面你吃,吃完继续投~',
'给我一张票,我将告诉全世界,这个鱼塘被你承包了!',
'十年修得同船渡,百年修得亲投票!',
'山无棱,天地合,依旧要开口:亲,再来一张吧!',
'多不多投,我都在这里不离不弃!',
'看书容易,投票不易,且投且珍惜!',
'开文这么久,终于等来了你,亲,再投一张呗\(^o^)/~',
'终于等到你,还好我没放弃,投票来得好不容易,才会让人更加珍惜',
'多谢亲的慷慨,大明湖畔的容嬷嬷替作者送个香吻给您~',
'此文是我开,此字是我写,既然看我书,赶紧投票来!',
'多订阅少订阅多少都订阅,早来投晚来投早晚都来投。【横批:订阅投票】',
'亲,我若在你心上,再投一张又何妨?你若在我身旁,每天万更又怎样!',
'自从得了亲的票,整个人精神多了!',
'曾经有一张票摆在我面前我没有珍惜,等到失去时才后悔莫及。人生最悲哀的事情莫过于此。如果上天能再给我一次重来的机会,我会说:再给我一张!',
'谢谢亲的票票,男主表示已经洗白白躺到了床上,请笑纳~',
'我能想到最浪漫的事,就是你投票来,我狂更',
'向来缘浅,奈何情深,缘浅的请投一张票!情深的,请再来N张!',
'我一直很清醒,清醒地看着自己的沉沦 。沉沦在你的票票中!',
'票票诚可贵,基情价更高;若有土豪在,多少都可抛!',
'亲的每一张票,在我的心中,都是一句我爱你。',
'女人,投了票就想走?好好呆在这,我陪你一起看完这本书。',
'一切不以投票为目的的阅读都是耍流氓!来,再耍一次',
'如果世界上曾经有那个人出现过,其他人都会变成将就。可我愿意将就在你的投票深情中!',
'此票终需要一投,或投给你爱的我,或投个爱你的我。',
'看到你的票票,我只想说:谢谢你爱我,谢谢你出现在我的世界里,谢谢你让我的生命里充满了温暖:可以再来一张吗',
'我终究没能飙得过那辆宝马,只能眼看着它在夕阳中绝尘而去,不是我的引擎不好,而是我脚蹬子坏了!再投一票吧,好让我尽快换个脚蹬子。',
'再来一发,看看会出现什么提示?' ];
var index = Math.floor(Math.random() * s.length);
return s[index];
},
/*显示浮层*/
showWinbox: function (title, html, cover, goHis) {
var htm = [];
htm.push('<div id="_winbox" class="win-box">');
htm.push(' <div class="win-padding">');
htm.push(' <div class="win-main">');
htm.push(' <div class="win-header">');
htm.push(' <span>' + title + '</span>');
htm.push(' <button class="win-box-close">×</button>');
htm.push(' </div>');
htm.push(' <div class="win-line"></div>');
htm.push(' <div id="_winBody">' + html + '</div>');
htm.push(' </div>');
htm.push(' </div>');
htm.push('</div>');
$("#_winbox").remove();
$(htm.join("")).appendTo("body");
Site._resetPos();
$("button[class=win-box-close]").click(function () {
Site.closeWinbox(goHis);
});
if (cover) {
$("#_winCover").remove();
$('<div id="_winCover" class="win-cover"></div>').appendTo("body");
$("#_winCover").height($(document).height());
}
},
/*关闭浮层*/
closeWinbox: function (goHis) {
$("#_winbox").remove();
$("#_winCover").remove();
if (Site.options.timer) clearTimeout(Site.options.timer);
if (Site.options.checkTimer) clearInterval(Site.options.checkTimer);
if (typeof goHis != "undefined" && goHis == 1)
history.go(-1);
},
/*登陆框*/
showLoginbox: function (callback) {
var htm = [
'<ul class="group2 m10">',
' <li><input id="_txtLoginName" class="input r3" type="text" data-text="账号:手机号/潇湘账号" value="账号:手机号/潇湘账号" /></li>',
' <li><input id="_txtPassword" class="input r3" type="text" data-text="密码" value="密码" /></li>',
' <li style="text-align:right;"><a id="_lnkFindPassword" href="javascript:;">忘记密码?</a></li>',
' <li><a id="_lnkLogin" class="button blue r3" href="javascript:;" onclick="login();">登录</a></li>',
' <li><a id="_lnkQQLogin" class="button color1 r3" href="javascript:;">QQ号登录</a></li>',
' <li><a id="_lnkRegister" class="button color2 r3" href="javascript:;">注册账户</a></li>',
'</ul>'
];
var goHis = location.href.toLocaleLowerCase().indexOf("\content") >= 0 ? 1 : 0;
this.showWinbox('用户登陆', htm.join(""), true, goHis);
$(".input").click(
function () {
if ($(this).val() == $(this).attr("data-text")) {
$(this).val("");
$(this).css({ color: "#555" });
}
}).blur(
function () {
if ($(this).val() == "") {
$(this).val($(this).attr("data-text"));
$(this).css({ color: "#999" });
}
});
$("#_lnkLogin").click(
function () {
var userName = $("#_txtLoginName").val();
var userPass = $("#_txtPassword").val();
if (userName == $("#_txtLoginName").attr("data-text")
|| userName == "") {
Util.Alert("请输入账户");
return;
}
if (userPass == $("#_txtPassword").attr("data-text")
|| userPass == "") {
Util.Alert("请输入密码");
return;
}
Site._setBoxTitle("登录中....");
ajaxLocalService('user_login', { username: userName, userpassword: <PASSWORD>Pass }
, function (response) {
Site._setBoxTitle("用户登录");
if (response.Code != 0) {
Util.Alert(response.Message);
try {
XiaoXiangJS.loginFailed();
} catch (e) { };
return;
}
var res = response.Data;
Util.Alert("登录成功了");
setTimeout(function () {
if (typeof callback == "function") callback(res.Id);
Site.closeWinbox();
}, 1500);
try {
XiaoXiangJS.loginSuccess(res.Id, res.Name);
} catch (e) { };
});
});
$("#_lnkRegister").click(
function () {
Site.showRegisterbox(callback);
});
$("#_lnkFindPassword").click(
function () {
Site.showFindPasswordbox(callback);
});
// QQ登陆支持
$("#_lnkQQLogin").click(function () {
location.href = "https://graph.qq.com/oauth2.0/authorize?response_type=code&client_id=100389624&state=androidxxsynet&scope=get_user_info&redirect_uri=" + encodeURI("http://android.xxsy.net/pages2/QQAuthorizeLogin.aspx");
Util.CookieWrite("QQ_LOGIN_RETURN_LINK", _referer, 999);
});
},
/*注册框*/
showRegisterbox: function (callback) {
var htm = [
'<div id="_registerBox">',
' <div class="tab t3">',
' <ul><li class="animbg"></li></ul>',
' <ul id="_tabRegister">',
' <li data-index="0" class="curr">一键注册</li>',
' <li data-index="1">手机号注册</li>',
' <li data-index="2">昵称注册</li>',
' </ul>',
' </div>',
' <div id="_panels" style="border-top:2px solid #efefef;">',
' <div class="box" style="padding:10px;">',
' <div class="m8">',
' <a target="_blank" href="SMS:12114?body=8888;" class="button blue r3">发送短信一键注册</a>',
' </div>',
' <div class="wenxin ww bgwhite m15">',
' <h3>一键注册说明</h3>',
' <ul>',
' <li>1、如点击发送按钮无效,请手工编辑短信内容<em>潇湘</em>发送到号码<em>12114</em>即可完成注册。</li>',
' <li>2、发送一条短信即可完成注册。</li>',
' <li>3、注册完成将收到成功提示短信。</li>',
' <li>4、账号为手机号,密码为手机号后六位。</li>',
' </ul>',
' </div>',
' <div class="m10">',
' <a data-action="gologin" href="javascript:;" class="button color1 r3">已有账号前往登陆</a>',
' </div>',
' </div>',
' <div class="box" style=" padding:10px; display:none;">',
' <div data-step="1">',
' <ul class="group2 ww">',
' <li><input id="_txtMobile" class="input r3" type="tel" data-text="请输入手机号" value="请输入手机号" /></li>',
' <li>',
' <ul class="group g2 ww">',
' <li><input id="_txtResult" class="input r3" type="tel" data-text="输入答案" value="输入答案" /></li>',
' <li><img src="/Vercode?r=' + new Date() + '" onclick="this.src = \'/Vercode?r=\' + new Date()" style="width:100%; height:38px;" /></li>',
' </ul>',
' </li>',
' <li><a id="_lnkCode" class="button blue r3">获取短信验证码</a></li>',
' </ul>',
' </div>',
' <div data-step="2" style="display:none;">',
' <ul class="group2 ww">',
' <li><input id="_txtVerifyCode" class="input r3" type="tel" data-text="请输入短信验证码" value="请输入短信验证码" /></li>',
' <li><a id="_lnkRegister" class="button blue r3">确定注册</a></li>',
' </ul>',
' <div class="wenxin bgwhite ww">',
' <h3>温馨提示</h3>',
' <ul>',
' <li>1、账号的密码为手机号最后6位数字。</li>',
' </ul>',
' </div>',
' </div>',
' <ul class="group2 ww">',
' <li><a data-action="gologin" href="javascript:;" class="button color2 r3">已有账号前往登陆</a></li>',
' </ul>',
' </div>',
' <div class="box" style="padding:10px;display:none;">',
' <ul class="group2 ww">',
' <li><input id="_txtNickname" class="input r3" type="text" data-text="昵称" value="昵称" /></li>',
' <li><input id="_txtPass2" class="input r3" type="text" data-text="密码" value="密码" /></li>',
' <li><input id="_txtNickMobile" class="input r3" type="tel" data-text="手机号(错误手机号将不能取回密码)" value="手机号(错误手机号将不能取回密码)" /></li>',
' <li><a id="_lnkRegister2" href="javascript:;" class="button blue r3">确定注册</a></li>',
' <li><a data-action="gologin" href="javascript:;" class="button color1 r3">已有账号前往登陆</a></li>',
' </ul>',
' </div>',
' </div>',
'</div>'];
this.showWinbox("账户注册", htm.join(""), true);
$("#_tabRegister").find("li").click(function () {
var sel = $(this);
var index = parseInt(sel.attr("data-index"));
var v = 33.3;
sel.parent().siblings("ul").find(".animbg").animate({ marginLeft: (index * v) + "%" }, 200
, function () {
sel.addClass("curr")
.siblings().removeClass("curr");
});
$("#_panels > div").eq(index)
.show()
.siblings("div").hide();
});
// 输入框单击事件
$("#_registerBox .input").click(
function () {
if ($(this).val() == $(this).attr("data-text")) {
$(this).val("");
}
}).blur(
function () {
if ($(this).val() == "") {
$(this).val($(this).attr("data-text"));
}
});
// 获取验证码
$("#_lnkCode").click(
function () {
if (Site.options.codeFlag == 1)
return;
var rv = $("#_txtResult").val();
if (rv == "" || rv == $("#_txtResult").attr("data-text")) {
Util.Alert("答案不能为空");
return;
}
var mobile = $("#_txtMobile").val();
if (/\d{11}/gi.test(mobile) == false) {
Util.Alert("手机号不正确");
return;
}
ajaxLocalService('user_sendregistercode', { mobile: mobile, rv: rv },
function (responseData) {
if (responseData.Code != 0) {
Util.Alert(responseData.Message);
return;
}
var res = responseData.Data;
Site.options.codeFlag = 1;
Site.options.delay = 60; //60秒之内不能重复发送
Site.options.timer = setInterval(
function () {
if (Site.options.delay <= 0) {
clearInterval(Site.options.timer);
Site.options.codeFlag = 0;
$("#lnkCode").html("获取验证码");
return;
}
$("#lnkCode").html("获取验证码(" + (Site.options.delay--) + ")");
}, 1000);
$("div[data-step='1']").hide()
.siblings("div[data-step='2']").show();
Util.Alert(res);
});
});
// 手机号注册
$("#_lnkRegister").click(function () {
var mobile = $("#_txtMobile").val();
var verifyCode = $("#_txtVerifyCode").val();
if (/\d{11}/gi.test(mobile) == false) {
Util.Alert("密码不能少于6个字符");
return;
}
if (verifyCode == "") {
Util.Alert("请输入验证");
return;
}
Site._setBoxTitle("正在注册....");
ajaxService2({
method: 'user.register',
username: mobile, usrepassword: <PASSWORD>.substring(mobile.length - 6),
usermail: mobile + <EMAIL>", usrmobile: mobile, channel: 6// XiaoXiangJS.getChannel()
}
, function (response) {
Site._setBoxTitle("账户注册");
if (response.Code != 0) {
Util.Alert(response.Message);
try {
XiaoXiangJS.loginFailed();
}
catch (e) { }
return;
}
ajaxLocalService('user_login', { username: mobile, userpassword: mobile.substring(mobile.length - 6) },
function (responseData) {
if (responseData.Code != 0) {
Util.Alert(responseData.Message);
try {
XiaoXiangJS.loginFailed();
}
catch (e) { }
return;
}
var res = responseData.Data;
Util.Alert("注册成功了");
setTimeout(function () {
if (typeof callback == "function") {
callback(res.Id);
}
Site.closeWinbox();
}, 1500);
try {
XiaoXiangJS.loginSuccess(res.Id, res.Name);
}
catch (e) { }
});
});
});
// 昵称注册
$("#_lnkRegister2").click(function () {
var nickname = $("#_txtNickname").val();
var userPass = $("#_txtPass2").val();
var mobile = $("#_txtNickMobile").val();
if (nickname == "" || nickname == $("#_txtNickname").attr("data-text")) {
Util.Alert("昵称不能为空");
return;
}
if (/^[(0-9A-Za-z)(\u4e00-\u9fa5)]{2,}$/ig.test(nickname) == false) {
Util.Alert("昵称只能包含数字、字母、汉字!");
return;
}
if (userPass.length < 6) {
Util.Alert("密码不能少于6个字符");
return;
}
if (/^1\d{10}$/gi.test(mobile) == false) {
Util.Alert("不是一个有效的手机号");
return;
}
Site._setBoxTitle("正在注册....");
ajaxService2({
method: "user.register",
username: nickname, usrepassword: <PASSWORD>, usrmobile: mobile, channel: 6//XiaoXiangJS.getChannel()
}
, function (response) {
Site._setBoxTitle("账户注册");
if (response.Code != 0) {
Util.Alert(response.Message);
try {
XiaoXiangJS.loginFailed();
}
catch (e) { }
return;
}
ajaxLocalService('user_login', { username: nickname, userpassword: <PASSWORD> },
function (responseData) {
if (responseData.Code != 0) {
Util.Alert(responseData.Message);
try {
XiaoXiangJS.loginFailed();
}
catch (e) { }
return;
}
var res = responseData.Data;
Util.Alert("注册成功了");
setTimeout(function () {
if (typeof callback == "function") callback(res.Id);
Site.closeWinbox();
}, 1500);
try {
XiaoXiangJS.loginSuccess(res.Id, res.Name);
}
catch (e) { }
});
});
});
// 已有账号登陆
$("a[data-action=gologin]").click(function () {
Site.showLoginbox(callback);
});
},
/*找回密码框*/
showFindPasswordbox: function (callback) {
var htm = [
'<ul id="_step1" class="group2 m10">',
' <li><input id="_txtUserName" class="input r3" type="text" data-text="请输入账号" value="请输入账号" /></li>',
' <li>',
' <ul class="group g2 ww">',
' <li><input id="_txtResult" class="input r3" type="tel" data-text="输入答案" value="输入答案" /></li>',
' <li><img src="/Vercode?r=' + new Date() + '" onclick="this.src = \'/Vercode?r=\' + new Date()" style="width:100%; height:38px;" /></li>',
' </ul>',
' </li>',
' <li><a id="_lnkCode" class="button blue r3">获取短信验证码</a></li>',
'</ul>',
'<ul id="_step2" class="group2 m10" style="display:none;">',
' <li><input id="_txtPass" class="input r3" type="text" data-text="请输入新密码" value="请输入新密码" /></li>',
' <li><input id="_txtVerifyCode" class="input r3" type="tel" data-text="输入验证码" value="输入验证码" /></li>',
' <li><a id="_lnkUpdatePassword" class="button blue r3">确定修改密码</a></li>',
'</ul>',
'<ul id="_step1" class="group2 m10">',
' <li><a id="_lnkRegister" class="button color2 r3">前往注册</a></li>',
'</ul>'
];
this.showWinbox("找回密码", htm.join(""), true);
var _userName = "";
$(".input").click(
function () {
if ($(this).val() == $(this).attr("data-text")) {
$(this).val("");
$(this).css({ color: "#555" });
}
}).blur(
function () {
if ($(this).val() == "") {
$(this).val($(this).attr("data-text"));
$(this).css({ color: "#999" });
}
});
$("#_lnkCode").click(
function () {
var rv = $("#_txtResult").val();
if (rv == "" || rv == $("#_txtResult").attr("data-text")) {
Util.Alert("答案不能为空");
return;
}
var userName = $("#_txtUserName").val();
if (userName == "" || userName == $("#_txtUserName").attr("data-text")) {
Util.Alert("账户不能为空")
return;
}
ajaxLocalService("user_sendfindpasswordcode", { username: userName, rv: rv }
, function (response) {
if (response.Code != 0) {
Util.Alert(response.Message);
return;
}
$("#_step1").hide();
$("#_step2").show();
_userName = userName;
});
});
$("#_lnkUpdatePassword").click(function () {
var userName = _userName;
var userPass = $("#_txtPass").val();
var verifyCode = $("#_txtVerifyCode").val();
if (userPass.length < 6) {
Util.Alert("密码不能少于6个字符");
return;
}
if (verifyCode == "") {
Util.Alert("请输入验证");
return;
}
ajaxLocalService("user_findpassword", { username: userName, userpass: userPass, verifycode: verifyCode }
, function (response) {
if (response.Code != 0) {
try {
XiaoXiangJS.loginFailed();
} catch (e) { }
Util.Alert(response.Message);
return;
}
var res = response.Data;
Util.Alert("成功修改了密码");
setTimeout(function () {
if (typeof callback == "function") callback(res.Id);
Site.closeWinbox();
}, 1500);
try {
XiaoXiangJS.loginSuccess(res.Id, res.Name);
} catch (e) { };
});
});
// 前往注册
$("#_lnkRegister").click(
function () {
Site.showRegisterbox(callback);
});
},
/*章节订阅框*/
showChapterSubscribebox: function (userid, bookid, chapterid, callback) {
var htm = [];
htm.push('<div class="box">');
htm.push(' <ul class="list">');
htm.push(' <li>章节名:<span id="_spnChapterName"></span></li>');
htm.push(' <li>订阅该章节需:<span id="_spnChapterPrice"></span><em>(潇湘币)</em></li>');
htm.push(' <li>账户余额:<span id="_spanAccount"></span><em>(潇湘币)</em></li>');
htm.push(' </ul>');
htm.push(' <ul class="group2">');
htm.push(' <li><a href="javascript:;" id="_lnkSubscribe" class="button blue r3">订阅本章节</a></li>');
htm.push(' <li><a href="javascript:;" id="_lnkTejia" class="button color1 r3" style="display:none;">全本订阅只需100潇湘币</a></li>');
htm.push(' <li><a href="javascript:;" id="_lnkBaoyue" class="button color2 r3" style="display:none;">包月作品点此订购包月</a></li>');
htm.push(' </ul>');
htm.push('</div>');
Site.showWinbox("Loading....", htm.join(""), true, 1);
$.get(Site.Host, $.param({ action: "chapter_getsubscribedetail", userid: userid, bookid: bookid, chapterid: chapterid })
, function (data) {
var res = eval("(" + data + ")");
var i = res.result;
Site._setBoxTitle(i.BookName + " - 章节订阅");
$("#_spnChapterName").html(i.ChapterName);
$("#_spnChapterPrice").html(i.Price);
$("#_spanAccount").html(i.UserAccount);
if (i.Tejia > 0) {
$("#_lnkTejia").html("全本订阅只需" + i.Tejia + "潇湘币");
$("#_lnkTejia").show().click(function () {
Site.showQuanbenrSubscribebox(userid, bookid, callback);
});
}
if (i.IsBaoyue == 1) {
$("#_lnkBaoyue").show().click(function () {
Site.showBaoyuebox(userid, callback);
});
}
Site._resetPos();
$("#_lnkSubscribe").click(
function (data) {
Site._setBoxTitle("订阅中请稍后....");
$.get(Site.Host, $.param({ action: "chapter_subscribe", userid: userid, chapterid: chapterid })
, function (data) {
Site._setBoxTitle("章节订阅");
var res = eval("(" + data + ")");
if (res.result == 0) {
Util.Alert("订阅时发生了错误")
return;
}
var str = "订阅成功";
if (res.mon > 0) str += ",本次订阅获取月票1张";
if (res.assess > 0) str += ",本次订阅获取评价票1张";
Util.Alert(str);
setTimeout(
function () {
if (typeof callback == "function") callback();
Site.closeWinbox();
}, 1400);
});
});
});
},
/*包月框*/
showBaoyuebox: function (userid, callback) {
var htm = [
'<div class="box ww" style="padding:10px 0;">',
' <div class="wenxin bgwhite">',
' <h3>包月说明</h3>',
' <ul>',
' <li>包月书库内有超过700多部优秀作品,购买包月服务,期间可无限制阅读任何包月书库作品任何章节。</li>',
' </ul>',
' </div>',
' <ul id="_monCount" class="group-price m12">',
' <li><a href="javascript:;" data-value="1"><span>包1个月</span><em>1500潇湘币</em></a></li>',
' <li><a href="javascript:;" data-value="3"><span>包3个月</span><em>4000潇湘币</em></a></li>',
' <li><a href="javascript:;" data-value="6"><span>包6个月</span><em>7000潇湘币</em></a></li>',
' <li><a href="javascript:;" data-value="12"><span>包12个月</span><em>12000潇湘币</em></a></li>',
' <li><a href="javascript:;" data-value="24"><span>包24个月</span><em>20000潇湘币</em></a></li>',
' </ul>',
' <ul class="group2">',
' <li><a id="_lnkBaoyue" href="javascript:;" class="button blue r3">确定包月</a></li>',
' </ul>',
'</div>'
];
var goHis = location.href.toLocaleLowerCase().indexOf("content.aspx") >= 0 ? 1 : 0;
Site.showWinbox("包月服务", htm.join(""), true, goHis);
var _count = 0;
var _site = 6;
$("#_monCount a").click(function () {
$("#_monCount a").removeClass("current");
$(this).addClass("current");
_count = parseInt($(this).attr("data-value"));
});
$("#_lnkBaoyue").click(function () {
Site._setBoxTitle("正在提交");
ajaxService2({ method: "book.buy.monthly", userid: _userid, buymons: _count, site: _site }
, function (response) {
Site._setBoxTitle("包月服务");
if (response.Code != 0) {
Util.Alert(response.Message);
XiaoXiangJS.purchaseFailed();
return;
}
Util.Alert("包月成功了");
setTimeout(function () {
if (typeof callback == "function") callback();
Site.closeWinbox();
}, 1500);
XiaoXiangJS.purchaseSuccess();
});
});
},
/*显示发表评论框*/
showSendReviewbox: function (userid, bookid, callback) {
var htm = [
' <ul class="group2">',
' <li><textarea class="area" id="_txtReviewText"></textarea></li>',
' <li style="text-align:right;"><span id="_spnLength">0/1000</span></li>',
' <li><a href="javascript:;" id="_lnkSendReview" class="button blue r3">确定发表评论</a></li>',
' </ul>'
];
Site.showWinbox("发表评论", htm.join(""), true);
Site.options.checkTimer = setInterval(
function () {
var len = $("#_txtReviewText").val().length;
var spn = $("#_spnLength");
spn.css({ color: (len > 1000 || len < 6) ? "#f00" : "#555" });
spn.html(len + "/1000");
}, 500);
$("#_lnkSendReview").click(
function () {
var txt = $("#_txtReviewText").val();
if (txt.length < 6) {
Util.Alert("评论内容不能少于6个字");
return;
}
if (txt.length > 1000) {
Util.Alert("评论内容不能1000个字");
return;
}
Site._setBoxTitle("发送中....");
$.get(Site.Host, $.param({ action: "book_sendreview", userid: userid, bookid: bookid, content: txt })
, function (data) {
Site._setBoxTitle("发表评论");
var res = eval("(" + data + ")");
if (res.result == 0) {
Util.Alert("成功发表了一条评论");
clearInterval(Site.options.checkTimer);
setTimeout(function () {
if (typeof callback == "function") callback();
Site.closeWinbox();
}, 1500);
return;
}
var err = "发表失败";
if (res.result == 1) err = "注册未满48小时不能发表";
if (res.result == 2) err = "被永久禁言";
if (res.result == 3) err = "禁言中";
if (res.result == 4) err = "您发表的太快了,休息片刻后再发表";
Util.Alert(err);
});
});
},
/*粉丝互动(送鲜花、钻石、打赏等)*/
showPropsbox: function (userid, bookid, type) {
var htm = [
'<div id="_propsBox" class="box ww">',
' <div class="tab t5" >',
' <ul><li class="animbg"></li></ul>',
' <ul>',
' <li data-index="0" data-props="2">鲜花</li>',
' <li data-index="1" data-props="1">钻石</li>',
' <li data-index="2" data-props="5">打赏</li>',
' <li data-index="3" data-props="4">月票</li>',
' <li data-index="4" data-props="8">评价票</li>',
' </ul>',
' </div>',
' <div id="_propsPanels">',
' <div>',
' <ul class="group-count">',
' <li><a href="javascript:;" data-type="2" data-count="1">1(朵)</a></li>',
' <li><a href="javascript:;" data-type="2" data-count="5">5(朵)</a></li>',
' <li><a href="javascript:;" data-type="2" data-count="10">10(朵)</a></li>',
' <li><a href="javascript:;" data-type="2" data-count="50">50(朵)</a></li>',
' <li><a href="javascript:;" data-type="2" data-count="100">100(朵)</a></li>',
' <li><a id="_btnCustom2" href="javascript:;" data-type="2" data-count="0" contenteditable="true">自定义</a></li>',
' </ul>',
' <ul class="group2"><li><a href="javascript:;" class="button blue r3" data-value="2">确定送鲜花</a></li></ul>',
' <div class="wenxin bgwhite m5">',
' <h3>温馨提示</h3>',
' <ul>',
' <li>1、赠送一朵鲜花需要消耗20点潇湘币</li>',
' </ul>',
' </div>',
' </div>',
' <div style="display:none">',
' <ul class="group-count">',
' <li><a href="javascript:;" data-type="1" data-count="1">1(颗)</a></li>',
' <li><a href="javascript:;" data-type="1" data-count="5">5(颗)</a></li>',
' <li><a href="javascript:;" data-type="1" data-count="10">10(颗)</a></li>',
' <li><a href="javascript:;" data-type="1" data-count="50">50(颗)</a></li>',
' <li><a href="javascript:;" data-type="1" data-count="100">100(颗)</a></li>',
' <li><a id="_btnCustom1" href="javascript:;" data-type="1" data-count="0" contenteditable="true">自定义</a></li>',
' </ul>',
' <ul class="group2"><li><a href="javascript:;" class="button blue r3" data-value="1">确定送钻石</a></li></ul>',
' <div class="wenxin bgwhite m5">',
' <h3>温馨提示</h3>',
' <ul>',
' <li>1、赠送一颗钻石需要消耗100点潇湘币</li>',
' </ul>',
' </div>',
' </div>',
' <div style="display:none">',
' <ul class="group-count">',
' <li><a href="javascript:;" data-type="5" data-count="188">188</a></li>',
' <li><a href="javascript:;" data-type="5" data-count="388">388</a></li>',
' <li><a href="javascript:;" data-type="5" data-count="888">888</a></li>',
' <li><a href="javascript:;" data-type="5" data-count="1888">1888</a></li>',
' <li><a href="javascript:;" data-type="5" data-count="8888">8888</a></li>',
' <li><a id="_btnCustom5" href="javascript:;" data-type="5" data-count="0" contenteditable="true">自定义</a></li>',
' </ul>',
' <ul class="group2"><li><a href="javascript:;" class="button blue r3" data-value="5">确定打赏</a></li></ul>',
' <div class="wenxin bgwhite m5">',
' <h3>温馨提示</h3>',
' <ul>',
' <li>1、打赏单位为潇湘币</li>',
' </ul>',
' </div>',
' </div>',
' <div style="display:none">',
' <ul class="group-count">',
' <li><a href="javascript:;" data-type="4" data-count="1">1(张)</a></li>',
' <li><a href="javascript:;" data-type="4" data-count="2">2(张)</a></li>',
' <li><a href="javascript:;" data-type="4" data-count="3">3(张)</a></li>',
' <li><a href="javascript:;" data-type="4" data-count="4">4(张)</a></li>',
' <li><a href="javascript:;" data-type="4" data-count="5">5(张)</a></li>',
' <li><a id="_btnCustom4" href="javascript:;" data-type="4" data-count="0" contenteditable="true">自定义</a></li>',
' </ul>',
' <div class="box" style="font-size:14px; color:#0B3985; padding-bottom:5px;">当前月票:<span id="_shengyu4">0</span>(张)</div>',
' <ul class="group2"><li><a href="javascript:;" class="button blue r3" data-value="4">确定投月票</a></li></ul>',
' <div class="wenxin bgwhite m5">',
' <h3>温馨提示</h3>',
' <ul>',
' <li>1、vip订阅消费每满10元即可获得1张月票,月票当月有效.</li>',
' </ul>',
' </div>',
' </div>',
' <div style="display:none">',
' <ul class="group-count">',
' <li><a href="javascript:;" data-type="8" data-count="1">不知所云</a></li>',
' <li><a href="javascript:;" data-type="8" data-count="2">随便看看</a></li>',
' <li><a href="javascript:;" data-type="8" data-count="3">值得一看</a></li>',
' <li><a href="javascript:;" data-type="8" data-count="4">不容错过</a></li>',
' <li><a href="javascript:;" data-type="8" data-count="5">经典必看</a></li>',
' </ul>',
' <div class="box" style="font-size:14px; color:#0B3985; padding-bottom:5px;">当前评价票:<span id="_shengyu8">0</span>(张)</div>',
' <ul class="group2"><li><a href="javascript:;" class="button blue r3" data-value="8">确定投评价票</a></li></ul>',
' <div class="wenxin bgwhite m5">',
' <h3>温馨提示</h3>',
' <ul>',
' <li>1、每月消费满10元即可获赠1张评价票,每月只可获赠一次.</li>',
' <li>2、可以每张200点潇湘币购买(购买无限制)</li>',
' <li>3、每次评价消耗1张评价票</li>',
' </ul>',
' </div>',
' </div>',
' </div>',
'</div>'];
Site.showWinbox("粉丝互动", htm.join(""), true);
var _count = 0;
var _integral = 0;
var _propsType = 1;
var _userid = userid;
var _bookid = bookid;
var _iscus = 0;
var _site = 6;//XiaoXiangJS.getChannel();
var current = $("#_propsBox li[data-props=" + type + "]").addClass("curr");
var idx = parseInt(current.attr("data-index"));
current.parent().siblings().find(".animbg").css({ marginLeft: idx * 20 + "%" });
$("#_propsPanels > div").eq(idx).show().siblings().hide();
_propsType = type;
ajaxService2({ method : "user.get", userid: _userid }
, function (response) {
var user = response.Data;
if (user.Id > 0) {
$("#_shengyu8").html(user.Assesstickets);
$("#_shengyu4").html(user.Montickets);
}
});
// 绑定选项卡单击事件
$("#_propsBox li[data-props]").click(function () {
var sel = $(this);
var index = parseInt(sel.attr("data-index"));
sel.parent().siblings().find(".animbg").animate({ marginLeft: index * 20 + "%" }, 200
, function () { sel.addClass("curr").siblings().removeClass("curr"); });
$("#_propsPanels > div").eq(index).show().siblings().hide();
_count = 0;
_propsType = parseInt(sel.attr("data-props"));
Site._resetPos();
});
// 绑定自定义输入框单击、失去焦点事件
$("#_propsBox a[contenteditable]").click(
function () {
var value = $(this).html();
value = value.replace(/<[^>]+>|\s+/gi, "");
if (/\d+/gi.test(value) == false)
$(this).html("");
}).blur(
function () {
var value = $(this).html();
value = value.replace(/<[^>]+>|\s+/gi, "");
if (/\d+/gi.test(value) == false)
$(this).html("自定义");
});
$("#_propsBox a[data-type]").click(
function () {
$("#_propsBox a[data-type]").removeClass("curr");
$(this).addClass("curr");
_propsType = parseInt($(this).attr("data-type"));
_count = parseInt($(this).attr("data-count"));
_iscus = (_count > 0) ? 0 : 1;
if (_propsType == 8) {
_integral = _count;
_count = 1;
}
});
// 绑定发送按钮事件
$("#_propsBox .button").click(
function () {
if (_propsType == 8 && _integral == 0) {
Util.Alert("请选择评论信息");
return;
}
if (_count == 0) {
if (_iscus == 1) {
var temp = $("#_btnCustom" + _propsType).html();
temp = temp.replace(/<[^>]+>|\s+/gi, "");
if (/\d+/gi.test(temp)) {
_count = parseInt(temp);
}
}
}
if (_propsType == 1 && _count == 0) {
Util.Alert("请选择钻石数量或自定义数量");
return;
}
if (_propsType == 2 && _count == 0) {
Util.Alert("请选择鲜花数量或自定义数量");
return;
}
if (_propsType == 4 && _count == 0) {
Util.Alert("请选择月票数量或自定义数量");
return;
}
if (_propsType == 5 && _count == 0) {
Util.Alert("请选择打赏数量或自定义数量");
return;
}
if (_propsType == 5 && _iscus == 1 && _count < 100) {
Util.Alert("打赏至少100潇湘币");
return;
}
Site._setBoxTitle("提交中....");
ajaxService2({ method: 'user.use.gift', userid: _userid, bookid: _bookid, gifttype: _propsType, count: _count, score: _integral, site: _site },
function (response) {
Site._setBoxTitle("粉丝互动");
if (response.Code != 0) {
Util.Alert(response.Message);
return;
}
if (_propsType == 4) {
var message = '成功投了张' + _count + '月票,' + Site.getTalk();
Util.Alert(message);
} else {
Util.Alert("操作成功了");
}
if (_propsType == 4 || _propsType == 8) {
var shengyu = $("#_shengyu" + _propsType);
var sh = parseInt(shengyu.html());
sh = sh - _count;
if (sh < 0) sh = 0;
shengyu.html(sh);
}
});
});
},
/*修改密码*/
showUpdatePasswordbox: function (userid, callback) {
var htm = [];
htm.push('<div>');
htm.push(' <ul class="group2">');
htm.push(' <li><input id="_txtOldPassword" class="input" type="text" data-text="输入旧密码" value="输入旧密码" /></li>');
htm.push(' <li><input id="_txtNewPassword" class="input" type="text" data-text="新密码" value="新密码" /></li>');
htm.push(' <li><a href="javascript:;" id="_lnkUpdate" class="button blue r3">确定修改</a></li>');
htm.push(' </ul>');
htm.push('</div>');
this.showWinbox('密码修改', htm.join(""), true);
$(".input").click(
function () {
if ($(this).val() == $(this).attr("data-text")) {
$(this).val("");
$(this).css({ color: "#555" });
}
}).blur(
function () {
if ($(this).val() == "") {
$(this).val($(this).attr("data-text"));
$(this).css({ color: "#999" });
}
});
$("#_lnkUpdate").click(
function () {
var oldPassword = $("#_txtOldPassword").val();
var newPassword = $("#_txtNewPassword").val();
if (newPassword.length < 6) {
Util.Alert("密码至少6个字符");
return;
}
Site._setBoxTitle("发送请求中....");
ajaxService2({ method: "user.update.password", userid: userid, oldpassword: oldPassword, newpassword: <PASSWORD> }
, function (response) {
Site._setBoxTitle("密码修改");
if (response.Code != 0) {
Util.Alert(response.Message);
return;
}
var res = response.Data;
ajaxLocalService("user_login", { username: res.Name, userpassword: res.Password },
function (response) {
if (response.Code == 0) {
Util.Alert("成功修改了密码");
if (typeof callback == "function") callback();
setTimeout(function () {
Site.closeWinbox();
}, 1500);
}
});
});
});
},
/*显示全本订阅框*/
showQuanbenrSubscribebox: function (userid, bookid, callback) {
if (userid == 0 || userid == null) {
Site.showLoginbox(function (uid) {
setTimeout(function () {
Site.showQuanbenrSubscribebox(uid, bookid);
}, 300);
});
return;
}
var htm = [
'<div class="box">',
' <ul id="_subscribeInfoUL" class="list ww">',
' <li>全本订阅价格:<span id="_spnPrice">0</span>(潇湘币)</li>',
' <li>当前余额:<span id="_spnAccount">0</span>(潇湘币)</li>',
' </ul>',
' <div id="_successDiv" style="display:none;color:#1C3FD4;padding:10px 0 5px;text-align:center;">订阅成功了!</div>',
' <ul id="_qbDingyueUL" class="group2 m10"><li><a id="_lnkDingyue" href="javascript:;" class="button blue r3">确定订阅</a></li></ul>',
' <div class="wenxin bgwhite m5">',
' <h3>全本订阅说明</h3>',
' <ul>',
' <li>1、一次性订阅特定作品的全部VIP章节,可享受特价优惠。</li>',
' <li>2、订阅后可永久阅读已订阅的作品。</li>',
' </ul>',
' </div>',
'</div>'
];
var goHis = location.href.toLocaleLowerCase().indexOf("content.aspx") >= 0 ? 1 : 0;
this.showWinbox('全本订阅', htm.join(""), true, goHis);
Site._setBoxTitle("加载中.....");
$.get(Site.Host, $.param({ action: "book_tejia_detail", userid: userid, bookid: bookid })
, function (data) {
var res = eval("(" + data + ")");
if (res.code == 0) {
Site._setBoxTitle(res.bookName + " - 全本订阅");
$("#_spnPrice").html(res.price);
$("#_spnAccount").html(res.account);
var isbuy = res.isbuy;
if (isbuy == 1) {
$("#_qbDingyueUL").html('<li><a href="javascript:Site.closeWinbox();" class="button color3 r3">该作品已订阅!</a></li>');
return;
}
if (res.isIngot == 1) {
var maxDeduction = res.maxDeduction;
var canget = res.canget;
var ingot = res.ingot;
var sli = [];
//if ((ingot + res.account) >= res.price) {
// if (ingot > 0) {
// var strbtn = "使用" + maxDeduction + "元宝";
// if (res.price > maxDeduction &&
// res.account > 0)
// strbtn += "+" + (res.price - maxDeduction) + "潇湘币";
// strbtn += "订阅";
// sli.push('<li><a id="_lnkDingyueByIngot" href="javascript:;" class="button blue r3">' + strbtn + '</a></li>');
// }
//} else {
// sli.push('<li><a id="_gotoPay" href="javascript:;" class="button blue r3">余额不足前往充值</a></li>');
//}
//用户余额大于订阅点数才显示
if (res.account >= res.price) {
var str = '使用' + res.price + '潇湘币订阅';
//if (canget > 0)
// str += '(可获赠' + canget + '元宝)';
sli.push('<li><a id="_lnkDingyue" href="javascript:;" class="button color1 r3">' + str + '</a></li>');
}
$("#_qbDingyueUL").html(sli.join(""));
//$("#_subscribeInfoUL").append('<li>剩余体验币:' + ingot + '</li>');
}
$("#_gotoPay").click(function () {
location.href = 'Chongzhi.aspx';
});
$("#_lnkDingyue").click(function () {
var strbtn2 = $("#_lnkDingyue").html();
$("#_lnkDingyue").html("提交中.....");
$.get(Site.Host, $.param({ action: "book_quanben2", userid: userid, bookid: bookid, siteid: 2 })
, function (data2) {
$("#_lnkDingyue").html(strbtn2);
var res2 = eval("(" + data2 + ")");
if (res2.result == 1) {
$("#_successDiv").show();
setTimeout(function () {
if (typeof callback == "function") callback();
Site.closeWinbox();
}, 2000);
XiaoXiangJS.purchaseSuccess();
return;
}
var errs = ["缺少参数", "余额不足", "已经订阅过了不能重复点阅", "不是特价书"];
Util.Alert(errs[Math.abs(res2.result)]);
XiaoXiangJS.purchaseFailed();
});
});
//$("#_lnkDingyueByIngot").click(function () {
// var strbtn2 = $("#_lnkDingyueByIngot").html();
// $("#_lnkDingyueByIngot").html("提交中.....");
// $.get(Site.Host, $.param({ action: "book_quanben2", userid: userid, bookid: bookid, ingotnum: res.maxDeduction })
// , function (data2) {
// $("#_lnkDingyueByIngot").html(strbtn2);
// var res2 = eval("(" + data2 + ")");
// if (res2.result == 1) {
// $("#_successDiv").show();
// setTimeout(function () {
// if (typeof callback == "function") callback();
// Site.closeWinbox();
// }, 2000);
// XiaoXiangJS.purchaseSuccess();
// return;
// }
// var errs = ["缺少参数", "余额不足", "已经订阅过了不能重复点阅", "不是特价书"];
// Util.Alert(errs[Math.abs(res2.result)]);
// XiaoXiangJS.purchaseFailed();
// });
//});
return;
}
});
},
/*绑定手机号*/
showBindingMobilebox: function (userid, callback) {
var htm = [
'<div class="box">',
' <div class="tab t2">',
' <ul><li class="animbg"></li></ul>',
' <ul id="_ulBinding">',
' <li data-index="0" class="curr">手机绑定</li>',
' <li data-index="1">解除绑定</li>',
' </ul>',
' </div>',
' <div id="_bindingPanels" style="border-top:2px solid #efefef; padding:10px 0;">',
' <div>',
' <div data-step="1">',
' <ul class="group2">',
' <li><input id="_txtMobile" class="input r3" type="tel" data-text="输入手机号" value="输入手机号" /></li>',
' <li>',
' <ul class="group g2 ww">',
' <li><input id="_txtResult" class="input r3" type="tel" data-text="输入答案" value="输入答案" /></li>',
' <li><img src="/Vercode?r=' + new Date() + '" onclick="this.src = \'/Vercode?r=\' + new Date()" style="width:100%; height:38px;" /></li>',
' </ul>',
' </li>',
' <li><a id="_lnkCode" class="button color1 r3">获取短信验证码</a></li>',
' </ul>',
' </div>',
' <div id="_hasBinding" style="text-align:center; font-size:14px; display:none;"></div>',
' <div data-step="2" style="display:none;">',
' <ul class="group2">',
' <li><input id="_txtCode" class="input r3" type="tel" data-text="输入短信验证码" value="输入短信验证码" /></li>',
' <li><a id="_lnkBinding" class="button blue r3">确定绑定</a></li>',
' </ul>',
' <div class="wenxin bgwhite m10">',
' <h3>温馨提示</h3>',
' <ul>',
' <li>1.绑定手机后可以通过手机号取回密码!</li>',
' </ul>',
' </div>',
' </div>',
' </div>',
' <div style="display:none;">',
' <ul class="group2">',
' <li>',
' <ul class="group g2 ww">',
' <li><input id="_txtSmsMobile" class="input r3" type="tel" data-text="输入绑定的手机号" value="输入绑定的手机号" /></li>',
' <li><a id="_bntSendSmsCode" class="button blue r3">获取验证码</a></li>',
' </ul>',
' </li>',
' <li><input id="_txtSmsCode" class="input r3" type="tel" data-text="输入收到的短信验证码" value="输入收到的短信验证码" /></li>',
' <li><a id="_lnkUnbinding" class="button blue r3">确定解除手机绑定</a></li>',
' </ul>',
' <div class="wenxin bgwhite m10">',
' <h3>温馨提示</h3>',
' <ul>',
' <li>1.解除手机绑定后将不能通过手机号取回密码!</li>',
' </ul>',
' </div>',
' </div>',
' </div>',
'</div>'
];
this.showWinbox('手机绑定', htm.join(""), true);
var _timer;
var _delay = 60;
ajaxLocalService("user_getbinding", { userid: userid },
function (response) {
var res = response.Data;
var attements = response.Attachments;
if (res == 1) {
$("#_hasBinding").html(attements);
$("#_hasBinding").show().siblings().hide();
}
});
$("#_ulBinding").find("li").click(function () {
var sel = $(this);
var index = parseInt(sel.attr("data-index"));
sel.parent().siblings().find(".animbg").animate({ marginLeft: index * 50 + "%" }, 200
, function () { sel.addClass("curr").siblings().removeClass("curr"); });
$("#_bindingPanels > div").eq(index).show().siblings().hide();
});
$(".input").click(
function () {
if ($(this).val() == $(this).attr("data-text")) {
$(this).val("");
}
}).blur(
function () {
if ($(this).val() == "") {
$(this).val($(this).attr("data-text"));
}
});
$("#_lnkBinding").click(
function () {
var mobile = $("#_txtMobile").val();
var code = $("#_txtCode").val();
ajaxLocalService("user_binding", { userid: userid, code: code, mobile: mobile } ,
function (response) {
if (response.Code == 0) {
var res = response.Data;
Util.Alert("绑定成功");
if (typeof callback == "function") callback();
setTimeout(function () { Site.closeWinbox(); }, 1500)
}
else {
Util.Alert(response.Message);
}
});
});
$("#_lnkUnbinding").click(function () {
var code = $("#_txtSmsCode").val();
var mobile = $("#_txtSmsMobile").val();
ajaxLocalService("user_unbinding", { userid: userid, code: code, mobile: mobile },
function (response) {
if (response.Code == 0) {
var res = response.Data;
Util.Alert("成功解除绑定");
setTimeout(function () {
Site.closeWinbox();
}, 1500);
}
else {
Util.Alert(response.Message);
}
});
});
//发送解除绑定验证码
$('#_bntSendSmsCode').click(
function () {
if (Site.options.codeFlag == 1)
return;
var mobile = $('#_txtSmsMobile').val();
if (/^1\d{10}$/gi.test(mobile) == false) {
Util.Alert('请输入正确的手机号');
return;
}
ajaxLocalService("user_sendunbindingcode", { mobile: mobile, userid: userid }
, function (response) {
if (response.Code == 0) {
var res = response.Data;
Util.Alert("验证码已发送");
Site.options.codeFlag = 1;
_delay = 60;
_timer = setInterval(
function () {
if (_delay <= 0) {
clearInterval(_timer);
Site.options.codeFlag = 0;
$("#_bntSendSmsCode").html("获取验证码");
return;
}
$("#_bntSendSmsCode").html("获取验证码(" + (_delay--) + ")");
}, 1000);
}
else {
Util.Alert(response.Message);
}
});
});
$("#_lnkCode").click(function () {
var mobile = $("#_txtMobile").val();
if (/^\d{11}$/gi.test(mobile) == false) {
Util.Alert("请输入正确的手机号")
return;
}
var rv = $("#_txtResult").val();
if (rv == "" || rv == $("#_txtResult").attr("data-text")) {
Util.Alert("答案不能为空")
return;
}
ajaxLocalService("user_sendbindingcode", {mobile: mobile, rv: rv }
, function (response) {
var res = response.Data;
if (response.Code == 0) {
Util.Alert("验证码已发送");
Site.options.codeFlag = 1;
_delay = 60;
_timer = setInterval(
function () {
if (_delay <= 0) {
clearInterval(_timer);
Site.options.codeFlag = 0;
$("#_lnkCode").html("获取验证码");
return;
}
$("#_lnkCode").html("获取验证码(" + (_delay--) + ")");
}, 1000);
$("div[data-step='1']").hide()
.siblings("div[data-step='2']").show();
}
else {
Util.Alert(response.Message);
}
});
});
},
/*显示确认框*/
showConfirmbox: function (msg, callback) {
var htm = [
'<div id="_confirmBox" class="box" style="padding:10px 0;">',
' <div class="box" style="padding-bottom:10px; color:#555; line-height:24px; text-indent:28px; ">',
msg,
' </div>',
' <div class="line"></div>',
' <ul class="group g2 m10">',
' <li><a href="javascript:;" data-value="1" class="button blue r3">确定</a></li>',
' <li><a href="javascript:;" data-value="0" class="button color2 r3">取消</a></li>',
' </ul>',
'</div>'
];
this.showWinbox('提示', htm.join(""), true);
$("#_confirmBox .button").click(function () {
var value = parseInt($(this).attr("data-value"));
if (typeof callback == "function")
callback(value);
Site.closeWinbox();
});
},
/*回顶部*/
initGotoTop: function () {
$("#_goTop").remove();
$('<a id="_goTop" href="javascript:;" class="gotop"></a>').appendTo("body");
$("#_goTop").click(function () {
$("body").animate({ scrollTop: 0 }, 200);
});
setInterval(function () {
var top = $("body").scrollTop();
top > 10 ? $("#_goTop").show() : $("#_goTop").hide();
}, 200);
},
/*显示更新*/
showUpdate: function () {
var html = [
'<div>',
' <p class="box" style="',
' padding:10px;',
' text-indent:28px;',
' font-size:14px;',
' color:#555;',
' line-height:150%;',
' text-shadow:0 1px 0 #fff;">更新到最新版本才可以签到!</p>',
' <ul class="group2">',
' <li><a class="button blue" href="javascript:XiaoXiangJS.checkVersion()">更新至最新版本</a></li>',
' </ul>',
'</div>'
];
this.showWinbox("提示", html.join(""), true);
},
/*绑定手机号*/
showBindingMobileboxBySingin: function (userid, callback) {
var htm = [
'<div class="box">',
' <div style="padding:15px 10px; text-align:center; color:#555; font-size:15px; ">您需绑定手机后才能正常使用签到功能!</div>',
' <div id="_bindingPanels" style=" padding:0 0 10px 0;">',
' <div data-step="1">',
' <ul class="group2">',
' <li><input id="_txtMobile" class="input r3" type="tel" data-text="输入手机号" value="输入手机号" /></li>',
' <li>',
' <ul class="group g2 ww">',
' <li><input id="_txtResult" class="input r3" type="tel" data-text="输入答案" value="输入答案" /></li>',
' <li><img src="/Vercode?r=' + new Date() + '" onclick="this.src = \'/Vercode?r=\' + new Date()" style="width:100%; height:38px;" /></li>',
' </ul>',
' </li>',
' <li><a id="_lnkCode" class="button color1 r3">获取验证码</a></li>',
' </ul>',
' </div>',
' <div data-step="2" style="display:none;">',
' <ul class="group2">',
' <li><input id="_txtCode" class="input r3" type="tel" data-text="输入短信验证码" value="输入短信验证码" /></li>',
' <li><a id="_lnkBinding" class="button blue r3">确定绑定</a></li>',
' </ul>',
' </div>',
' <div class="wenxin bgwhite m10">',
' <h3>温馨提示</h3>',
' <ul>',
' <li>1. 绑定手机后可以通过手机号取回密码!</li>',
' </ul>',
' </div>',
' </div>',
'</div>'
];
this.showWinbox('手机绑定', htm.join(""), true);
var _timer;
var _delay = 60;
ajaxLocalService("user_getbinding", { userid: userid },
function (response) {
var res = response.Data;
var attements = response.Attachments;
if (res == 1) {
$("#_hasBinding").html(attements);
$("#_hasBinding").show().siblings().hide();
}
});
$(".input").click(
function () {
if ($(this).val() == $(this).attr("data-text")) {
$(this).val("");
}
}).blur(
function () {
if ($(this).val() == "") {
$(this).val($(this).attr("data-text"));
}
});
$("#_lnkBinding").click(
function () {
var mobile = $("#_txtMobile").val();
var code = $("#_txtCode").val();
ajaxLocalService("user_binding", {userid: userid, code: code, mobile: mobile }
, function (response) {
var res = response.Data;
if (response.Code == 0) {
Util.Alert("绑定成功");
if (typeof callback == "function") callback();
setTimeout(function () { Site.closeWinbox(); }, 1500)
}
else {
Util.Alert(response.Message);
}
});
});
$("#_lnkCode").click(function () {
var mobile = $("#_txtMobile").val();
if (/^\d{11}$/gi.test(mobile) == false) {
Util.Alert("请输入正确的手机号")
return;
}
var rv = $("#_txtResult").val();
if (rv == "" || rv == $("#_txtResult").attr("data-text")) {
Util.Alert("答案不能为空")
return;
}
ajaxLocalService("user_sendbindingcode",{ mobile: mobile, rv: rv }
, function (response) {
var res = response.Data;
if (response.Code == 0) {
Util.Alert("验证码已发送");
Site.options.codeFlag = 1;
_delay = 60;
_timer = setInterval(
function () {
if (_delay <= 0) {
clearInterval(_timer);
Site.options.codeFlag = 0;
$("#_lnkCode").html("获取验证码");
return;
}
$("#_lnkCode").html("获取验证码(" + (_delay--) + ")");
}, 1000);
$("div[data-step='1']").hide()
.siblings("div[data-step='2']").show();
}
else {
Util.Alert(response.Message);
}
});
});
}
};
})();
window.onload = function () {
Site.initGotoTop();
}<file_sep>/Mvc.Web.Android/Models/BaseModel.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace Mvc.Web.Android.Models
{
/// <summary>
/// 基础对象
/// </summary>
public partial class BaseModel
{
[Display(Name = "ID")]
public int ID { get; set; }
}
}<file_sep>/Mvc.Web.Android/Helper/Extensions.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using System.Web;
using System.Web.Security;
namespace Mvc.Web.Android
{
/// <summary>
/// 公共扩展
/// </summary>
public static class Extensions
{
/// <summary>
/// 判断一个T?是否是空值或者是默认值
/// </summary>
/// <typeparam name="T">数据类型</typeparam>
/// <param name="source">源数据</param>
/// <returns>book</returns>
public static bool IsNullOrDefault<T>(this T? source) where T : struct
{
return default(T).Equals(
source.GetValueOrDefault());
}
/// <summary>
/// 数据转换
/// </summary>
public static T To<T>(this object source, T value = default(T))
{
if (source != null
&& !(source is DBNull))
{
try
{
value = (T)Convert.ChangeType(source, typeof(T));
}
catch
{ }
}
return value;
}
/// <summary>
/// MD5加密
/// </summary>
/// <param name="source"></param>
/// <param name="bit">位数如果为16取中间16位</param>
/// <returns>String</returns>
public static string MD5(this string source, int bit = 32)
{
var result = FormsAuthentication.HashPasswordForStoringInConfigFile(source, "md5");
if (bit == 16)
result = result.Substring(8, 16);
return result.ToLower();
}
/// <summary>
/// 数据转换
/// </summary>
public static string ShortIntro(this string str, int len = 30)
{
if (!string.IsNullOrWhiteSpace(str))
{
var pattern = @"<[^>]+>|\s+|http[a-z0-9\\\/\&\?\:\.\%]+|\&[\w\d]{2,6}\;|\*+";
str = Regex.Replace(str, pattern, "", RegexOptions.Compiled | RegexOptions.Multiline | RegexOptions.IgnoreCase);
if (str.Length > len)
{
str = str.Substring(0, len) + "...";
}
}
return str;
}
/// <summary>
/// 简写万元
/// </summary>
public static string ToWan(this int num)
{
string value = string.Empty;
if (num > 10000)
{
value = int.Parse((num / 10000).ToString()).ToString() + "万";
}
else
value = num.ToString();
return value;
}
}
}
<file_sep>/Mvc.Web.Android/Models/Book/CategoryModel.cs
using NapiService;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace Mvc.Web.Android.Models.Book
{
/// <summary>
/// 分类页Model
/// </summary>
public class CategoryModel
{
/// <summary>
/// 排序
/// </summary>
public CategorySort Sort = CategorySort.New;
/// <summary>
/// 分类
/// </summary>
public CategoryClass Type = CategoryClass.None;
}
/// <summary>
/// 排序
/// </summary>
public enum CategorySort
{
/// <summary>
/// 热门
/// </summary>
Hot = (int)SearchSort.VipSameDay_Desc,
/// <summary>
/// 免费
/// </summary>
Free=96,
/// <summary>
/// 完结
/// </summary>
Finished=99,
/// <summary>
/// 最新
/// </summary>
New = (int)SearchSort.VipSameDayDescAndUpdateTimeDesc,
/// <summary>
/// 特价
/// </summary>
SpecialOffer = 97,
}
/// <summary>
/// 分类
/// </summary>
public enum CategoryClass
{
/// <summary>
/// 所有分类
/// </summary>
None = (int)SearchClass.None,
/// <summary>
/// 穿越
/// </summary>
PassThrough = (int)SearchClass.PassThrough,
/// <summary>
/// 架空
/// </summary>
Aerial = (int)SearchClass.Aerial,
/// <summary>
/// 都市
/// </summary>
Urban = (int)SearchClass.Urban,
/// <summary>
/// 青春
/// </summary>
Youth = (int)SearchClass.Youth,
/// <summary>
/// 魔幻
/// </summary>
Magical = (int)SearchClass.Magical,
/// <summary>
/// 玄幻
/// </summary>
Occult = (int)SearchClass.Occult,
/// <summary>
/// 豪门
/// </summary>
PowerfulFamily = (int)SearchClass.PowerfulFamily,
/// <summary>
/// 历史
/// </summary>
History = (int)SearchClass.History,
/// <summary>
/// 异能
/// </summary>
Ability = (int)SearchClass.Ability,
/// <summary>
/// 短篇
/// </summary>
ShortArticle = (int)SearchClass.ShortArticle,
/// <summary>
/// 耽美
/// </summary>
Tanbi = (int)SearchClass.Tanbi,
/// <summary>
/// 包月
/// </summary>
Monthly = 98,
}
}<file_sep>/Mvc.Web.Android/Helper/SiteHelper.cs
using Mvc.Web.Android.Settings;
using NapiService;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
using System.Web;
namespace Mvc.Web.Android.Helper
{
/// <summary>
/// 网站常用工具
/// </summary>
public static class SiteHelper
{
/// <summary>
/// 获取充值渠道号
/// </summary>
/// <param name="order">订单号</param>
/// <returns>String</returns>
public static string GetPayChannel(object order)
{
var channelName = "未知渠道";
var arr = order.To<string>(string.Empty).Split(new[] { "_" }, StringSplitOptions.RemoveEmptyEntries);
if (arr.Length > 0)
{
var number = arr[arr.Length - 1];
if (SiteSettings.PayChannel.Keys.Contains(number))
channelName = SiteSettings.PayChannel[number];
}
return channelName;
}
public static string IntroFilter(string intro)
{
var str = intro;
str = Regex.Replace(str, "<p>", "<br>", RegexOptions.IgnoreCase | RegexOptions.Multiline);
var pattern = @"http[^\u4e00-\u9fa5]+|\&.*?\;";
var regex = new Regex(pattern, RegexOptions.IgnoreCase | RegexOptions.Multiline | RegexOptions.Compiled);
str = regex.Replace(str, "");
return str;
}
public static string GetEncryptDictionary(string key)
{
string s = "0123456789ABCDEF";
if (string.IsNullOrEmpty(key)) key = "";
string str = key.MD5().ToUpper();
List<string> list = str.ToCharArray()
.Distinct()
.Select(o => o.ToString())
.ToList();
if (list.Count() < 16)
{
foreach (char c in s.ToCharArray())
{
if (!list.Contains(c.ToString()))
{
list.Add(c.ToString());
}
if (list.Count >= 16)
{
break;
}
}
}
return string.Join("", list);
}
/// <summary>
/// 加密
/// </summary>
/// <param name="original">被加密字符串</param>
/// <param name="key">加密密钥</param>
/// <returns></returns>
public static string XEncrypt(string original, string key)
{
string dic = GetEncryptDictionary(key);
StringBuilder str = new StringBuilder();
if (!string.IsNullOrEmpty(original))
{
char[] chars = original.ToCharArray();
string bytes = string.Join("", chars.Select(o => Convert.ToString(o, 2).PadLeft(16, '0')));
for (int i = 0; i < bytes.Length; i += 4)
{
int index = Convert.ToInt32(bytes.Substring(i, 4), 2);
str.Append(dic[index].ToString());
}
}
return str.ToString();
}
/// <summary>
/// 解密
/// </summary>
/// <param name="encrypted"></param>
/// <param name="key"></param>
/// <returns></returns>
public static string XDecrypt(string encrypted, string key)
{
string dic = GetEncryptDictionary(key);
if (string.IsNullOrEmpty(encrypted))
{
return "";
}
StringBuilder str = new StringBuilder();
foreach (char ch in encrypted.ToCharArray())
{
int index = dic.IndexOf(ch);
string bytes = Convert.ToString(index, 2).PadLeft(4, '0');
str.Append(bytes);
}
string s = str.ToString();
str.Clear();
for (int i = 0; i < s.Length; i += 16)
{
char ch = (char)Convert.ToInt32(s.Substring(i, 16), 2);
str.Append(ch.ToString());
}
return str.ToString();
}
/// <summary>
/// DES解密
/// </summary>
/// <param name="str">待解密字符串</param>
/// <param name="key">解密密钥</param>
/// <returns></returns>
public static string DESDecode(string str, string key)
{
string value = string.Empty;
str = str ?? string.Empty;
try
{
DESCryptoServiceProvider provider = new DESCryptoServiceProvider();
byte[] keyBytes = Encoding.UTF8.GetBytes(key.MD5().ToLower().Substring(8, 8));
provider.Key = keyBytes;
provider.IV = keyBytes;
byte[] buffer = new byte[str.Length / 2];
for (int i = 0; i < (str.Length / 2); i++)
{
int num2 = Convert.ToInt32(str.Substring(i * 2, 2), 0x10);
buffer[i] = (byte)num2;
}
MemoryStream stream = new MemoryStream();
CryptoStream stream2 = new CryptoStream(stream, provider.CreateDecryptor(), CryptoStreamMode.Write);
stream2.Write(buffer, 0, buffer.Length);
stream2.FlushFinalBlock();
stream.Close();
value = Encoding.UTF8.GetString(stream.ToArray());
}
catch { }
return value;
}
/// <summary>
/// DES加密
/// </summary>
/// <param name="str">待加密字符串</param>
/// <param name="key">加密密钥</param>
/// <returns>加密后的代码</returns>
public static string DESEncode(string str, string key)
{
string value = string.Empty;
try
{
DESCryptoServiceProvider provider = new DESCryptoServiceProvider();
byte[] keyBytes = Encoding.UTF8.GetBytes(key.MD5().ToLower().Substring(8, 8));
provider.Key = keyBytes;
provider.IV = keyBytes;
byte[] bytes = Encoding.UTF8.GetBytes(str);
MemoryStream stream = new MemoryStream();
CryptoStream stream2 = new CryptoStream(stream, provider.CreateEncryptor(), CryptoStreamMode.Write);
stream2.Write(bytes, 0, bytes.Length);
stream2.FlushFinalBlock();
StringBuilder builder = new StringBuilder();
foreach (byte num in stream.ToArray())
{
builder.AppendFormat("{0:X2}", num);
}
stream.Close();
value = builder.ToString();
}
catch
{ }
return value;
}
/// <summary>
/// 发送服务请求,返回对象
/// </summary>
/// <typeparam name="T">对象类型</typeparam>
/// <param name="requestData_Json">请求的参数(Json格式)</param>
/// <param name="defaultValue">获取失败默认值</param>
/// <returns>T</returns>
public static T GetServices<T>(string requestData_Json, T defaultValue = default(T))
{
T value = defaultValue;
try
{
string urlData = HttpUtility.UrlEncode(requestData_Json);
//string url = "http://napi.xxsy.net/services2?" + urlData;
string url = "http://napi.xxsy.net/services?requestData=" + urlData;
string service_back = WebHelper.QuickGet(url);
HttpResponseModel<T> model = WebHelper.Json<HttpResponseModel<T>>(service_back);
if(model.Code == 0)
value = model.Data;
}
catch { }
return value;
}
/// <summary>
/// Http响应结果
/// </summary>
public partial class HttpResponseModel<T>
{
/// <summary>
/// 响应Code
/// </summary>
public int Code = 0;
/// <summary>
/// 响应信息
/// </summary>
public string Message = string.Empty;
/// <summary>
/// 响应数据
/// </summary>
public T Data = default(T);
/// <summary>
/// 附加数据
/// </summary>
public object Attachments = default(Object);
}
/// <summary>
/// 通用调用方法1
/// </summary>
/// <typeparam name="T">T</typeparam>
/// <typeparam name="TAttach">TAttach</typeparam>
/// <param name="httpParameters">Parameters参数字典</param>
/// <param name="attach">Attements</param>
/// <returns>T</returns>
public static T InvokeService<T, TAttach>(Dictionary<string, object> httpParameters, out TAttach attach)
{
if (!httpParameters.ContainsKey("method"))
throw new Exception("method");
T value = default(T);
attach = default(TAttach);
string host = "http://napi.xxsy.net/services2?";
StringBuilder sb = new StringBuilder();
foreach (var k in httpParameters.Keys)
sb.AppendFormat("{0}={1}&", k, HttpUtility.UrlEncode(httpParameters[k].To<string>(string.Empty)));
string sign = (httpParameters["method"] + "vage~244@$9234sdfnsdf~!").MD5();
sb.AppendFormat("sign={0}", sign);
host += sb.ToString();
sb.Clear();
string responseData = WebHelper.QuickGet(host);
var httpModel = WebHelper.Json<HttpResModel<T, TAttach>>(responseData);
if (httpModel != null)
{
value = httpModel.Data;
attach = httpModel.Attachments;
}
return value;
}
/// <summary>
/// 通用调用方法2
/// </summary>
/// <typeparam name="T">T</typeparam>
/// <param name="httpParameters">Parameters参数字典</param>
/// <returns>T</returns>
public static T InvokeService<T>(Dictionary<string, object> httpParameters)
{
Object attch;
return InvokeService<T, Object>(httpParameters, out attch);
}
/// <summary>
/// 过渡对象
/// </summary>
/// <typeparam name="T">T</typeparam>
/// <typeparam name="TAttach">TAttach</typeparam>
public class HttpResModel<T, TAttach>
{
/// <summary>
/// 响应Code
/// </summary>
public int Code = 0;
/// <summary>
/// 响应信息
/// </summary>
public string Message = string.Empty;
/// <summary>
/// 响应数据
/// </summary>
public T Data = default(T);
/// <summary>
/// 附加数据
/// </summary>
public TAttach Attachments = default(TAttach);
}
}
}
<file_sep>/Mvc.Web.Android/Helper/SmsHelper.cs
using NapiService;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Security;
namespace Mvc.Web.Android.Helper
{
/// <summary>
/// 短信发送支持
/// </summary>
public static class SmsHelper
{
/// <summary>
/// 短信接口路径
/// </summary>
private static string HOST = "http://link.xxsy.net/svc/sms.aspx?mobile={0}&message={1}&token={2}";
/// <summary>
/// 短信接口路径
/// </summary>
private static string HOST2 = "http://link.xxsy.net/svc/sms.aspx?mobile={0}&message={1}&token={2}&ip={3}&forwhat={4}";
/// <summary>
/// 发送信息
/// </summary>
/// <param name="mebile">手机号</param>
/// <param name="message">发送内容(必须符合模板约定)</param>
/// <returns>bool成功返回true否则false</returns>
public static bool Send(string mobile, string message)
{
var value = false;
var secret = "d6e3ea3679d28199c60b61dfeba204ac";
var token = FormsAuthentication.HashPasswordForStoringInConfigFile(
string.Format("xxsynet{0}{1}{2}", mobile, DateTime.Now.ToString("yyyyMMdd"), secret), "MD5").ToLower();
using (var client = new WebClient())
{
var temp = client.DownloadString(string.Format(HOST, mobile, message, token));
var res = 0;
int.TryParse(temp, out res);
value = (res > 0);
}
return value;
}
/// <summary>
/// 发送短信
/// </summary>
/// <param name="mobile">手机号</param>
/// <param name="message">信息</param>
/// <param name="forwhat">用途</param>
/// <returns>bool</returns>
public static bool Send(string mobile, string message, string forwhat)
{
var value = false;
var secret = "d6e3ea3679d28199c60b61dfeba204ac";
var token = FormsAuthentication.HashPasswordForStoringInConfigFile(
string.Format("xxsynet{0}{1}{2}", mobile, DateTime.Now.ToString("yyyyMMdd"), secret), "MD5").ToLower();
using (var client = new WebClient())
{
var temp = client.DownloadString(string.Format(HOST2, mobile, HttpUtility.UrlEncode(message), token, WebHelper.IP(), HttpUtility.UrlEncode(forwhat)));
var res = 0;
int.TryParse(temp, out res);
value = (res > 0);
}
return value;
}
}
}<file_sep>/Mvc.Web.Android/Models/HttpRequestArgsModel.cs
using Mvc.Web.Android.Controllers;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace Mvc.Web.Android.Models
{
/// <summary>
/// Http请求对象
/// </summary>
public class HttpRequestArgsModel
{
/// <summary>
/// 创建HttpRequestArgsModel
/// </summary>
public HttpRequestArgsModel()
{
this.Method = string.Empty;
this.Parameters = new NxpDictionary<string, object>();
this.Date = DateTime.Now;
}
/// <summary>
/// 请求方法
/// </summary>
public string Method { get; set; }
/// <summary>
/// 请求参数
/// </summary>
public NxpDictionary<string, object> Parameters { get; set; }
/// <summary>
/// 请求时间
/// </summary>
public DateTime Date { get; set; }
}
}<file_sep>/Mvc.Web.Android/Models/MemberTagsModel.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Web;
namespace Mvc.Web.Android.Models
{
/// <summary>
/// MemberTags
/// </summary>
///子类并不映射到任何数据库,加上一个不映射的属性[NotMapped]
[NotMapped]
public partial class MemberTagsModel : BaseModel
{
[Display(Name = "登录名", Description = "4-20个字符")]
[Required(ErrorMessage = "×")]
[StringLength(20, MinimumLength = 4, ErrorMessage = "×")]
public string Name { get; set; }
[Display(Name = "密码", Description = "6-20个字符")]
[Required(ErrorMessage = "×")]
[StringLength(20, MinimumLength = 6, ErrorMessage = "×")]
public string Password { get; set; }
}
}<file_sep>/Mvc.Web.Android/Script/tiyanbi.js
/// <reference path="JX.js" />
function showTiyanbiAds(isNew) {
if (isNew == 1) {
var showed = Util.CookieValue('showtiyanbiads');
if (showed == '') {
var s = [
'<div class="tiyanbi">',
' <div class="tiyanbi_cover"></div>',
' <div class="tiyanbi_ads">',
' <div class="tiyanbi_main">',
' <img src="http://images.xxsy.net/mxxsynet/tiyanbi_ads.png" />',
' <a class="tiyanbi_button" href="Chongzhi.aspx">去充值</a>',
' <a class="tiyanbi_help" href="MyTiyanbiMark.aspx">体验币的秘密,你造吗?</a>',
' </div>',
' </div>',
'</div>'
];
$('.tiyanbi').remove();
$(s.join('')).appendTo('body');
Util.CookieWrite('showtiyanbiads', '1', 60 * 24);
$('.tiyanbi_main img').load(function () {
$('.tiyanbi_cover').height($(document).height()).click(function () { $('.tiyanbi').remove(); });
var box = $('.tiyanbi_ads');
var top = ($(window).height() - box.height()) * 0.5 - 20;
if (top < 0) top = 0;
box.css({top: top + 'px' });
});
}
}
}<file_sep>/Mvc.Web.Android/Controllers/Info_OLDController.cs
using Mvc.Web.Android.Helper;
using NapiService;
using NapiService.Model;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Mvc.Web.Android.Controllers
{
public class Info_OLDController : BaseController
{
/// <summary>
/// 详情数据
/// </summary>
public InfoDetail Detail = new InfoDetail();
NapiService.BookService _bookService = new NapiService.BookService();
NapiService.BookcaseService _bookcaseService = new NapiService.BookcaseService();
NapiService.ChapterService _bookChapterService = new NapiService.ChapterService();
/// <summary>
/// 页面主入口
/// </summary>
public ActionResult Index(int bookid = 0)
{
ViewBag.BID = bookid;
if (bookid == 0)
return RedirectToAction("NotFound", "Error", new { err = "该作品不存在或者已暂时被下架" });
if (bookid != 0)
{
ViewBag.IsKeep = _bookcaseService.Exists(ViewBag.User.ID, bookid);
}
Detail = GetInfoDetail(bookid);
if (Detail.Book.Id != bookid || Detail.Book.Check != 0)
return RedirectToAction("NotFound", "Error", new { err = "该作品不存在或者已暂时被下架" });
if (Detail.Book.SpecialOffer > 0)
{
ViewBag.Tejia_Class = "group g2";
ViewBag.Tejia_InnerHtml = string.Format(
"<li><a href=\"javascript:XiaoXiangJS.download({1},'{3}')\" class=\"button color1 r3\">下载到本地离线阅读</a></li><li><a href=\"javascript:;\" onclick=\"Site.showQuanbenrSubscribebox({0},{1})\" class=\"button color3 r3\">{2}元特价</a></li>"
, ViewBag.UserID, Detail.Book.Id, Detail.Book.SpecialOffer / 100, Detail.Book.Title);
//判断是否包月
if (ViewBag.User.ID > 0)
{
DateTime baoyueTime = _memberService.GetMonthlyEndTime(ViewBag.User.ID);
if (baoyueTime > DateTime.Now)
{
if (Detail.Book.IsMonthly == 1)
{
ViewBag.Tejia_Class = "group g1";
ViewBag.Tejia_InnerHtml = string.Format(
"<li><a href=\"javascript:XiaoXiangJS.download({0},'{1}')\" class=\"button color1 r3\">下载到本地离线阅读</a></li>"
, Detail.Book.Id, Detail.Book.Title);
}
}
}
}
else
{
ViewBag.Tejia_Class = "group g1";
ViewBag.Tejia_InnerHtml = string.Format(
"<li><a href=\"javascript:XiaoXiangJS.download({0},'{1}')\" class=\"button color1 r3\">下载到本地离线阅读</a></li>"
, Detail.Book.Id, Detail.Book.Title);
}
return View(Detail);
}
/// <summary>
/// 获取详情数据
/// </summary>
InfoDetail GetInfoDetail(int bookid)
{
var detail = new InfoDetail();
var book = _bookService.Get(bookid);
var lastChapterTags = _bookChapterService.GetLast(bookid);
if (book != null)
{
detail.Book = book;
//获取最新的5条评论
//detail.Revies.AddRange(_bookReviewService.GetReviews(bookid, 0, 5, 0, out iTotal));
//获取同类推荐
//detail.SimilarBooks.AddRange(_cacheManager.Get<List<Nxp.Framework.Model.Books.Booksearch>>(string.Format("TongleiTuijian_{0}", book.ClassId), 180, () => { return _bookSearchService.SameRecommendtion(bookid, 5, 50); }));
//获取作者其他
//detail.AuthorOtherBooks.AddRange(_bookSearchService.GetAuthorOtherBooks(bookid));
//获取最新章节
detail.LastChapterTags = lastChapterTags;
}
return detail;
}
public class InfoDetail
{
public Book Book = new Book();
public Bookgift Props = new Bookgift();
public List<Bookreview> Revies = new List<Bookreview>();
public List<Booksearch> AuthorOtherBooks = new List<Booksearch>();
public List<Booksearch> SimilarBooks = new List<Booksearch>();
public Chapter LastChapterTags = new Chapter();
}
}
}
<file_sep>/Mvc.Web.Android/Controllers/ServicesController.cs
using Mvc.Web.Android.Helper;
using Mvc.Web.Android.Models;
using Mvc.Web.Android.Settings;
using NapiService;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using System.Web;
using System.Web.Mvc;
namespace Mvc.Web.Android.Controllers
{
public class ServicesController : BaseController
{
#region MyRegion
//private MemberService _memberService;
public ServicesController()
{
//_memberService = new MemberService();
}
#endregion
/// <summary>
/// 回调函数名
/// </summary>
string _NxpCallback = string.Empty;
/// <summary>
/// 接口入口
/// <para>@requestData:json字符串</para>
/// <para>@nxpCallback:JSONP回调函数名</para>
/// </summary>
public ActionResult Index(string requestData = "", string nxpCallback = "")
{
this._NxpCallback = nxpCallback;
if (!string.IsNullOrEmpty(requestData))
{
var data = WebHelper.Json<HttpRequestArgsModel>(requestData);
if (data != null)
{
switch (data.Method.ToLower())
{
#region 用户相关
//用户登陆
case "user_login": return Login(data.Parameters);
// 用户状态检测
case "user_check": return CheckUser(data.Parameters);
// 发送注册验证码
case "user_sendregistercode": return SendRegisterCode(data.Parameters);
// 发送找回密码验证码
case "user_sendfindpasswordcode": return SendFindPasswordCode(data.Parameters);
// 发送绑定手机验证码
case "user_sendbindingcode": return SendBindingCode(data.Parameters);
// 发送解绑手机验证码
case "user_sendunbindingcode": return SendUnBindingCode(data.Parameters);
// 找回密码
case "user_findpassword": return FindPassword(data.Parameters);
// 获取绑定信息
case "user_getbinding": return GetBinding(data.Parameters);
// 绑定手机号
case "user_binding": return MemberBinding(data.Parameters);
// 手机解除绑定
case "user_unbinding": return MemberUnbinding(data.Parameters);
#endregion
}
}
}
return View();
}
/// <summary>
/// 发送解绑手机验证码
/// <para>@mobile:手机号</para>
/// <para>@rv:验证码</para>
/// </summary>
private ActionResult SendUnBindingCode(NxpDictionary<string, object> nxpDictionary)
{
var mobile = nxpDictionary.GetString("mobile", string.Empty);
var userid = nxpDictionary.GetInt32("userid", 0);
var member = _memberService.GetById(userid);
if (member.Mobile != mobile)
{
return LogError("请输入正确的手机号");
}
if (string.IsNullOrWhiteSpace(member.Mobile))
{
return LogError("手机号已解除绑定");
}
var code = new Random().Next(1000, 9999);
var res = _memberService.SendSmsUnbindingMobileMessage(mobile, code.To<string>());
if (!res)
{
return LogError("发送失败或使用次数已满");
}
//验证码写入Session
Session["UNBINDING_CODE_" + WebHelper.Md5(mobile)] = code;
Session["UNBINDING_CODE_COUNT_" + WebHelper.Md5(mobile)] = 0;
return ShowResult("验证码已发送");
}
/// <summary>
/// 发送绑定手机验证码
/// <para>@mobile:手机号</para>
/// <para>@rv:验证码</para>
/// </summary>
private ActionResult SendBindingCode(NxpDictionary<string, object> nxpDictionary)
{
var mobile = nxpDictionary.GetString("mobile", string.Empty);
var rv = nxpDictionary.GetString("rv", string.Empty);
if (string.IsNullOrEmpty(rv) || rv != Session["VALIDATE_CODE"].To<string>(string.Empty))
{
return LogError("输入的答案不正确");
}
if (!Regex.IsMatch(mobile, "\\d{11}"))
{
return LogError("非法手机号");
}
if (_memberService.GetByMobile(mobile) != null)
{
return LogError("手机号已被绑定");
}
var code = new Random().Next(1000, 9999);
var res = _memberService.SendSmsBindingMobileMessage(mobile, code.To<string>());
if (!res)
{
return LogError("发送失败或使用次数已满");
}
//验证码写入Session
Session["BINDING_CODE_" + WebHelper.Md5(mobile)] = code;
Session["BINDING_CODE_COUNT_" + WebHelper.Md5(mobile)] = 0;
return ShowResult("验证码已发送");
}
/// <summary>
/// 手机解除绑定
/// <para>@userid:用户Id</para>
/// </summary>
private ActionResult MemberUnbinding(NxpDictionary<string, object> nxpDictionary)
{
var userid = nxpDictionary.GetInt32("userid", 0);
var code = nxpDictionary.GetString("code", string.Empty);
var mobile = nxpDictionary.GetString("mobile", string.Empty);
if (!Security(userid))
return LogError("验证用户失败请重新登陆!", -1);
int errorNumber = Session["UNBINDING_CODE_COUNT_" + WebHelper.Md5(mobile)].To<int>();
if (errorNumber >= 3)
{
return LogError("验证码不正确");
}
var myCode = Session["UNBINDING_CODE_" + WebHelper.Md5(mobile)].To<string>("");
if (code == "" || code != myCode)
{
Session["UNBINDING_CODE_COUNT_" + WebHelper.Md5(mobile)] = errorNumber++;
return LogError("验证码不正确");
}
var member = _memberService.GetById(userid);
if (member == null || member.Id == 0)
return LogError("用户不存在");
if (member.IsBindingMobile == 0)
{
return LogError("用户已解除绑定");
}
var httpModel = _memberService.UpdateMobileHttpModel(userid, mobile, 0);
if (httpModel.Code == 0)
return ShowResult(httpModel.Data, httpModel.Attachments);
else
return LogError(httpModel.Message);
}
/// <summary>
/// 绑定手机号
/// <para>@userid:用户Id</para>
/// <para>@mobile:手机号</para>
/// <para>@code:验证码</para>
/// </summary>
private ActionResult MemberBinding(NxpDictionary<string, object> nxpDictionary)
{
var userid = nxpDictionary.GetInt32("userid", 0);
var mobile = nxpDictionary.GetString("mobile", string.Empty);
var code = nxpDictionary.GetString("code", string.Empty);
if (!Security(userid))
return LogError("验证用户失败请重新登陆!", -1);
int errorNumber = Session["BINDING_CODE_COUNT_" + WebHelper.Md5(mobile)].To<int>();
if (errorNumber >= 3)
{
return LogError("验证码不正确");
}
var myCode = Session["BINDING_CODE_" + WebHelper.Md5(mobile)].To<string>("");
if (code == "" || code != myCode)
{
Session["BINDING_CODE_COUNT_" + WebHelper.Md5(mobile)] = errorNumber++;
return LogError("验证码不正确");
}
var member = _memberService.GetById(userid);
if (member.IsBindingMobile == 1)
{
return LogError("用户已绑定");
}
var httpModel = _memberService.UpdateMobileHttpModel(userid, mobile, 1);
if (httpModel.Code == 0)
return ShowResult(httpModel.Data, httpModel.Attachments);
else
return LogError(httpModel.Message);
}
/// <summary>
/// 获取绑定信息
/// <para>@userid:用户Id</para>
/// </summary>
private ActionResult GetBinding(NxpDictionary<string, object> nxpDictionary)
{
var userid = nxpDictionary.GetInt32("userid", 0);
if (!Security(userid))
return LogError("验证用户失败请重新登陆!", -1);
var member = _memberService.GetById(userid);
return ShowResult(member.IsBindingMobile, !string.IsNullOrEmpty(member.Mobile)
? string.Format("你已绑定手机号为:{0}***{1}", member.Mobile.Substring(0, 4), member.Mobile.Substring(member.Mobile.Length - 3))
: "");
}
/// <summary>
/// 用户状态检测
/// <para>@userid:用户Id</para>
/// </summary>
private ActionResult CheckUser(NxpDictionary<string, object> nxpDictionary)
{
int userid = nxpDictionary.GetInt32("userid", 0);
var member = _memberService.GetById(userid);
int value = 0;
if(member != null)
value = member.Status;
return ShowResult(value);
}
/// <summary>
/// 找回密码
/// <para>@username:用户名</para>
/// <para>@verifycode:验证码</para>
/// <para>@userpass:用户密码</para>
/// </summary>
private ActionResult FindPassword(NxpDictionary<string, object> nxpDictionary)
{
var userName = nxpDictionary.GetString("username", string.Empty);
var verifyCode = nxpDictionary.GetString("verifycode", string.Empty);
var userPass = nxpDictionary.GetString("userpass", string.Empty);
if (userPass.Length < 6)
{
return LogError("密码不能少于6个字符");
}
int errorNumber = Session["FINDPASSWORD_CODE_COUNT_" + WebHelper.Md5(userName)].To<int>();
if (errorNumber >= 3)
{
return LogError("验证码不正确");
}
var myCode = Session["FINDPASSWORD_CODE_" + WebHelper.Md5(userName)].To<string>("");
if (verifyCode == "" || verifyCode != myCode)
{
Session["FINDPASSWORD_CODE_COUNT_" + WebHelper.Md5(userName)] = errorNumber + 1;
return LogError("验证码不正确");
}
var member = _memberService.GetByName(userName);
if (member == null)
{
return LogError("用户名不存在或手机未绑定");
}
var httpModel = _memberService.UpdatePasswordHttpModel(userName, userPass);
if (httpModel.Code == 0)
{
return Login(new NxpDictionary<string, object>
{
{"username",userName},
{"userpassword",userPass}
});
}
else
{
return LogError("修改密码失败");
}
}
/// <summary>
/// 发送找回密码验证码
/// <para>@username:用户名</para>
/// <para>@rv:验证码</para>
/// </summary>
private ActionResult SendFindPasswordCode(NxpDictionary<string, object> nxpDictionary)
{
var userName = nxpDictionary.GetString("username", string.Empty);
var rv = nxpDictionary.GetString("rv", string.Empty);
if (string.IsNullOrEmpty(rv) || rv != Session["VALIDATE_CODE"].To<string>(string.Empty))
{
return LogError("输入的答案不正确");
}
var member = _memberService.GetByName(userName);
if (member == null)
{
return LogError("用户名不存在");
}
if (member.IsBindingMobile != 1)
{
return LogError("用户未绑定手机号");
}
if (member.Status != 0)
{
return LogError("用户账号被冻结");
}
var code = new Random().Next(1000, 9999);
var res = _memberService.SendSmsRegisterMessage(member.Mobile, code.To<string>());
if (!res)
{
return LogError("发送失败或使用次数已满");
}
//验证码写入Session
Session["FINDPASSWORD_CODE_" + WebHelper.Md5(userName)] = code;
Session["FINDPASSWORD_CODE_COUNT_" + WebHelper.Md5(userName)] = 0;
return ShowResult("验证码已发送");
}
/// <summary>
/// 发送注册验证码
/// <para>@mobile:手机</para>
/// <para>@rv:验证码</para>
/// </summary>
private ActionResult SendRegisterCode(NxpDictionary<string, object> nxpDictionary)
{
var mobile = nxpDictionary.GetString("mobile", string.Empty);;
var rv = nxpDictionary.GetString("rv", string.Empty);
if (string.IsNullOrEmpty(rv) || rv != Session["VALIDATE_CODE"].To<string>(string.Empty))
{
return LogError("输入的答案不正确");
}
if (!Regex.IsMatch(mobile, "\\d{11}"))
{
return LogError("非法手机号");
}
var member = _memberService.GetByName(mobile);
if (member != null && member.Id > 0)
{
return LogError("手机号已存在");
}
var tempMember = _memberService.GetByMobile(mobile);
if (tempMember != null && tempMember.Id > 0)
{
return LogError("该手机号已被其他账号绑定");
}
var code = new Random().Next(1000, 9999);
var res = _memberService.SendSmsRegisterMessage(mobile, code.To<string>());
if (!res)
{
return LogError("发送失败或使用次数已满");
}
//验证码写入Session
Session["REGISTER_CODE_" + WebHelper.Md5(mobile)] = code;
Session["REGISTER_CODE_COUNT_" + WebHelper.Md5(mobile)] = 0;
return ShowResult("成功发送了验证码");
}
/// <summary>
/// 用户登录
/// <para>@username:用户名</para>
/// <para>@userpassword:<PASSWORD></para>
/// <para>@imei:设备号</para>
/// <para>@devicetype:设备类型</para>
/// </summary>
private ActionResult Login(NxpDictionary<string, object> nxpDictionary)
{
var userName = nxpDictionary.GetString("username", string.Empty);
var userPass = nxpDictionary.GetString("userpassword", string.Empty);
var session = string.Empty;
// 20140707添加
var imei = nxpDictionary.GetString("imei", string.Empty);
var deviceType = nxpDictionary.GetString("devicetype", string.Empty);
var httpModel = _memberService.LoginHttpModel(userName, userPass, SiteSettings.SubscribeChannel);
var member = httpModel.Data;
if (httpModel.Code != 0)
{
return LogError(httpModel.Message);
}
string attachments = WebHelper.Json(new { Id = member.Id, Name = member.Name, Password = <PASSWORD> });
attachments = WebHelper.Encrypt(attachments);
int expires = 999 * 24 * 60;
WebHelper.SetCookie("Session_Code", httpModel.Attachments, expires);
WebHelper.SetCookie("USER_FLAG", attachments, expires);
return ShowResult(member);
//记录IMEI
//imei = WebHelper.Decrypt(imei);
//if (!string.IsNullOrEmpty(imei))
//{
// _ingotService.InsertDevice(member.Id, imei, deviceType);
//}
}
/// <summary>
/// 检测安全性
/// </summary>
/// <returns></returns>
private bool Security(int userid)
{
if (userid == 0)
return false;
var member = _memberService.GetById(userid);
var value = (userid == ViewBag.User.ID && member.Password == <PASSWORD>);
return value;
}
/// <summary>
/// 返回结果(支持JSONP)
/// </summary>
/// <param name="data">结果对象</param>
/// <param name="attachments">附加信息</param>
private ActionResult ShowResult(object data, object attachments = null)
{
var model = new HttpResponseModel();
model.Data = data;
model.Attachments = attachments;
if (string.IsNullOrEmpty(_NxpCallback))
return Json(model);
return Content(string.Format("{0}({1})", _NxpCallback, WebHelper.Json(model, "yyyy-MM-dd HH:mm:ss")));
}
/// <summary>
/// 显示错误
/// </summary>
/// <param name="error">错误信息</param>
/// <param name="code">错误代码</param>
private ActionResult LogError(string error, int code = 1)
{
HttpResponseModel model = new Models.HttpResponseModel();
model.Code = code;
model.Message = error;
if (string.IsNullOrEmpty(_NxpCallback))
return Json(model);
return Content(string.Format("{0}({1})", _NxpCallback, WebHelper.Json(model, "yyyy-MM-dd HH:mm:ss")));
}
}
}
<file_sep>/Mvc.Web.Android/Controllers/ErrorController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Mvc.Web.Android.Controllers
{
public class ErrorController : Controller
{
public ActionResult Error(string errorUrl = "")
{
if (errorUrl.Length > 0)
{
//SmsManager.Send(errorUrl, string.Empty);
}
return View();
}
public ActionResult NotFound(string err = "")
{
ViewBag.NotFoundMessage = "您所访问的页面没找到!";
if (!string.IsNullOrEmpty(err))
{
ViewBag.NotFoundMessage = err;
}
return View();
}
}
}
| b72252d8cef12eddbd5734cb7d4aabb8431730cb | [
"JavaScript",
"C#"
] | 22 | C# | shouchangfayi/mvc | 6e454a65f7fe1a4364986c74d3762c2a5cbbfb7a | 1c22592e0a89cbc00a075addce12cc05d3fbba47 | |
refs/heads/master | <file_sep>package com.twu.biblioteca;
import org.junit.Test;
import static org.junit.Assert.*;
public class MovieTest {
@Test
public void testToString() {
Movie movie = new Movie("The Mooovie", "<NAME>", "2014", "10");
assertEquals("The Mooovie\tCow, Arthur\t2014\t10", movie.toString());
}
@Test
public void testEquals() {
Movie movie = new Movie("The Mooovie", "<NAME>", "2014", "10");
assertTrue(movie.equals(new Movie("The Mooovie", "<NAME>", "2014", "10")));
}
}
<file_sep>package com.twu.biblioteca;
import java.util.*;
/*
This is the CONSOLE class. Here, we read in any message typed in by the user,
perform appropriate operations, and then return a message indicating the result.
*/
class Console {
private Library library;
private ArrayList<User> userList;
private boolean isLoggedIn;
private User activeUser;
Console() {
library = new Library();
isLoggedIn = false;
userList = new ArrayList<User>();
userList.add(new User("101-3345", "letmein", "<NAME>",
"<EMAIL>","(979) 606-5684"));
userList.add(new User("777-0987", "sevens",
"<NAME>", "<EMAIL>", "(978) 960-2396"));
userList.add(new User("123-4567", "password",
"<NAME>", "<EMAIL>", "(661) 574-7566"));
}
String getWelcomeMessage() {
return "Hello!\nWelcome to Biblioteca.\nPlease type your commands below.";
}
String getMainMenu() {
if (isLoggedIn)
return "\nMAIN MENU:\n" +
"List Books\n" +
"List Movies\n" +
"Checkout <Title>\n" +
"Return <Title>\n" +
"View User Info\n" +
"Quit";
else
return "\nMAIN MENU:\n" +
"List Books\n" +
"List Movies\n" +
"Login\n" +
"Quit";
}
String readMessage(String in) {
if (in.equals("List Books")) {
return listResources("book");
} else if(in.equals("List Movies")) {
return listResources("movie");
} else if (isACheckoutMessage(in) && isLoggedIn) {
return performCheckOutSequence(in);
} else if (isAReturnMessage(in) && isLoggedIn) {
return performReturnSequence(in);
} else if (in.equals("View User Info") && isLoggedIn) {
return activeUser.getUserInfo();
} else if (in.equals("Quit")) {
return quit();
}
else {
return "Select a valid option!";
}
}
public String login(String userId, String password) {
User u = getUserWithThisLogin(userId, password);
if (u != null) {
isLoggedIn = true;
activeUser = u;
return "Successfully logged in!" + getMainMenu();
} else {
return "Failed login";
}
}
private User getUserWithThisLogin(String userId, String password) {
for (User user : userList) {
if (user.isThisLogin(userId, password)) {
return user;
}
}
return null;
}
private boolean isAReturnMessage(String message) {
String[] splitIntoWords = message.split(" ");
return (splitIntoWords[0].equals("Return"));
}
private String performReturnSequence(String in) {
String title = getAllWordsExceptFirstWord(in);
if (isAValidReturn(title)) {
Resource toReturn = library.getResourceByTitle(title);
toReturn.checkIn();
return toReturn.getTitle() + " has been successfully returned.\nThank you for returning the book.";
}
else {
return "This is not a valid book to return.";
}
}
private boolean resourceWithThisTitleIsCheckedOut(String title) {
Resource r = library.getResourceByTitle(title);
return !r.isCheckedIn();
}
private String performCheckOutSequence(String in) {
String title = getAllWordsExceptFirstWord(in);
if (isAValidCheckout(title)) {
Resource toCheckout = library.getResourceByTitle(title);
toCheckout.checkOut();
return toCheckout.getTitle() + " has been successfully checked out.\nThank you! Enjoy the book.";
}
else {
return "That book is not available.";
}
}
private boolean isAValidReturn(String title) {
return library.containsTitle(title) && resourceWithThisTitleIsCheckedOut(title);
}
private boolean isAValidCheckout(String title) {
return library.containsTitle(title) && !resourceWithThisTitleIsCheckedOut(title);
}
private boolean isACheckoutMessage(String message) {
String[] splitIntoWords = message.split(" ");
return (splitIntoWords[0].equals("Checkout"));
}
private String getAllWordsExceptFirstWord(String message) {
int indexOfSpace = message.indexOf(' ');
return message.substring(indexOfSpace + 1); // the book title
}
private String listResources(String mode) {
return library.getResourceListInColumns(mode);
}
String quit() {
return "Goodbye!";
}
}
<file_sep>package com.twu.biblioteca;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assert.assertNotEquals;
public class BookTest {
@Test
public void testToString() {
Book book = new Book("Emma", "<NAME>", "1815");
assertEquals(book.toString(), "Emma\tAusten, Jane\t1815");
}
@Test
public void testToColumnWithCheckedOutBooks() {
Book book = new Book("Emma", "<NAME>", "1815");
assertEquals("Emma <NAME> 1815", book.toColumn(4, 12));
book.checkOut();
assertEquals("", book.toColumn(4, 12));
}
@Test
public void testEquals() {
Book book = new Book("Emma", "<NAME>", "1815");
assertTrue(book.equals(new Book("Emma", "<NAME>", "1815")));
}
}
<file_sep>package com.twu.biblioteca;
public class Movie extends Resource {
private String rating;
public Movie(String titleArg, String yearArg, String directorArg, String ratingArg) {
super(titleArg, directorArg, yearArg); // CAUTION: argument order
rating = ratingArg;
}
public String getDirector() {
return creator;
}
public String getRating() {
return rating;
}
public String getTitle() {
return title;
}
public String getYear() {
return year;
}
public String getCreator() {
return getDirector();
}
public String toColumn(int titleLength, int directorLength) {
StringBuilder row = new StringBuilder(title);
while (row.length() < titleLength + 3) { // need spaces to be even with longest title
row.append(" ");
}
row.append(year);
row.append(" ");
row.append(creator);
while (row.length() < titleLength + directorLength + 9 + 4) { // 4 chars for the year
row.append(" ");
}
row.append(rating);
return row.toString();
}
public String toString() {
return title + "\t" + year + "\t" + creator + "\t" + rating;
}
}
<file_sep>package com.twu.biblioteca;
public abstract class Resource {
protected String title;
protected String year;
protected String creator;
protected boolean isCheckedIn;
public Resource(String titleArg, String creatorArg, String yearArg) {
title = titleArg;
year = yearArg;
creator = creatorArg;
isCheckedIn = true;
}
public String getTitle() {
return title;
}
public String getCreator() {
return creator;
}
public String getYear() {
return year;
}
public abstract String toColumn(int titleLength, int creatorLength);
public abstract String toString();
public boolean equals(Resource other) {
return toString().equals(other.toString());
}
public boolean isCheckedIn() {
return isCheckedIn;
}
public void checkOut() {
isCheckedIn = false;
}
public void checkIn() {
isCheckedIn = true;
}
public Book toBook() {
if (this instanceof Book) {
return (Book) this;
} else {
throw new IllegalArgumentException("toBook called on a non-book object");
}
}
public Movie toMovie() {
if (this instanceof Movie) {
return (Movie) this;
} else {
throw new IllegalArgumentException("toMovie called on a non-Movie object");
}
}
}
<file_sep>package com.twu.biblioteca;
import java.util.*;
import java.io.InputStream;
public class BibliotecaApp {
public static void main(String[] args) {
Console console = new Console();
welcome(console);
InputStream stdin = System.in;
listen(stdin, console);
}
// this accepts a general InputStream for testing purposes
public static void listen(InputStream in, Console console) {
Scanner sc = new Scanner(in);
while (sc.hasNext()) {
String command = sc.nextLine();
if (command.equals("Login")) { // special case
System.out.println("User ID:");
String userId = sc.nextLine();
System.out.println("Password:");
String password = sc.nextLine();
System.out.println(console.login(userId, password));
continue;
}
String response = console.readMessage(command);
System.out.println(response);
if (command.equals("Quit")) { // special case
break;
}
}
}
public static void welcome(Console console) {
System.out.println(console.getWelcomeMessage());
System.out.println(console.getMainMenu());
}
}
| 6e8129567f078c8778837522a4df7a8db300a9d7 | [
"Java"
] | 6 | Java | mmbarnett/twu-biblioteca-michael-barnett | fbc9359bb177f5c6db76d16cc50f76f8aed15317 | 82567c41c3239478f9e28d36994b574a5263ec27 | |
refs/heads/master | <file_sep># The following lines were added by compinstall
zstyle ':completion:*' completer _complete _ignored _correct _approximate
zstyle ':completion:*' max-errors 2
zstyle :compinstall filename '/home/mmcfadden/.zshrc'
autoload -Uz compinit
compinit
# End of lines added by compinstall
# Lines configured by zsh-newuser-install
HISTFILE=~/.histfile
HISTSIZE=1000
SAVEHIST=100000
setopt appendhistory autocd extendedglob notify
unsetopt beep
bindkey -v
# End of lines configured by zsh-newuser-install
#
# User specific aliases and functions
alias d='ls -sF'
alias la='ls -al'
alias lt='ls -lt'
alias C=clear
alias f=pushd
alias b=popd
alias h='history 1 -1'
alias j='jobs -l'
alias a='alias'
export PROMPT=$'%U%B%F{blue}[%n@%m: %~]%u\n%# %b%f'
<file_sep># Source global definitions
alias d="ls -sF"
alias la="ls -al"
alias lt="ls -lt"
alias C=clear
alias f=pushd
alias b=popd
alias h=history
alias j="jobs -l"
alias a=alias
alias vi=vim
alias dbgon="export DYNINST_DEBUG_PROCCONTROL=1"
alias dbgoff="export DYNINST_DEBUG_PROCCONTROL="
# Set DISPLAY so X forwarding works through mrsh from desktop
if [ -n $DISPLAY ] && [ -z $(echo $DISPLAY | cut -d: -f1) ]; then
export DISPLAY=$(hostname -s)${DISPLAY}
fi
if [ -f $HOME/.Xdefaults ]; then
xrdb $HOME/.Xdefaults
fi
export PS1='\[$(tput bold)$(tput smul)\]\u@\H:\W\[$(tput rmul)\] ($SESSION)\n$ '
export HISTCONTROL=ignoreboth:erasedups # no duplicate entries
shopt -s histappend # append history file
export PROMPT_COMMAND="history -a" # update histfile after every command
<file_sep>stty erase
H=`uname -m`
HOST=`hostname`
if [[ $HOST == "rz"* ]]; then
export WS=/usr/workspace/wsrzd/martymcf
else
export WS=/usr/workspace/wsb/martymcf
fi
export TMPDIR=$WS/tmp
export SSH_ASKPASS=""
if [ -e $HOME/.bashrc ]; then
. $HOME/.bashrc
fi
if [ -f /etc/toss-release ]; then
tossversion=`cat /etc/toss-release`
tossversion=${tossversion#*-}
tossversion=${tossversion#*-}
tossversion=${tossversion%%.*}
else
tossversion=0
fi
if [ "$tossversion" = "2" ]; then
echo "Welcome to TOSS version $tossversion"
. /usr/local/tools/dotkit/init.sh
# use -q cmake git vimvi boost
use -q git vimvi boost
use
module load cudatoolkit/7.5 gnu/4.9.2
module list
elif [ "$tossversion" = "3" ]; then
echo "Welcome to TOSS version $tossversion"
# module purge
# module use /opt/modules/modulefiles
# module load git gcc cmake cudatoolkit/7.5
# module load fftw
# module load dyninst/9.1.0
# module load totalview
# module load stat
# module load launchmon/1.0.2
# module load opt
# module list
else
echo "Welcome to the jungle..."
fi
echo "Which environment to you wish to start?"
select sess in "archer" "coral" "cuda" "di-mmap" "dyninst"; do
case $sess in
archer ) export SESSION="archer"; break;;
coral ) export SESSION="coral"; break;;
cuda ) export SESSION="cuda"; break;;
di-mmap ) export SESSION="di-mmap"; break;;
dyninst ) export SESSION="dyninst"; break;;
esac
done
echo "$SESSION environment established"
if [ -e /g/g0/martymcf/archer/src/spack ]; then
export SPACK_ROOT=/g/g0/martymcf/archer/src/spack
. $SPACK_ROOT/share/spack/setup-env.sh
fi
export EDITOR=/usr/bin/vim
export INSTALLDIR=$HOME/$SESSION/install/$H
export BUILDDIR=$HOME/$SESSION/build/$H
export SRCDIR=$HOME/$SESSION/src
export DYNINSTHDR=$INSTALLDIR/include
export DYNINSTLIB=$INSTALLDIR/lib
export DYNINSTAPI_RT_LIB=$INSTALLDIR/lib/libdyninstAPI_RT.so
export LD_LIBRARY_PATH=.:$DYNINSTLIB:$LD_LIBRARY_PATH
export PATH=$INSTALLDIR/bin:$PATH
export vipath=.,$SRCDIR/dyninst,$SRCDIR/dyninst/common/h,$SRCDIR/dyninst/common/src,$SRCDIR/dyninst/dwarf/h,$SRCDIR/dyninst/dwarf/src,$SRCDIR/dyninst/elf/h,$SRCDIR/dyninst/elf/src,$SRCDIR/dyninst/proccontrol/h,$SRCDIR/dyninst/proccontrol/src,$SRCDIR/dyninst/proccontrol/src/loadLibrary,$SRCDIR/dyninst/symlite/h,$SRCDIR/dyninst/symlite/src,$SRCDIR/dyninst/symtabAPI/h,$SRCDIR/dyninst/symtabAPI/src,/usr/include,$SRCDIR/build/dyninst/common/h,$CUDA_INSTALL_PATH/extras/Debugger/include/,,
| 30c06e0cfa22a83d8630ab0cce993bb42c587525 | [
"Shell"
] | 3 | Shell | 3daughterdad/dotfiles | 9853829a3fe3b7e56a45f57bd172d76c5dd10091 | 987993ad62c97723be40a6b05e2f285bc14f98f8 | |
refs/heads/master | <file_sep># nu
Go Library for scraping NU.nl RSS feed to JSON output
> XML is crap. Really. There are no excuses. XML is nasty to parse for humans, and it's a disaster to parse even for computers. There's just no reason for that horrible crap to exist. - <NAME>
##Example
```go
package main
import (
"fmt"
"github.com/Rivalo/nu"
)
func main() {
json, err := nu.JSON("http://www.nu.nl/rss/Algemeen")
if err != nil {
fmt.Println(err)
return
}
fmt.Println(string(json))
}
```
<file_sep>package nu
import (
"encoding/json"
"fmt"
)
//Struct returns the NU RSS feed as struct XML
func Struct(url string) (feed XML, err error) {
feed, err = parseToFeed(url)
if err != nil {
return XML{}, err
}
return
}
//JSON returns the NU RSS feed as JSON in []byte
func JSON(url string) (JSON []byte, err error) {
feed, err := parseToFeed(url)
if err != nil {
return nil, err
}
fmt.Println(feed)
JSON, err = json.Marshal(feed)
if err != nil {
return nil, err
}
return
}
<file_sep>package nu
//XML2 contains link of NU.nl Items (Hack for encoding/xml)
type xML2 struct {
Channel channel2 `xml:"channel"`
}
//Channel2 contains channel of NUXML2 feed
type channel2 struct {
Item []article2 `xml:"item"`
}
//Article2 constains link of single Nu.nl article
type article2 struct {
Link []string `xml:"link" json:"link"`
}
<file_sep>package nu
//XML is a struct containing Nu.nl RSS file
type XML struct {
Channel Channel `xml:"channel"`
}
//Channel contains channel of NUXML feed
type Channel struct {
Title string `xml:"title" json:"title"`
Description string `xml:"description" json:"description"`
Language string `xml:"language" json:"language"`
Copyright string `xml:"copyright" json:"copyright"`
LastBuildDate string `xml:"lastBuildDate" json:"lastbuilddate"`
Item []Article `xml:"item" json:"item"`
}
//Article constains information of single Nu.nl article
type Article struct {
Title string `xml:"title" json:"title"`
Link string `json:"link"`
Description string `xml:"description" json:"description"`
PubData string `xml:"pubDate" json:"pubdate"`
GUID int `xml:"guid" json:"guid"`
Categories []string `xml:"category" json:"categories"`
Creator string `xml:"creator" json:"creator"`
Rights string `xml:"rights" json:"rights"`
Image Enclosure `xml:"enclosure" json:"image"`
Related []AtomLink `xml:"http://www.w3.org/2005/Atom link" json:"related"`
}
//Enclosure contains image of article
type Enclosure struct {
Link string `xml:"url,attr" json:"url"`
}
//AtomLink contains reference articles
type AtomLink struct {
Link string `xml:"href,attr"`
Title string `xml:"title,attr"`
}
<file_sep>package nu
import (
"encoding/xml"
"io"
"net/http"
)
//ParseToFeed returns JSON in []Byte from NU.nl RSS XML feed as string
func parseToFeed(url string) (Feed XML, err error) {
file, err := http.Get(url)
if err != nil {
return
}
defer file.Body.Close()
PreFeed, err := nuToPreFeed(file.Body)
if err != nil {
return
}
file, err = http.Get(url)
if err != nil {
return
}
defer file.Body.Close()
Feed, err = preFeedToJSON(PreFeed, file.Body)
if err != nil {
return
}
return
}
func nuToPreFeed(f io.Reader) (NuFeed XML, err error) {
NuFeed = XML{}
d := xml.NewDecoder(f)
err = d.Decode(&NuFeed)
if err != nil {
return XML{}, err
}
return NuFeed, err
}
func preFeedToJSON(PreFeed XML, f io.Reader) (Feed XML, err error) {
NuFeed2 := xML2{}
d := xml.NewDecoder(f)
err = d.Decode(&NuFeed2)
if err != nil {
return XML{}, err
}
for i := 0; i < len(PreFeed.Channel.Item); i++ {
PreFeed.Channel.Item[i].Link = NuFeed2.Channel.Item[i].Link[0]
}
return PreFeed, err
}
| 17132618fa92824731d898b851df469164ba484e | [
"Markdown",
"Go"
] | 5 | Markdown | Rivalo/nu | a4026455b16ae2c2fd1ea8921cae8eb30a28bbb1 | 413e8ca9115f8643b025a97edda9069d341eecab | |
refs/heads/master | <repo_name>kovalchuk-mykhailo/test-schedule-app<file_sep>/src/selectors/index.js
import { createSelector } from "reselect";
import { STATUS_BOOKED } from "../constants/LessonStatus";
export const scheduleSelector = (state) => state.scheduleData.schedule;
export const lessonsSelector = createSelector(scheduleSelector, (schedule) => {
const initLessons = [...new Array(24)].map(() =>
[...new Array(7)].map(() => ({
status: null,
}))
);
for (let val of schedule) {
const hour = +val.start;
const day = +val.day;
initLessons[hour - 1][day - 1] = {
status: `${val.status}`,
};
}
return initLessons;
});
export const countSelector = createSelector(scheduleSelector, (schedule) =>
schedule.reduce(
(acc, value) => (value.status === STATUS_BOOKED ? acc + 1 : acc),
0
)
);
<file_sep>/src/reducers/index.js
import { combineReducers } from "redux";
import { scheduleReducer } from "./sheduleReducer";
const allReducers = combineReducers({
scheduleData: scheduleReducer,
});
export default allReducers;
<file_sep>/src/pages/HomePage/index.js
import React, { useEffect } from "react";
import { connect } from "react-redux";
import { getScheduleAsyncRequest } from "../../actions/schedule";
import ListSchedule from "./components/ListSchedule";
import ScheduleTableContainer from "./components/ScheduleTableContainer";
import { scheduleSelector } from "../../selectors";
import styles from "./styles.module.css";
const HomePage = ({ schedule, getScheduleAsyncRequest }) => {
useEffect(() => {
getScheduleAsyncRequest();
}, []);
return (
<div className={styles.homePage}>
<ListSchedule schedule={schedule} />
<ScheduleTableContainer schedule={schedule} />
</div>
);
};
const mapStateToProps = (state) => ({
schedule: scheduleSelector(state),
});
const mapDispatchToProps = (dispatch) => ({
getScheduleAsyncRequest: () => {
dispatch(getScheduleAsyncRequest());
},
});
export default connect(mapStateToProps, mapDispatchToProps)(HomePage);
<file_sep>/src/pages/HomePage/components/ListScheduleLesson/index.js
import React from "react";
import { STATUS_FREE } from "../../../../constants/LessonStatus";
import { DAY_TEXT, HOUR_TEXT, START_TEXT } from "../../../../constants/Texts";
import styles from "./styles.module.css";
const ListScheduleLesson = ({ lesson }) => {
if (lesson.status === STATUS_FREE) {
return (
<li className={styles.listItem}>
<p>
{DAY_TEXT}: {lesson.day}
</p>
<p>
{START_TEXT}: {lesson.start} {HOUR_TEXT}
</p>
</li>
);
} else return null;
};
export default ListScheduleLesson;
<file_sep>/src/constants/LessonStatus.js
export const STATUS_FREE = "free";
export const STATUS_BOOKED = "booked";
<file_sep>/src/constants/Schedule.js
export const GET_SCHEDULE_ASYNC_REQUEST = "GET_CURRENCIES_ASYNC_REQUEST";
export const GET_SCHEDULE_SUCCESS = "GET_SCHEDULE_SUCCESS";
export const GET_SCHEDULE_FAILURE = "GET_SCHEDULE_FAILURE";
export const GET_SCHEDULE_REQUEST = "GET_SCHEDULE_REQUEST";
export const SET_SCHEDULE = "SET_SCHEDULE";
<file_sep>/src/pages/HomePage/components/ScheduleTable/index.js
import React from "react";
import TableBody from "./components/TableBody";
import TableHead from "./components/TableHead";
import styles from "./styles.module.css";
const ScheduleTable = () => {
return (
<table className={styles.tableContainer}>
<TableHead />
<TableBody />
</table>
);
};
export default ScheduleTable;
<file_sep>/src/actions/schedule.js
import {
GET_SCHEDULE_ASYNC_REQUEST,
GET_SCHEDULE_FAILURE,
GET_SCHEDULE_REQUEST,
GET_SCHEDULE_SUCCESS,
SET_SCHEDULE,
} from "../constants/Schedule";
export const getScheduleAsyncRequest = () => ({
type: GET_SCHEDULE_ASYNC_REQUEST,
});
export const getScheduleRequest = () => ({
type: GET_SCHEDULE_REQUEST,
});
export const getScheduleSuccess = (schedule) => ({
type: GET_SCHEDULE_SUCCESS,
schedule,
});
export const getScheduleFailure = (error) => ({
type: GET_SCHEDULE_FAILURE,
error,
});
export const setSchedule = (schedule) => ({
type: SET_SCHEDULE,
schedule,
});
<file_sep>/src/pages/HomePage/components/ScheduleTable/components/TableHead/index.js
import React from "react";
const TableHead = () => (
<thead>
<tr>
<th scope="col"></th>
<th scope="col">Пон. 19</th>
<th scope="col">Вт. 20</th>
<th scope="col">Ср. 21</th>
<th scope="col">Чт. 22</th>
<th scope="col">Пт. 23</th>
<th scope="col">Сб. 24</th>
<th scope="col">Нд. 25</th>
</tr>
</thead>
);
export default TableHead;
<file_sep>/src/pages/HomePage/components/ScheduleTable/components/TableBody/index.js
import React from "react";
import { connect } from "react-redux";
import TableRow from "./components/TableRow";
import { lessonsSelector } from "../../../../../../selectors";
const TableBody = ({ lessons }) => (
<tbody>
{lessons.map((row, index) => {
const hour = index + 1;
const header = hour < 10 ? `0${hour}:00` : `${hour}:00`;
return <TableRow key={index} items={row} header={header} hour={hour} />;
})}
</tbody>
);
const mapStateToProps = (state) => ({
lessons: lessonsSelector(state),
});
export default connect(mapStateToProps)(TableBody);
<file_sep>/src/reducers/sheduleReducer.js
import {
GET_SCHEDULE_ASYNC_REQUEST,
GET_SCHEDULE_FAILURE,
GET_SCHEDULE_REQUEST,
GET_SCHEDULE_SUCCESS,
SET_SCHEDULE,
} from "../constants/Schedule";
const initialState = {
schedule: [],
isLoading: false,
error: "",
};
export const scheduleReducer = (state = initialState, action) => {
switch (action.type) {
case GET_SCHEDULE_ASYNC_REQUEST: {
return {
...state,
isLoading: true,
};
}
case GET_SCHEDULE_REQUEST: {
return {
...state,
isLoading: true,
};
}
case GET_SCHEDULE_SUCCESS:
return {
...state,
schedule: action.schedule,
isLoading: false,
};
case GET_SCHEDULE_FAILURE:
return {
...state,
isLoading: false,
error: action.error,
};
case SET_SCHEDULE:
return {
...state,
schedule: action.schedule,
};
default:
return state;
}
};
| 4ae747a573f92056e0c7675924b37ab9a60f04b0 | [
"JavaScript"
] | 11 | JavaScript | kovalchuk-mykhailo/test-schedule-app | 466658980a675c88c0e2b4cac98af5579090542b | 2ffd3b0e8d4e5f997623f294fa849f1d20172f03 | |
refs/heads/master | <repo_name>resocie/lab-20160303-Processamento-da-Coleta<file_sep>/processadordetweets.py
import re
f = open("collect_foradilma-20151216.txt")
# r = "on_data"
# r = "\[on_data\] \{\"created_at\":\".{3} .{3} \d\d \d\d:\d\d:\d\d \+\d{4} 2015\",\"id\":(\d*),\"id_str\":\"\d+\",\"text\":(\".*?\"),\"source\":"
r = "\[on_data\] \{\"created_at\":\"(.*?)\",\"id\":(\d*),\"id_str\":\"\d+\",\"text\":(\".*?\"),\"source\":.*\"screen_name\":\"(.*?)\","
for l in f:
match = re.search(r,l)
if match:
print("%s,%s,%s,%s" % (match.group(1), match.group(2), match.group(3), match.group(4))) | edead7140bd61f90c9b9105ccc0961c228bb87a2 | [
"Python"
] | 1 | Python | resocie/lab-20160303-Processamento-da-Coleta | 88a366fc2f0112667a260cd752dd86897dd7de0f | 322db74060806bc0dc11c0c17d5a00371adf7bef | |
refs/heads/main | <repo_name>TaniaVarduca/Lucrari-licenta<file_sep>/ContinutTabel.java
package View;
import Controller.*;
import Domain.*;
import Repository.*;
import java.text.DecimalFormat;
import java.lang.reflect.Array;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
public class ContinutTabel {
public static void afisStudenti(List<Student> studentList){
for (int i = 0; i < studentList.size(); i++)
System.out.format("|%11d|%-10s|%-10s|%-26s|%10d|\n",studentList.get(i).getIDstudent(), studentList.get(i).getNume(), studentList.get(i).getPrenume(), studentList.get(i).getTitluLucrare(), studentList.get(i).getSustinut());
}
public static void afisProfesori(List<Profesor> profList){
for (int i = 0; i < profList.size(); i++)
System.out.format("|%12d|%10s \n",profList.get(i).getIDprofesor(), profList.get(i).getNume());
}
public static void afisTeme(List<Tema> temaList){
for (int i = 0; i < temaList.size(); i++)
System.out.format("|%8s|%-22s|%-10s|%-15s\n",temaList.get(i).getIDtema(), temaList.get(i).getTitlu(), temaList.get(i).getTip(), temaList.get(i).getNumeProf());
}
public static void afisTemeProf(List<Tema> temaList, String numeProf){
for (int i = 0; i < temaList.size(); i++)
if(numeProf.equals(temaList.get(i).getNumeProf()))
System.out.format("|%8s|%-22s|%-10s|%-15s\n",temaList.get(i).getIDtema(), temaList.get(i).getTitlu(), temaList.get(i).getTip(), temaList.get(i).getNumeProf());
}
public static void afisStudentiProf(List<Student> studentList, List<Tema> temaList, String numeProf){
for (int i = 0; i < temaList.size(); i++)
if(numeProf.equals(temaList.get(i).getNumeProf()))
for(int j = 0;j < studentList.size(); j++)
if(studentList.get(j).getTitluLucrare().equals(temaList.get(i).getTitlu()))
System.out.format("|%11d|%-10s|%-10s|%-26s|%10d|\n",studentList.get(j).getIDstudent(), studentList.get(j).getNume(), studentList.get(j).getPrenume(), studentList.get(j).getTitluLucrare(), studentList.get(j).getSustinut());
}
}<file_sep>/Student.java
package Domain;
public class Student {
private int IDstudent = 0; //initializare cod student
private String nume = null; //initializare nume
private String prenume = null; //initializare prenume
private String titluLucrare = null; //initializare titluLucrare
private int sustinut = 0; //initializare sustinut
public Student(){ //constructor implicit
}
public Student(int IDstudent, String nume, String prenume, String titluLucrare, int sustinut){ //constr. cu parametri
this.IDstudent = IDstudent;
this.nume = nume;
this.prenume = prenume;
this.titluLucrare = titluLucrare;
this.sustinut = sustinut;
}
public int getIDstudent() { //get IDstudent
return IDstudent;
}
public void setIDstudent(int IDstudent) { //set IDstudent
this.IDstudent = IDstudent;
}
public String getNume() { //get nume
return nume;
}
public void setNume(String nume) { //set nume
this.nume = nume;
}
public String getPrenume() { //get prenume
return prenume;
}
public void setPrenume(String prenume) { //set prenume
this.prenume = prenume;
}
public String getTitluLucrare() { //get titluLucrare
return titluLucrare;
}
public void setTitluLucrare(String titluLucrare) { //set titluLucrare
this.titluLucrare = titluLucrare;
}
public int getSustinut() { //get sustinut
return sustinut;
}
public void setSustinut(int sustinut) { //set IDstudent
this.sustinut = sustinut;
}
public String toString() { //afisare string
return IDstudent + "," + nume + "," + prenume + "," + titluLucrare + ","+ sustinut;
}
}
<file_sep>/UpdateStudent.java
package UI;
import java.awt.BorderLayout;
import java.awt.EventQueue;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.border.EmptyBorder;
import Controller.ControllerStudent;
import Repository.RepoStudent;
import javax.swing.JTextPane;
import java.awt.Font;
import java.awt.Image;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.awt.Color;
import javax.imageio.ImageIO;
import javax.swing.JButton;
import javax.swing.JTextField;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
public class UpdateStudent extends JFrame {
RepoStudent repo_stud = new RepoStudent("C:\\Users\\TANIA\\eclipse-workspace\\lab5\\src\\Studenti.txt");
private ControllerStudent ctrl_stud = new ControllerStudent(repo_stud);
private JPanel contentPane;
private JTextField textField;
private JTextField textField_1;
private JTextField textField_2;
private JTextField textField_3;
private JTextField textField_4;
Image image = null;
/**
* Launch the application.
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
UpdateStudent frame = new UpdateStudent();
frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the frame.
*/
public UpdateStudent() {
ctrl_stud.readFromFile_Student();
setTitle("Adaugare student");
//setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setBounds(100, 100, 470, 605);
contentPane = new JPanel();
contentPane.setBackground(new Color(245, 245, 220));
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
setContentPane(contentPane);
contentPane.setLayout(null);
JTextPane txtpnIntroducetiDatelePentru = new JTextPane();
txtpnIntroducetiDatelePentru.setBackground(new Color(224, 255, 255));
txtpnIntroducetiDatelePentru.setFont(new Font("Sitka Small", Font.PLAIN, 19));
txtpnIntroducetiDatelePentru.setBounds(41, 41, 377, 44);
txtpnIntroducetiDatelePentru.setText("Introduceti datele pentru modificare:");
txtpnIntroducetiDatelePentru.setEditable(false);
contentPane.add(txtpnIntroducetiDatelePentru);
JTextPane txtpnId = new JTextPane();
txtpnId.setFont(new Font("Sitka Small", Font.BOLD, 17));
txtpnId.setText("ID:");
txtpnId.setBounds(55, 134, 40, 28);
contentPane.add(txtpnId);
JTextPane txtpnNume = new JTextPane();
txtpnNume.setFont(new Font("Sitka Text", Font.BOLD, 17));
txtpnNume.setText("Nume:");
txtpnNume.setBounds(55, 192, 61, 28);
contentPane.add(txtpnNume);
JTextPane txtpnPrenume = new JTextPane();
txtpnPrenume.setFont(new Font("Sitka Text", Font.BOLD, 17));
txtpnPrenume.setText("Prenume:");
txtpnPrenume.setBounds(55, 247, 87, 28);
contentPane.add(txtpnPrenume);
JTextPane txtpnTitluLucrare = new JTextPane();
txtpnTitluLucrare.setFont(new Font("Sitka Text", Font.BOLD, 17));
txtpnTitluLucrare.setText("Titlu lucrare:");
txtpnTitluLucrare.setBounds(55, 309, 124, 28);
contentPane.add(txtpnTitluLucrare);
JTextPane txtpnSustinut = new JTextPane();
txtpnSustinut.setFont(new Font("Sitka Text", Font.BOLD, 17));
txtpnSustinut.setText(" Sustinut \r\n(1=da/0=nu):");
txtpnSustinut.setBounds(55, 366, 122, 54);
contentPane.add(txtpnSustinut);
textField = new JTextField();
textField.setColumns(10);
textField.setBounds(243, 134, 108, 28);
contentPane.add(textField);
textField_1 = new JTextField();
textField_1.setColumns(10);
textField_1.setBounds(243, 192, 108, 28);
contentPane.add(textField_1);
textField_2 = new JTextField();
textField_2.setColumns(10);
textField_2.setBounds(243, 247, 108, 28);
contentPane.add(textField_2);
textField_3 = new JTextField();
textField_3.setColumns(10);
textField_3.setBounds(243, 309, 108, 28);
contentPane.add(textField_3);
textField_4 = new JTextField();
textField_4.setColumns(10);
textField_4.setBounds(243, 372, 108, 28);
contentPane.add(textField_4);
JButton btnNewButton = new JButton("Update student");
btnNewButton.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
JOptionPane.showMessageDialog(null, "Studentul a fost updatat!");
}
});
btnNewButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
int id = Integer.parseInt(textField.getText());
String nume = textField_1.getText();
String prenume = textField_2.getText();
String titlu = textField_3.getText();
int sustinut = Integer.parseInt(textField_4.getText());
if (nume.equals("")) {
nume = ctrl_stud.searchByCodStudent_Obiect(id).getNume();
}
if (prenume.equals("")) {
prenume = ctrl_stud.searchByCodStudent_Obiect(id).getPrenume();
}
if (titlu.equals("")) {
titlu = ctrl_stud.searchByCodStudent_Obiect(id).getTitluLucrare();
}
try {
ctrl_stud.updateStudent(id, nume, prenume, titlu, sustinut);
} catch (Exception e) {
e.getMessage();
}
}
});
btnNewButton.setFont(new Font("Tw Cen MT Condensed", Font.BOLD, 20));
btnNewButton.setBounds(142, 475, 152, 44);
contentPane.add(btnNewButton);
}
/*
Descriere: citeste un numar natural
Input: String sir
Output: int
*/
public int ReadNaturalNumber(String sir) {
System.out.println(sir);
int n = -1;
while (n < 0) {
InputStreamReader reader = new InputStreamReader(System.in);
BufferedReader buff = new BufferedReader(reader);
try {
String text = buff.readLine();
n = Integer.parseInt(text);
if (n < 0)
System.out.println("Introduceti un numar mai mare ca 0!");
} catch (IOException e) {
System.out.println("Introduceti un numar pozitiv" + e.getMessage());
} catch (NumberFormatException ex) {
System.out.println("Introduceti un numar pozitiv" + ex.getMessage());
}
}
return n;
}
/*
Descriere: citeste un string
Input: String sir
Output: String
*/
public String ReadString(String sir) {
System.out.println(sir);
InputStreamReader reader = new InputStreamReader(System.in);
BufferedReader buff = new BufferedReader(reader);
String text = "";
try {
text = buff.readLine();
} catch (IOException e) {
System.out.println("Eroare la citire!" + e.getMessage());
}
return text;
}
}
<file_sep>/ShowProfesori.java
package UI;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.EventQueue;
import java.awt.Font;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JTable;
import javax.swing.border.EmptyBorder;
import Domain.Profesor;
import Controller.ControllerProfesor;
import Repository.RepoProfesor;
import javax.swing.UIManager;
public class ShowProfesori extends JFrame {
RepoProfesor repo_prof = new RepoProfesor("C:\\Users\\TANIA\\eclipse-workspace\\lab5\\src\\Profesori.txt");
private ControllerProfesor ctrl_prof = new ControllerProfesor(repo_prof);
private JPanel contentPane;
private JTable table;
/**
* Launch the application.
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
ShowStudenti frame = new ShowStudenti();
frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the frame.
*/
public ShowProfesori() {
setTitle("<NAME>");
setBounds(100, 100, 820, 287);
contentPane = new JPanel();
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
contentPane.setLayout(new BorderLayout(0, 0));
setContentPane(contentPane);
ctrl_prof.readFromFile_Profesor();
List<Profesor> listProfesor = new ArrayList<>();
listProfesor = ctrl_prof.getAll();
String[] columnNames =
{"ID",
"NUME SI PRENUME"};
Object[][] data = new Object[listProfesor.size()+1][2];
data[0][0] = "ID";
data[0][1] = "NUME SI PRENUME";
for (int i = 1; i < listProfesor.size()+1; i++) {
data[i][0] = listProfesor.get(i-1).getIDprofesor();
data[i][1] = listProfesor.get(i-1).getNume();
}
table = new JTable(data,columnNames);
table.setFont(new Font("Arial",3, 15));
table.setBackground(UIManager.getColor("info"));
contentPane.add(table, BorderLayout.CENTER);
}
}
<file_sep>/Profesor.java
package Domain;
public class Profesor {
private int IDprofesor = 0;
private String nume = null;
public Profesor(){ //constructor implicit
}
public Profesor(int IDprofesor, String nume){ //constr. cu parametri
this.IDprofesor = IDprofesor;
this.nume = nume;
}
public int getIDprofesor() { //get IDprofesor
return IDprofesor;
}
public void setIDprofesor(int IDprofesor) { //set IDprofesor
this.IDprofesor = IDprofesor;
}
public String getNume() { //get nume
return nume;
}
public void setNume(String nume) { //set nume
this.nume = nume;
}
public String toString() { //afisare string
return IDprofesor + "," + nume;
}
}
<file_sep>/README.md
# Lucrari-licenta
Gestionarea lucrarilor de licenta si master
<file_sep>/CapTabel.java
package View;
import Domain.Student;
import Domain.Profesor;
import java.util.List;
public class CapTabel {
public static void afisStudenti1() {
System.out.println();
System.out.println("=========================================================================");
System.out.format("|%10s|%7s |%8s |%20s |%9s |\n", "Cod student", "Nume", "Prenume", "Titlu lucrare", "Sustinut");
System.out.println("=========================================================================");
}
public static void afisStudenti2() {
System.out.println("=========================================================================");
System.out.println();
}
public static void afisProfesori1() {
System.out.println();
System.out.println("==================================");
System.out.format("|%10s| %10s |\n", "Cod profesor", "Nume si prenume");
System.out.println("==================================");
}
public static void afisProfesori2() {
System.out.println("==================================");
System.out.println();
}
public static void afisTeme1() {
System.out.println();
System.out.println("=============================================================");
System.out.format("|%8s|%14s |%7s |%15s |\n", "Cod tema", "Titlu", "Tip", "Nume Profesor");
System.out.println("=============================================================");
}
public static void afisTeme2() {
System.out.println("=============================================================");
System.out.println();
}
}
<file_sep>/UI.java
package UI;
import Controller.*;
import Domain.Student;
import Domain.Profesor;
import java.io.*;
import java.util.ArrayList;
import java.time.LocalDate;
import java.util.List;
import View.*;
public class UI {
private ControllerStudent ctrl_stud;
private ControllerProfesor ctrl_prof;
private ControllerTema ctrl_tema;
public UI(ControllerStudent ctrl_stud, ControllerProfesor ctrl_prof, ControllerTema ctrl_tema) {
this.ctrl_stud = ctrl_stud;
this.ctrl_prof = ctrl_prof;
this.ctrl_tema = ctrl_tema;
}
public int ReadNaturalNumber(String sir) {
System.out.println(sir);
int n = -1;
while (n < 0) {
InputStreamReader reader = new InputStreamReader(System.in);
BufferedReader buff = new BufferedReader(reader);
try {
String text = buff.readLine();
n = Integer.parseInt(text);
if (n < 0)
System.out.println("Introduceti un numar mai mare ca 0!");
} catch (IOException e) {
System.out.println("Introduceti un numar pozitiv" + e.getMessage());
} catch (NumberFormatException ex) {
System.out.println("Introduceti un numar pozitiv" + ex.getMessage());
}
}
return n;
}
public String ReadString(String sir) {
System.out.println(sir);
InputStreamReader reader = new InputStreamReader(System.in);
BufferedReader buff = new BufferedReader(reader);
String text = "";
try {
text = buff.readLine();
} catch (IOException e) {
System.out.println("Eroare la citire!" + e.getMessage());
}
return text;
}
public void MeniuPrincipal() {
System.out.println("---------- MENIU PRINCIPAL ----------");
System.out.println(" a. Studenti");
System.out.println(" b. Profesori");
System.out.println(" c. Teme");
System.out.println(" x. Iesire");
System.out.println("-------------------------------------");
}
public void MeniuStudent() {
System.out.println("--------------- MENIU ----------------");
System.out.println(" 1. Adaugare Student");
System.out.println(" 2. Update Student");
System.out.println(" 3. Stergere Student");
System.out.println(" 4. Afisare lista Studenti");
System.out.println(" 5. Afisare lista Studenti care si-au sustinut lucrarea");
System.out.println(" 6. Afisare lista Studenti care nu si-au sustinut lucrarea");
System.out.println(" 7. Afisare lista Studenti care nu s-au inscris pe liste");
System.out.println(" 0. Inapoi");
System.out.println("-------------------------------------");
}
public void MeniuProfesor() {
System.out.println("--------------- MENIU ----------------");
System.out.println(" 1. Adaugare Profesor");
System.out.println(" 2. Afisare lista Profesori");
System.out.println(" 3. Afisare lista teme pentru un Profesor");
System.out.println(" 4. Afisare lista studenti asignati la un Profesor");
System.out.println(" 0. Inapoi");
System.out.println("-------------------------------------");
}
public void MeniuTema() {
System.out.println("--------------- MENIU ----------------");
System.out.println(" 1. Adaugare Tema");
System.out.println(" 2. Afisare lista Teme");
System.out.println(" 0. Inapoi");
System.out.println("-------------------------------------");
}
public void runStudent() {
MeniuStudent();
int option = ReadNaturalNumber("Introduceti optiunea dorita: ");
ctrl_stud.readFromFile_Student();
ctrl_prof.readFromFile_Profesor();
ctrl_tema.readFromFile_Tema();
while (option != 0) {
switch (option) {
case 1:
int IDstudent = ReadNaturalNumber("Introduceti codul studentului: ");
String nume = ReadString("Introduceti numele studentului: ");
String prenume = ReadString("Introduceti prenumele studentului: ");
String titluLucrare = ReadString("Introduceti titlul lucrarii: ");
int sustinut = ReadNaturalNumber("Lucrare sustinuta=1/nesustinuta=0: ");
try {
ctrl_stud.addStudent(IDstudent, nume, prenume, titluLucrare, sustinut);
} catch (Exception e) {
e.getMessage();
}
break;
case 2:
int codStudent = ReadNaturalNumber("Introduceti codul studentului: ");
if (ctrl_stud.searchByCodStudent(codStudent)) {
String nume1 = ReadString("Introduceti numele studentului: ");
String prenume1 = ReadString("Introduceti prenumele studentului: ");
String titluLucrare1 = ReadString("Introduceti titlul lucrarii: ");
int sustinut1 = ReadNaturalNumber("Lucrare sustinuta=1/nesustinuta=0: ");
if (nume1.equals("")) {
nume1 = ctrl_stud.searchByCodStudent_Obiect(codStudent).getNume();
}
if (prenume1.equals("")) {
prenume1 = ctrl_stud.searchByCodStudent_Obiect(codStudent).getPrenume();
}
if (titluLucrare1.equals("")) {
titluLucrare1 = ctrl_stud.searchByCodStudent_Obiect(codStudent).getTitluLucrare();
}
if (String.valueOf(sustinut1).equals("")) {
sustinut1 = ctrl_stud.searchByCodStudent_Obiect(codStudent).getSustinut();
}
ctrl_stud.updateStudent(codStudent, nume1, prenume1, titluLucrare1, sustinut1);
} else {
System.out.println("Nu exista codul introdus! ");
}
break;
case 3:
int codStudent1 = ReadNaturalNumber("Introduceti codul studentului: ");
try {
if (ctrl_stud.searchByCodStudent(codStudent1)) {
List<Student> lista_stud = ctrl_stud.getAllCod(codStudent1);
for (int i = 0; i < lista_stud.size(); i++) {
ctrl_stud.deleteStudent(codStudent1);
}
ctrl_stud.deleteStudent(codStudent1);
}
} catch (Exception e) {
System.out.println(e.getMessage());
}
break;
case 4:
CapTabel.afisStudenti1();
ContinutTabel.afisStudenti(ctrl_stud.getAll());
CapTabel.afisStudenti2();
break;
case 5:
CapTabel.afisStudenti1();
ContinutTabel.afisStudenti(ctrl_stud.getAllSustinut());
CapTabel.afisStudenti2();
break;
case 6:
CapTabel.afisStudenti1();
ContinutTabel.afisStudenti(ctrl_stud.getAllNuSustinut());
CapTabel.afisStudenti2();
break;
case 7:
CapTabel.afisStudenti1();
ContinutTabel.afisStudenti(ctrl_stud.getAllNeinscris());
CapTabel.afisStudenti2();
break;
case 0:
break;
default:
System.out.println("Optiune gresita!");
break;
}
MeniuStudent();
option = ReadNaturalNumber("Introduceti optiunea dorita: ");
}
}
public void runProfesor() {
MeniuProfesor();
int option = ReadNaturalNumber("Introduceti optiunea dorita: ");
ctrl_stud.readFromFile_Student();
ctrl_prof.readFromFile_Profesor();
ctrl_tema.readFromFile_Tema();
while (option != 0) {
switch (option) {
case 1:
int IDprofesor = ReadNaturalNumber("Introduceti codul profesorului: ");
String nume = ReadString("Introduceti numele profesorului: ");
try {
ctrl_prof.addProfesor(IDprofesor, nume);
} catch (Exception e) {
e.getMessage();
}
break;
case 2:
CapTabel.afisProfesori1();
ContinutTabel.afisProfesori(ctrl_prof.getAll());
CapTabel.afisProfesori2();
break;
case 3:
String nume1 = ReadString("Introduceti numele profesorului: ");
CapTabel.afisTeme1();
ContinutTabel.afisTemeProf(ctrl_tema.getAll(), nume1);
CapTabel.afisTeme2();
break;
case 4:
String nume2 = ReadString("Introduceti numele profesorului: ");
CapTabel.afisStudenti1();
ContinutTabel.afisStudentiProf(ctrl_stud.getAll(), ctrl_tema.getAll(), nume2);
CapTabel.afisStudenti2();
break;
case 0:
break;
default:
System.out.println("Optiune gresita!");
break;
}
MeniuProfesor();
option = ReadNaturalNumber("Introduceti optiunea dorita: ");
}
}
public void runTema() {
MeniuTema();
int option = ReadNaturalNumber("Introduceti optiunea dorita: ");
ctrl_stud.readFromFile_Student();
ctrl_prof.readFromFile_Profesor();
ctrl_tema.readFromFile_Tema();
while (option != 0) {
switch (option) {
case 1:
int IDtema = ReadNaturalNumber("Introduceti codul temei: ");
String titlu = ReadString("Introduceti titlul temei: ");
String tip = ReadString("Introduceti tipul temei (licenta/dizertatie): ");
String numeProf = ReadString("Introduceti numele profesorului: ");
try {
ctrl_tema.addTema(IDtema, titlu, tip, numeProf);
} catch (Exception e) {
e.getMessage();
}
break;
case 2:
CapTabel.afisTeme1();
ContinutTabel.afisTeme(ctrl_tema.getAll());
CapTabel.afisTeme2();
break;
case 0:
break;
default:
System.out.println("Optiune gresita!");
break;
}
MeniuTema();
option = ReadNaturalNumber("Introduceti optiunea dorita: ");
}
}
public void runApp() {
MeniuPrincipal();
String option_principal = ReadString("Introduceti optiunea dorita: ");
String out_option = new String("x");
while (!(option_principal.equals(out_option))) {
switch (option_principal) {
case "a":
runStudent();
break;
case "b":
runProfesor();
break;
case "c":
runTema();
break;
case "x":
break;
default:
System.out.println("Optiune gresita!");
break;
}
MeniuPrincipal();
option_principal = ReadString("Introduceti optiunea dorita: ");
}
System.out.println("Program terminat!");
}
} | d846e92cd49f02f3756ee42d6fa988c2ba727048 | [
"Markdown",
"Java"
] | 8 | Java | TaniaVarduca/Lucrari-licenta | af1e94521e7feb8b46efa47ca3da028f44b25eeb | 624c66c216b7dae02c822c6d035a371c9f7febf2 | |
refs/heads/master | <file_sep>/**
* Created by <EMAIL> on 05.05.16.
*/
package com.vsu.nil.widgets
import java.awt.Dimension
import java.awt.Graphics
import java.util.*
import javax.swing.JComponent
import kotlin.concurrent.timer
fun TouchPanel.button(func: TouchButton.() -> Unit = {}) = addTouchableChild(TouchButton(), func)
class TouchButton : JComponent(), TouchableContent, KClickable {
override val kClickHandlers: MutableList<() -> Unit> = ArrayList()
override var activated: Boolean = false
set(value) {
size = if (value) parentActivatedSize else parentNormalSize
field = value
}
override var parentActivatedSize = Dimension()
override var parentNormalSize = Dimension()
var isClicked = false
override fun click() {
super.click()
isClicked = true
repaint()
timer(initialDelay = 1000, period = 666) {
isClicked = false
repaint()
cancel()
}
}
override fun paintComponent(g: Graphics?) {
super.paintComponent(g)
g?.drawLine(0, 0, width, height)
if (isClicked) g?.drawLine(0, height, width, 0)
}
}<file_sep>/**
* Created by <EMAIL> on 24.04.16.
*/
package com.vsu.nil
import com.vsu.nil.widgets.button
import com.vsu.nil.widgets.touchPanel
import com.vsu.nil.wrappers.*
import java.awt.FlowLayout
import java.awt.GridLayout
import java.awt.event.MouseAdapter
import java.awt.event.MouseEvent
fun main(args: Array<String>) {
window {
layout = GridLayout(4, 1)
extendedState = MAXIMIZED_BOTH
label {
text = "Buttons"
horizontalAlignment = CENTER
} to this
val status = label {
horizontalAlignment = CENTER
}
panel {
layout = FlowLayout()
button {
text = "One"
addActionListener { status.text = text }
} to this
button {
text = "Two"
addActionListener { status.text = text }
} to this
button {
text = "Three"
addActionListener { status.text = text }
} to this
} to this
status to this
panel {
layout = null
touchPanel { touchPanel ->
normalWidth = 100
normalHeight = 100
events {
onMouseMoved = { activated = true }
onMouseExited = { activated = false }
}
val button = button { } to this
val mouseAdapter = object : MouseAdapter() {
override fun mouseClicked(e: MouseEvent?) {
button.click()
}
override fun mouseMoved(e: MouseEvent?) {
touchPanel.dispatchEvent(e)
}
override fun mouseExited(e: MouseEvent?) {
touchPanel.dispatchEvent(e)
}
}
button.addMouseListener(mouseAdapter)
button.addMouseMotionListener(mouseAdapter)
} to this
} to this
events {
onClosing = { System.exit(0) }
}
}
}<file_sep>/**
* Created by <EMAIL> on 29.04.16.
*/
package com.vsu.nil.widgets
import com.vsu.nil.kinect.trackPanel
import com.vsu.nil.wrappers.KMouseAdapter
import com.vsu.nil.wrappers.addChild
import java.awt.BorderLayout
import java.awt.Color
import java.awt.Dimension
import java.awt.Point
import java.util.*
import javax.swing.JComponent
import javax.swing.JPanel
import javax.swing.border.LineBorder
fun touchPanel(func: TouchPanel.(TouchPanel) -> Unit = {}): TouchPanel {
val panel = TouchPanel()
trackPanel(panel)
return addChild(panel, func)
}
fun <T> TouchPanel.addTouchableChild(child: T, func: T.() -> Unit): T
where T : JComponent, T : TouchableContent {
touchableChildren.add(child)
child.parentNormalSize = normalSize
child.parentActivatedSize = activatedSize
child.func()
return child
}
const val INITIAL_WIDTH = 100
const val INITIAL_HEIGHT = 100
class TouchPanel : JPanel, TouchableContainer {
override val touchableChildren by lazy { ArrayList<TouchableContent>() }
val events = KMouseAdapter()
var normalWidth = INITIAL_WIDTH
set(value) {
field = value
normalSize.width = value
if (!activated) size = normalSize
}
var normalHeight = INITIAL_HEIGHT
set(value) {
field = value
normalSize.height = value
if (!activated) size = normalSize
}
override var normalSize = Dimension(normalWidth, normalHeight)
var activatedWidth = (normalWidth * 1.5).toInt()
set(value) {
field = value
activatedSize.width = value
if (activated) size = activatedSize
}
var activatedHeight = (normalHeight * 1.5).toInt()
set(value) {
field = value
activatedSize.height = value
if (activated) size = activatedSize
}
override var activatedSize = Dimension(activatedWidth, activatedHeight)
var borderColor = Color.GREEN
override var activated: Boolean = false
set(value) {
if (value xor field) {
border = if (value) {
size = activatedSize
LineBorder(borderColor, 3)
} else {
size = normalSize
null
}
touchableChildren.forEach { it.activated = value }
field = value
}
}
constructor() : super(BorderLayout()) {
addMouseListener(events)
addMouseMotionListener(events)
}
override fun isTouched(point: Point): Boolean {
val width: Int
val height: Int
if (activated) {
width = activatedWidth
height = activatedHeight
} else {
width = normalWidth
height = normalHeight
}
return x <= point.x && point.x <= x + width && y <= point.y && point.y <= y + height
}
}
// TODO возможно это уже не понадобится
fun TouchPanel.content(func: TouchContent.() -> Unit = {}) {
val child = TouchContent()
addTouchableChild(child, func)
add(child)
}
class TouchContent : JPanel(), TouchableContent {
override var activated: Boolean = false
set(value) {
size = if (value) parentActivatedSize else parentNormalSize
field = value
}
override var parentActivatedSize = Dimension()
override var parentNormalSize = Dimension()
}<file_sep>/**
* Created by <EMAIL> on 05.05.16.
*/
package com.vsu.nil.widgets
import java.awt.Dimension
import java.awt.Point
interface Touchable {
var activated: Boolean
}
interface TouchableContainer : Touchable {
val touchableChildren: MutableList<TouchableContent>
var normalSize: Dimension
var activatedSize: Dimension
fun isTouched(point: Point): Boolean
}
interface TouchableContent : Touchable {
var parentNormalSize: Dimension
var parentActivatedSize: Dimension
}
interface KClickable {
val kClickHandlers: MutableList<() -> Unit>
fun click() {
kClickHandlers.forEach { it() }
}
fun onKClicked(func: () -> Unit) {
kClickHandlers.add(func)
}
}<file_sep>/**
* Created by <EMAIL> on 06.05.16.
*/
package com.vsu.nil.wrappers
import java.awt.event.MouseEvent
import java.awt.event.MouseListener
import java.awt.event.MouseMotionListener
class KMouseAdapter : MouseMotionListener, MouseListener {
operator fun invoke(func: KMouseAdapter.() -> Unit) {
this.func()
}
var onMouseEntered: (e: MouseEvent?) -> Unit = {}
override fun mouseEntered(e: MouseEvent?) {
onMouseEntered(e)
}
var onMouseClicked: (e: MouseEvent?) -> Unit = {}
override fun mouseClicked(e: MouseEvent?) {
onMouseClicked(e)
}
var onMouseReleased: (e: MouseEvent?) -> Unit = {}
override fun mouseReleased(e: MouseEvent?) {
onMouseReleased(e)
}
var onMouseExited: (e: MouseEvent?) -> Unit = {}
override fun mouseExited(e: MouseEvent?) {
onMouseExited(e)
}
var onMousePressed: (e: MouseEvent?) -> Unit = {}
override fun mousePressed(e: MouseEvent?) {
onMousePressed(e)
}
var onMouseMoved: (e: MouseEvent?) -> Unit = {}
override fun mouseMoved(e: MouseEvent?) {
onMouseMoved(e)
}
var onMouseDragged: (e: MouseEvent?) -> Unit = {}
override fun mouseDragged(e: MouseEvent?) {
onMouseDragged(e)
}
}<file_sep>/**
* Created by <EMAIL> on 29.04.16.
*/
package com.vsu.nil.wrappers
import java.awt.event.WindowEvent
import java.awt.event.WindowFocusListener
import java.awt.event.WindowListener
import java.awt.event.WindowStateListener
import java.util.*
import javax.swing.JFrame
private val windowAdapters by lazy { HashMap<JFrame, WindowAdapter>() }
val JFrame.events: WindowAdapter
get() {
var adapter = windowAdapters[this]
if (adapter === null) {
adapter = WindowAdapter()
windowAdapters[this] = adapter
addWindowStateListener(adapter)
addWindowFocusListener(adapter)
addWindowListener(adapter)
}
return adapter
}
class WindowAdapter : WindowListener, WindowStateListener, WindowFocusListener {
operator fun invoke(func: WindowAdapter.() -> Unit) {
this.func()
}
var onDeiconified: (e: WindowEvent?) -> Unit = {}
override fun windowDeiconified(e: WindowEvent?) {
onDeiconified(e)
}
var onActivated: (e: WindowEvent?) -> Unit = {}
override fun windowActivated(e: WindowEvent?) {
onActivated(e)
}
var onDeactivated: (e: WindowEvent?) -> Unit = {}
override fun windowDeactivated(e: WindowEvent?) {
onDeactivated(e)
}
var onIconofied: (e: WindowEvent?) -> Unit = {}
override fun windowIconified(e: WindowEvent?) {
onIconofied(e)
}
var onClosing: (e: WindowEvent?) -> Unit = {}
override fun windowClosing(e: WindowEvent?) {
onClosing(e)
}
var onClosed: (e: WindowEvent?) -> Unit = {}
override fun windowClosed(e: WindowEvent?) {
onClosed(e)
}
var onOpened: (e: WindowEvent?) -> Unit = {}
override fun windowOpened(e: WindowEvent?) {
onOpened(e)
}
var onStateChanged: (e: WindowEvent?) -> Unit = {}
override fun windowStateChanged(e: WindowEvent?) {
onStateChanged(e)
}
var onLostFocus: (e: WindowEvent?) -> Unit = {}
override fun windowLostFocus(e: WindowEvent?) {
onLostFocus(e)
}
var onGainedFocus: (e: WindowEvent?) -> Unit = {}
override fun windowGainedFocus(e: WindowEvent?) {
onGainedFocus(e)
}
}<file_sep>/**
* Created by <EMAIL> on 25.04.16.
*/
package com.vsu.nil.wrappers
import java.awt.Component
import java.awt.Container
import javax.swing.JButton
import javax.swing.JFrame
import javax.swing.JLabel
import javax.swing.JPanel
fun window(func: JFrame.(JFrame) -> Unit = {}): JFrame {
val window = JFrame()
window.func(window)
window.setVisible(true)
return window
}
fun panel(func: JPanel.(JPanel) -> Unit = {}) = addChild(JPanel(), func)
fun label(func: JLabel.(JLabel) -> Unit = {}) = addChild(JLabel(), func)
fun button(func: JButton.(JButton) -> Unit = {}) = addChild(JButton(), func)
fun <C : Component> addChild(child: C, func: C.(C) -> Unit = {}): C {
child.func(child)
return child
}
val CENTER = JLabel.CENTER
val MAXIMIZED_BOTH = JFrame.MAXIMIZED_BOTH
infix fun <C : Component> C.to(container: Container): C {
container.add(this)
return this
}<file_sep># Natural-Interaction-Library<file_sep>/**
* Created by <EMAIL> on 03.05.16.
*/
package com.vsu.nil.kinect
import com.vsu.nil.widgets.TouchableContainer
import java.awt.Point
import java.util.*
import kotlin.concurrent.timer
private val trackedPanels = HashSet<TouchableContainer>()
fun trackPanel(panel: TouchableContainer) {
trackedPanels.add(panel)
}
class HandTracker {
val timer: Timer = Timer()
/**
* Координаты отслеживаемой руки.
* null, если рука не отслеживается
*/
var position: Point? = null
fun updatePanels() {
val pos = position
pos?.let {
for (panel in trackedPanels) {
if (panel.activated) {
if (!panel.isTouched(pos)) {
var count = 0
timer(initialDelay = 250, period = 250) {
val newPos = position
pos?.let {
if (panel.isTouched(pos)) cancel()
if (count++ >= 2) {
panel.activated = false
cancel()
}
} ?: cancel()
}
}
} else {
if (panel.isTouched(pos)) {
var count = 0
timer(initialDelay = 250, period = 250) {
val newPos = position
pos?.let {
if (!panel.isTouched(pos)) cancel()
if (count++ >= 2) {
panel.activated = true
cancel()
}
} ?: cancel()
}
}
}
}
}
}
} | 65c04ae4128ce93fde4f380613f1d2bab9984f21 | [
"Markdown",
"Kotlin"
] | 9 | Kotlin | osspike/Natural-Interaction-Library | e7a0a2abb5802cbb3e2a335c4066399dd7e18cc2 | 68826262fcfd4031bb886b3ff772bc65f0d31d66 | |
refs/heads/master | <file_sep># kawaiifrutas
[Edit on StackBlitz ⚡️](https://stackblitz.com/edit/kawaiifrutas)<file_sep>import { Component, EventEmitter, Input, Output } from '@angular/core';
export type Fruta = {
nombre: string;
cantidad: number;
precio: number;
};
@Component({
selector: 'app-carta',
templateUrl: './carta.component.html',
styleUrls: ['./carta.component.css']
})
export class CartaComponent {
@Input() frutas: Fruta[];
@Output() pagarFrutas = new EventEmitter();
borrarFrutaCarrito(index: number): void {
this.frutas.splice(index, 1);
}
obtenerTotal(): number {
let result = 0;
this.frutas.forEach(fruta => {
result = result + fruta.precio * fruta.cantidad;
});
return result;
}
pagar(): void {
this.pagarFrutas.emit(this.frutas)
}
}
<file_sep>import { Component, EventEmitter, Input, Output } from '@angular/core';
import { Fruta } from '../carta/carta.component';
@Component({
selector: 'app-objeto',
templateUrl: './objeto.component.html',
styleUrls: ['./objeto.component.css']
})
export class ObjetoComponent {
@Input() fruta : Fruta = { nombre : "", precio : null , cantidad : null};
@Output() borrarFruta = new EventEmitter();
borrar(){
this.borrarFruta.emit(this.fruta.nombre)
};
}<file_sep>import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { FormsModule } from '@angular/forms';
import { AppComponent } from './app.component';
import { HelloComponent } from './hello.component';
import { ListaCompraComponent } from './Componentes/lista-compra/lista-compra.component';
import { CartaComponent } from './Componentes/carta/carta.component';
import { ObjetoComponent } from './Componentes/objeto/objeto.component';
@NgModule({
imports: [ BrowserModule, FormsModule ],
declarations: [ AppComponent, HelloComponent, ListaCompraComponent, CartaComponent, ObjetoComponent, ],
bootstrap: [ AppComponent ]
})
export class AppModule { }
<file_sep>import { Component } from '@angular/core';
import { Fruta } from '../carta/carta.component';
@Component({
selector: 'app-lista-compra',
templateUrl: './lista-compra.component.html',
styleUrls: ['./lista-compra.component.css']
})
export class ListaCompraComponent {
frutas : Fruta [] = [];
nuevafruta : Fruta = {
nombre: "",
cantidad: null,
precio:null,
};
anadir(): void {
if (
this.isValid(this.nuevafruta)
) {
this.frutas.push(this.nuevafruta);
this.nuevafruta = {nombre:"",cantidad:null,precio:null}
}
};
isValid(nuevafruta: Fruta): boolean {
if (!nuevafruta?.nombre) {
alert('Indica el nombre de la fruta');
return false;
}
if (this.frutas.some(i => i.nombre == nuevafruta.nombre)) {
alert('Esta fruta ya está en el carrito');
return false;
}
if (!nuevafruta?.cantidad) {
alert('Indica la cantidad');
return false;
}
if (!nuevafruta?.precio) {
alert('Indica el precio');
return false;
}
return true;
}
pagar() :void {
this.frutas = [];
alert ("Pago Completado");
}
}
| 6d823101e82991c08dbbe16038da91373b5c2980 | [
"Markdown",
"TypeScript"
] | 5 | Markdown | Natguesan/kawaiifrutas | 80a4866dadd2c572fe056fcd1eefd6255492ee20 | 6eec6a929dd00cb42251a2c1593574de3b94d363 | |
refs/heads/master | <repo_name>ejoftheweb/Minigmand<file_sep>/app/src/main/java/uk/co/platosys/minigma/utils/XMLUtils.java
package uk.co.platosys.minigma.utils;
import org.jdom2.Document;
import org.jdom2.input.SAXBuilder;
import org.jdom2.output.XMLOutputter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import uk.co.platosys.minigma.Key;
import uk.co.platosys.minigma.Lock;
import uk.co.platosys.minigma.exceptions.BadPassphraseException;
import uk.co.platosys.minigma.exceptions.InvalidXMLException;
/**
* Static methods to handle org.jdom2.Documents
*/
public class XMLUtils {
/**Takes Base64 encoded data and returns an org.jdom2.Document**/
public static Document decode(String encoded) throws InvalidXMLException {
SAXBuilder saxBuilder = new SAXBuilder();
try{
return saxBuilder.build(new ByteArrayInputStream(MinigmaUtils.decode(encoded)));
}catch(Exception x){
throw new InvalidXMLException("invalid xml", x);
}
}
/**Encodes an org.jdom2.Document object as Base64 text**/
public static String encode(Document document) throws IOException {
XMLOutputter xmlOutputter = new XMLOutputter();
try {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
xmlOutputter.output(document, byteArrayOutputStream);
return MinigmaUtils.encode(byteArrayOutputStream.toByteArray(),true);
}catch(IOException iox){
throw iox;
}catch(Exception x){
}
return null;
}
/**Takes encrypted data in the form of Base64 text, the Key for which it was encrypted and its passphrase and returns an org.jdom2.Document object*/
public static Document decrypt(String encrypted, Key key, char[] passphrase) throws InvalidXMLException, BadPassphraseException {
//TODO
return null;
}
/**Encrypts an org.jdom2.Document object to the PGP Public Key represented by the supplied Lock object*/
public static String encrypt(Document document, Lock lock){
//TODO
return null;
}
}
<file_sep>/app/src/test/java/uk/co/platosys/minigma/VerifySignatureTest.java
package uk.co.platosys.minigma;
import org.junit.Before;
import org.junit.Test;
import uk.co.platosys.minigma.exceptions.Exceptions;
import uk.co.platosys.minigma.exceptions.MinigmaException;
import uk.co.platosys.minigma.utils.Kidney;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertTrue;
public class VerifySignatureTest {
LockStore lockstore;
//String username = TestValues.testUsernames[0];
Map<Fingerprint, String> createdFingerprints=new HashMap<>();
@Before
public void setup(){
try {
if (lockstore==null){lockstore=new MinigmaLockStore(TestValues.lockFile, true);}
File keysDirectory = TestValues.keyDirectory;
if (!keysDirectory.exists()) {
keysDirectory.mkdirs();
for (int i = 0; i < TestValues.testPassPhrases.length; i++) {
Lock lock = LockSmith.createLockset(TestValues.keyDirectory, lockstore, TestValues.testPassPhrases[i].toCharArray(), Algorithms.RSA);
createdFingerprints.put(lock.getFingerprint(), TestValues.testPassPhrases[i]);
}
}
}catch(Exception x){
Exceptions.dump("CTSCSetup", x);
}
}
@Test
public void verifySignatureTest(){
Key key=null;
Lock lock=null;
File signatureFile=null;
LockStore lockStore=null;
for(Fingerprint fingerprint:createdFingerprints.keySet()){
try {
key = new Key(new File(TestValues.keyDirectory, fingerprint.toBase64String()));
Signature signature = key.sign(TestValues.testText, TestValues.testPassPhrases[0].toCharArray());
System.out.println(Kidney.toString(signature.getKeyID())+":"+signature.getShortDigest());
signatureFile = new File(TestValues.signatureDirectory, signature.getShortDigest());
if (signatureFile.exists()) {
signatureFile.delete();
}
signature.encodeToFile(signatureFile);
lock = lockStore.getLock(fingerprint);
//System.out.println(Kidney.toString(lock.getLockID()));
}catch(Exception e) {
System.out.println("VST2 "+e.getClass().getName()+"\n "+ e.getMessage());
StackTraceElement[] stackTraceElements = e.getStackTrace();
for (StackTraceElement stackTraceElement:stackTraceElements){
System.out.println(stackTraceElement.toString());
}
}try{
Signature rereadSignature = new Signature(signatureFile);
//System.out.println(Kidney.toString(rereadSignature.getKeyID()));
assertTrue(lock.verify(TestValues.testText,rereadSignature));
}catch (Exception e){
System.out.println("VST3 "+ e.getMessage());
StackTraceElement[] stackTraceElements = e.getStackTrace();
for (StackTraceElement stackTraceElement:stackTraceElements){
System.out.println(stackTraceElement.toString());
}
}}
}
@Test
public void verifySignatureNotationsTest(){
Key key=null;
Lock lock=null;
File signatureFile=null;
LockStore lockStore=null;
try {
lockStore = new MinigmaLockStore(new File(TestValues.lockDirectory, "lockstore"), false);
}catch (MinigmaException e){
Exceptions.dump(e);
}
for(Fingerprint fingerprint:createdFingerprints.keySet()){
try {
key = new Key(new File(TestValues.keyDirectory,fingerprint.toBase64String()));
List<Notation> notationList = new ArrayList<>();
for (int i=0; i<TestValues.testNotationNames.length; i++){
Notation notation = new Notation(TestValues.testNotationNames[i], TestValues.testNotationValues[i]);
notationList.add(notation);
}
Signature signature = key.sign(TestValues.testText, notationList, TestValues.testPassPhrases[0].toCharArray());
System.out.println(Kidney.toString(signature.getKeyID())+":"+signature.getShortDigest());
signatureFile = new File(TestValues.signatureDirectory, signature.getShortDigest());
if (signatureFile.exists()) {
signatureFile.delete();
}
signature.encodeToFile(signatureFile);
lock = lockStore.getLock(fingerprint);
//System.out.println(Kidney.toString(lock.getLockID()));
}catch(Exception e) {
Exceptions.dump(e);
}try{
Signature rereadSignature = new Signature(signatureFile);
List<Notation> notations = rereadSignature.getNotations();
for(Notation notation:notations){
String notationName = notation.getName();
String notationValue = notation.getValue();
System.out.println(notationName + ":"+notationValue);
}
assertTrue(lock.verify(TestValues.testText,rereadSignature));
}catch (Exception e){
Exceptions.dump(e);
}
}}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/votes/InvalidBallotException.java
package uk.co.platosys.minigma.votes;
public class InvalidBallotException extends Exception {
public InvalidBallotException(String msg) {
super(msg);
}
}<file_sep>/app/src/main/java/uk/co/platosys/minigma/Lock.java
/*
* Copyright <NAME> and Platosys.
/* (c) copyright 2018 Platosys
* MIT Licence
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.*/
package uk.co.platosys.minigma;
import android.util.Log;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.spongycastle.bcpg.ArmoredInputStream;
import org.spongycastle.bcpg.HashAlgorithmTags;
import org.spongycastle.bcpg.PublicKeyAlgorithmTags;
import org.spongycastle.bcpg.SignatureSubpacketTags;
import org.spongycastle.bcpg.attr.ImageAttribute;
import org.spongycastle.openpgp.*;
import org.spongycastle.openpgp.operator.KeyFingerPrintCalculator;
import org.spongycastle.openpgp.operator.PBESecretKeyDecryptor;
import org.spongycastle.openpgp.operator.jcajce.JcaKeyFingerprintCalculator;
import org.spongycastle.openpgp.operator.jcajce.JcaPGPContentSignerBuilder;
import org.spongycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder;
import org.spongycastle.util.Arrays;
import uk.co.platosys.minigma.exceptions.BadPassphraseException;
import uk.co.platosys.minigma.exceptions.Exceptions;
import uk.co.platosys.minigma.exceptions.MinigmaException;
import uk.co.platosys.minigma.exceptions.MinigmaOtherException;
import uk.co.platosys.minigma.exceptions.SignatureException;
import uk.co.platosys.minigma.exceptions.UnsupportedAlgorithmException;
import uk.co.platosys.minigma.utils.Kidney;
import uk.co.platosys.minigma.utils.MinigmaOutputStream;
import uk.co.platosys.minigma.utils.MinigmaUtils;
/**
* In Minigma, a Lock is the object used to lock something; once locked, it can
* only be opened with a matching Key.
*
* Minigma Keys and Locks correspond to private keys and
* public keys in other asymmetric crypto systems.
*
* Minigma is a fairly lightweight wrapper to OpenPGP, so a Minigma Lock can be instantiated
* from OpenPGP public key material.
*
* Locks can be concatenated, so one can be instantiated for a group of people. If
* this concatenated Lock is used to lock something, the locked object can be unlocked
* by ANY of the corresponding Keys. We have plans for, but have not yet implemented a Lock concatenation
* in which ALL of the corresponding Keys are required.
*
* A Lock object is normally instantiated by obtaining it from a LockStore.
*
* LockIDs are 64-bit longs but are not guaranteed to be unique. Neither, for that matter, are Fingerprints,
* which are 160-bit numbers, but probabilistically they are. Better to use Fingerprints to identify Locks rather than
* LockIDs, but sometimes the LockID is cool.
*
* @author edward
*
* Many Minigma methods, including several in the Lock class, are overloaded so that they can take either
* a String, a BigBinary or a byte[] argument.
*/
public class Lock {
private PGPPublicKeyRingCollection publicKeyRingCollection;
private static String TAG = "Lock";
private KeyFingerPrintCalculator calculator;
//public static final String MULTIPLE_LOCK="multiple lock";
private long lockID;
private Fingerprint fingerprint;
private PGPPublicKey publicKey;
private List<String> userIDs = new ArrayList<>();
private String shortID;
/**
* Creates a Lock object from base64-encoded OpenPGP public key material
* @param encoded the base64-encoded string containing the public key
*/
public Lock(String encoded)throws MinigmaException{
byte[] bytes = MinigmaUtils.decode(encoded);
init(bytes);
}
/**
* byte array constructor
* @param lockbytes
* @throws MinigmaException
*/
public Lock(byte[] lockbytes) throws MinigmaException{
init(lockbytes);
}
private void init(byte[] bytes) throws MinigmaException{
try{
ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
KeyFingerPrintCalculator keyFingerPrintCalculator = new JcaKeyFingerprintCalculator();
this.publicKeyRingCollection = new PGPPublicKeyRingCollection(bis, keyFingerPrintCalculator);
PGPPublicKeyRing keyRing = (PGPPublicKeyRing) publicKeyRingCollection.getKeyRings().next();
this.publicKey = keyRing.getPublicKey();
Iterator<String> userids = publicKey.getUserIDs();
while (userids.hasNext()){
userIDs.add(userids.next());
}
this.lockID = publicKey.getKeyID();
this.fingerprint = new Fingerprint(publicKey.getFingerprint());
this.shortID = MinigmaUtils.encode(publicKey.getKeyID());
}catch(Exception x){
throw new MinigmaException("error initialising minigma-lock from string", x);
}
}
/**
* Instantiates a Lock from an AsciiArmored file
*
*/
public Lock(File file) throws MinigmaException{
try {
ArmoredInputStream armoredInputStream = new ArmoredInputStream(new FileInputStream(file));
KeyFingerPrintCalculator calculator = new JcaKeyFingerprintCalculator();
PGPPublicKeyRingCollection keyRings=new PGPPublicKeyRingCollection(armoredInputStream, calculator);
init(keyRings);
}catch(IOException iox){
throw new MinigmaException("Lock(file) error opening lock file", iox);
}catch(PGPException pex){
throw new MinigmaException("Lock(file) error instantiating KeyRingCollection", pex);
}
}
/**
* This constructor takes a BouncyCastle PGPPublicKeyRingCollection and
* instantiates a Lock from the first public key ring in the collection.
* @param publicKeyRingCollection
*/
protected Lock(PGPPublicKeyRingCollection publicKeyRingCollection) {
init(publicKeyRingCollection);
}
private void init(PGPPublicKeyRingCollection publicKeyRingCollection){
try {
this.publicKeyRingCollection = publicKeyRingCollection;
if(publicKeyRingCollection.getKeyRings().hasNext()) {
PGPPublicKeyRing keyRing = (PGPPublicKeyRing) publicKeyRingCollection.getKeyRings().next();
PGPPublicKey pubkey = keyRing.getPublicKey();
this.publicKey = pubkey;
Iterator<String> userids = pubkey.getUserIDs();
while (userids.hasNext()){
userIDs.add(userids.next());
}
this.lockID = publicKey.getKeyID();
this.fingerprint = new Fingerprint(publicKey.getFingerprint());
this.shortID = MinigmaUtils.encode(publicKey.getKeyID());
}else{
System.out.println("no lock exists");
}
//System.out.println(Kidney.toString(lockID));
//System.out.println(Kidney.toString(fingerprint));
}catch(Exception x){
Exceptions.dump("Lock-init", x);
}
}
/**
*
*/
protected Lock(PGPPublicKeyRing pgpPublicKeyRing){
try {
Collection<PGPPublicKeyRing> keyList= new ArrayList<PGPPublicKeyRing>();
keyList.add(pgpPublicKeyRing);
this.publicKeyRingCollection = new PGPPublicKeyRingCollection(keyList);
}catch (Exception x){
Exceptions.dump("Lock+PKR", x);
}
PGPPublicKey pubkey = pgpPublicKeyRing.getPublicKey();
this.publicKey=pubkey;
Iterator<String> userids = pubkey.getUserIDs();
while (userids.hasNext()){
userIDs.add(userids.next());
}
this.lockID = publicKey.getKeyID();
this.fingerprint = new Fingerprint(publicKey.getFingerprint());
this.shortID = MinigmaUtils.encode(publicKey.getKeyID());
}
/**
Encrypts a String with this Lock
*/
public byte[] lock(String string) throws MinigmaException{
return lock(MinigmaUtils.toByteArray(string));
}
/**
Encrypts a byte array with this Lock
*/
public byte[] lock(byte[] literalData) throws MinigmaException{
//MinigmaUtils.printBytes(literalData);
byte[] compressedData = MinigmaUtils.compress(literalData);
//MinigmaUtils.printBytes(compressedData);
byte[] encryptedData=CryptoEngine.encrypt(compressedData, this);
return encryptedData;
}
/**
* Encrypts a BigBinary with this Lock
*
*/
public BigBinary lock(BigBinary clearBytes) throws MinigmaException{
return new BigBinary(lock(clearBytes.toByteArray()));
}
/**Encrypts the given String and returns the cyphertext as a String.
*
* @param string
* @return
* @throws MinigmaException
*/
public String lockAsString(String string) throws MinigmaException{
return MinigmaUtils.encode(lock(string));
}
/**
* Returns this Lock as an ASCII-Armored String, e.g. for submitting to
* keyservers.
* @return
*/
public String toArmoredString(){
try {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
MinigmaOutputStream minigmaOutputStream = new MinigmaOutputStream(byteArrayOutputStream);
minigmaOutputStream.write(getBytes());
minigmaOutputStream.flush();
minigmaOutputStream.close();
byte[] bytes = byteArrayOutputStream.toByteArray();
String string = MinigmaUtils.fromByteArray(bytes);
Log.d(TAG, string);
return string;
}catch(Exception x){
Exceptions.dump(x);
return null;
}
}
/**
* @return true if it verifies against this Lock, false otherwise.
* @throws MinigmaException
* @throws UnsupportedAlgorithmException
* @throws SignatureException if the signature does not verify correctly.
*/
public boolean verify(String signedMaterial, Signature signature)throws MinigmaException, UnsupportedAlgorithmException, SignatureException {
List<List<Fingerprint>> results= SignatureEngine.verify(signedMaterial, signature, this);
List<Fingerprint> signorIDS=results.get(0);
if(signorIDS.contains(fingerprint)){
return true;
}else{
return false;
}
}
/**
* Adds a Lock to this lock, concatenating the two. Material locked with the
* resulting concatenated Lock can be unlocked with *any* of the corresponding
* Keys, unless inclusive is true in which case *all* the Keys are needed. However,
* this feature is not yet implemented and passing inclusive as true will cause an exception to be thrown.
* @param lock the Lock to be added to this Lock
* @param inclusive must be false in this implementation.
* @return a Lock which can be unlocked by the keys corresponding to either Lock.
*/
public Lock addLock(Lock lock, boolean inclusive) throws MinigmaException{
if(inclusive){throw new MinigmaException("inclusive Lock concatenation not yet implemented");}
long newLockID=0;
try{
Iterator<PGPPublicKeyRing> pgpPublicKeyRingIterator = lock.getPGPPublicKeyRingIterator();
while(pgpPublicKeyRingIterator.hasNext()){
PGPPublicKeyRing pgpPublicKeyRing = pgpPublicKeyRingIterator.next();
newLockID = pgpPublicKeyRing.getPublicKey().getKeyID();
if (!(publicKeyRingCollection.contains(newLockID))){
publicKeyRingCollection = PGPPublicKeyRingCollection.addPublicKeyRing(publicKeyRingCollection, pgpPublicKeyRing);
//System.out.println("Lock: added lock with ID:"+Kidney.toString(newLockID)+" to lock "+Kidney.toString(lockID));
}
}
//System.out.println("concatenation completed, this lock now has "+publicKeys.size()+" locks");
}catch(Exception x){
throw new MinigmaException("Error concatenating Lock", x);
}
return this;
}
/**
* Removes a lock. Use this method with caution! it removes all references to any public key referred to by the Lock argument.
* This could include a key that has been added by way of another Lock. So remove carefully.
* @param lock the Lock to be removed;
* @return this Lock, but with the other Lock removed
*/
public Lock removeLock(Lock lock)throws MinigmaException{
Iterator<PGPPublicKeyRing> pgpPublicKeyRingIterator = lock.getPGPPublicKeyRingIterator();
try{
while(pgpPublicKeyRingIterator.hasNext()){
PGPPublicKeyRing pgpPublicKeyRing = pgpPublicKeyRingIterator.next();
long keyID = pgpPublicKeyRing.getPublicKey().getKeyID();
if (publicKeyRingCollection.contains(keyID)){
publicKeyRingCollection=PGPPublicKeyRingCollection.removePublicKeyRing(publicKeyRingCollection, pgpPublicKeyRing);
}
}
}catch(Exception x){
throw new MinigmaException("Error de-concatenating Lock", x);
}
return this;
}
/**Adds a textual ID - such as an email address - to the Lock corresponding to the passed-in key
* @param id - the String id
* @param key the Key to certify the new Lock
* @param passphrase the passphrase to the Key
*
* */
public Certificate addID (String id, Key key, char[] passphrase)throws MinigmaOtherException, BadPassphraseException{
long lockid = key.getKeyID();
PGPPublicKey pgpPublicKey;
PGPSignatureGenerator pgpSignatureGenerator;
PGPPrivateKey pgpPrivateKey;
PGPSignature certificationSignature;
try {
pgpPublicKey = publicKeyRingCollection.getPublicKey(lockid);
}catch(PGPException px){
throw new MinigmaOtherException("lock not found corresponding to key with id:"+Kidney.toString(lockid));
}try {
pgpSignatureGenerator = new PGPSignatureGenerator(new JcaPGPContentSignerBuilder(pgpPublicKey.getAlgorithm(), HashAlgorithmTags.SHA512));
PBESecretKeyDecryptor pbeSecretKeyDecryptor = new JcePBESecretKeyDecryptorBuilder().build(passphrase);
pgpPrivateKey = key.getSigningKey().extractPrivateKey(pbeSecretKeyDecryptor);
}catch(PGPException px) {
throw new BadPassphraseException("Bad passphrase supplied for Key with id "+lockid);
}try{
pgpSignatureGenerator.init(PGPSignature.DEFAULT_CERTIFICATION, pgpPrivateKey);
certificationSignature = pgpSignatureGenerator.generateCertification(id, pgpPublicKey);
publicKey = PGPPublicKey.addCertification(pgpPublicKey, id, certificationSignature);
}catch(PGPException px){
throw new MinigmaOtherException("failed to certify new ID");
}try {
return new Certificate(certificationSignature);
}catch(MinigmaException mx){
throw new MinigmaOtherException("failed to create Minigma Certificate from PGP certification signature", mx);
}
}
/**Adds a photo or image id to the Lock corresponding to the passed-in key. At the moment, only
* jpeg photos are supported, a limitation that comes from the underlying BouncyCastle implementation but
* apparently only because it's the only type for which a constant has been declared in ImageAttribute.
* @param photodata - the String id
* @param key the Key to certify the new Lock
* @param passphrase the passphrase to the Key
*
* */
public Certificate addImageID (byte[] photodata, Key key, char[] passphrase)throws MinigmaOtherException, BadPassphraseException{
long lockid = key.getKeyID();
PGPPublicKey pgpPublicKey;
PGPSignatureGenerator pgpSignatureGenerator;
PGPPrivateKey pgpPrivateKey;
PGPSignature certificationSignature;
try {
pgpPublicKey = publicKeyRingCollection.getPublicKey(lockid);
}catch(PGPException px){
throw new MinigmaOtherException("lock not found corresponding to key with id:"+Kidney.toString(lockid));
}try {
pgpSignatureGenerator = new PGPSignatureGenerator(new JcaPGPContentSignerBuilder(pgpPublicKey.getAlgorithm(), HashAlgorithmTags.SHA512));
PBESecretKeyDecryptor pbeSecretKeyDecryptor = new JcePBESecretKeyDecryptorBuilder().build(passphrase);
pgpPrivateKey = key.getSigningKey().extractPrivateKey(pbeSecretKeyDecryptor);
}catch(PGPException px) {
throw new BadPassphraseException("Bad passphrase supplied for Key with id "+lockid);
}try{
PGPUserAttributeSubpacketVectorGenerator pgpUserAttributeSubpacketVectorGenerator = new PGPUserAttributeSubpacketVectorGenerator();
pgpUserAttributeSubpacketVectorGenerator.setImageAttribute(ImageAttribute.JPEG, photodata);
PGPUserAttributeSubpacketVector pgpUserAttributeSubpacketVector = pgpUserAttributeSubpacketVectorGenerator.generate();
pgpSignatureGenerator.init(PGPSignature.DEFAULT_CERTIFICATION, pgpPrivateKey);
certificationSignature = pgpSignatureGenerator.generateCertification(pgpUserAttributeSubpacketVector, pgpPublicKey);
publicKey = PGPPublicKey.addCertification(pgpPublicKey,certificationSignature);
}catch(PGPException px){
throw new MinigmaOtherException("failed to certify new ID");
}try {
return new Certificate(certificationSignature);
}catch(MinigmaException mx){
throw new MinigmaOtherException("failed to create Minigma Certificate from PGP certification signature", mx);
}
}
/**Revokes a particular public key in a Lock, generating a key revocation Certificate
*
* @param keyID the 64-bit ID of the public key to be revoked
* @param key its corresponding key
* @param passphrase and the passphrase
* @return
*/
public Certificate revokeLock (long keyID, Key key, char[] passphrase) throws MinigmaException{
try {
PGPPublicKeyRing pgpPublicKeyRing = publicKeyRingCollection.getPublicKeyRing(keyID);
PGPPublicKey pgpPublicKey = pgpPublicKeyRing.getPublicKey(keyID);
return revokeLock(pgpPublicKey, key, passphrase);
}catch(PGPException pgpx){
Exceptions.dump(pgpx);
return null;
}
}
/**Revokes a particular public key in a Lock, generating a key revocation Certificate
*
* @param fingerprint The fingerprint of the public key to be revoked
* @param key
* @param passphrase
* @return
*/
public Certificate revokeLock (Fingerprint fingerprint, Key key, char[] passphrase) throws MinigmaException{
byte[] keyID = fingerprint.getFingerprintbytes();
try {
PGPPublicKeyRing pgpPublicKeyRing = publicKeyRingCollection.getPublicKeyRing(keyID);
PGPPublicKey pgpPublicKey = pgpPublicKeyRing.getPublicKey(keyID);
return revokeLock(pgpPublicKey, key, passphrase);
}catch(PGPException pgpx){
Exceptions.dump(pgpx);
return null;
}
}
private Certificate revokeLock (PGPPublicKey pgpPublicKey, Key key, char[] passphrase) throws MinigmaException{
try{
PGPSignatureGenerator pgpSignatureGenerator = new PGPSignatureGenerator(new JcaPGPContentSignerBuilder(pgpPublicKey.getAlgorithm(), HashAlgorithmTags.SHA512));
PBESecretKeyDecryptor pbeSecretKeyDecryptor = new JcePBESecretKeyDecryptorBuilder().build(passphrase);
PGPPrivateKey pgpPrivateKey = key.getSigningKey().extractPrivateKey(pbeSecretKeyDecryptor);
pgpSignatureGenerator.init(0x20, pgpPrivateKey);
PGPSignature revocationSignature = pgpSignatureGenerator.generateCertification(pgpPublicKey);
publicKey = PGPPublicKey.addCertification(pgpPublicKey,revocationSignature);
return new Certificate(revocationSignature);
}catch(PGPException pgpex){
Exceptions.dump(pgpex);
return null;
}
}
public Certificate addDesignatedRevoker (byte[] lockid, Key key, char[] passphrase){
try {
PGPSignatureSubpacketGenerator pgpSignatureSubpacketGenerator = new PGPSignatureSubpacketGenerator();
pgpSignatureSubpacketGenerator.setRevocationKey(true, PublicKeyAlgorithmTags.RSA_SIGN, lockid);
PGPSignatureSubpacketVector pgpSignatureSubpacketVector = pgpSignatureSubpacketGenerator.generate();
PGPSignatureGenerator pgpSignatureGenerator = new PGPSignatureGenerator(new JcaPGPContentSignerBuilder(PublicKeyAlgorithmTags.RSA_SIGN, HashAlgorithmTags.SHA512));
PBESecretKeyDecryptor pbeSecretKeyDecryptor = new JcePBESecretKeyDecryptorBuilder().build(passphrase);
PGPPrivateKey pgpPrivateKey = key.getMasterKey().extractPrivateKey(pbeSecretKeyDecryptor);
pgpSignatureGenerator.init(PGPSignature.DIRECT_KEY,pgpPrivateKey);
pgpSignatureGenerator.setHashedSubpackets(pgpSignatureSubpacketVector);
PGPSignature revokerSignature = pgpSignatureGenerator.generate();
publicKey = PGPPublicKey.addCertification(publicKey,revokerSignature);
return new Certificate(revokerSignature);
}catch(Exception x){
Exceptions.dump(x);
return null;
}
}
/**
*
* @return
*/
public Iterator<PGPPublicKeyRing> getPGPPublicKeyRingIterator(){
return publicKeyRingCollection.getKeyRings();
}
/**
*
* @return
*/
protected PGPPublicKeyRingCollection getKeyRings(){
try{
return publicKeyRingCollection;
}catch(Exception ex){
return null;
}
}
/**
*
* @param keyID
* @return
*/
protected PGPPublicKeyRing getPublicKeyRing(long keyID){
try{
return publicKeyRingCollection.getPublicKeyRing(keyID);
}catch(Exception e){
return null;
}
}
/**
*
* @param keyID
* @return
*/
public PGPPublicKey getPublicKey(long keyID){
PGPPublicKeyRing pkr = getPublicKeyRing(keyID);
return pkr.getPublicKey();
}
/*
public void revoke(long keyID, Key key, char[] passphrase) throws MinigmaException {
if(publicKeys.containsKey(keyID)){
PGPPublicKeyRing pkr = publicKeys.get(keyID);
PGPPublicKey publicKey = pkr.getPublicKey(keyID);
PGPPublicKey.addCertification(publicKey, null)
}else{
throw new MinigmaException ("key "+Kidney.toString(keyID)+ " not in this lock");
}
}
*/
/**
*Certifies a specific PGP public subkey within this Lock.
* (e.g. master, encryption, subkey)
* @param keyID the keyID of the public key to be certified
* @param key the key of the person doing the certifying
* @param passphrase the corresponding passphrase
* @param certificationLevel the level of certification - basic to positive?
* @throws MinigmaException
*/
public Certificate certify(long keyID, Key key, char [] passphrase, LockStore lockStore, int certificationLevel) throws MinigmaException {
if (!(Arrays.contains(Certificate.CERTIFICATION_TYPES, certificationLevel))){
throw new MinigmaException(certificationLevel+ "is not a valid certification type");
}
try{
if(publicKeyRingCollection.contains(keyID)){
try{
PGPPublicKeyRing pgpPublicKeyRing = publicKeyRingCollection.getPublicKeyRing(keyID);
PGPPublicKey pgpPublicKey = pgpPublicKeyRing.getPublicKey(keyID);
boolean isCertified = false;
Iterator signatures = pgpPublicKey.getSignatures();
//first check to see if it is already certified by this key, no point in duplicating the effort
while(signatures.hasNext()){
PGPSignature signature = (PGPSignature) signatures.next();
if(signature.isCertification()){
isCertified=(signature.getKeyID()==key.getKeyID());
if(isCertified){return new Certificate(signature);}
}
}
if(! isCertified) {
Certificate certificate = SignatureEngine.getKeyCertification(key, passphrase, publicKey, certificationLevel);
//Now to add the certification to the PGPpublic key itself.
PGPSignature pgpSignature = certificate.getPgpSignature();
publicKeyRingCollection = PGPPublicKeyRingCollection.removePublicKeyRing(publicKeyRingCollection, pgpPublicKeyRing);
pgpPublicKeyRing = PGPPublicKeyRing.removePublicKey(pgpPublicKeyRing, publicKey);
publicKey = PGPPublicKey.addCertification(publicKey, pgpSignature);
pgpPublicKeyRing = PGPPublicKeyRing.insertPublicKey(pgpPublicKeyRing, publicKey);
publicKeyRingCollection = PGPPublicKeyRingCollection.addPublicKeyRing(publicKeyRingCollection, pgpPublicKeyRing);
lockStore.addLock(this);
return certificate;
}
}catch(Exception x){
throw new MinigmaException("Problem certifying key", x);
}
}else{
throw new MinigmaException ("key "+Kidney.toString(keyID)+ " not in this lock");
}
}catch(Exception x){
Exceptions.dump(x);
throw new MinigmaException("certification issues", x);
}
return null;
}
/*public void addCertificate(Certificate certificate){
PGPSignature pgpSignature = certificate.getPgpSignature();
publicKeyRingCollection = PGPPublicKeyRingCollection.removePublicKeyRing(publicKeyRingCollection, pgpPublicKeyRing);
pgpPublicKeyRing = PGPPublicKeyRing.removePublicKey(pgpPublicKeyRing, publicKey);
publicKey = PGPPublicKey.addCertification(publicKey, pgpSignature);
pgpPublicKeyRing = PGPPublicKeyRing.insertPublicKey(pgpPublicKeyRing, publicKey);
publicKeyRingCollection = PGPPublicKeyRingCollection.addPublicKeyRing(publicKeyRingCollection, pgpPublicKeyRing);
lockStore.addLock(this);
return certificate;
}*/
public List<Certificate> getCertificates() throws MinigmaException{
List<Certificate> certificates = new ArrayList<>();
for (PGPPublicKeyRing pgpPublicKeyRing : publicKeyRingCollection){
Iterator<PGPPublicKey> pgpPublicKeyIterator = pgpPublicKeyRing.getPublicKeys();
while (pgpPublicKeyIterator.hasNext()){
PGPPublicKey pgpPublicKey = pgpPublicKeyIterator.next();
Iterator signatureIterator = pgpPublicKey.getSignatures();
while (signatureIterator.hasNext()) {
try {
PGPSignature pgpSignature = (PGPSignature) signatureIterator.next();
if (pgpSignature.isCertification()) {
long keyID = pgpSignature.getKeyID();
// String signerUserID = lockStore.getUserID(keyID);
Certificate certificate = new Certificate(pgpSignature);
certificates.add(certificate);
}
}catch (ClassCastException ccx){
Exceptions.dump(ccx);
//TODO handle
}
}
}
}
return certificates;
}
public boolean contains (long lockID) {
try {
return publicKeyRingCollection.contains(lockID);
}catch (Exception x){
Exceptions.dump(x);
return false;
}
}
public boolean contains (byte[] lockID) {
try {
return publicKeyRingCollection.contains(lockID);
}catch (Exception x){
Exceptions.dump(x);
return false;
}
}
/**
*
* @return
*/
public long getLockID(){
return lockID;
}
public Fingerprint getFingerprint(){
return fingerprint;
}
public byte[] getBytes(){
try {
return publicKeyRingCollection.getEncoded();
}catch(IOException iox){
Exceptions.dump(iox);
return null;
}
}
/**Lock syncing syncs two versions of the same Lock (Locks accrue certifications). When a Lockstore
* encounters two versions of the same Lock (that is, having the same fingerprint)it merges them.
*
* @param otherLock
* @throws MinigmaException
*/
/*public void syncLocks(Lock otherLock)throws MinigmaException {
if(!(otherLock.getFingerprint().equals(fingerprint))){
throw new MinigmaException("can only sync the same locks");
}
List<Certificate> otherCertificateList = otherLock.getCertificates();
List<Certificate> thisCertificateList = getCertificates();
otherCertificateList.addAll(thisCertificateList);
for(Certificate certificate:otherCertificateList){
if(!(thisCertificateList.contains(certificate))){
}
}
}*/
/**
* Returns the first userID associated with this Lock. Note that Locks, like PGPPublicKeys, can
* have multiple userIDs associated with them.
* @return
*/
public String getUserID(){
return userIDs.get(0);
}
/**
* Returns a List of all the text userIDs associated with this Lock.
* @return
*/
public List<String> getUserIDs(){
return userIDs;
}
/**
* @return the PGP 64-bit key ID encoded as an 8-character Base64 String.
*/
public String getShortID(){
return shortID;
}
}<file_sep>/app/src/main/java/uk/co/platosys/minigma/utils/Kidney.java
/*
* Copyright <NAME> and Platosys.
** (c) copyright 2018 Platosys
* MIT Licence
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package uk.co.platosys.minigma.utils;
import java.nio.ByteBuffer;
import java.nio.LongBuffer;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.List;
/**
*Kidney is a utility providing static classes to convert a
* PGP KeyID, or a fingerprint, to a string and back again.
*
* Additionally, it produces and expects strings which have colons, spaces or dashes at every
* second character, to make the thing look prettier.
*
* The default separator is a dash, not a colon. The resultant strings therefore can (but should not) be
* used as xml attribute names.
*
*/
public class Kidney {
static final SecureRandom RANDOM=new SecureRandom();
public static final char DEFAULT_SEPARATOR='-';
static final char[] HEX_CHAR_TABLE = {
'0', '1', '2', '3','4', '5', '6', '7','8', '9', 'a', 'b','c','d', 'e', 'f'
};
/**
* Takes a long and returns a String, formatted as pairs of hexadecimal digits separated by dashes.
* @param k
* @return
*/
public static String toString(long k){
return toString(k, DEFAULT_SEPARATOR);
}
/**
* takes a long and a char and returns a String, formatted as pairs of hexadecimal digits separated by the char.
* @param k
* @param separator
* @return
*/
public static String toString(long k, char separator){
char[] sepchar = new char[1];
sepchar[0]=separator;
String unbrokenString = Long.toHexString(k);
String brokenString="";
for (int i=0; i<unbrokenString.length()-1; i=i+2){
char [] x = new char[3];
x[0] = unbrokenString.charAt(i);
x[1] = unbrokenString.charAt(i+1);
x[2] = sepchar[0];
String newString = new String (x);
brokenString = brokenString+newString;
}
if(brokenString.endsWith(new String(sepchar))){
brokenString = brokenString.substring(0, (brokenString.length()-1));
}
return brokenString;
}
/**
*
* @param fingerprint
* @return
*/
public static String toString(byte[] fingerprint) {
char separator='-';
char[] hex = new char[(3 * fingerprint.length)-1];
for (int i=0; i<((fingerprint.length)-1); i++) {
byte b = fingerprint[i];
int v = b & 0xFF;
hex[(3*i)] = HEX_CHAR_TABLE[v >>> 4];
hex[(3*i)+1]=HEX_CHAR_TABLE[v & 0xF];
hex[(3*i)+2]=separator;
}
int i= (fingerprint.length-1);
byte b = fingerprint[i];
int v = b & 0xFF;
hex[(3*i)] = HEX_CHAR_TABLE[v >>> 4];
hex[(3*i)+1]=HEX_CHAR_TABLE[v & 0xF];
return new String(hex);
}
/**
*parses a hexadecimal string and returns it as a long.
* @param hexString
* @return
* @throws NumberFormatException
*/
public static long toLong(String hexString) throws NumberFormatException{
long answer=0;
StringBuffer strbuf = new StringBuffer();
for (int i=0; i<hexString.length(); i++){
char x = hexString.charAt(i);
if ((x!=':')&&(x!=' ')&&(x!='-')){
strbuf.append(x);
}
}
String concatString = new String(strbuf);
int len = concatString.length();
for (int i=0; i<len; i++){
int d=0;
char x = concatString.charAt(len-(i+1));
if (x=='0'){d=0;}
else if (x=='1'){d=1;}
else if (x=='2'){d=2;}
else if (x=='3'){d=3;}
else if (x=='4'){d=4;}
else if (x=='5'){d=5;}
else if (x=='6'){d=6;}
else if (x=='7'){d=7;}
else if (x=='8'){d=8;}
else if (x=='9'){d=9;}
else if ((x=='a')|(x=='A')){d=10;}
else if ((x=='b')|(x=='B')){d=11;}
else if ((x=='c')|(x=='C')){d=12;}
else if ((x=='d')|(x=='D')){d=13;}
else if ((x=='e')|(x=='E')){d=14;}
else if ((x=='f')|(x=='F')){d=15;}
else {
throw new NumberFormatException(x+ " is not a recognised hex digit");
}
long col=1;
for (int j=0; j<i; j++){
col=col*16;
}
answer=answer+(col*d);
//Log.d(TAG,5, "Kidney in: "+hexString+", out: "+Long.toHexString(answer));
}
return answer;
}
/**
* returns a long as a byte array.
* @param l
* @return
*/
public static byte[] longToByteArray(long l) {
byte[] bArray = new byte[8];
ByteBuffer bBuffer = ByteBuffer.wrap(bArray);
LongBuffer lBuffer = bBuffer.asLongBuffer();
lBuffer.put(0, l);
return bArray;
}
/**
* Returns a reasonably random long. As secure as the underlying implementation of SecureRandom, probably.
* @return
*/
public static long randomLong(){
byte[] arr= new byte[8];
RANDOM.nextBytes(arr);
int i = 0;
int len = 8;
int cnt = 0;
byte[] tmp = new byte[len];
for (i = 0; i < len; i++) {
tmp[cnt] = arr[i];
cnt++;
}
long accum = 0;
i = 0;
for ( int shiftBy = 0; shiftBy < 32; shiftBy += 8 ) {
accum |= ( (long)( tmp[i] & 0xff ) ) << shiftBy;
i++;
}
return accum;
}
}<file_sep>/app/src/main/java/uk/co/platosys/minigma/LockStore.java
package uk.co.platosys.minigma;
/* (c) copyright 2018 Platosys
* MIT Licence
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.*/
import uk.co.platosys.minigma.exceptions.LockNotFoundException;
import uk.co.platosys.minigma.exceptions.MinigmaException;
import java.util.Iterator;
/**
* This interface defines how Locks are stored. Minigma provides one implementation, MinigmaLockStore,
* which uses PGPPublicKeyRings as a storage mechanism.
*
* Minigma does not use OpenPGP KeyIDs, but only fingerprints (the 160-bit timestamped hash of the public key)
* OpenPGP short (32-bit) KeyIDs are broadly deprecated as it is now trivial to generate collisions, that is,
* keys that have the same short keyID. Long (64-bit) keyIDs are much more secure, but collisions are theoretically
* possible. Using the 160-bit fingerprint is less convenient if this is ever to be done humanly but Minigma is all about
* doing this by machine.
*
*/
public interface LockStore {
/**
* Adds a Lock to a Lockstore. If the Lockstore already contains a Lock with that id, it
* is replaced (typically because the Lock's certification has changed).
*
* @param lock
* @return
*/
boolean addLock(Lock lock);
/**
* Removes the Lock with the given fingerprint from the Lockstore. If an implementation cannot remove
* a Lock it should simply return false.
* @param fingerprint
* @return
*/
boolean removeLock(Fingerprint fingerprint);
/**
* The most important method in the interface. All LockStores should implement this.
* @param fingerprint
* @return
*/
Lock getLock(Fingerprint fingerprint) throws LockNotFoundException;
Iterator<Lock> iterator() throws MinigmaException;
/**
* This method is used to retrieve a Lock from a Lockstore given a userID - typically an email
* address, but any String identifier can be used.
* @param userID
* @return a Lock associated with this userID.
* @throws MinigmaException
* @throws LockNotFoundException if there is no Lock in the Lockstore having this userID.
*/
Lock getLock(String userID)throws MinigmaException, LockNotFoundException;
boolean contains(String userID);
long getStoreId();
String getUserID(Fingerprint keyID);
String getUserID(long keyID);
int getCount();//returns the number of keys held by this Lockstore
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/exceptions/Exceptions.java
package uk.co.platosys.minigma.exceptions;
import android.util.Log;
public class Exceptions {
public static void dump ( Throwable e) {
Log.e("DUMP", "error", e); //comment out to get testing to run.
System.out.println(e.getClass().getName() + ":" + e.getMessage());
if (e.getCause() != null) {
System.out.println("Cause:"+e.getCause().getClass().getName());
dump(e.getCause());
} else {
StackTraceElement[] stackTraceElements = e.getStackTrace();
for (StackTraceElement stackTraceElement : stackTraceElements) {
System.out.println(stackTraceElement.toString());
}
}
}
public static void dump (String TAG, Throwable e) {
Log.e("DUMP", "error", e); //comment out to get testing to run.
System.out.println(e.getClass().getName() + ":" + e.getMessage());
if (e.getCause() != null) {
dump(TAG, e.getCause());
} else {
StackTraceElement[] stackTraceElements = e.getStackTrace();
for (StackTraceElement stackTraceElement : stackTraceElements) {
System.out.println(stackTraceElement.toString());
}
}
}
public static void dump (String TAG, String msg, Throwable e) {
Log.e("DUMP", "error", e); //comment out to get testing to run.
System.out.println(msg);
System.out.println(e.getClass().getName() + ":" + e.getMessage());
if (e.getCause() != null) {
dump(TAG, e.getCause());
} else {
StackTraceElement[] stackTraceElements = e.getStackTrace();
for (StackTraceElement stackTraceElement : stackTraceElements) {
System.out.println(stackTraceElement.toString());
}
}
}
}
/*
switch
*/<file_sep>/app/src/main/java/uk/co/platosys/minigma/utils/MinigmaOutputStream.java
/* (c) copyright 2018 Platosys
* MIT Licence
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.*/
package uk.co.platosys.minigma.utils;
import org.spongycastle.bcpg.ArmoredOutputStream;
import uk.co.platosys.minigma.Minigma;
import java.io.OutputStream;
/**MinigmaOutputStream is a PGP ArmoredOutputStream in which the headers identify Minigma
* as the user agent
*
*/
public class MinigmaOutputStream extends ArmoredOutputStream {
public MinigmaOutputStream (OutputStream outputStream){
super(outputStream);
setHeader("Library", Minigma.LIBRARY_NAME);
setHeader(ArmoredOutputStream.VERSION_HDR, Minigma.VERSION+", BC:"+Minigma.BCVERSION);
setHeader("Comment", Minigma.APP_DESCRIPTION);
}
}
<file_sep>/app/build.gradle
apply plugin: 'com.android.library'
android {
compileSdkVersion 26
defaultConfig {
minSdkVersion 15
targetSdkVersion 26
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
}
repositories {
mavenCentral()
maven { url "https://jitpack.io" }
}
dependencies {
implementation group: 'commons-codec', name: 'commons-codec', version: '1.14'
implementation 'com.github.ejoftheweb:effwords:v0.2.2'
implementation group: 'net.jpountz.lz4', name: 'lz4', version: '1.3.0'
api group: 'com.madgag.spongycastle', name: 'prov', version: '1.58.0.0'
api group: 'com.madgag.spongycastle', name: 'bcpg-jdk15on', version: '1.58.0.0'
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'com.android.support:appcompat-v7:26.1.0'
testImplementation 'junit:junit:4.12'
testImplementation 'org.mockito:mockito-core:3.2.0'
testImplementation "org.powermock:powermock-module-junit4:1.7.4"
testImplementation "org.powermock:powermock-api-mockito2:1.7.4"
androidTestImplementation 'com.android.support.test:runner:1.0.1'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
implementation 'com.google.guava:guava:28.1-android'
implementation 'com.android.volley:volley:1.1.1'
implementation group: 'org.jdom', name: 'jdom2', version: '2.0.6'
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/android/package-info.java
/**This package contains Android-specific implementations of interfaces defined in the main package**/
package uk.co.platosys.minigma.android;<file_sep>/app/src/main/java/uk/co/platosys/minigma/VLock.java
package uk.co.platosys.minigma;
import uk.co.platosys.minigma.exceptions.MinigmaException;
/**
* This extension of Lock provides a couple of additional fields
* for use with Hagrid verifying keyservers.
*/
public class VLock extends Lock {
private String token;
private String email;
public VLock(Lock lock) throws MinigmaException {
super(lock.getBytes());
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
}
<file_sep>/README.md
# Minigmand
Minigmand is an implementation of Minigma for the Android platform. It replaces the BouncyCastle packages with the equivalent
SpongyCastle ones (because of a naming conflict in Android).
To use it, just put this in the repositories section of your gradle.build file:\n
maven { url "https://jitpack.io" }
and in dependencies:
implementation 'com.github.ejoftheweb:minigmand:master-SNAPSHOT'
BUT: it is very much early dev code, it is certainly not production-ready. Feedback would of course be welcome.
Usage.
In Minigma, a public key is called a Lock and a private key is called a Key. You lock something with a Lock and you need a corresponding Key to unlock it. But they are all OpenPGP-compatible keys so you can use them in other OpenPGP applications (such as GPG). The idea behind Minigma is to have a really simple and logical API with the minimum learning curve so that you can use open crypto in other apps easily. YMMV of course!
to create a key-pair(a lockset): LockSmith.createLockSet(File keyDirectory, //where the generated secret Key is to be stored
LockStore lockStore, //where the public Lock will be stored
String username, //the username/email associated with the lockset
char[] passPhrase, //the passphrase with which the generated private Key will be encrypted
int algorithm)//use Algorithms.RSA
The LockStore can be either a MinigmaLockStore, which is PGPublic Key Ring Collection implementation, stored as a base64 text file
on the local filesystem, or an HKPLockStore, which is an http: interface to a public keyserver. Or implement the interface yourself, which is probably best.
For more info, see the wiki.
<file_sep>/app/src/main/java/uk/co/platosys/minigma/HKPLockStore.java
package uk.co.platosys.minigma;
/* (c) copyright 2018 Platosys
* MIT Licence
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.*/
import android.util.Log;
import uk.co.platosys.minigma.exceptions.Exceptions;
import uk.co.platosys.minigma.exceptions.MinigmaException;
import uk.co.platosys.minigma.utils.Kidney;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Iterator;
import static java.net.HttpURLConnection.HTTP_ACCEPTED;
import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
import static java.net.HttpURLConnection.HTTP_OK;
/**This is an implementation of LockStore that uses public keyservers
* as the backing store, with which it communicates using the HKP protocol
* based on http.
*
* It is being updated to use the VKS protocol provided by the Hagrid verifying keyserver.
*
*
*/
public class HKPLockStore implements LockStore {
private String host;
private int port=11371; //this is the the default HKP port number
public static final String PROTOCOL="http:";
public static final String GET_FILE_PART="pks/lookup";
public static final String POST_FILE_PART="pks/add";
public static final String VKS_GET_BY_FINGERPRINT_FILE_PART="vks/v1/by-fingerprint";
public static final String VKS_GET_BY_KEYID_FILE_PART="vks/v1/by-keyid";
public static final String VKS_GET_BY_EMAIL_FILE_PART="vks/v1/by-email";
public static final String VKS_UPLOAD_FILE_PART="/vks/v1/upload";
public static final String VKS_REQUEST_VERIFY_FILE_PART="/vks/v1/request-verify";
public static final String VKS_MIMETYPE= "application/json";
public static final String ARMORED_PKEY_OPEN="-----BEGIN PGP PUBLIC KEY BLOCK-----";
public static final String ARMORED_PKEY_CLOSE="-----END PGP PUBLIC KEY BLOCK-----";
private boolean useVKS = false;
private String TAG = "HKPLockstore";
/**
* Create an instance of the HKPLockStore by specifying a hostname and a port number to the constructor.
*
* @param host
*/
public HKPLockStore(String host){
this.host=host;
//should constructor verify host's existence? How?
}
/**
* Create an instance of the HKPLockStore by specifying a hostname and a port number to the constructor.
*
* @param host
*/
public HKPLockStore(String host, int port){
this.host=host;
this.port=port;
//should constructor verify host's existence? How?
}
@Override
public boolean addLock(Lock lock) {
if (useVKS) {
return addLockWithVKS(lock);
} else {
return addLockWithHKP(lock);
}
}
private boolean addLockWithVKS (Lock lock){
try {
URL url = new URL(PROTOCOL, host, port, POST_FILE_PART);
HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setRequestMethod("POST");
httpURLConnection.setDoOutput(true); //changes the default method to POST.
httpURLConnection.addRequestProperty("op","post");
httpURLConnection.addRequestProperty("options", "mr");
OutputStream outputStream = new BufferedOutputStream( httpURLConnection.getOutputStream());
outputStream.write(("keytext = ").getBytes());
outputStream.write(lock.toArmoredString().getBytes());
outputStream.flush();
outputStream.close();
int response = httpURLConnection.getResponseCode();
switch (response){
case HTTP_OK:
return true;
case HTTP_ACCEPTED:
return true;
default:
handleError(response);
}
return false;
}catch(Exception x){
return false;
}
}
private boolean addLockWithHKP(Lock lock){
try {
URL url = new URL(PROTOCOL, host, port, POST_FILE_PART);
HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setRequestMethod("POST");
httpURLConnection.setDoOutput(true); //changes the default method to POST.
httpURLConnection.addRequestProperty("op","post");
httpURLConnection.addRequestProperty("options", "mr");
OutputStream outputStream = new BufferedOutputStream( httpURLConnection.getOutputStream());
outputStream.write(("keytext = ").getBytes());
outputStream.write(lock.toArmoredString().getBytes());
outputStream.flush();
outputStream.close();
int response = httpURLConnection.getResponseCode();
switch (response){
case HTTP_OK:
return true;
case HTTP_ACCEPTED:
return true;
default:
handleError(response);
}
return false;
}catch(Exception x){
return false;
}
}
/** This method always returns false. It is not practicable (or for that matter usually ever desirable) to remove a public key from
* a public keyserver.
* @param fingerprint
* @return always false*/
@Override
public boolean removeLock(Fingerprint fingerprint) {
return false;
}
/**This method retrieves a Lock from the server given its keyID/fingerprint*/
@Override
public Lock getLock(Fingerprint fingerprint) {
byte[] keyID = fingerprint.getFingerprintbytes();
try {
URL url = new URL(PROTOCOL, host, port, GET_FILE_PART);
HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setRequestMethod("GET");
httpURLConnection.addRequestProperty("op","get");
httpURLConnection.addRequestProperty("options", "mr");
httpURLConnection.addRequestProperty("search", Kidney.toString(keyID));
Log.d(TAG, url.getQuery());
int responseCode = httpURLConnection.getResponseCode();
switch (responseCode){
case HTTP_OK:
return extractLock(httpURLConnection);
//break;
case HTTP_NOT_FOUND:
break;
default:
}
return null;
}catch(Exception x){
Exceptions.dump(x);
return null;
}
}
@Override
public Iterator<Lock> iterator() throws MinigmaException {
return null;
}
/**This method retrieves a Lock from the server given a userID*/
@Override
public Lock getLock(String userID) throws MinigmaException {
try {
URL url = new URL(PROTOCOL, host, port, GET_FILE_PART);
HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setRequestMethod("GET");
httpURLConnection.addRequestProperty("op","get");
httpURLConnection.addRequestProperty("options", "mr");
httpURLConnection.addRequestProperty("search", userID);
int responseCode = httpURLConnection.getResponseCode();
switch (responseCode){
case HTTP_OK:
return extractLock(httpURLConnection);
//break;
case HTTP_NOT_FOUND:
break;
default:
}
}catch (IOException iox){
}
return null;
}
private Lock extractLock(HttpURLConnection httpURLConnection){
try {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(httpURLConnection.getInputStream()));
String inputLine;
StringBuffer response = new StringBuffer();
StringBuffer pgpKeysBlock = new StringBuffer();
boolean pkey = false;
while ((inputLine = bufferedReader.readLine()) != null) {
if (inputLine.contains(ARMORED_PKEY_OPEN)){pkey=true;}
if (pkey){pgpKeysBlock.append(inputLine);}
response.append(inputLine);
if (inputLine.contains(ARMORED_PKEY_CLOSE)){pkey=false;}
}
bufferedReader.close();
Lock lock = new Lock(pgpKeysBlock.toString());
return lock;
}catch (IOException iox){
//TODO
Exceptions.dump(iox);
}catch (MinigmaException mx) {
//TODO
Exceptions.dump(mx);
}
return null;
}
@Override
public boolean contains(String userID) {
try {
return (getLock(userID) instanceof Lock);
}catch (Exception x){
return false;
}
}
@Override
public long getStoreId() {
return 0;
}
@Override
public String getUserID(Fingerprint fingerprint) {
return null;
}
@Override
public String getUserID(long keyID) {
return null;
}
@Override
public int getCount() {
return 0;
}
public void setPort(int port){
this.port=port;
}
public boolean isUseVKS(){return useVKS;}
public boolean setUseVKS(boolean useVKS){
this.useVKS=useVKS;
return true;
}
private void handleError(int response){
Log.d(TAG, "HTTP error code:"+response);
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/utils/package-info.java
/**
* Utilities for handling Base64 streams etc.
*/
package uk.co.platosys.minigma.utils;<file_sep>/app/src/main/java/uk/co/platosys/minigma/BigBinary.java
package uk.co.platosys.minigma;
//import android.support.annotation.NonNull;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import java.math.BigInteger;
import java.text.ParseException;
import uk.co.platosys.minigma.utils.MinigmaUtils;
/**In crypto we use big binary numbers a lot. Often, the java.math class BigInteger will suffice,
* or we can just handle the underlying byte arrays. BigBinary is a wrapper for a byte[] and includes
* methods for instantiating from, and returning the underlying number as, a Base64 String which is often the most
* practical way of handling it.
*
* BigBinary was introduced in Minigma v0.2, replacing the earlier use of Strings as digests etc. There's
* thus less need for Base64 coding and decoding under the hood - because BigBinary is a byte array in an Object -
* and it avoids any confusion with cleartext semantic Strings.
*
*/
public class BigBinary implements Comparable{
private byte[] bytes;
public BigBinary (String string)throws ParseException {
this.bytes= MinigmaUtils.decode(string);
}
public BigBinary(byte[] bytes){
this.bytes=bytes;
}
@Override
public String toString(){
return MinigmaUtils.encode(bytes, true);
}
public byte[] toByteArray(){
return bytes;
}
public int getBitlength(){
return bytes.length*8;
}
public BigInteger toBigInteger() {return new BigInteger(bytes);}
//Append methods
/**
* Appends the given integer to this BigBinary. Note this is not the same as addition, it is basically
* multiplying by 2^32 and then adding.
* @param annex
* @return this BigBinary with the annex appended.
*/
public BigBinary append(int annex){
byte[]addbytes = Ints.toByteArray(annex);
return append(addbytes);
}
public BigBinary append (long annex){
byte[] addbytes = Longs.toByteArray(annex);
return append(addbytes);
}
public BigBinary append(byte[] annex){
byte[] newArray = new byte[bytes.length+annex.length];
int a=0;
for (int i=0; i<bytes.length; i++){
newArray[a]=bytes[i];
a++;
}
for (int i=0; i<annex.length; i++){
newArray[a]=annex[i];
a++;
}
this.bytes=newArray;
return this;
}
//Detach methods
/** The detach methods are the inverses of the append methods
* This method detaches a byte array of length length from the
* underlying byte array.
*/
public byte[] detach(int length) throws ArrayIndexOutOfBoundsException {
if(length>bytes.length){throw new ArrayIndexOutOfBoundsException("attempting to detach too much");}
byte[] detached = new byte[length];
byte[] remains = new byte[bytes.length-length];
for (int i=0; i<remains.length; i++){
remains[i]=bytes[i];
}
for (int i=remains.length; i<bytes.length; i++){
detached[i]=bytes[i];
}
this.bytes=remains;
return detached;
}
public long detachLong() throws ArrayIndexOutOfBoundsException {
byte[] detached = detach( Longs.BYTES);
return Longs.fromByteArray(detached);
}
public int detachInt() throws ArrayIndexOutOfBoundsException {
byte[] detached = detach(Ints.BYTES);
return Ints.fromByteArray(detached);
}
@Override
public boolean equals (Object object){
if (object instanceof BigBinary){
BigBinary bigBinary = (BigBinary) object;
byte[] theirs = bigBinary.toByteArray();
if (theirs.length != bytes.length) {return false;}
for (int i=0; i<bytes.length; i++){
if (bytes[i]!=theirs[i]){return false;}
}
return true;
}else{
return false;
}
}
@Override
public int compareTo( Object object) {
if (object instanceof BigBinary){
BigBinary bigBinary = (BigBinary) object;
byte[] theirs = bigBinary.toByteArray();
if (theirs.length != bytes.length) {
//this behaviour isn't quite right. But it will do for now.
//TODO fixit.
throw new ClassCastException("comparing unequal bitlength BigBinaries");
}
for (int i=0; i<bytes.length; i++){
if (bytes[i]>theirs[i]) {return 1;}
if (bytes[i]<theirs[i]) {return -1;}
}
return 0;//we've gone through the whole array and they're all equal.
}else{
throw new ClassCastException();
}
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/PassPhraser.java
package uk.co.platosys.minigma;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.List;
import uk.co.platosys.effwords.Effwords;
import uk.co.platosys.minigma.exceptions.Exceptions;
/**Class to generate random-word passphrases.
*
* In Minigma, a Key needs a passphrase, which is a char array.
*
* PassPhraser generates random-word passphrases. There is some evidence that random-word
* passphrases are easier to remember for a similar level of entropy than random-character ones,
* even though the resulting passphrase is much longer.
*
* Random passphrases are known to be more secure than human-generated ones.
*
* You can specify alternative wordlists supported by Effwords. At the moment, Effwords only
* supports the three EFF lists which are English words. It would be good to support other languages
* though.
*
* */
public class PassPhraser {
private File wordListFile;
private List<String> wordList;
public static final String WORDSEPARATOR = " ";
public static final char WORDSEPARATOR_CHAR=' ';
public static int LONGWORDLIST=Effwords.EFF_LONGLIST;
public static int SHORTWORDLIST=Effwords.EFF_SHORTLIST;
public static int ALTWORDLIST=Effwords.EFF_DEFAULTLIST;
/**
* Returns a random word passphrase of length words from the default
* word list, using a SecureRandom instance for entropy.
* @param words
* @return
*/
public static char[] getPassPhrase(int words) {
return getPassPhrase(LONGWORDLIST, words);
}
/**
* Returns a random word passphrase of length words from the given
* wordlist, using a SecureRandom instance for entropy.
* @param words
* @return
*/
public static char[] getPassPhrase(int wordList, int words) {
SecureRandom secureRandom = new SecureRandom();
StringBuffer buffer= new StringBuffer();
for (int i = 0; i < words; i++) {
try {
int word = secureRandom.nextInt();
if (i > 0) {
buffer.append(WORDSEPARATOR);
}
buffer.append(Effwords.getWord(wordList,word));
}catch (Exception x) {
Exceptions.dump(x);
}
}
return buffer.toString().toCharArray();
}
/**
* converts a char array to an array of strings.
* @param passphrase
* @return
*/
public static List<String> toWordList(char[] passphrase) {
ArrayList<String> words = new ArrayList<>();
StringBuffer stringBuffer = new StringBuffer();
for (char ch : passphrase) {
if (ch != WORDSEPARATOR_CHAR) {
stringBuffer.append(ch);
} else {
String word = stringBuffer.toString();
words.add(word);
stringBuffer = new StringBuffer();
}
}
if (stringBuffer != null) {
words.add(stringBuffer.toString());
}
return words;
}
/**
* returns a word from the given wordlist, where word is a random integer
* such as one generated by the user throwing dice.
* @param wordlist the Effwords wordlist selected.
* @param word a random integer
* @return
*/
public static String getWord(int wordlist, int word){
try {
return Effwords.getWord(wordlist, word);
}catch(Exception x){
Exceptions.dump(x);
return null;
}
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/Key.java
package uk.co.platosys.minigma;
/*
Copyright (C) 2017 <NAME> and Platosys
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL EDWARD BARROW OR
PLATOSYS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.spongycastle.bcpg.ArmoredInputStream;
import org.spongycastle.jce.provider.BouncyCastleProvider;
import org.spongycastle.openpgp.PGPException;
import org.spongycastle.openpgp.PGPSecretKey;
import org.spongycastle.openpgp.PGPSecretKeyRing;
import org.spongycastle.openpgp.PGPSecretKeyRingCollection;
import org.spongycastle.openpgp.PGPUtil;
import org.spongycastle.openpgp.operator.KeyFingerPrintCalculator;
import org.spongycastle.openpgp.operator.PBESecretKeyDecryptor;
import org.spongycastle.openpgp.operator.PBESecretKeyEncryptor;
import org.spongycastle.openpgp.operator.bc.BcKeyFingerprintCalculator;
import org.spongycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder;
import org.spongycastle.openpgp.operator.jcajce.JcePBESecretKeyEncryptorBuilder;
import uk.co.platosys.minigma.exceptions.BadPassphraseException;
import uk.co.platosys.minigma.exceptions.MinigmaException;
import uk.co.platosys.minigma.exceptions.MinigmaOtherException;
import uk.co.platosys.minigma.exceptions.NoDecryptionKeyException;
import uk.co.platosys.minigma.utils.Kidney;
import uk.co.platosys.minigma.utils.MinigmaOutputStream;
import uk.co.platosys.minigma.utils.MinigmaUtils;
/**
* In Minigma, a Key is the object used to unlock something that has been locked
* with a corresponding Lock. Minigma Keys and Locks correspond to private keys and
* public keys respectively in other asymmetric crypto systems.
*
* Minigma is a fairly lightweight wrapper to OpenPGP, so a Minigma Key can be instantiated
* from OpenPGP private key material.
*
* HOWEVER: OpenPGP private key material does not include the UserID. The KeyID or fingerprint can be deduced from the
* key material, but not the relevant userID (which is, generally, an email address). Therefore, you need to look up the
* associated userID from a keyring, or in Minigma, a Lockstore, using lockstore.getUserId(long keyid) every time you use the
* key for signing.
* Minigma therefore provides a set of overloaded constructors which take a LockStore argument which should be used for creating
* signing keys.
*
* A Key always needs a passphrase.
* @author edward
*
*
*
*/
public class Key {
private PGPSecretKey signingKey;
private PGPSecretKey masterKey;
private PGPSecretKeyRingCollection secretKeyRingCollection;
private Fingerprint fingerprint;
private String userID="";
private File file;
/** @param secretKeyRingCollection
*/
protected Key(PGPSecretKeyRingCollection secretKeyRingCollection) throws Exception{
this.secretKeyRingCollection=secretKeyRingCollection;
init(null);
}
/** @param keyFile a java.io.File object pointing to a text file of OpenPGP key material
* Keys instantiated using this method will not be able to return a meaningful
* userID
*/
public Key(File keyFile)throws MinigmaException {
try{
this.file=keyFile;
FileInputStream fileStream=new FileInputStream(keyFile);
InputStream instream=new ArmoredInputStream(fileStream);
instream=PGPUtil.getDecoderStream(instream);
KeyFingerPrintCalculator kfpc = new BcKeyFingerprintCalculator();
this.secretKeyRingCollection = new PGPSecretKeyRingCollection(instream, kfpc);
init(null);
instream.close();
fileStream.close();
}catch(Exception x){
throw new MinigmaException("problem loading Key from file", x);
}
}
/**Keys can also be instantiated from an InputStream rather than a File. (although why??)
*
* @param inputStream
* @throws MinigmaException
*/
@Deprecated
public Key(InputStream inputStream)throws MinigmaException {
try{
InputStream instream=new ArmoredInputStream(inputStream);
instream=PGPUtil.getDecoderStream(instream);
KeyFingerPrintCalculator kfpc = new BcKeyFingerprintCalculator();
this.secretKeyRingCollection = new PGPSecretKeyRingCollection(instream, kfpc);
init(null);
instream.close();
inputStream.close();
}catch(Exception x){
throw new MinigmaException("problem loading Key from input stream", x);
}
}
/** @param keyFile a java.io.File object pointing to a text file of OpenPGP key material. This constructor
* takes a LockStore argument, to enable lookup of a meaningful userID
*/
public Key(File keyFile, LockStore lockStore)throws MinigmaException {
try{
this.file=keyFile;
FileInputStream fileStream=new FileInputStream(keyFile);
InputStream instream=new ArmoredInputStream(fileStream);
instream=PGPUtil.getDecoderStream(instream);
KeyFingerPrintCalculator kfpc = new BcKeyFingerprintCalculator();
this.secretKeyRingCollection = new PGPSecretKeyRingCollection(instream, kfpc);
init(lockStore);
instream.close();
fileStream.close();
}catch(Exception x){
throw new MinigmaException("problem loading Key from file", x);
}
}
@Deprecated
public Key(InputStream inputStream, LockStore lockStore)throws MinigmaException {
try{
InputStream instream=new ArmoredInputStream(inputStream);
instream=PGPUtil.getDecoderStream(instream);
KeyFingerPrintCalculator kfpc = new BcKeyFingerprintCalculator();
this.secretKeyRingCollection = new PGPSecretKeyRingCollection(instream, kfpc);
init(lockStore);
instream.close();
inputStream.close();
}catch(Exception x){
throw new MinigmaException("problem loading Key from input stream", x);
}
}
private void init(LockStore lockStore) throws Exception{
try{
signingKey = null;
masterKey=null;
//decryptionKey = null;
Iterator<PGPSecretKeyRing> ringIterator = secretKeyRingCollection.getKeyRings();
while ((signingKey == null) && ringIterator.hasNext()){
PGPSecretKeyRing pgpSecretKeyRing = ringIterator.next();
Iterator<PGPSecretKey> keyIterator = pgpSecretKeyRing.getSecretKeys();
while ((signingKey == null) && keyIterator.hasNext()){
PGPSecretKey key = keyIterator.next();
if (key.isSigningKey()){
signingKey = key;
fingerprint = new Fingerprint(signingKey.getPublicKey().getFingerprint());
if(lockStore!=null){
this.userID=lockStore.getUserID(fingerprint);
}
}else if (key.isMasterKey()){
masterKey=key;
}
}
}
if (signingKey == null) {
throw new IllegalArgumentException("Can't find signing key in key ring.");
}
}catch(Exception e){
throw e;
}
}
/** @return the Fingerprint object associated with this Key;*/
public Fingerprint getFingerprint(){
return fingerprint;
}
/** @return the 64-bit keyID associated with this Key. */
public long getKeyID(){
return fingerprint.getKeyID();
}
/** @return the primary userID associated with this key;
* or the empty string */
public String getUserID() {return userID;}
/**
*
* @return
*/
protected PGPSecretKey getSigningKey(){return signingKey;}
protected PGPSecretKey getMasterKey(){return masterKey;}
/**
* Changes the passphrase of this Key.
*/
public void changePassphrase(char[] oldpassphrase, char[] newpassphrase) throws MinigmaOtherException, BadPassphraseException {
PBESecretKeyDecryptor decryptor=null;
PBESecretKeyEncryptor encryptor=null;
List<PGPSecretKeyRing> newkeys = new ArrayList<>();
try {
decryptor = new JcePBESecretKeyDecryptorBuilder().setProvider(BouncyCastleProvider.PROVIDER_NAME).build(oldpassphrase);
encryptor = new JcePBESecretKeyEncryptorBuilder(Algorithms.SYMMETRIC_ALGORITHM).setProvider(BouncyCastleProvider.PROVIDER_NAME).build(newpassphrase);
}catch(PGPException px){
throw new MinigmaOtherException("error creating passphrase encryptor/decryptor", px);
}
Iterator<PGPSecretKeyRing> ringIterator = secretKeyRingCollection.iterator(); //TODO
while(ringIterator.hasNext()){
PGPSecretKeyRing secretKeyRing = ringIterator.next();
try {
newkeys.add(PGPSecretKeyRing.copyWithNewPassword(secretKeyRing, decryptor, encryptor));
}catch(PGPException px){
throw new BadPassphraseException("probably", px);
}
}
try {
MinigmaOutputStream keyOut = new MinigmaOutputStream(new FileOutputStream(file));
secretKeyRingCollection = new PGPSecretKeyRingCollection(newkeys);
secretKeyRingCollection.encode(keyOut);
keyOut.flush();
keyOut.close();
}catch(IOException iox){
//TODO
}catch (PGPException px){
//TODO
}
}
/**
*Returns an BouncyCastle PGPSecretKey decryption key, to be used to
* decrypt/unlock something.
* @param keyID
* @return
*/
protected PGPSecretKey getDecryptionKey(long keyID) throws MinigmaException, NoDecryptionKeyException{
try{
if (secretKeyRingCollection.contains(keyID)) {
return secretKeyRingCollection.getSecretKey(keyID);
}else{
throw new NoDecryptionKeyException("Key does not decrypt key with id:"+ Kidney.toString(keyID));
}
}catch(NoDecryptionKeyException ndke) {
throw ndke;
}catch(Exception e){
throw new MinigmaException("Key-getDecryptionKey exception", e);
}
}
/**
* @param toBeSigned the binary data to be signed in the form of a byte array
* @param passphrase
* @return a Signature object
* @throws MinigmaException
*/
public Signature sign(byte[] toBeSigned, char[] passphrase) throws BadPassphraseException, MinigmaOtherException {
BigBinary digest= Digester.digest(toBeSigned);
return SignatureEngine.sign(digest, this, passphrase);
}
/**
* @param toBeSigned the BigBinary to be signed
* @param passphrase
* @return a Signature object
* @throws MinigmaException
*/
public Signature sign(BigBinary toBeSigned, char[] passphrase) throws BadPassphraseException, MinigmaOtherException{
BigBinary digest= Digester.digest(toBeSigned);
return SignatureEngine.sign(digest, this, passphrase);
}
/**
* @param toBeSigned the String to be signed
* @param passphrase
* @return a Signature object.
* @throws MinigmaException
*/
public Signature sign(String toBeSigned, char[] passphrase) throws BadPassphraseException, MinigmaOtherException{
BigBinary digest= Digester.digest(toBeSigned);
return SignatureEngine.sign(digest, this, passphrase);
}
/**
* @param toBeSigned the String to be signed
* @param passphrase
* @param notations a List of Notation objects to be included in this signature (as PGPNotationData)
* @return a Signature object
* @throws MinigmaException
*/
public Signature sign(byte[] toBeSigned, List<Notation> notations, char[] passphrase) throws BadPassphraseException, MinigmaOtherException{
BigBinary digest= Digester.digest(toBeSigned);
return SignatureEngine.sign(digest, this, notations, passphrase);
}
/**
* @param toBeSigned the String to be signed
* @param passphrase
* @param notations a List of Notation objects to be included in this signature (as PGPNotationData)
* @return a Signature object.
* @throws MinigmaException
*/
public Signature sign(String toBeSigned, List<Notation> notations, char[] passphrase) throws BadPassphraseException, MinigmaOtherException{
BigBinary digest= Digester.digest(toBeSigned);
return SignatureEngine.sign(digest, this, notations, passphrase);
}
/**
* This takes ciphertext and returns the cleartext. The ciphertext is actually Base64-encoded binary data.
*
* @param ciphertext to be unlocked
* @param passphrase This key's passphrase
* @return a cleartext String
* @throws Exception
*/
public String unlock(String ciphertext, char[] passphrase) throws BadPassphraseException, MinigmaOtherException{
return unlockAsString(MinigmaUtils.decode(ciphertext),passphrase);
}
public String unlockAsString(byte[] bytes, char[] passphrase) throws BadPassphraseException, MinigmaOtherException{
try {
return new String(unlockAsBytes(bytes, passphrase), "UTF-8");
}catch (UnsupportedEncodingException uex){
//we specify "UTF-8", this exception should never be thrown.
throw new MinigmaOtherException("coding unsupported", uex);
}
}
public byte[] unlockAsBytes
(byte[] bytes, char[] passphrase) throws BadPassphraseException, MinigmaOtherException {
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
return CryptoEngine.decrypt(bais, this, passphrase);
}
public BigBinary unlock(BigBinary cipherbytes, char[] passphrase) throws BadPassphraseException, MinigmaOtherException {
return new BigBinary(unlockAsBytes(cipherbytes.toByteArray(),passphrase));
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/Digester.java
/*
* Copyright <NAME> and Platosys.
* This software is licensed under the Free Software Foundation's
General Public Licence, version 2 ("the GPL").
The full terms of the licence can be found online at http://www.fsf.org/
In brief, you are free to copy and to modify the code in any way you wish, but if you
publish the modified code you may only do so under the GPL, and (if asked) you must
supply a copy of the source code alongside any compiled code.
Platosys software can also be licensed on negotiated terms if the GPL is inappropriate.
For further information about this, please contact <EMAIL>
*/
package uk.co.platosys.minigma;
/* (c) copyright 2018 Platosys
* MIT Licence
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.*/
import java.io.IOException;
import java.nio.charset.Charset;
import net.jpountz.xxhash.XXHash64;
import net.jpountz.xxhash.XXHashFactory;
import org.spongycastle.crypto.digests.KeccakDigest;
import org.spongycastle.crypto.digests.SHA3Digest;
import org.spongycastle.openpgp.PGPSignature;
import uk.co.platosys.minigma.exceptions.Exceptions;
import uk.co.platosys.minigma.exceptions.MinigmaException;
import uk.co.platosys.minigma.exceptions.MinigmaOtherException;
import uk.co.platosys.minigma.utils.MinigmaUtils;
/**
* a class with static methods for digesting Strings, BigBinaries and
* byte arrays.
* @author edward
*/
public class Digester {
private static String TAG ="Digester";
public static BigBinary digest (String string) throws MinigmaOtherException{
return digest(string.getBytes(Charset.forName("UTF-8")));
}
/**
* Returns a BigBinary digest of the supplied BigBinary.
* @param bigBinary
* @return
* @throws MinigmaException
*/
public static BigBinary digest (BigBinary bigBinary) throws MinigmaOtherException {
return digest(bigBinary.toByteArray());
}
/**Takes a byte array and returns a BigBinary. Uses SHA3-256 as the digest
* algorithm
*
* */
public static BigBinary digest (byte[] bytes) throws MinigmaOtherException{
try{
KeccakDigest digest = new SHA3Digest(256);
for(byte byt:bytes){
digest.update(byt);
}
byte[] digested = new byte[digest.getDigestSize()];
digest.doFinal(digested, 0);
return new BigBinary(digested);
}catch(Exception e){
throw new MinigmaOtherException("error making digest", e);
}
}
/**
* This returns a short String which is a non-cryptographic hash of
* the supplied byte array. The short hashes so obtained are used as identifiers
* and filenames for Signatures.
* @param bytes
* @return
* @throws MinigmaException
*/
public static String shortDigest (byte[] bytes) {
try{ XXHashFactory xxHashFactory = XXHashFactory.fastestInstance();
XXHash64 xxHash64 = xxHashFactory.hash64();
long longHash = xxHash64.hash(bytes, 0, 0, 0);
return(MinigmaUtils.encode(longHash));
}catch(Exception e){
Exceptions.dump(e);
return null;
}
}
public static String shortDigest (PGPSignature signature) {
try {
return shortDigest(signature.getEncoded());
}catch(IOException iox){
Exceptions.dump(iox);
return null;
}
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/exceptions/BadPassphraseException.java
package uk.co.platosys.minigma.exceptions;
public class BadPassphraseException extends MinigmaException {
public BadPassphraseException(String msg){
super(msg);
}
public BadPassphraseException(String msg, Throwable cause){
super(msg, cause);
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/BaseSignature.java
package uk.co.platosys.minigma;
/* (c) copyright 2018 Platosys
* MIT Licence
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.*/
import org.spongycastle.bcpg.ArmoredInputStream;
import org.spongycastle.bcpg.ArmoredOutputStream;
import org.spongycastle.bcpg.sig.NotationData;
import org.spongycastle.openpgp.*;
import org.spongycastle.openpgp.jcajce.JcaPGPObjectFactory;
import uk.co.platosys.minigma.exceptions.MinigmaException;
import uk.co.platosys.minigma.utils.MinigmaOutputStream;
import uk.co.platosys.minigma.utils.MinigmaUtils;
import java.io.*;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Abstract base class wrapping PGPSignature objects
*/
public abstract class BaseSignature {
protected PGPSignature pgpSignature;
protected String shortDigest;
/**
* Instantiates a Minigma Signature object from a BouncyCastle PGPSignature object
* @param pgpSignature
*/
protected BaseSignature (PGPSignature pgpSignature){
this.pgpSignature=pgpSignature;
this.shortDigest=Digester.shortDigest(pgpSignature);
}
/**Instantiates a Minigma Signature object given a suitably-encoded String
* @param string a Base64-encoded String
* @throws ParseException if the supplied String contains wrong characters.
*/
protected BaseSignature (String string) throws ParseException {
this (new BigBinary(string));
}
/**Instantiates a Minigma Signature object given a BigBinary object.
* @param bigBinary the signature as a BigBinary object.
*/
protected BaseSignature (BigBinary bigBinary){
this(new ByteArrayInputStream(bigBinary.toByteArray()));
}
protected BaseSignature (InputStream inputStream){
PGPSignatureList signatureList;
try {
ArmoredInputStream armoredInputStream = new ArmoredInputStream(inputStream);
JcaPGPObjectFactory jcaPGPObjectFactory = new JcaPGPObjectFactory(PGPUtil.getDecoderStream(armoredInputStream));
Object object = jcaPGPObjectFactory.nextObject();
if (object instanceof PGPCompressedData) {
PGPCompressedData pgpCompressedData = (PGPCompressedData) object;
jcaPGPObjectFactory = new JcaPGPObjectFactory(pgpCompressedData.getDataStream());
Object object2 = jcaPGPObjectFactory.nextObject();
if (object2 instanceof PGPSignatureList) {
signatureList = (PGPSignatureList) object2;
} else {
throw new MinigmaException("unexpected object type found in compressed data signature stream");
}
} else if (object instanceof PGPSignatureList) {
signatureList = (PGPSignatureList) object;
} else {
throw new MinigmaException("unexpected object type found in uncompressed signature stream");
}
this.pgpSignature=signatureList.get(0);
this.shortDigest=Digester.shortDigest(pgpSignature);
}catch(Exception x){
}
}
/**
* Returns the Signature as a String. The String representations don't have PGP Ascii Armor so aren't fully interoperable,
* if you need Ascii Armor, use the following method with armored=true.
* @return
*/
public String encodeToString(){return encodeToString(false);}
public String encodeToString(boolean armored){
return MinigmaUtils.encode(encodeToBytes(armored));
}
protected byte[] encodeToBytes (boolean armored){
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
try {
encodeToStream(byteArrayOutputStream, armored);
}catch(Exception x){}
byte[] signatureBytes=byteArrayOutputStream.toByteArray();
return signatureBytes;
}
protected byte[] getBytes(){
return encodeToBytes(false);
}
protected BigBinary getBigBinary() {return new BigBinary(encodeToBytes(false));}
/**
* Writes the signature to the given output stream, with or without PGP Ascii Armor headers/footers.
* Use armored=false if interoperability isn't a concern.
*
* @param outputStream
* @param armored
* @throws IOException
*/
public void encodeToStream(OutputStream outputStream, boolean armored) throws IOException{
if(armored){
encodeToStream(outputStream);
}else{
pgpSignature.encode(outputStream);
outputStream.flush();
outputStream.close();
}
}
/**
* Writes the signature to the given output stream in PGP AsciiArmored format. This maximises interoperability with
* other OpenPGP implementations.
* @param outputStream
* @throws IOException
*/
public void encodeToStream(OutputStream outputStream) throws IOException{
ArmoredOutputStream armoredOutputStream = new MinigmaOutputStream(outputStream);
pgpSignature.encode(armoredOutputStream);
armoredOutputStream.flush();
armoredOutputStream.close();
}
/**
* Writes the signature to the given file in PGP Ascii Armored format. This maximises interoperability with
* other OpenPGP implementations.
* @param file
* @throws IOException
*/
public void encodeToFile(File file) throws IOException{
FileOutputStream fileOutputStream = new FileOutputStream(file);
encodeToStream(fileOutputStream);
fileOutputStream.flush();
fileOutputStream.close();
}
/**
* The short digest is a Minigma extension to the OpenPGP standard and returns
* a non-cryptographic short digest which can be used, for example, as a filename for the signature
* itself. The short digest uses a different fast hashing algorithm. It's not a digest of
* the material being signed but of the signature itself. It's not easily reversible, but it's not
* demonstrably hard either, nor is it guaranteed to be collision-free, so it should only be used where
* the consequences of either a collision or of someone managing to deduce the original from the digest
* are manageable.
* @return a short digest of the Signature object.
*/
public String getShortDigest(){
return shortDigest;
}
/**
* Returns the ID of the key that signed this signature, as a long. Note that this is a 64-bit keyID, and not a 160-bit fingerprint.
* Bear in mind that collisions (an ID identifying a different key) are somewhat less unlikely with
* 64-bit IDs, which is why current best PGP practice is to use fingerprints rather than keyIDs. But what is likely
* to happen? You get the keyID, use it to look up a corresponding Lock (public key), and if it's the wrong
* one, the signature won't verify. Don't associate an actual person with a signature until it is properly
* verified.
* @return
*/
public long getKeyID(){
return pgpSignature.getKeyID();
}
@Override
public boolean equals(Object object){
if (object instanceof BaseSignature){
BaseSignature baseSignature = (BaseSignature) object;
return Arrays.equals(getBytes(),baseSignature.getBytes());
}else{
return false;
}
}
/**OpenPGP allows Signatures to carry NotationData, which is an extensible, user-defined
* vehicle for attaching additional information to a signature. Minigma specifically uses
* name-value pairs for this (the mechanism also allows for binary NotationData, which is
* not currently supported under Minigma).
* @return List of Notation objects.
*/
public List<Notation> getNotations(){
List<Notation> notations = new ArrayList<>();
PGPSignatureSubpacketVector notationVector = pgpSignature.getHashedSubPackets();
NotationData[] notationData = notationVector.getNotationDataOccurrences();
for(NotationData notationD:notationData){
Notation notation = new Notation(notationD.getNotationName(), notationD.getNotationValue());
notation.setCritical(notationD.isCritical());
notation.setHumanReadable(notationD.isHumanReadable());
notations.add(notation);
}
return notations;
}
protected PGPSignature getPgpSignature(){
return pgpSignature;
}
public int getHashAlgorithm(){
return pgpSignature.getHashAlgorithm();
}
public int getKeyAlgorithm(){
return pgpSignature.getKeyAlgorithm();
}
protected int getSignatureType(){
return pgpSignature.getSignatureType();
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/exceptions/MinigmaOtherException.java
package uk.co.platosys.minigma.exceptions;
public class MinigmaOtherException extends MinigmaException {
public MinigmaOtherException(String msg){
super(msg);
}
public MinigmaOtherException(String msg, Throwable cause){
super(msg, cause);
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/package-info.java
/**
* Minigma is an intuitive OpenPGP API, using BouncyCastle under the skin. It is not fully OpenPGP
* compatible (it can't handle all OpenPGP's algorithms) but a compliant OpenPGP app should be able to
* decrypt Minigma-encrypted cyphertext.
*/
package uk.co.platosys.minigma;
<file_sep>/app/src/main/java/uk/co/platosys/minigma/exceptions/InvalidXMLException.java
package uk.co.platosys.minigma.exceptions;
/**Exception thrown when invalid/badly-formed XML is produced by the XMLUtils class*/
public class InvalidXMLException extends Exception {
public InvalidXMLException (String msg){
super(msg);
}
public InvalidXMLException (String msg, Throwable cause){
super(msg, cause);
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/exceptions/package-info.java
/**
* Exceptions thrown by the Minigma packages
*/
package uk.co.platosys.minigma.exceptions;<file_sep>/app/src/main/java/uk/co/platosys/minigma/CryptoEngine.java
package uk.co.platosys.minigma;
/* (c) copyright 2018 Platosys
* MIT Licence
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.*/
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Iterator;
import org.spongycastle.bcpg.ArmoredOutputStream;
import org.spongycastle.jce.provider.BouncyCastleProvider;
import org.spongycastle.openpgp.*;
import org.spongycastle.openpgp.jcajce.JcaPGPObjectFactory;
import org.spongycastle.openpgp.operator.PGPDataEncryptorBuilder;
import org.spongycastle.openpgp.operator.PGPKeyEncryptionMethodGenerator;
import org.spongycastle.openpgp.operator.PublicKeyDataDecryptorFactory;
import org.spongycastle.openpgp.operator.bc.BcPublicKeyDataDecryptorFactory;
import org.spongycastle.openpgp.operator.jcajce.JcaKeyFingerprintCalculator;
import org.spongycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder;
import org.spongycastle.openpgp.operator.jcajce.JcePGPDataEncryptorBuilder;
import org.spongycastle.openpgp.operator.jcajce.JcePublicKeyKeyEncryptionMethodGenerator;
import uk.co.platosys.minigma.Key;
import uk.co.platosys.minigma.Lock;
import uk.co.platosys.minigma.Minigma;
import uk.co.platosys.minigma.exceptions.BadPassphraseException;
import uk.co.platosys.minigma.exceptions.DecryptionException;
import uk.co.platosys.minigma.exceptions.Exceptions;
import uk.co.platosys.minigma.exceptions.MinigmaException;
import uk.co.platosys.minigma.exceptions.MinigmaOtherException;
import uk.co.platosys.minigma.exceptions.NoDecryptionKeyException;
import uk.co.platosys.minigma.utils.Kidney;
import uk.co.platosys.minigma.utils.MinigmaUtils;
/**
* this class holds the static decrypt and encrypt methods
*
* @author edward
*/
public class CryptoEngine {
private static String TAG ="CryptoEngine";
/**
* Decrypts an InputStream to a byte array
*
* @param inputStream
* @param key
* @param passphrase
* @return
* @throws Exception
*/
public static byte[] decrypt(InputStream inputStream, Key key, char[] passphrase)
throws MinigmaOtherException,
BadPassphraseException
{
InputStream decoderStream;
PGPObjectFactory pgpObjectFactory=null;
PGPEncryptedDataList pgpEncryptedDataList = null;
try {
decoderStream = PGPUtil.getDecoderStream(inputStream);
pgpObjectFactory = new PGPObjectFactory(decoderStream, new JcaKeyFingerprintCalculator());
boolean moreObjects=true;
while (moreObjects) {
Object object = pgpObjectFactory.nextObject();
if (object == null) {
moreObjects = false;
}
if (object instanceof PGPEncryptedDataList) {
pgpEncryptedDataList = (PGPEncryptedDataList) object;
PGPCompressedData compressedData = decrypt(pgpEncryptedDataList, key, passphrase);
return decompress(compressedData);
} else {
System.out.println(object.getClass().getName());
}
}
throw new MinigmaException("couldn't find encrypted data list");
}catch(BadPassphraseException bpe){
throw bpe;
} catch(Exception e){
Exceptions.dump(e);
throw new MinigmaOtherException("error reading encrypted data list", e);
}
}
/**
* An encryptedDataList will contain one or more blocks of encrypted data, usually the same literal data encrypted
* to one or more public keys. Typically, the provided Key will only be able to unlock one of them.
* @param pgpEncryptedDataList
* @param key
* @param passphrase
* @return
* @throws MinigmaException
* @throws DecryptionException
*/
private static PGPCompressedData decrypt(PGPEncryptedDataList pgpEncryptedDataList, Key key, char[] passphrase) throws MinigmaOtherException, BadPassphraseException, DecryptionException {
PGPPrivateKey privateKey = null;
PGPPublicKeyEncryptedData pgpPublicKeyEncryptedData = null;
try {
Iterator<PGPPublicKeyEncryptedData> it = pgpEncryptedDataList.getEncryptedDataObjects();
JcePBESecretKeyDecryptorBuilder keyDecryptorBuilder = new JcePBESecretKeyDecryptorBuilder();
keyDecryptorBuilder.setProvider(BouncyCastleProvider.PROVIDER_NAME);
int size = pgpEncryptedDataList.size();
int count = 0;
while (it.hasNext() && privateKey == null) {
pgpPublicKeyEncryptedData = it.next();
count++;
//System.out.println();
long keyID = pgpPublicKeyEncryptedData.getKeyID();
//System.out.println("EncryptedDataBlock was encrypted with keyID "+Kidney.toString(keyID));
try {
PGPSecretKey secretKey = key.getDecryptionKey(keyID);
if (secretKey.getKeyID() == keyID) {
try {
privateKey = key.getDecryptionKey(keyID).extractPrivateKey(keyDecryptorBuilder.build(passphrase));
//System.out.println("Key match for "+Kidney.toString(keyID));
} catch (PGPException pgpException) {
throw new BadPassphraseException("bad passphrase", pgpException);
}
}
} catch (BadPassphraseException bpe) {
throw bpe;
} catch
(NoDecryptionKeyException ndke) {
//System.out.println("no decryption key available for keyID "+Kidney.toString(keyID));
//we don't need to worry about this exception here.
} catch (Exception x) {
System.out.println("oops exception in decrypt while loop");
Exceptions.dump(x);
throw new MinigmaException("CryptoEngine: getEncryptedDataObjects - unexpected exception", x);
}
}
if (privateKey == null) {
//System.out.println("Done "+ count + "keys of "+size+" altogether, still no private key");
throw new DecryptionException("CryptoEngine: decryption key doesn't fit any of the locks");
}
}catch(BadPassphraseException bpe){
throw bpe;
}catch (DecryptionException dx) { //don't think this is ever thrown here
Exceptions.dump(dx);
throw dx;
}catch (Exception e) {
Exceptions.dump(e);
throw new MinigmaOtherException("A problem arose during decryption", e);
}
//so we now have an encrypted data object and a key that fits it...
try {
PublicKeyDataDecryptorFactory dataDecryptorFactory = new BcPublicKeyDataDecryptorFactory(privateKey);
InputStream decryptedStream = pgpPublicKeyEncryptedData.getDataStream(dataDecryptorFactory);
JcaPGPObjectFactory compressedFactory = new JcaPGPObjectFactory(decryptedStream);
return (PGPCompressedData) compressedFactory.nextObject();
} catch (Exception e) {
Exceptions.dump(e);
throw new MinigmaOtherException("Minigma-unLock() 3: error reading encrypted data stream", e);
}
}
private static byte[] decompress (PGPCompressedData clearCompressedData) throws MinigmaOtherException{
PGPLiteralData literalData=null;
try {
InputStream inputStream = clearCompressedData.getDataStream();
JcaPGPObjectFactory decompressedFactory = new JcaPGPObjectFactory(inputStream);
boolean moreObjects=true;
while ((literalData==null)&&(moreObjects)) {
Object decompressedObject = decompressedFactory.nextObject();
if (decompressedObject==null){moreObjects=false;}
if (decompressedObject instanceof PGPLiteralData) {
literalData = (PGPLiteralData) decompressedObject;
}
}
return MinigmaUtils.readStream(literalData.getDataStream());
}catch(Exception e){
Exceptions.dump(e);
throw new MinigmaOtherException( "Minigma-unLock() 4: error getting decompressed object", e );
}
}
/**
* Returns a byte array of encrypted data. The resultant binary data must be base64 encoded
* for transport by text systems such as xml.
* @param compressedData
* @param lock
* @return
* @throws MinigmaException
*/
@SuppressWarnings("resource")
public static byte[] encrypt (byte[] compressedData, Lock lock) throws MinigmaException{
Minigma.initialiseProvider();
PGPEncryptedDataGenerator encryptedDataGenerator=configureGenerator(Algorithms.SYMMETRIC_ALGORITHM,lock);
ByteArrayOutputStream encryptedByteStream = new ByteArrayOutputStream();
OutputStream outputStream;
try {
outputStream = encryptedDataGenerator.open(encryptedByteStream, compressedData.length);
}catch(PGPException pgpe) {
Exceptions.dump(pgpe);
throw new MinigmaException("Error generating cypher: have you installed the unlimited strength policy files?", pgpe);
}catch(Exception e){
Exceptions.dump(e);
throw new MinigmaException("Error generating cypher: refer to stack trace for details", e);
}try{
outputStream.write(compressedData);
outputStream.flush();
outputStream.close();
byte[] encryptedBytes = encryptedByteStream.toByteArray();
encryptedDataGenerator.close();
return encryptedBytes;
}catch(Exception e){
Exceptions.dump(e);
throw new MinigmaException("Cryptoengine-encrypt: ", e);
}
}
private static PGPEncryptedDataGenerator configureGenerator(int algorithm, Lock lock) throws MinigmaException {
PGPEncryptedDataGenerator encryptedDataGenerator;
try{
JcePGPDataEncryptorBuilder pgpDataEncryptorBuilder = new JcePGPDataEncryptorBuilder(algorithm);
pgpDataEncryptorBuilder.setProvider(BouncyCastleProvider.PROVIDER_NAME);
encryptedDataGenerator = new PGPEncryptedDataGenerator(pgpDataEncryptorBuilder);
Iterator<PGPPublicKeyRing> it = lock.getPGPPublicKeyRingIterator();
if (!it.hasNext()){
throw new MinigmaException("Empty Lock: "+lock.toString());
}
while (it.hasNext()){
PGPPublicKeyRing keyRing = it.next();
Iterator<PGPPublicKey> publicKeyIterator = keyRing.getPublicKeys();
while(publicKeyIterator.hasNext()){
PGPPublicKey pgpPublicKey = publicKeyIterator.next();
if(pgpPublicKey.isEncryptionKey()){
PGPKeyEncryptionMethodGenerator methodGenerator = new JcePublicKeyKeyEncryptionMethodGenerator(pgpPublicKey);
encryptedDataGenerator.addMethod(methodGenerator);
System.out.println("added encryption method for keyID "+ Kidney.toString(pgpPublicKey.getKeyID()));
}
}
}
return encryptedDataGenerator;
}catch(Exception e){
throw new MinigmaException("Minigma-encrypt: error configuring generator",e);
}
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/utils/FileTools.java
/*
*
* (c) copyright 2018 Platosys
* MIT Licence
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
*The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package uk.co.platosys.minigma.utils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
/**
*
* @author edward
*/
public class FileTools {
/**
* Recursively deletes files and directories and their contents (equivalent to rm -r )
* (assumes no permission issues, doesn't trap them yet);
*/
public static void delete(File file){
if(!file.isDirectory()){
file.delete();
}else{
File[] files = file.listFiles();
for (int i=0; i<files.length; i++){
delete(files[i]);
}
file.delete();
}
}
/**this removes spaces and any funny characters from the supplied string, but keeps dots.
*
* handy to process strings to make them more useful as cross-platform filenames
*
* @param string
* @return
*/
public static String removeFunnyCharacters(String string){
StringBuffer buffer = new StringBuffer();
char dot = '.';
for (int i=0; i<string.length(); i++){
char x = string.charAt(i);
if (Character.isLetterOrDigit(x)){
buffer.append(x);
}
if (x==dot){buffer.append(x);}
}
return new String(buffer);
}
/**
* simple file copy utility
* @param fromFile
* @param toFile
* @throws IOException
*/
public static void copy(File fromFile, File toFile)
throws IOException {
if (!fromFile.exists())
throw new IOException("FileCopy: " + "no such source file: "
+ fromFile.getAbsolutePath());
if (!fromFile.isFile())
throw new IOException("FileCopy: " + "can't copy directory: "
+ fromFile.getAbsolutePath());
if (!fromFile.canRead())
throw new IOException("FileCopy: " + "source file is unreadable: "
+ fromFile.getAbsolutePath());
if (toFile.isDirectory())
toFile = new File(toFile, fromFile.getName());
if (toFile.exists()) {
if (!toFile.canWrite())
throw new IOException("FileCopy: "
+ "destination file is unwriteable: " + toFile.getAbsolutePath());
System.out.print("Overwrite existing file " + toFile.getName()
+ "? (Y/N): ");
System.out.flush();
BufferedReader in = new BufferedReader(new InputStreamReader(
System.in));
String response = in.readLine();
if (!response.equals("Y") && !response.equals("y"))
throw new IOException("FileCopy: "
+ "existing file was not overwritten.");
} else {
String parent = toFile.getParent();
if (parent == null)
parent = System.getProperty("user.dir");
File dir = new File(parent);
if (!dir.exists())
throw new IOException("FileCopy: "
+ "destination directory doesn't exist: " + parent);
if (dir.isFile())
throw new IOException("FileCopy: "
+ "destination is not a directory: " + parent);
if (!dir.canWrite())
throw new IOException("FileCopy: "
+ "destination directory is unwriteable: " + parent);
}
FileInputStream from = null;
FileOutputStream to = null;
try {
from = new FileInputStream(fromFile);
to = new FileOutputStream(toFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = from.read(buffer)) != -1)
to.write(buffer, 0, bytesRead); // write
} finally {
if (from != null)
try {
from.close();
} catch (IOException e) {
}
if (to != null)
try {
to.close();
} catch (IOException e) {
}
}
}
}
<file_sep>/app/src/main/java/uk/co/platosys/minigma/votes/Officer.java
package uk.co.platosys.minigma.votes;
import uk.co.platosys.minigma.Key;
/**
* Objects implementing the Officer interface represent the Returning Officer of a poll. Their Key is used
* to unwrap the Ballots sent to them by Voters.
*/
public interface Officer extends Voter {
Key getKey();
}
| ff4540fd7335a6e2cb70ddbb22d11df819471055 | [
"Markdown",
"Java",
"Gradle"
] | 27 | Java | ejoftheweb/Minigmand | 1c81e0613299898c0c5535bbbc4b7ff627e7e963 | 33faa6412c4f98866de054255abe76e178711a63 | |
refs/heads/master | <repo_name>proyecto-galeria/proyecto-galeria-backend<file_sep>/controllers/instruction-controller.js
var InstructionModel = require('../models/instruction-model')
function findAll (req, res, next) {
InstructionModel.find({}, function (err, users) {
if (err) console.log(err)
res.send(users)
})
}
function create (req, res, next) {
let newUser = new InstructionModel(req.body)
newUser.save(function (err, user) {
if (err) console.log(err)
res.send(user)
})
}
function update (req, res, next) {
InstructionModel.findOne({name: req.params.id}, function (err, user) {
if (err) console.log(err)
if (req.body.name) user.name = req.body.name
user.age = req.body.age
user.save(function (err, savedUser) {
if (err) console.log(err)
res.send(savedUser)
})
})
}
function remove (req, res, next) {
InstructionModel.findOne({name: req.params.id}, function (err, res) {
if (err) console.log(err)
res.remove()
})
}
module.exports = {
findAll,
create,
update,
remove
}<file_sep>/index.js
const express = require('express')
const bodyParser = require('body-parser')
const app = express()
const cors = require('cors')
const InstructionController = require('./controllers/instruction-controller')
const RecordController = require('./controllers/record-controller')
/// set
app.use(bodyParser.urlencoded({ extended: false }))
app.use(bodyParser.json())
app.use(cors())
app.get('/api/instructions', InstructionController.findAll)
app.post('/api/instructions', InstructionController.create)
app.put('/api/instructions/:id', InstructionController.update)
app.delete('/api/instructions/:id', InstructionController.remove)
app.get('/api/records', RecordController.findAll)
app.post('/api/records', RecordController.create)
app.put('/api/records/:id', RecordController.update)
app.delete('/api/records/:id', RecordController.remove)
// --------------------------------- Adding static ------------------------
// app.use(express.static('public'))
app.listen(3000, () => console.log('Example app listening on port 3000!'))
<file_sep>/models/schemas.js
var mongoose = require('mongoose')
var InstructionSchema = mongoose.Schema({
user: {
type: String,
required: true
},
date: {
type: Date,
required: true
},
concept: {
type: String,
required: true
},
description: {
type: String,
required: true
}
})
var RecordSchema = mongoose.Schema({
user: {
type: String,
required: true
},
date: {
type: Date,
required: true
},
name: {
type: String,
required: true
},
comments: String,
instruction: {
type: mongoose.Schema.ObjectId
},
photo: String,
sent: Boolean,
recipients: [String]
})
module.exports = {
InstructionSchema,
RecordSchema
} | 868e307e425ceb8575db60dcc0d5bacec02bed86 | [
"JavaScript"
] | 3 | JavaScript | proyecto-galeria/proyecto-galeria-backend | aaf2bc243adade41c946b66e6d447b95ce559955 | 90fde11e4d4975a6bf9ce5368933c3dcfaaa50aa | |
refs/heads/main | <repo_name>jscubillos5/Prueba-NUVU<file_sep>/FRONTEND/src/classes/Person/Person.spect.ts
import { Person } from 'src/classes/Person/Person';
describe('Person', () => {
it('should create an instance', () => {
expect(new Person()).toBeTruthy();
});
});
<file_sep>/FRONTEND/src/services/Person.service.ts
import { Injectable } from '@angular/core';
import { HttpClient, HttpHeaders, HttpErrorResponse } from '@angular/common/http';
import { Observable, pipe } from 'rxjs';
import { Person } from 'src/classes/Person/Person';
import { retry, catchError } from 'rxjs/operators';
import { throwError } from 'rxjs';
@Injectable()
export class PersonService {
private personURL: string;
httpOptions = {
headers: new HttpHeaders({
'Content-Type': 'application/json'
})
}
constructor(private http: HttpClient) {
console.log(HttpHeaders);
console.log(Observable);
console.log(pipe);
var mainHost = 'http://localhost:';
var port = '8081';
var mainURL = '/Person/';
this.personURL = mainHost + port + mainURL;
}
public getPersons(): Observable<Person[]> {
var endpoint = 'get-persons';
return this.http.get<Person[]>(this.personURL + endpoint);
}
public addPerson(person: Person) {
var endpoint = 'add-person';
var url = this.personURL + endpoint;
console.log("addPerson url: " + url + " person: " + JSON.stringify(person));
return this.http.post<Person>(url, JSON.stringify(person), this.httpOptions)
.pipe(
retry(1),
catchError(this.handleError)
)
}
public deletePerson(id: number) {
var endpoint = 'delete-person';
var param = '?id=';
var url = this.personURL + endpoint + param + + id.toString();
console.log("deletePerson url: " + url);
return this.http.post<Person>(url, null, this.httpOptions)
.pipe(
retry(1),
catchError(this.handleError)
)
}
private handleError(error: HttpErrorResponse): any {
if (error.status == 406) {
console.error(error.message);
alert("The person has already been added or removed, please review");
} else if (error.status == 409) {
console.error(error.message);
alert("Server error, please contact customer service");
}
return throwError(
'Something bad happened; please try again later.');
}
}<file_sep>/FRONTEND/src/app/person/person.component.ts
import { Component } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { PersonService } from 'src/services/Person.service';
import { Person } from 'src/classes/Person/Person';
@Component({
selector: 'app-person-form',
templateUrl: './person.component.html',
styleUrls: ['./person.component.css']
})
export class PersonComponent {
person: Person;
persons: Person[];
constructor(private route: ActivatedRoute,
private router: Router,
private personService: PersonService) {
this.person = new Person();
this.persons = [];
}
ngOnInit() {
this.refreshPerson()
}
refreshPerson() {
this.personService.getPersons()
.subscribe(data => {
console.log(data)
this.persons = data;
})
}
clearformPerson() {
this.person = new Person();
}
addPerson(): void {
const idDefault = 0;
this.person.id = idDefault;
this.personService.addPerson(this.person).subscribe(data => {
console.log(data)
this.refreshPerson();
});
this.clearformPerson();
}
deletePerson(id: number, name: string): void {
if (confirm("Are you sure to delete: " + name)) {
this.personService.deletePerson(id).subscribe(data => {
console.log(data)
this.refreshPerson();
});
}
}
}
<file_sep>/BACK/src/main/java/com/nuvu/dao/CreditCardDao.java
package com.nuvu.dao;
import com.nuvu.model.CreditCard;
import com.nuvu.repositories.CreditCardRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
/**
* @author <NAME>
* @since 2020-04-16
*/
@Component
public class CreditCardDao {
@Autowired
CreditCardRepository creditCardRepository;
public CreditCard getCreditCard(Long id) {
return creditCardRepository.findById(id).orElse(null);
}
public CreditCard getCreditCardByNumber(int number) {
return creditCardRepository.findByNumber(number);
}
public List<CreditCard> getCreditCardByPerson(Long idPerson) {
List<CreditCard> results = new ArrayList<>();
for (CreditCard creditCard : creditCardRepository.findAll()) {
if (creditCard.getIdPerson() == idPerson) {
results.add(creditCard);
}
}
return results;
}
public void addCreditCard(CreditCard creditCard) {
creditCardRepository.save(creditCard);
}
public void deleteCreditCard(Long id) {
creditCardRepository.deleteById(id);
}
}
<file_sep>/BACK/src/main/java/com/nuvu/model/Person.java
package com.nuvu.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import java.util.Date;
/**
* @author <NAME>
* @since 2020-04-16
*/
@Entity
public class Person {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private long id;
private String identification;
private String name;
private String email;
private Date dateRegister;
public Person(long id, String identification, String name, String email, Date dateRegister) {
this.id = id;
this.identification = identification;
this.name = name;
this.email = email;
this.dateRegister = dateRegister;
}
public Person() {
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getIdentification() {
return identification;
}
public void setIdentification(String identification) {
this.identification = identification;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Date getDateRegister() {
return dateRegister;
}
public void setDateRegister(Date dateRegister) {
this.dateRegister = dateRegister;
}
}
<file_sep>/BACK/src/main/java/com/nuvu/dto/PersonDto.java
package com.nuvu.dto;
/**
* @author <NAME>
* @since 2020-04-16
*/
public class PersonDto {
private long id;
private String identification;
private String name;
private String email;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getIdentification() {
return identification;
}
public void setIdentification(String identification) {
this.identification = identification;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
}
<file_sep>/BACK/src/main/java/com/nuvu/repositories/CreditCardRepository.java
package com.nuvu.repositories;
import com.nuvu.model.CreditCard;
import org.springframework.data.repository.CrudRepository;
/**
* @author <NAME>
* @since 2020-04-16
*/
public interface CreditCardRepository extends CrudRepository<CreditCard, Long> {
CreditCard findByNumber(int number);
}
<file_sep>/BACK/src/main/java/com/nuvu/controllers/CreditCardController.java
package com.nuvu.controllers;
import com.nuvu.dto.CreditCardDto;
import com.nuvu.model.CreditCard;
import com.nuvu.service.CreditCardService;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.util.List;
/**
* @author <NAME>
* @since 2020-04-16
*/
@RestController
@RequestMapping(path = "/CreditCard/")
@CrossOrigin(origins = "http://localhost:4200")
public class CreditCardController {
@Autowired
CreditCardService creditCardService;
@ApiOperation(value = "Permite identificar si el controlador de tarjetas de credito esta arriba")
@RequestMapping("home")
public String home() {
return "Home Credit Card";
}
@ApiOperation(value = "Retorna la lista de tarjetas de credito asociadas a una persona")
@GetMapping(
path = "get-credit-cards",
produces = MediaType.APPLICATION_JSON_VALUE
)
public @ResponseBody
HttpEntity<List<CreditCard>> getCreditCardByPerson(@Valid @RequestParam("idPerson") Long idPerson) {
return creditCardService.getCreditCardByPerson(idPerson);
}
@ApiOperation(value = "Permite registrar una tarjeta de credito en el sistema")
@PostMapping(
path = "add-credit-card",
produces = MediaType.APPLICATION_JSON_VALUE
)
public @ResponseBody
HttpEntity<CreditCard> addCreditCard(@RequestBody CreditCardDto creditCardDto) {
return creditCardService.addCreditCard(creditCardDto);
}
@ApiOperation(value = "Permite eliminar una tarjeta de credito en el sistema")
@PostMapping(
path = "delete-credit-card",
produces = MediaType.APPLICATION_JSON_VALUE
)
public @ResponseBody
HttpEntity<Long> deleteCreditCard(@Valid @RequestParam("id") Long id) {
return creditCardService.deleteCreditCard(id);
}
}
<file_sep>/FRONTEND/src/services/CreditCard.service.ts
import { Injectable } from '@angular/core';
import { HttpClient, HttpHeaders, HttpErrorResponse } from '@angular/common/http';
import { Observable, pipe } from 'rxjs';
import { retry, catchError } from 'rxjs/operators';
import { throwError } from 'rxjs';
import { CreditCard } from 'src/classes/CreditCard/CreditCard';
@Injectable()
export class CreditCardService {
private creditCardURL: string;
httpOptions = {
headers: new HttpHeaders({
'Content-Type': 'application/json'
})
}
constructor(private http: HttpClient) {
console.log(HttpHeaders);
console.log(Observable);
console.log(pipe);
var mainHost = 'http://localhost:';
var port = '8081';
var mainURL = '/CreditCard/';
this.creditCardURL = mainHost + port + mainURL;
}
public getCreditCardByPerson(idPerson: number): Observable<CreditCard[]> {
var endpoint = 'get-credit-cards';
var param = '?idPerson=';
var url = this.creditCardURL + endpoint + param + + idPerson.toString();
console.log("getCreditCardByPerson url: " + url);
return this.http.get<CreditCard[]>(url);
}
public addcreditCard(creditCard: CreditCard) {
var endpoint = 'add-credit-card';
var url = this.creditCardURL + endpoint;
console.log("addcreditCard url: " + url + " creditCard: " + JSON.stringify(creditCard));
return this.http.post<CreditCard>(url, JSON.stringify(creditCard), this.httpOptions)
.pipe(
retry(1),
catchError(this.handleError)
)
}
public deletecreditCard(id: number) {
var endpoint = 'delete-credit-card';
var param = '?id=';
var url = this.creditCardURL + endpoint + param + + id.toString();
console.log("deletecreditCard url: " + url);
return this.http.post<CreditCard>(url, null, this.httpOptions)
.pipe(
retry(1),
catchError(this.handleError)
)
}
private handleError(error: HttpErrorResponse): any {
if (error.status == 406) {
console.error(error.message);
alert("The credit card has already been added or removed, please review");
} else if (error.status == 409) {
console.error(error.message);
alert("Server error, please contact customer service");
}
return throwError(
'Something bad happened; please try again later.');
}
}<file_sep>/BACK/src/main/java/com/nuvu/model/CreditCard.java
package com.nuvu.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import java.util.Date;
/**
* @author <NAME>
* @since 2020-04-16
*/
@Entity
public class CreditCard {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private long id;
private int number;
private Date validUntil;
private int cvc;
private long idPerson;
public CreditCard() {
}
public CreditCard(long id, int number, Date validUntil, int cvc, long idPerson) {
this.id = id;
this.number = number;
this.validUntil = validUntil;
this.cvc = cvc;
this.idPerson = idPerson;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public int getNumber() {
return number;
}
public void setNumber(int number) {
this.number = number;
}
public Date getValidUntil() {
return validUntil;
}
public void setValidUntil(Date validUntil) {
this.validUntil = validUntil;
}
public int getCvc() {
return cvc;
}
public void setCvc(int cvc) {
this.cvc = cvc;
}
public long getIdPerson() {
return idPerson;
}
public void setIdPerson(long idPerson) {
this.idPerson = idPerson;
}
}
<file_sep>/FRONTEND/src/app/credit-card/credit-card.component.ts
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { CreditCard } from 'src/classes/CreditCard/CreditCard';
import { Person } from 'src/classes/Person/Person';
import { CreditCardService } from 'src/services/CreditCard.service';
import { PersonService } from 'src/services/Person.service';
@Component({
selector: 'app-credit-card',
templateUrl: './credit-card.component.html',
styleUrls: ['./credit-card.component.css']
})
export class CreditCardComponent implements OnInit {
creditCard: CreditCard;
persons: Person[];
creditCards: CreditCard[];
constructor(private route: ActivatedRoute,
private router: Router,
private creditCardService: CreditCardService,
private personService: PersonService) {
this.creditCard = new CreditCard();
this.persons = [];
this.creditCards = [];
}
ngOnInit() {
this.refreshCreditCard()
}
refreshCreditCard() {
this.creditCards = [];
this.personService.getPersons().subscribe(data => {
console.log(data)
this.persons = data;
this.persons.forEach((value) => {
console.log("value.id: " + value.id + " value.identification: " + value.identification)
this.loadCreditCards(value.id);
});
});
}
loadCreditCards(idPerson: number) {
this.creditCardService.getCreditCardByPerson(idPerson)
.subscribe(data => {
console.log(data)
if (data.length > 0) {
data.forEach((value) => {
this.creditCards.push(value);
});
}
});
}
clearformcreditCard() {
this.creditCard = new CreditCard();
}
addCreditCard(): void {
const idDefault = 0;
const maxValuenumber = 9999999999;
const maxValueCVC = 999;
this.creditCard.id = idDefault;
if (this.creditCard.number > maxValuenumber) {
alert("The number of a credit card cannot be greater than 10 numbers");
}
else if (this.creditCard.cvc > maxValueCVC) {
alert("The CVC of a credit card cannot be greater than 3 numbers");
}
else {
this.creditCardService.addcreditCard(this.creditCard).subscribe(data => {
console.log(data)
this.refreshCreditCard();
});
this.clearformcreditCard();
}
}
deleteCreditCard(id: number, number: number): void {
if (confirm("Are you sure to delete: " + number)) {
this.creditCardService.deletecreditCard(id).subscribe(data => {
console.log(data)
this.refreshCreditCard();
});
}
}
}
<file_sep>/BACK/src/main/java/com/nuvu/BasicApplication.java
package com.nuvu;
import com.nuvu.model.CreditCard;
import com.nuvu.model.Person;
import com.nuvu.repositories.CreditCardRepository;
import com.nuvu.repositories.PersonRepository;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import java.text.SimpleDateFormat;
import java.util.Date;
@SpringBootApplication
@ComponentScan()
public class BasicApplication {
public static void main(String[] args) {
SpringApplication.run(BasicApplication.class, args);
}
@Bean
CommandLineRunner initPersonRepository(PersonRepository personRepository) {
return args -> {
Person person = new Person(1, "1018458060", "<NAME>", "CUBILLOS2093HOTMAIL.COM", new Date());
personRepository.save(person);
};
}
@Bean
CommandLineRunner initCreditCardRepository(CreditCardRepository creditCardRepository) {
return args -> {
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy");
Date validUntil = sdf.parse("01/01/2022");
CreditCard creditCard = new CreditCard(1, 45526877, validUntil, 209, 1);
creditCardRepository.save(creditCard);
};
}
}
<file_sep>/FRONTEND/src/classes/CreditCard/CreditCard.ts
export class CreditCard {
id!: number;
number!: number;
validUntil!: Date;
cvc!: number;
idPerson!: number;
}<file_sep>/BACK/src/main/java/com/nuvu/repositories/PersonRepository.java
package com.nuvu.repositories;
import com.nuvu.model.Person;
import org.springframework.data.repository.CrudRepository;
/**
* @author <NAME>
* @since 2020-04-16
*/
public interface PersonRepository extends CrudRepository<Person, Long> {
Person findByIdentification(String identification);
}
| 3a942eb3f33ae72cab546fe0506db8f44992ddf4 | [
"Java",
"TypeScript"
] | 14 | TypeScript | jscubillos5/Prueba-NUVU | 7a02e06b08c9e7863bff6cd75b5eca4005ce8891 | 100fcf17e6dbd58df6fee921538f6c93eccec58e | |
refs/heads/master | <repo_name>DisaDisa/homework2<file_sep>/app/src/main/java/ru/ifmo/droid2016/tmdb/loader/MovieLoader.java
package ru.ifmo.droid2016.tmdb.loader;
import android.content.Context;
import android.content.res.Resources;
import android.support.v4.content.AsyncTaskLoader;
import android.util.Log;
import com.facebook.stetho.urlconnection.StethoURLConnectionManager;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import ru.ifmo.droid2016.tmdb.api.TmdbApi;
import ru.ifmo.droid2016.tmdb.model.Movie;
import ru.ifmo.droid2016.tmdb.utils.IOUtils;
/**
* Created by Disa on 23.11.2016.
*/
public class MovieLoader extends AsyncTaskLoader<LoadResult<List<Movie>>> {
List<Movie> data;
private String TAG = "Movie";
private String curLanguage;
private int[] pages;
private int curPage;
public MovieLoader(Context context) {
super(context);
this.pages = new int[0];
curPage = 0;
data = new ArrayList<>();
this.curLanguage = null;
}
@Override
public LoadResult<List<Movie>> loadInBackground() {
Log.d("######Loader", "Load in Background");
final StethoURLConnectionManager stethoManager = new StethoURLConnectionManager("API");
ResultType resultType = ResultType.ERROR;
List<Movie> currentData = new ArrayList<>();
int[] pagesToDownload = new int[0];
if (!Resources.getSystem().getConfiguration().locale.getLanguage().equals(curLanguage)) {
pagesToDownload = pages;
curLanguage = Resources.getSystem().getConfiguration().locale.getLanguage();
data = new ArrayList<>();
}
HttpURLConnection connection = null;
InputStream in = null;
try {
connection = TmdbApi.getPopularMoviesRequest(curLanguage);
stethoManager.preConnect(connection, null);
connection.connect();
stethoManager.postConnect();
if (connection.getResponseCode() == HttpURLConnection.HTTP_OK) {
in = connection.getInputStream();
in = stethoManager.interpretResponseStream(in);
currentData.addAll(MovieParser.parseMovies(in));
} else {
throw new BadResponseException("HTTP: " + connection.getResponseCode()
+ ", " + connection.getResponseMessage());
}
resultType = ResultType.OK;
} catch (MalformedURLException e) {
Log.e(TAG, "Failed to get movies", e);
} catch (IOException e) {
stethoManager.httpExchangeFailed(e);
if (IOUtils.isConnectionAvailable(getContext(), false)) {
resultType = ResultType.ERROR;
} else {
resultType = ResultType.NO_INTERNET;
}
} catch (Exception e) {
Log.e(TAG, "Failed to get movies: ", e);
} finally {
IOUtils.closeSilently(in);
if (connection != null) {
connection.disconnect();
}
}
data.addAll(currentData);
return new LoadResult<>(resultType, data);
}
@Override
protected void onStartLoading() {
forceLoad();
}
} | 909c820b91a766bdf7640f4f62db0da77104170a | [
"Java"
] | 1 | Java | DisaDisa/homework2 | a732f430a1c1fcbd0631d08989555bad3ee8db4d | 33257d6c018fce9627049027ed9d0a46e1773df4 | |
refs/heads/master | <repo_name>imamrdn/presenceClassA-formProject<file_sep>/data.php
<?php
echo "<h1>Presensi<h1>";
$fp = fopen("data.txt", "r");
echo "<table border=1 width=70%>";
echo "<tr><th>Nama</th><th>NIM</th><th>Email</th><th>Kelas</th><th>Alamat</th></tr>";
while ($isi = fgets($fp)){
$pisah = explode("|", $isi);
echo "<tr><td>$pisah[0]</td><td>$pisah[1]</td><td>$pisah[2]</td><td>$pisah[3]</td><td>$pisah[4]</td></tr>";
}
echo "</tabel>";
?>
<file_sep>/formaction.php
<?php
if ($_SERVER["REQUEST_METHOD"] == "POST") {
$name = $_POST['nama'];
$nim = $_POST['nim'];
$email = $_POST['email'];
$class = $_POST['class'];
$address= $_POST['address'];
$enter = "\n";
if (!empty($name) && !empty($nim)) {
if (!empty($email) && !empty($class)) {
if (!empty($address)){
echo "<h1>Presensi Berhasil</h1>";
$myfile = fopen("data.txt", "a+");
fwrite($myfile, "$name | $nim | $email | $class | $address $enter");
fclose($myfile);
} else {
echo "Data belum lengkap, presensi Anda Gagal";
}
} else {
echo "Data belum lengkap, presensi Anda Gagal";
}
} else {
echo "Data belum lengkap, presensi Anda Gagal";
}
}
echo "<br><a href='data.php'>Lihat Presensi</a><br>";
echo "<a href='index.php'>Isi Presensi</a>";
?>
| b4bdb0496d9a70a0f6e5e7c0f76d60ded48062b5 | [
"PHP"
] | 2 | PHP | imamrdn/presenceClassA-formProject | af40545b63efa5ff9504c6d0ab232b6d370a0473 | 11de3c46f7a524676eb4b90937121aa5d2f948e0 | |
refs/heads/master | <file_sep>NetworkX_Samples
================
Sample codes of Network X (Python)<file_sep># -*- coding: utf-8 -*-
import numpy
import matplotlib.pyplot as plt
import pylab
import networkx as nx
#############################################
## ここからリンク関係
# リンク情報のみを反映させたグラフを作成
# int型を指定しないとノード名(ラベル)を単純な数値にしてくれない
G = nx.read_edgelist("edge.txt", nodetype=int)
#############################################
## ここから座標関係
# 座標情報をndarray型として読み込む
pos_array=numpy.loadtxt("coord.txt", dtype=int)
# 座標情報は辞書型で表す
pos = {}
# ndarrayの情報を「(x,y)」という値に変換する
for i in range(len(pos_array)):
pos[i] = (pos_array[i][0], pos_array[i][1])
#############################################
## ここから描画関係
# 描画
nx.draw(G, pos)
# 保存
plt.savefig("test_fig.png")
plt.show()
| 7c9e5fbc5a0a28c8cc42220a88e65357605146f9 | [
"Markdown",
"Python"
] | 2 | Markdown | mkacky/NetworkX_Samples | 4f825791e291871c11c34e54e0d7b1b83a0dc9f3 | 14c48453244ba30058b1866c29ca61c86e40f924 | |
refs/heads/master | <repo_name>0xPr0xy/soundcloud-cli<file_sep>/urwid/widget.py
#!/usr/bin/python
#
# Urwid basic widget classes
# Copyright (C) 2004-2011 <NAME>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
from operator import attrgetter
from urwid.util import MetaSuper, decompose_tagmarkup, calc_width, \
is_wide_char, move_prev_char, move_next_char
from urwid.compat import bytes
from urwid.text_layout import calc_pos, calc_coords, shift_line
from urwid import signals
from urwid import text_layout
from urwid.canvas import CanvasCache, CompositeCanvas, SolidCanvas, \
apply_text_layout
from urwid.command_map import command_map
from urwid.split_repr import split_repr, remove_defaults, python3_repr
# Widget sizing methods
# (use the same string objects to make some comparisons faster)
FLOW = 'flow'
BOX = 'box'
FIXED = 'fixed'
# Text alignment modes
LEFT = 'left'
RIGHT = 'right'
CENTER = 'center'
# Filler alignment modes
TOP = 'top'
MIDDLE = 'middle'
BOTTOM = 'bottom'
# Text wrapping modes
SPACE = 'space'
ANY = 'any'
CLIP = 'clip'
# Extras for Padding
PACK = 'pack'
GIVEN = 'given'
RELATIVE = 'relative'
RELATIVE_100 = (RELATIVE, 100)
class WidgetMeta(MetaSuper, signals.MetaSignals):
"""
Automatic caching of render and rows methods.
Class variable no_cache is a list of names of methods to not cache.
Class variable ignore_focus if defined and True indicates that this
widget is not affected by the focus parameter, so it may be ignored
when caching.
"""
def __init__(cls, name, bases, d):
no_cache = d.get("no_cache", [])
super(WidgetMeta, cls).__init__(name, bases, d)
if "render" in d:
if "render" not in no_cache:
render_fn = cache_widget_render(cls)
else:
render_fn = nocache_widget_render(cls)
cls.render = render_fn
if "rows" in d and "rows" not in no_cache:
cls.rows = cache_widget_rows(cls)
if "no_cache" in d:
del cls.no_cache
if "ignore_focus" in d:
del cls.ignore_focus
class WidgetError(Exception):
pass
def validate_size(widget, size, canv):
"""
Raise a WidgetError if a canv does not match size size.
"""
if (size and size[1:] != (0,) and size[0] != canv.cols()) or \
(len(size)>1 and size[1] != canv.rows()):
raise WidgetError("Widget %r rendered (%d x %d) canvas"
" when passed size %r!" % (widget, canv.cols(),
canv.rows(), size))
def update_wrapper(new_fn, fn):
"""
Copy as much of the function detail from fn to new_fn
as we can.
"""
try:
new_fn.__name__ = fn.__name__
new_fn.__dict__.update(fn.__dict__)
new_fn.__doc__ = fn.__doc__
new_fn.__module__ = fn.__module__
except TypeError:
pass # python2.3 ignore read-only attributes
def cache_widget_render(cls):
"""
Return a function that wraps the cls.render() method
and fetches and stores canvases with CanvasCache.
"""
ignore_focus = bool(getattr(cls, "ignore_focus", False))
fn = cls.render
def cached_render(self, size, focus=False):
focus = focus and not ignore_focus
canv = CanvasCache.fetch(self, cls, size, focus)
if canv:
return canv
canv = fn(self, size, focus=focus)
validate_size(self, size, canv)
if canv.widget_info:
canv = CompositeCanvas(canv)
canv.finalize(self, size, focus)
CanvasCache.store(cls, canv)
return canv
cached_render.original_fn = fn
update_wrapper(cached_render, fn)
return cached_render
def nocache_widget_render(cls):
"""
Return a function that wraps the cls.render() method
and finalizes the canvas that it returns.
"""
fn = cls.render
if hasattr(fn, "original_fn"):
fn = fn.original_fn
def finalize_render(self, size, focus=False):
canv = fn(self, size, focus=focus)
if canv.widget_info:
canv = CompositeCanvas(canv)
validate_size(self, size, canv)
canv.finalize(self, size, focus)
return canv
finalize_render.original_fn = fn
update_wrapper(finalize_render, fn)
return finalize_render
def nocache_widget_render_instance(self):
"""
Return a function that wraps the cls.render() method
and finalizes the canvas that it returns, but does not
cache the canvas.
"""
fn = self.render.original_fn
def finalize_render(size, focus=False):
canv = fn(self, size, focus=focus)
if canv.widget_info:
canv = CompositeCanvas(canv)
canv.finalize(self, size, focus)
return canv
finalize_render.original_fn = fn
update_wrapper(finalize_render, fn)
return finalize_render
def cache_widget_rows(cls):
"""
Return a function that wraps the cls.rows() method
and returns rows from the CanvasCache if available.
"""
ignore_focus = bool(getattr(cls, "ignore_focus", False))
fn = cls.rows
def cached_rows(self, size, focus=False):
focus = focus and not ignore_focus
canv = CanvasCache.fetch(self, cls, size, focus)
if canv:
return canv.rows()
return fn(self, size, focus)
update_wrapper(cached_rows, fn)
return cached_rows
class Widget(object):
"""
base class of widgets
"""
__metaclass__ = WidgetMeta
_selectable = False
_sizing = set([])
_command_map = command_map # default to the single shared CommandMap
def _invalidate(self):
CanvasCache.invalidate(self)
def _emit(self, name, *args):
"""
Convenience function to emit signals with self as first
argument.
"""
signals.emit_signal(self, name, self, *args)
def selectable(self):
"""
Return True if this widget should take focus. Default
implementation returns the value of self._selectable.
"""
return self._selectable
def sizing(self):
"""
Return a set including one or more of 'box', 'flow' and
'fixed'. Default implementation returns the value of
self._sizing.
"""
return self._sizing
def pack(self, size, focus=False):
"""
Return a 'packed' (maxcol, maxrow) for this widget. Default
implementation (no packing defined) returns size, and
calculates maxrow if not given.
"""
if size == ():
if FIXED in self.sizing():
raise NotImplementedError('Fixed widgets must override'
' Widget.pack()')
raise WidgetError('Cannot pack () size, this is not a fixed'
' widget: %s' % repr(self))
elif len(size) == 1:
if FLOW in self.sizing():
return size + (self.rows(size, focus),)
raise WidgetError('Cannot pack (maxcol,) size, this is not a'
' flow widget: %s' % repr(self))
return size
# this property returns the widget without any decorations, default
# implementation returns self.
base_widget = property(lambda self:self)
# Use the split_repr module to create __repr__ from _repr_words
# and _repr_attrs
__repr__ = split_repr
def _repr_words(self):
words = []
if self.selectable():
words = ["selectable"] + words
if self.sizing():
sizing_modes = list(self.sizing())
sizing_modes.sort()
words.append("/".join(sizing_modes))
return words + ["widget"]
def _repr_attrs(self):
return {}
class FlowWidget(Widget):
"""
base class of widgets that determine their rows from the number of
columns available.
"""
_sizing = set([FLOW])
def rows(self, size, focus=False):
"""
All flow widgets must implement this function.
"""
raise NotImplementedError()
def render(self, size, focus=False):
"""
All widgets must implement this function.
"""
raise NotImplementedError()
class BoxWidget(Widget):
"""
base class of width and height constrained widgets such as
the top level widget attached to the display object
"""
_selectable = True
_sizing = set([BOX])
def render(self, size, focus=False):
"""
All widgets must implement this function.
"""
raise NotImplementedError()
def fixed_size(size):
"""
raise ValueError if size != ().
Used by FixedWidgets to test size parameter.
"""
if size != ():
raise ValueError("FixedWidget takes only () for size." \
"passed: %r" % (size,))
class FixedWidget(Widget):
"""
base class of widgets that know their width and height and
cannot be resized
"""
_sizing = set([FIXED])
def render(self, size, focus=False):
"""
All widgets must implement this function.
"""
raise NotImplementedError()
def pack(self, size=None, focus=False):
"""
All fixed widgets must implement this function.
"""
raise NotImplementedError()
class Divider(FlowWidget):
"""
Horizontal divider widget
"""
ignore_focus = True
def __init__(self,div_char=u" ",top=0,bottom=0):
"""
Create a horizontal divider widget.
div_char -- character to repeat across line
top -- number of blank lines above
bottom -- number of blank lines below
>>> Divider()
<Divider flow widget>
>>> Divider(u'-')
<Divider flow widget '-'>
>>> Divider(u'x', 1, 2)
<Divider flow widget 'x' bottom=2 top=1>
"""
self.__super.__init__()
self.div_char = div_char
self.top = top
self.bottom = bottom
def _repr_words(self):
return self.__super._repr_words() + [
python3_repr(self.div_char)] * (self.div_char != u" ")
def _repr_attrs(self):
attrs = dict(self.__super._repr_attrs())
if self.top: attrs['top'] = self.top
if self.bottom: attrs['bottom'] = self.bottom
return attrs
def rows(self, size, focus=False):
"""
Return the number of lines that will be rendered.
>>> Divider().rows((10,))
1
>>> Divider(u'x', 1, 2).rows((10,))
4
"""
(maxcol,) = size
return self.top + 1 + self.bottom
def render(self, size, focus=False):
"""
Render the divider as a canvas and return it.
>>> Divider().render((10,)).text # ... = b in Python 3
[...' ']
>>> Divider(u'-', top=1).render((10,)).text
[...' ', ...'----------']
>>> Divider(u'x', bottom=2).render((5,)).text
[...'xxxxx', ...' ', ...' ']
"""
(maxcol,) = size
canv = SolidCanvas(self.div_char, maxcol, 1)
canv = CompositeCanvas(canv)
if self.top or self.bottom:
canv.pad_trim_top_bottom(self.top, self.bottom)
return canv
class SolidFill(BoxWidget):
_selectable = False
ignore_focus = True
def __init__(self, fill_char=" "):
"""
Create a box widget that will fill an area with a single
character.
fill_char -- character to fill area with
>>> SolidFill(u'8')
<SolidFill box widget '8'>
"""
self.__super.__init__()
self.fill_char = fill_char
def _repr_words(self):
return self.__super._repr_words() + [python3_repr(self.fill_char)]
def render(self, size, focus=False ):
"""
Render the Fill as a canvas and return it.
>>> SolidFill().render((4,2)).text # ... = b in Python 3
[...' ', ...' ']
>>> SolidFill('#').render((5,3)).text
[...'#####', ...'#####', ...'#####']
"""
maxcol, maxrow = size
return SolidCanvas(self.fill_char, maxcol, maxrow)
class TextError(Exception):
pass
class Text(FlowWidget):
"""
a horizontally resizeable text widget
"""
ignore_focus = True
_repr_content_length_max = 140
def __init__(self, markup, align=LEFT, wrap=SPACE, layout=None):
"""
markup -- content of text widget, one of:
plain string -- string is displayed
( attr, markup2 ) -- markup2 is given attribute attr
[ markupA, markupB, ... ] -- list items joined together
align -- align mode for text layout
wrap -- wrap mode for text layout
layout -- layout object to use, defaults to StandardTextLayout
>>> Text(u"Hello")
<Text flow widget 'Hello'>
>>> t = Text(('bold', u"stuff"), 'right', 'any')
>>> t
<Text flow widget 'stuff' align='right' wrap='any'>
>>> print t.text
stuff
>>> t.attrib
[('bold', 5)]
"""
self.__super.__init__()
self._cache_maxcol = None
self.set_text(markup)
self.set_layout(align, wrap, layout)
def _repr_words(self):
"""
Show the text in the repr in python3 format (b prefix for byte
strings) and truncate if it's too long
"""
first = self.__super._repr_words()
text = self.get_text()[0]
rest = python3_repr(text)
if len(rest) > self._repr_content_length_max:
rest = (rest[:self._repr_content_length_max * 2 // 3 - 3] +
'...' + rest[-self._repr_content_length_max // 3:])
return first + [rest]
def _repr_attrs(self):
attrs = dict(self.__super._repr_attrs(),
align=self._align_mode,
wrap=self._wrap_mode)
return remove_defaults(attrs, Text.__init__)
def _invalidate(self):
self._cache_maxcol = None
self.__super._invalidate()
def set_text(self,markup):
"""
Set content of text widget.
markup -- see __init__() for description.
>>> t = Text(u"foo")
>>> print t.text
foo
>>> t.set_text(u"bar")
>>> print t.text
bar
>>> t.text = u"baz" # not supported because text stores text but set_text() takes markup
Traceback (most recent call last):
AttributeError: can't set attribute
"""
self._text, self._attrib = decompose_tagmarkup(markup)
self._invalidate()
def get_text(self):
"""
Returns (text, attributes).
text -- complete string content (unicode) of text widget
attributes -- run length encoded attributes for text
>>> Text(u"Hello").get_text() # ... = u in Python 2
(...'Hello', [])
>>> Text(('bright', u"Headline")).get_text()
(...'Headline', [('bright', 8)])
>>> Text([('a', u"one"), u"two", ('b', u"three")]).get_text()
(...'onetwothree', [('a', 3), (None, 3), ('b', 5)])
"""
return self._text, self._attrib
text = property(lambda self:self.get_text()[0])
attrib = property(lambda self:self.get_text()[1])
def set_align_mode(self, mode):
"""
Set text alignment / justification.
Valid modes for StandardTextLayout are:
'left', 'center' and 'right'
>>> t = Text(u"word")
>>> t.set_align_mode('right')
>>> t.align
'right'
>>> t.render((10,)).text # ... = b in Python 3
[...' word']
>>> t.align = 'center'
>>> t.render((10,)).text
[...' word ']
>>> t.align = 'somewhere'
Traceback (most recent call last):
TextError: Alignment mode 'somewhere' not supported.
"""
if not self.layout.supports_align_mode(mode):
raise TextError("Alignment mode %r not supported."%
(mode,))
self._align_mode = mode
self._invalidate()
def set_wrap_mode(self, mode):
"""
Set wrap mode.
Valid modes for StandardTextLayout are :
'any' : wrap at any character
'space' : wrap on space character
'clip' : truncate lines instead of wrapping
>>> t = Text(u"some words")
>>> t.render((6,)).text # ... = b in Python 3
[...'some ', ...'words ']
>>> t.set_wrap_mode('clip')
>>> t.wrap
'clip'
>>> t.render((6,)).text
[...'some w']
>>> t.wrap = 'any' # Urwid 0.9.9 or later
>>> t.render((6,)).text
[...'some w', ...'ords ']
>>> t.wrap = 'somehow'
Traceback (most recent call last):
TextError: Wrap mode 'somehow' not supported.
"""
if not self.layout.supports_wrap_mode(mode):
raise TextError("Wrap mode %r not supported."%(mode,))
self._wrap_mode = mode
self._invalidate()
def set_layout(self, align, wrap, layout=None):
"""
Set layout object, align and wrap modes.
align -- align mode for text layout
wrap -- wrap mode for text layout
layout -- layout object to use, defaults to StandardTextLayout
>>> t = Text(u"hi")
>>> t.set_layout('right', 'clip')
>>> t
<Text flow widget 'hi' align='right' wrap='clip'>
"""
if layout is None:
layout = text_layout.default_layout
self._layout = layout
self.set_align_mode(align)
self.set_wrap_mode(wrap)
align = property(lambda self:self._align_mode, set_align_mode)
wrap = property(lambda self:self._wrap_mode, set_wrap_mode)
layout = property(lambda self:self._layout)
def render(self, size, focus=False):
"""
Render contents with wrapping and alignment. Return canvas.
>>> Text(u"important things").render((18,)).text # ... = b in Python 3
[...'important things ']
>>> Text(u"important things").render((11,)).text
[...'important ', ...'things ']
"""
(maxcol,) = size
text, attr = self.get_text()
#assert isinstance(text, unicode)
trans = self.get_line_translation( maxcol, (text,attr) )
return apply_text_layout(text, attr, trans, maxcol)
def rows(self, size, focus=False):
"""
Return the number of rows the rendered text spans.
>>> Text(u"important things").rows((18,))
1
>>> Text(u"important things").rows((11,))
2
"""
(maxcol,) = size
return len(self.get_line_translation(maxcol))
def get_line_translation(self, maxcol, ta=None):
"""
Return layout structure used to map self.text to a canvas.
This method is used internally, but may be useful for
debugging custom layout classes.
maxcol -- columns available for display
ta -- None or the (text, attr) tuple returned from
self.get_text()
"""
if not self._cache_maxcol or self._cache_maxcol != maxcol:
self._update_cache_translation(maxcol, ta)
return self._cache_translation
def _update_cache_translation(self,maxcol, ta):
if ta:
text, attr = ta
else:
text, attr = self.get_text()
self._cache_maxcol = maxcol
self._cache_translation = self._calc_line_translation(
text, maxcol )
def _calc_line_translation(self, text, maxcol ):
return self.layout.layout(
text, self._cache_maxcol,
self._align_mode, self._wrap_mode )
def pack(self, size=None, focus=False):
"""
Return the number of screen columns and rows required for
this Text widget to be displayed without wrapping or
clipping, as a single element tuple.
size -- None for unlimited screen columns or (maxcol,) to
specify a maximum column size
>>> Text(u"important things").pack()
(16, 1)
>>> Text(u"important things").pack((15,))
(9, 2)
>>> Text(u"important things").pack((8,))
(8, 2)
"""
text, attr = self.get_text()
if size is not None:
(maxcol,) = size
if not hasattr(self.layout, "pack"):
return size
trans = self.get_line_translation( maxcol, (text,attr))
cols = self.layout.pack( maxcol, trans )
return (cols, len(trans))
i = 0
cols = 0
while i < len(text):
j = text.find('\n', i)
if j == -1:
j = len(text)
c = calc_width(text, i, j)
if c>cols:
cols = c
i = j+1
return (cols, text.count('\n') + 1)
class EditError(TextError):
pass
class Edit(Text):
"""
Text editing widget implements cursor movement, text insertion and
deletion. A caption may prefix the editing area. Uses text class
for text layout.
"""
# allow users of this class to listen for change events
# sent when the value of edit_text changes
# (this variable is picked up by the MetaSignals metaclass)
signals = ["change"]
def valid_char(self, ch):
"""Return true for printable characters."""
return is_wide_char(ch,0) or (len(ch)==1 and ord(ch) >= 32)
def selectable(self): return True
def __init__(self, caption=u"", edit_text=u"", multiline=False,
align=LEFT, wrap=SPACE, allow_tab=False,
edit_pos=None, layout=None, mask=None):
"""
caption -- markup for caption preceeding edit_text
edit_text -- text string for editing
multiline -- True: 'enter' inserts newline False: return it
align -- align mode
wrap -- wrap mode
allow_tab -- True: 'tab' inserts 1-8 spaces False: return it
edit_pos -- initial position for cursor, None:at end
layout -- layout object
mask -- character to mask away text with, None means no masking
>>> Edit()
<Edit selectable flow widget '' edit_pos=0>
>>> Edit(u"Y/n? ", u"yes")
<Edit selectable flow widget 'yes' caption='Y/n? ' edit_pos=3>
>>> Edit(u"Name ", u"Smith", edit_pos=1)
<Edit selectable flow widget 'Smith' caption='Name ' edit_pos=1>
>>> Edit(u"", u"3.14", align='right')
<Edit selectable flow widget '3.14' align='right' edit_pos=4>
"""
self.__super.__init__("", align, wrap, layout)
self.multiline = multiline
self.allow_tab = allow_tab
self._edit_pos = 0
self.set_caption(caption)
self.set_edit_text(edit_text)
if edit_pos is None:
edit_pos = len(edit_text)
self.set_edit_pos(edit_pos)
self.set_mask(mask)
self._shift_view_to_cursor = False
def _repr_words(self):
return self.__super._repr_words()[:-1] + [
python3_repr(self._edit_text)] + [
'caption=' + python3_repr(self._caption)] * bool(self._caption) + [
'multiline'] * (self.multiline is True)
def _repr_attrs(self):
attrs = dict(self.__super._repr_attrs(),
edit_pos=self._edit_pos)
return remove_defaults(attrs, Edit.__init__)
def get_text(self):
"""
Returns (text, attributes).
text -- complete text of caption and edit_text, maybe masked away
attributes -- run length encoded attributes for text
>>> Edit("What? ","oh, nothing.").get_text() # ... = u in Python 2
(...'What? oh, nothing.', [])
>>> Edit(('bright',"user@host:~$ "),"ls").get_text()
(...'user@host:~$ ls', [('bright', 13)])
"""
if self._mask is None:
return self._caption + self._edit_text, self._attrib
else:
return self._caption + (self._mask * len(self._edit_text)), self._attrib
def set_text(self, markup):
"""
Not supported by Edit widget.
>>> Edit().set_text("test")
Traceback (most recent call last):
EditError: set_text() not supported. Use set_caption() or set_edit_text() instead.
"""
# hack to let Text.__init__() work
if not hasattr(self, '_text') and markup == "":
self._text = None
return
raise EditError("set_text() not supported. Use set_caption()"
" or set_edit_text() instead.")
def get_pref_col(self, size):
"""
Return the preferred column for the cursor, or the
current cursor x value. May also return 'left' or 'right'
to indicate the leftmost or rightmost column available.
This method is used internally and by other widgets when
moving the cursor up or down between widgets so that the
column selected is one that the user would expect.
>>> size = (10,)
>>> Edit().get_pref_col(size)
0
>>> e = Edit("","word")
>>> e.get_pref_col(size)
4
>>> e.keypress(size, 'left')
>>> e.get_pref_col(size)
3
>>> e.keypress(size, 'end')
>>> e.get_pref_col(size)
'right'
>>> e = Edit("","2\\nwords")
>>> e.keypress(size, 'left')
>>> e.keypress(size, 'up')
>>> e.get_pref_col(size)
4
>>> e.keypress(size, 'left')
>>> e.get_pref_col(size)
0
"""
(maxcol,) = size
pref_col, then_maxcol = self.pref_col_maxcol
if then_maxcol != maxcol:
return self.get_cursor_coords((maxcol,))[0]
else:
return pref_col
def update_text(self):
"""
No longer supported.
>>> Edit().update_text()
Traceback (most recent call last):
EditError: update_text() has been removed. Use set_caption() or set_edit_text() instead.
"""
raise EditError("update_text() has been removed. Use "
"set_caption() or set_edit_text() instead.")
def set_caption(self, caption):
"""
Set the caption markup for this widget.
caption -- see Text.__init__() for description of markup
>>> e = Edit("")
>>> e.set_caption("cap1")
>>> print e.caption
cap1
>>> e.set_caption(('bold', "cap2"))
>>> print e.caption
cap2
>>> e.attrib
[('bold', 4)]
>>> e.caption = "cap3" # not supported because caption stores text but set_caption() takes markup
Traceback (most recent call last):
AttributeError: can't set attribute
"""
self._caption, self._attrib = decompose_tagmarkup(caption)
self._invalidate()
caption = property(lambda self:self._caption)
def set_edit_pos(self, pos):
"""
Set the cursor position with a self.edit_text offset.
Clips pos to [0, len(edit_text)].
>>> e = Edit(u"", u"word")
>>> e.edit_pos
4
>>> e.set_edit_pos(2)
>>> e.edit_pos
2
>>> e.edit_pos = -1 # Urwid 0.9.9 or later
>>> e.edit_pos
0
>>> e.edit_pos = 20
>>> e.edit_pos
4
"""
if pos < 0:
pos = 0
if pos > len(self._edit_text):
pos = len(self._edit_text)
self.highlight = None
self.pref_col_maxcol = None, None
self._edit_pos = pos
self._invalidate()
edit_pos = property(lambda self:self._edit_pos, set_edit_pos)
def set_mask(self, mask):
"""
Set the character for masking text away. Empty means no masking.
"""
self._mask = mask
self._invalidate()
def set_edit_text(self, text):
"""
Set the edit text for this widget.
>>> e = Edit()
>>> e.set_edit_text(u"yes")
>>> print e.edit_text
yes
>>> e
<Edit selectable flow widget 'yes' edit_pos=0>
>>> e.edit_text = u"no" # Urwid 0.9.9 or later
>>> print e.edit_text
no
"""
try:
text = unicode(text)
except Exception:
raise EditError("Can't convert edit text to a string!")
self.highlight = None
self._emit("change", text)
self._edit_text = text
if self.edit_pos > len(text):
self.edit_pos = len(text)
self._invalidate()
def get_edit_text(self):
"""
Return the edit text for this widget.
>>> e = Edit(u"What? ", u"oh, nothing.")
>>> print e.get_edit_text()
oh, nothing.
>>> print e.edit_text
oh, nothing.
"""
return self._edit_text
edit_text = property(get_edit_text, set_edit_text)
def insert_text(self, text):
"""
Insert text at the cursor position and update cursor.
This method is used by the keypress() method when inserting
one or more characters into edit_text.
>>> e = Edit(u"", u"42")
>>> e.insert_text(u".5")
>>> e
<Edit selectable flow widget '42.5' edit_pos=4>
>>> e.set_edit_pos(2)
>>> e.insert_text(u"a")
>>> print e.edit_text
42a.5
"""
text = self._normalize_to_caption(text)
result_text, result_pos = self.insert_text_result(text)
self.set_edit_text(result_text)
self.set_edit_pos(result_pos)
self.highlight = None
def _normalize_to_caption(self, text):
"""
Return text converted to the same type as self.caption
(bytes or unicode)
"""
tu = isinstance(text, unicode)
cu = isinstance(self._caption, unicode)
if tu == cu:
return text
if tu:
return text.encode('ascii') # follow python2's implicit conversion
return text.decode('ascii')
def insert_text_result(self, text):
"""
Return result of insert_text(text) without actually performing the
insertion. Handy for pre-validation.
"""
# if there's highlighted text, it'll get replaced by the new text
text = self._normalize_to_caption(text)
if self.highlight:
start, stop = self.highlight
btext, etext = self.edit_text[:start], self.edit_text[stop:]
result_text = btext + etext
result_pos = start
else:
result_text = self.edit_text
result_pos = self.edit_pos
result_text = (result_text[:result_pos] + text +
result_text[result_pos:])
result_pos += len(text)
return (result_text, result_pos)
def keypress(self, size, key):
"""
Handle editing keystrokes, return others.
>>> e, size = Edit(), (20,)
>>> e.keypress(size, 'x')
>>> e.keypress(size, 'left')
>>> e.keypress(size, '1')
>>> print e.edit_text
1x
>>> e.keypress(size, 'backspace')
>>> e.keypress(size, 'end')
>>> e.keypress(size, '2')
>>> print e.edit_text
x2
>>> e.keypress(size, 'shift f1')
'shift f1'
"""
(maxcol,) = size
p = self.edit_pos
if self.valid_char(key):
self.insert_text( key )
elif key=="tab" and self.allow_tab:
key = " "*(8-(self.edit_pos%8))
self.insert_text( key )
elif key=="enter" and self.multiline:
key = "\n"
self.insert_text( key )
elif self._command_map[key] == 'cursor left':
if p==0: return key
p = move_prev_char(self.edit_text,0,p)
self.set_edit_pos(p)
elif self._command_map[key] == 'cursor right':
if p >= len(self.edit_text): return key
p = move_next_char(self.edit_text,p,len(self.edit_text))
self.set_edit_pos(p)
elif self._command_map[key] in ('cursor up', 'cursor down'):
self.highlight = None
x,y = self.get_cursor_coords((maxcol,))
pref_col = self.get_pref_col((maxcol,))
assert pref_col is not None
#if pref_col is None:
# pref_col = x
if self._command_map[key] == 'cursor up': y -= 1
else: y += 1
if not self.move_cursor_to_coords((maxcol,),pref_col,y):
return key
elif key=="backspace":
self.pref_col_maxcol = None, None
if not self._delete_highlighted():
if p == 0: return key
p = move_prev_char(self.edit_text,0,p)
self.set_edit_text( self.edit_text[:p] +
self.edit_text[self.edit_pos:] )
self.set_edit_pos( p )
elif key=="delete":
self.pref_col_maxcol = None, None
if not self._delete_highlighted():
if p >= len(self.edit_text):
return key
p = move_next_char(self.edit_text,p,len(self.edit_text))
self.set_edit_text( self.edit_text[:self.edit_pos] +
self.edit_text[p:] )
elif self._command_map[key] in ('cursor max left', 'cursor max right'):
self.highlight = None
self.pref_col_maxcol = None, None
x,y = self.get_cursor_coords((maxcol,))
if self._command_map[key] == 'cursor max left':
self.move_cursor_to_coords((maxcol,), LEFT, y)
else:
self.move_cursor_to_coords((maxcol,), RIGHT, y)
return
else:
# key wasn't handled
return key
def move_cursor_to_coords(self, size, x, y):
"""
Set the cursor position with (x,y) coordinates.
Returns True if move succeeded, False otherwise.
>>> size = (10,)
>>> e = Edit("","edit\\ntext")
>>> e.move_cursor_to_coords(size, 5, 0)
True
>>> e.edit_pos
4
>>> e.move_cursor_to_coords(size, 5, 3)
False
>>> e.move_cursor_to_coords(size, 0, 1)
True
>>> e.edit_pos
5
"""
(maxcol,) = size
trans = self.get_line_translation(maxcol)
top_x, top_y = self.position_coords(maxcol, 0)
if y < top_y or y >= len(trans):
return False
pos = calc_pos( self.get_text()[0], trans, x, y )
e_pos = pos - len(self.caption)
if e_pos < 0: e_pos = 0
if e_pos > len(self.edit_text): e_pos = len(self.edit_text)
self.edit_pos = e_pos
self.pref_col_maxcol = x, maxcol
self._invalidate()
return True
def mouse_event(self, size, event, button, x, y, focus):
"""
Move the cursor to the location clicked for button 1.
>>> size = (20,)
>>> e = Edit("","words here")
>>> e.mouse_event(size, 'mouse press', 1, 2, 0, True)
True
>>> e.edit_pos
2
"""
(maxcol,) = size
if button==1:
return self.move_cursor_to_coords( (maxcol,), x, y )
def _delete_highlighted(self):
"""
Delete all highlighted text and update cursor position, if any
text is highlighted.
"""
if not self.highlight: return
start, stop = self.highlight
btext, etext = self.edit_text[:start], self.edit_text[stop:]
self.set_edit_text( btext + etext )
self.edit_pos = start
self.highlight = None
return True
def render(self, size, focus=False):
"""
Render edit widget and return canvas. Include cursor when in
focus.
>>> c = Edit("? ","yes").render((10,), focus=True)
>>> c.text # ... = b in Python 3
[...'? yes ']
>>> c.cursor
(5, 0)
"""
(maxcol,) = size
self._shift_view_to_cursor = bool(focus)
canv = Text.render(self,(maxcol,))
if focus:
canv = CompositeCanvas(canv)
canv.cursor = self.get_cursor_coords((maxcol,))
# .. will need to FIXME if I want highlight to work again
#if self.highlight:
# hstart, hstop = self.highlight_coords()
# d.coords['highlight'] = [ hstart, hstop ]
return canv
def get_line_translation(self, maxcol, ta=None ):
trans = Text.get_line_translation(self, maxcol, ta)
if not self._shift_view_to_cursor:
return trans
text, ignore = self.get_text()
x,y = calc_coords( text, trans,
self.edit_pos + len(self.caption) )
if x < 0:
return ( trans[:y]
+ [shift_line(trans[y],-x)]
+ trans[y+1:] )
elif x >= maxcol:
return ( trans[:y]
+ [shift_line(trans[y],-(x-maxcol+1))]
+ trans[y+1:] )
return trans
def get_cursor_coords(self, size):
"""
Return the (x,y) coordinates of cursor within widget.
>>> Edit("? ","yes").get_cursor_coords((10,))
(5, 0)
"""
(maxcol,) = size
self._shift_view_to_cursor = True
return self.position_coords(maxcol,self.edit_pos)
def position_coords(self,maxcol,pos):
"""
Return (x,y) coordinates for an offset into self.edit_text.
"""
p = pos + len(self.caption)
trans = self.get_line_translation(maxcol)
x,y = calc_coords(self.get_text()[0], trans,p)
return x,y
class IntEdit(Edit):
"""Edit widget for integer values"""
def valid_char(self, ch):
"""
Return true for decimal digits.
"""
return len(ch)==1 and ch in "0123456789"
def __init__(self,caption="",default=None):
"""
caption -- caption markup
default -- default edit value
>>> IntEdit(u"", 42)
<IntEdit selectable flow widget '42' edit_pos=2>
"""
if default is not None: val = str(default)
else: val = ""
self.__super.__init__(caption,val)
def keypress(self, size, key):
"""
Handle editing keystrokes. Remove leading zeros.
>>> e, size = IntEdit(u"", 5002), (10,)
>>> e.keypress(size, 'home')
>>> e.keypress(size, 'delete')
>>> print e.edit_text
002
>>> e.keypress(size, 'end')
>>> print e.edit_text
2
"""
(maxcol,) = size
unhandled = Edit.keypress(self,(maxcol,),key)
if not unhandled:
# trim leading zeros
while self.edit_pos > 0 and self.edit_text[:1] == "0":
self.set_edit_pos( self.edit_pos - 1)
self.set_edit_text(self.edit_text[1:])
return unhandled
def value(self):
"""
Return the numeric value of self.edit_text.
>>> e, size = IntEdit(), (10,)
>>> e.keypress(size, '5')
>>> e.keypress(size, '1')
>>> e.value() == 51
True
"""
if self.edit_text:
return long(self.edit_text)
else:
return 0
def delegate_to_widget_mixin(attribute_name):
"""
Return a mixin class that delegates all standard widget methods
to an attribute given by attribute_name.
This mixin is designed to be used as a superclass of another widget.
"""
# FIXME: this is so common, let's add proper support for it
# when layout and rendering are separated
get_delegate = attrgetter(attribute_name)
class DelegateToWidgetMixin(Widget):
no_cache = ["rows"] # crufty metaclass work-around
def render(self, size, focus=False):
canv = get_delegate(self).render(size, focus=focus)
return CompositeCanvas(canv)
selectable = property(lambda self:get_delegate(self).selectable)
get_cursor_coords = property(
lambda self:get_delegate(self).get_cursor_coords)
get_pref_col = property(lambda self:get_delegate(self).get_pref_col)
keypress = property(lambda self:get_delegate(self).keypress)
move_cursor_to_coords = property(
lambda self:get_delegate(self).move_cursor_to_coords)
rows = property(lambda self:get_delegate(self).rows)
mouse_event = property(lambda self:get_delegate(self).mouse_event)
sizing = property(lambda self:get_delegate(self).sizing)
pack = property(lambda self:get_delegate(self).pack)
return DelegateToWidgetMixin
class WidgetWrapError(Exception):
pass
class WidgetWrap(delegate_to_widget_mixin('_wrapped_widget'), Widget):
def __init__(self, w):
"""
w -- widget to wrap, stored as self._w
This object will pass the functions defined in Widget interface
definition to self._w.
The purpose of this widget is to provide a base class for
widgets that compose other widgets for their display and
behaviour. The details of that composition should not affect
users of the subclass. The subclass may decide to expose some
of the wrapped widgets by behaving like a ContainerWidget or
WidgetDecoration, or it may hide them from outside access.
"""
self._wrapped_widget = w
def _set_w(self, w):
"""
Change the wrapped widget. This is meant to be called
only by subclasses.
>>> size = (10,)
>>> ww = WidgetWrap(Edit("hello? ","hi"))
>>> ww.render(size).text # ... = b in Python 3
[...'hello? hi ']
>>> ww.selectable()
True
>>> ww._w = Text("goodbye") # calls _set_w()
>>> ww.render(size).text
[...'goodbye ']
>>> ww.selectable()
False
"""
self._wrapped_widget = w
self._invalidate()
_w = property(lambda self:self._wrapped_widget, _set_w)
def _raise_old_name_error(self, val=None):
raise WidgetWrapError("The WidgetWrap.w member variable has "
"been renamed to WidgetWrap._w (not intended for use "
"outside the class and its subclasses). "
"Please update your code to use self._w "
"instead of self.w.")
w = property(_raise_old_name_error, _raise_old_name_error)
def _test():
import doctest
doctest.testmod()
if __name__=='__main__':
_test()
<file_sep>/urwid/container.py
#!/usr/bin/python
#
# Urwid container widget classes
# Copyright (C) 2004-2011 <NAME>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
from urwid.util import is_mouse_press
from urwid.widget import Widget, BoxWidget, FlowWidget, Divider
from urwid.decoration import Padding, Filler, calculate_padding, calculate_filler, \
decompose_align_width, decompose_valign_height
from urwid.monitored_list import MonitoredList
from urwid.canvas import CompositeCanvas, CanvasOverlay, CanvasCombine, \
SolidCanvas, CanvasJoin
class WidgetContainer(Widget):
def __init__(self, widget_list):
self.__super.__init__()
self._widget_list = MonitoredList([])
self._set_widget_list(widget_list)
self._widget_list.set_modified_callback(self._invalidate)
def _get_widget_list(self):
return self._widget_list
def _set_widget_list(self, widget_list):
"""
widget_list -- iterable containing widgets
Copy the values from widget_list into self.widget_list
"""
widget_list = property(_get_widget_list, _set_widget_list)
def __getitem__(self, index):
"""
Return the base widget of the widget at self.widget_list[index].
"""
w = self._widget_list[index]
if hasattr(w, 'base_widget'):
w = w.base_widget
return w
def __len__(self):
return len(self._widget_list)
def __iter__(self):
i = 0
try:
while True:
v = self[i]
yield v
i += 1
except IndexError:
return
def __contains__(self, value):
for v in self:
if v == value:
return True
return False
def __reversed__(self):
for i in reversed(range(len(self))):
yield self[i]
def index(self, value):
for i, v in enumerate(self):
if v == value:
return i
raise ValueError
def count(self, value):
return sum(1 for v in self if v == value)
class GridFlow(FlowWidget):
def selectable(self):
"""Return True if the cell in focus is selectable."""
return self.focus_cell and self.focus_cell.selectable()
def __init__(self, cells, cell_width, h_sep, v_sep, align):
"""
cells -- list of flow widgets to display
cell_width -- column width for each cell
h_sep -- blank columns between each cell horizontally
v_sep -- blank rows between cells vertically (if more than
one row is required to display all the cells)
align -- horizontal alignment of cells, see "align" parameter
of Padding widget for available options
"""
self.__super.__init__()
self.cells = cells
self.cell_width = cell_width
self.h_sep = h_sep
self.v_sep = v_sep
self.align = align
self.focus_cell = None
if cells:
self.focus_cell = cells[0]
self._cache_maxcol = None
def set_focus(self, cell):
"""Set the cell in focus.
cell -- widget or integer index into self.cells"""
if type(cell) == int:
assert cell>=0 and cell<len(self.cells)
self.focus_cell = self.cells[cell]
else:
assert cell in self.cells
self.focus_cell = cell
self._cache_maxcol = None
self._invalidate()
def get_focus (self):
"""Return the widget in focus."""
return self.focus_cell
def get_display_widget(self, size):
"""
Arrange the cells into columns (and possibly a pile) for
display, input or to calculate rows.
"""
(maxcol,) = size
# use cache if possible
if self._cache_maxcol == maxcol:
return self._cache_display_widget
self._cache_maxcol = maxcol
self._cache_display_widget = self.generate_display_widget(
size)
return self._cache_display_widget
def generate_display_widget(self, size):
"""
Actually generate display widget (ignoring cache)
"""
(maxcol,) = size
d = Divider()
if len(self.cells) == 0: # how dull
return d
if self.v_sep > 1:
# increase size of divider
d.top = self.v_sep-1
# cells per row
bpr = (maxcol+self.h_sep) // (self.cell_width+self.h_sep)
if bpr == 0: # too narrow, pile them on top of eachother
l = [self.cells[0]]
f = 0
for b in self.cells[1:]:
if b is self.focus_cell:
f = len(l)
if self.v_sep:
l.append(d)
l.append(b)
return Pile(l, f)
if bpr >= len(self.cells): # all fit on one row
k = len(self.cells)
f = self.cells.index(self.focus_cell)
cols = Columns(self.cells, self.h_sep, f)
rwidth = (self.cell_width+self.h_sep)*k - self.h_sep
row = Padding(cols, self.align, rwidth)
return row
out = []
s = 0
f = 0
while s < len(self.cells):
if out and self.v_sep:
out.append(d)
k = min( len(self.cells), s+bpr )
cells = self.cells[s:k]
if self.focus_cell in cells:
f = len(out)
fcol = cells.index(self.focus_cell)
cols = Columns(cells, self.h_sep, fcol)
else:
cols = Columns(cells, self.h_sep)
rwidth = (self.cell_width+self.h_sep)*(k-s)-self.h_sep
row = Padding(cols, self.align, rwidth)
out.append(row)
s += bpr
return Pile(out, f)
def _set_focus_from_display_widget(self, w):
"""Set the focus to the item in focus in the display widget."""
if isinstance(w, Padding):
# unwrap padding
w = w._original_widget
w = w.get_focus()
if w in self.cells:
self.set_focus(w)
return
if isinstance(w, Padding):
# unwrap padding
w = w._original_widget
w = w.get_focus()
#assert w == self.cells[0], repr((w, self.cells))
self.set_focus(w)
def keypress(self, size, key):
"""
Pass keypress to display widget for handling.
Capture focus changes."""
d = self.get_display_widget(size)
if not d.selectable():
return key
key = d.keypress(size, key)
if key is None:
self._set_focus_from_display_widget(d)
return key
def rows(self, size, focus=False):
"""Return rows used by this widget."""
d = self.get_display_widget(size)
return d.rows(size, focus=focus)
def render(self, size, focus=False ):
"""Use display widget to render."""
d = self.get_display_widget(size)
return d.render(size, focus)
def get_cursor_coords(self, size):
"""Get cursor from display widget."""
d = self.get_display_widget(size)
if not d.selectable():
return None
return d.get_cursor_coords(size)
def move_cursor_to_coords(self, size, col, row ):
"""Set the widget in focus based on the col + row."""
d = self.get_display_widget(size)
if not d.selectable():
# happy is the default
return True
r = d.move_cursor_to_coords(size, col, row)
if not r:
return False
self._set_focus_from_display_widget(d)
self._invalidate()
return True
def mouse_event(self, size, event, button, col, row, focus):
"""Send mouse event to contained widget."""
d = self.get_display_widget(size)
r = d.mouse_event(size, event, button, col, row, focus)
if not r:
return False
self._set_focus_from_display_widget(d)
self._invalidate()
return True
def get_pref_col(self, size):
"""Return pref col from display widget."""
d = self.get_display_widget(size)
if not d.selectable():
return None
return d.get_pref_col(size)
class OverlayError(Exception):
pass
class Overlay(BoxWidget):
def __init__(self, top_w, bottom_w, align, width, valign, height,
min_width=None, min_height=None ):
"""
top_w -- a flow, box or fixed widget to overlay "on top"
bottom_w -- a box widget to appear "below" previous widget
align -- one of:
'left', 'center', 'right'
('fixed left', columns)
('fixed right', columns)
('relative', percentage 0=left 100=right)
width -- one of:
None if top_w is a fixed widget
number of columns wide
('fixed right', columns) Only if align is 'fixed left'
('fixed left', columns) Only if align is 'fixed right'
('relative', percentage of total width)
valign -- one of:
'top', 'middle', 'bottom'
('fixed top', rows)
('fixed bottom', rows)
('relative', percentage 0=top 100=bottom)
height -- one of:
None if top_w is a flow or fixed widget
number of rows high
('fixed bottom', rows) Only if valign is 'fixed top'
('fixed top', rows) Only if valign is 'fixed bottom'
('relative', percentage of total height)
min_width -- the minimum number of columns for top_w
when width is not fixed
min_height -- one of:
minimum number of rows for the widget when height not fixed
Overlay widgets behave similarly to Padding and Filler widgets
when determining the size and position of top_w. bottom_w is
always rendered the full size available "below" top_w.
"""
self.__super.__init__()
self.top_w = top_w
self.bottom_w = bottom_w
self.set_overlay_parameters(align, width, valign, height,
min_width, min_height)
def set_overlay_parameters(self, align, width, valign, height,
min_width=None, min_height=None):
"""
Adjust the overlay size and position parameters.
See __init__() for a description of the parameters.
"""
at,aa,wt,wa=decompose_align_width(align, width, OverlayError)
vt,va,ht,ha=decompose_valign_height(valign,height,OverlayError)
self.align_type, self.align_amount = at, aa
self.width_type, self.width_amount = wt, wa
if self.width_type and self.width_type != 'fixed':
self.min_width = min_width
else:
self.min_width = None
self.valign_type, self.valign_amount = vt, va
self.height_type, self.height_amount = ht, ha
if self.height_type not in ('fixed', None):
self.min_height = min_height
else:
self.min_height = None
self._invalidate()
def selectable(self):
"""Return selectable from top_w."""
return self.top_w.selectable()
def keypress(self, size, key):
"""Pass keypress to top_w."""
return self.top_w.keypress(self.top_w_size(size,
*self.calculate_padding_filler(size, True)), key)
def get_cursor_coords(self, size):
"""Return cursor coords from top_w, if any."""
if not hasattr(self.body, 'get_cursor_coords'):
return None
(maxcol, maxrow) = size
left, right, top, bottom = self.calculate_padding_filler(size,
True)
x, y = self.top_w.get_cursor_coords(
(maxcol-left-right, maxrow-top-bottom) )
if y >= maxrow: # required??
y = maxrow-1
return x+left, y+top
def calculate_padding_filler(self, size, focus):
"""Return (padding left, right, filler top, bottom)."""
(maxcol, maxrow) = size
height = None
if self.width_type is None:
# top_w is a fixed widget
width, height = self.top_w.pack((),focus=focus)
assert height, "fixed widget must have a height"
left, right = calculate_padding(self.align_type,
self.align_amount, 'fixed', width,
None, maxcol, clip=True )
else:
left, right = calculate_padding(self.align_type,
self.align_amount, self.width_type,
self.width_amount, self.min_width, maxcol)
if height:
# top_w is a fixed widget
top, bottom = calculate_filler(self.valign_type,
self.valign_amount, 'fixed', height,
None, maxrow)
if maxrow-top-bottom < height:
bottom = maxrow-top-height
elif self.height_type is None:
# top_w is a flow widget
height = self.top_w.rows((maxcol,),focus=focus)
top, bottom = calculate_filler( self.valign_type,
self.valign_amount, 'fixed', height,
None, maxrow )
if height > maxrow: # flow widget rendered too large
bottom = maxrow - height
else:
top, bottom = calculate_filler(self.valign_type,
self.valign_amount, self.height_type,
self.height_amount, self.min_height, maxrow)
return left, right, top, bottom
def top_w_size(self, size, left, right, top, bottom):
"""Return the size to pass to top_w."""
if self.width_type is None:
# top_w is a fixed widget
return ()
maxcol, maxrow = size
if self.width_type is not None and self.height_type is None:
# top_w is a flow widget
return (maxcol-left-right,)
return (maxcol-left-right, maxrow-top-bottom)
def render(self, size, focus=False):
"""Render top_w overlayed on bottom_w."""
left, right, top, bottom = self.calculate_padding_filler(size,
focus)
bottom_c = self.bottom_w.render(size)
top_c = self.top_w.render(
self.top_w_size(size, left, right, top, bottom), focus)
top_c = CompositeCanvas(top_c)
if left<0 or right<0:
top_c.pad_trim_left_right(min(0,left), min(0,right))
if top<0 or bottom<0:
top_c.pad_trim_top_bottom(min(0,top), min(0,bottom))
return CanvasOverlay(top_c, bottom_c, left, top)
def mouse_event(self, size, event, button, col, row, focus):
"""Pass event to top_w, ignore if outside of top_w."""
if not hasattr(self.top_w, 'mouse_event'):
return False
left, right, top, bottom = self.calculate_padding_filler(size,
focus)
maxcol, maxrow = size
if ( col<left or col>=maxcol-right or
row<top or row>=maxrow-bottom ):
return False
return self.top_w.mouse_event(
self.top_w_size(size, left, right, top, bottom),
event, button, col-left, row-top, focus )
class Frame(BoxWidget):
def __init__(self, body, header=None, footer=None, focus_part='body'):
"""
body -- a box widget for the body of the frame
header -- a flow widget for above the body (or None)
footer -- a flow widget for below the body (or None)
focus_part -- 'header', 'footer' or 'body'
"""
self.__super.__init__()
self._header = header
self._body = body
self._footer = footer
self.focus_part = focus_part
def get_header(self):
return self._header
def set_header(self, header):
self._header = header
self._invalidate()
header = property(get_header, set_header)
def get_body(self):
return self._body
def set_body(self, body):
self._body = body
self._invalidate()
body = property(get_body, set_body)
def get_footer(self):
return self._footer
def set_footer(self, footer):
self._footer = footer
self._invalidate()
footer = property(get_footer, set_footer)
def set_focus(self, part):
"""Set the part of the frame that is in focus.
part -- 'header', 'footer' or 'body'
"""
assert part in ('header', 'footer', 'body')
self.focus_part = part
self._invalidate()
def get_focus (self):
"""Return the part of the frame that is in focus.
Will be one of 'header', 'footer' or 'body'.
"""
return self.focus_part
def frame_top_bottom(self, size, focus):
"""Calculate the number of rows for the header and footer.
Returns (head rows, foot rows),(orig head, orig foot).
orig head/foot are from rows() calls.
"""
(maxcol, maxrow) = size
frows = hrows = 0
if self.header:
hrows = self.header.rows((maxcol,),
self.focus_part=='header' and focus)
if self.footer:
frows = self.footer.rows((maxcol,),
self.focus_part=='footer' and focus)
remaining = maxrow
if self.focus_part == 'footer':
if frows >= remaining:
return (0, remaining),(hrows, frows)
remaining -= frows
if hrows >= remaining:
return (remaining, frows),(hrows, frows)
elif self.focus_part == 'header':
if hrows >= maxrow:
return (remaining, 0),(hrows, frows)
remaining -= hrows
if frows >= remaining:
return (hrows, remaining),(hrows, frows)
elif hrows + frows >= remaining:
# self.focus_part == 'body'
rless1 = max(0, remaining-1)
if frows >= remaining-1:
return (0, rless1),(hrows, frows)
remaining -= frows
rless1 = max(0, remaining-1)
return (rless1,frows),(hrows, frows)
return (hrows, frows),(hrows, frows)
def render(self, size, focus=False):
"""Render frame and return it."""
(maxcol, maxrow) = size
(htrim, ftrim),(hrows, frows) = self.frame_top_bottom(
(maxcol, maxrow), focus)
combinelist = []
depends_on = []
head = None
if htrim and htrim < hrows:
head = Filler(self.header, 'top').render(
(maxcol, htrim),
focus and self.focus_part == 'header')
elif htrim:
head = self.header.render((maxcol,),
focus and self.focus_part == 'header')
assert head.rows() == hrows, "rows, render mismatch"
if head:
combinelist.append((head, 'header',
self.focus_part == 'header'))
depends_on.append(self.header)
if ftrim+htrim < maxrow:
body = self.body.render((maxcol, maxrow-ftrim-htrim),
focus and self.focus_part == 'body')
combinelist.append((body, 'body',
self.focus_part == 'body'))
depends_on.append(self.body)
foot = None
if ftrim and ftrim < frows:
foot = Filler(self.footer, 'bottom').render(
(maxcol, ftrim),
focus and self.focus_part == 'footer')
elif ftrim:
foot = self.footer.render((maxcol,),
focus and self.focus_part == 'footer')
assert foot.rows() == frows, "rows, render mismatch"
if foot:
combinelist.append((foot, 'footer',
self.focus_part == 'footer'))
depends_on.append(self.footer)
return CanvasCombine(combinelist)
def keypress(self, size, key):
"""Pass keypress to widget in focus."""
(maxcol, maxrow) = size
if self.focus_part == 'header' and self.header is not None:
if not self.header.selectable():
return key
return self.header.keypress((maxcol,),key)
if self.focus_part == 'footer' and self.footer is not None:
if not self.footer.selectable():
return key
return self.footer.keypress((maxcol,),key)
if self.focus_part != 'body':
return key
remaining = maxrow
if self.header is not None:
remaining -= self.header.rows((maxcol,))
if self.footer is not None:
remaining -= self.footer.rows((maxcol,))
if remaining <= 0: return key
if not self.body.selectable():
return key
return self.body.keypress( (maxcol, remaining), key )
def mouse_event(self, size, event, button, col, row, focus):
"""
Pass mouse event to appropriate part of frame.
Focus may be changed on button 1 press.
"""
(maxcol, maxrow) = size
(htrim, ftrim),(hrows, frows) = self.frame_top_bottom(
(maxcol, maxrow), focus)
if row < htrim: # within header
focus = focus and self.focus_part == 'header'
if is_mouse_press(event) and button==1:
if self.header.selectable():
self.set_focus('header')
if not hasattr(self.header, 'mouse_event'):
return False
return self.header.mouse_event( (maxcol,), event,
button, col, row, focus )
if row >= maxrow-ftrim: # within footer
focus = focus and self.focus_part == 'footer'
if is_mouse_press(event) and button==1:
if self.footer.selectable():
self.set_focus('footer')
if not hasattr(self.footer, 'mouse_event'):
return False
return self.footer.mouse_event( (maxcol,), event,
button, col, row-maxrow+frows, focus )
# within body
focus = focus and self.focus_part == 'body'
if is_mouse_press(event) and button==1:
if self.body.selectable():
self.set_focus('body')
if not hasattr(self.body, 'mouse_event'):
return False
return self.body.mouse_event( (maxcol, maxrow-htrim-ftrim),
event, button, col, row-htrim, focus )
class PileError(Exception):
pass
class Pile(Widget): # either FlowWidget or BoxWidget
def __init__(self, widget_list, focus_item=None):
"""
widget_list -- list of widgets
focus_item -- widget or integer index, if None the first
selectable widget will be chosen.
widget_list may also contain tuples such as:
('flow', widget) always treat widget as a flow widget
('fixed', height, widget) give this box widget a fixed height
('weight', weight, widget) if the pile is treated as a box
widget then treat widget as a box widget with a
height based on its relative weight value, otherwise
treat widget as a flow widget
widgets not in a tuple are the same as ('weight', 1, widget)
If the pile is treated as a box widget there must be at least
one 'weight' tuple in widget_list.
"""
self.__super.__init__()
self.widget_list = MonitoredList(widget_list)
self.item_types = []
for i in range(len(widget_list)):
w = widget_list[i]
if type(w) != tuple:
self.item_types.append(('weight',1))
elif w[0] == 'flow':
f, widget = w
self.widget_list[i] = widget
self.item_types.append((f,None))
w = widget
elif w[0] in ('fixed', 'weight'):
f, height, widget = w
self.widget_list[i] = widget
self.item_types.append((f,height))
w = widget
else:
raise PileError, "widget list item invalid %r" % (w,)
if focus_item is None and w.selectable():
focus_item = i
self.widget_list.set_modified_callback(self._invalidate)
if focus_item is None:
focus_item = 0
if self.widget_list:
self.set_focus(focus_item)
else:
self.focus_item=None
self.pref_col = 0
@property
def contents(self):
for i, w in enumerate(self.widget_list):
try:
yield w, self.item_types[i]
except IndexError:
yield w, ('weight', 1)
def _get_item_types(self, i):
try:
return self.item_types[i]
except IndexError:
return 'weight', 1
def selectable(self):
"""Return True if the focus item is selectable."""
return self.focus_item is not None and self.focus_item.selectable()
def set_focus(self, item):
"""Set the item in focus.
item -- widget or integer index"""
if type(item) == int:
assert item>=0 and item<len(self.widget_list)
self.focus_item = self.widget_list[item]
else:
assert item in self.widget_list
self.focus_item = item
self._invalidate()
def get_focus(self):
"""Return the widget in focus."""
return self.focus_item
def get_pref_col(self, size):
"""Return the preferred column for the cursor, or None."""
if not self.selectable():
return None
self._update_pref_col_from_focus(size)
return self.pref_col
def get_item_size(self, size, i, focus, item_rows=None):
"""
Return a size appropriate for passing to self.widget_list[i]
"""
maxcol = size[0]
f, height = self._get_item_types(i)
if f=='fixed':
return (maxcol, height)
elif f=='weight' and len(size)==2:
if not item_rows:
item_rows = self.get_item_rows(size, focus)
return (maxcol, item_rows[i])
else:
return (maxcol,)
def get_item_rows(self, size, focus):
"""
Return a list of the number of rows used by each widget
in self.item_list.
"""
remaining = None
maxcol = size[0]
if len(size)==2:
remaining = size[1]
l = []
if remaining is None:
# pile is a flow widget
for w, (f, height) in self.contents:
if f == 'fixed':
l.append(height)
else:
l.append(w.rows((maxcol,), focus=focus
and self.focus_item == w))
return l
# pile is a box widget
# do an extra pass to calculate rows for each widget
wtotal = 0
for w, (f, height) in self.contents:
if f == 'flow':
rows = w.rows((maxcol,), focus=focus and
self.focus_item == w )
l.append(rows)
remaining -= rows
elif f == 'fixed':
l.append(height)
remaining -= height
else:
l.append(None)
wtotal += height
if wtotal == 0:
raise PileError, "No weighted widgets found for Pile treated as a box widget"
if remaining < 0:
remaining = 0
for i, (w, (f, height)) in enumerate(self.contents):
li = l[i]
if li is None:
rows = int(float(remaining)*height
/wtotal+0.5)
l[i] = rows
remaining -= rows
wtotal -= height
return l
def render(self, size, focus=False):
"""
Render all widgets in self.widget_list and return the results
stacked one on top of the next.
"""
maxcol = size[0]
item_rows = None
combinelist = []
for i, (w, (f, height)) in enumerate(self.contents):
item_focus = self.focus_item == w
canv = None
if f == 'fixed':
canv = w.render( (maxcol, height),
focus=focus and item_focus)
elif f == 'flow' or len(size)==1:
canv = w.render( (maxcol,),
focus=focus and item_focus)
else:
if item_rows is None:
item_rows = self.get_item_rows(size,
focus)
rows = item_rows[i]
if rows>0:
canv = w.render( (maxcol, rows),
focus=focus and item_focus )
if canv:
combinelist.append((canv, i, item_focus))
if not combinelist:
return SolidCanvas(" ", size[0], (size[1:]+(0,))[0])
out = CanvasCombine(combinelist)
if len(size)==2 and size[1] < out.rows():
# flow/fixed widgets rendered too large
out = CompositeCanvas(out)
out.pad_trim_top_bottom(0, size[1] - out.rows())
return out
def get_cursor_coords(self, size):
"""Return the cursor coordinates of the focus widget."""
if not self.focus_item.selectable():
return None
if not hasattr(self.focus_item,'get_cursor_coords'):
return None
i = self.widget_list.index(self.focus_item)
f, height = self._get_item_types(i)
item_rows = None
maxcol = size[0]
if f == 'fixed' or (f=='weight' and len(size)==2):
if f == 'fixed':
maxrow = height
else:
if item_rows is None:
item_rows = self.get_item_rows(size,
focus=True)
maxrow = item_rows[i]
coords = self.focus_item.get_cursor_coords(
(maxcol,maxrow))
else:
coords = self.focus_item.get_cursor_coords((maxcol,))
if coords is None:
return None
x,y = coords
if i > 0:
if item_rows is None:
item_rows = self.get_item_rows(size, focus=True)
for r in item_rows[:i]:
y += r
return x, y
def rows(self, size, focus=False ):
"""Return the number of rows required for this widget."""
return sum(self.get_item_rows(size, focus))
def keypress(self, size, key ):
"""Pass the keypress to the widget in focus.
Unhandled 'up' and 'down' keys may cause a focus change."""
item_rows = None
if len(size)==2:
item_rows = self.get_item_rows( size, focus=True )
i = self.widget_list.index(self.focus_item)
f, height = self._get_item_types(i)
if self.focus_item.selectable():
tsize = self.get_item_size(size,i,True,item_rows)
key = self.focus_item.keypress( tsize, key )
if self._command_map[key] not in ('cursor up', 'cursor down'):
return key
if self._command_map[key] == 'cursor up':
candidates = range(i-1, -1, -1) # count backwards to 0
else: # self._command_map[key] == 'cursor down'
candidates = range(i+1, len(self.widget_list))
if not item_rows:
item_rows = self.get_item_rows( size, focus=True )
for j in candidates:
if not self.widget_list[j].selectable():
continue
self._update_pref_col_from_focus(size)
self.set_focus(j)
if not hasattr(self.focus_item,'move_cursor_to_coords'):
return
f, height = self._get_item_types(i)
rows = item_rows[j]
if self._command_map[key] == 'cursor up':
rowlist = range(rows-1, -1, -1)
else: # self._command_map[key] == 'cursor down'
rowlist = range(rows)
for row in rowlist:
tsize=self.get_item_size(size,j,True,item_rows)
if self.focus_item.move_cursor_to_coords(
tsize,self.pref_col,row):
break
return
# nothing to select
return key
def _update_pref_col_from_focus(self, size ):
"""Update self.pref_col from the focus widget."""
widget = self.focus_item
if not hasattr(widget,'get_pref_col'):
return
i = self.widget_list.index(widget)
tsize = self.get_item_size(size,i,True)
pref_col = widget.get_pref_col(tsize)
if pref_col is not None:
self.pref_col = pref_col
def move_cursor_to_coords(self, size, col, row):
"""Capture pref col and set new focus."""
self.pref_col = col
#FIXME guessing focus==True
focus=True
wrow = 0
item_rows = self.get_item_rows(size,focus)
for r,w in zip(item_rows, self.widget_list):
if wrow+r > row:
break
wrow += r
if not w.selectable():
return False
if hasattr(w,'move_cursor_to_coords'):
i = self.widget_list.index(w)
tsize = self.get_item_size(size, i, focus, item_rows)
rval = w.move_cursor_to_coords(tsize,col,row-wrow)
if rval is False:
return False
self.set_focus(w)
return True
def mouse_event(self, size, event, button, col, row, focus):
"""
Pass the event to the contained widget.
May change focus on button 1 press.
"""
wrow = 0
item_rows = self.get_item_rows(size,focus)
for r,w in zip(item_rows, self.widget_list):
if wrow+r > row:
break
wrow += r
focus = focus and self.focus_item == w
if is_mouse_press(event) and button==1:
if w.selectable():
self.set_focus(w)
if not hasattr(w,'mouse_event'):
return False
i = self.widget_list.index(w)
tsize = self.get_item_size(size, i, focus, item_rows)
return w.mouse_event(tsize, event, button, col, row-wrow,
focus)
class ColumnsError(Exception):
pass
class Columns(Widget): # either FlowWidget or BoxWidget
def __init__(self, widget_list, dividechars=0, focus_column=None,
min_width=1, box_columns=None):
"""
widget_list -- list of flow widgets or list of box widgets
dividechars -- blank characters between columns
focus_column -- index into widget_list of column in focus,
if None the first selectable widget will be chosen.
min_width -- minimum width for each column which is not
designated as flow widget in widget_list.
box_columns -- a list of column indexes containing box widgets
whose maxrow is set to the maximum of the rows
required by columns not listed in box_columns.
widget_list may also contain tuples such as:
('flow', widget) always treat widget as a flow widget
('fixed', width, widget) give this column a fixed width
('weight', weight, widget) give this column a relative weight
widgets not in a tuple are the same as ('weight', 1, widget)
box_columns is ignored when this widget is being used as a
box widget because in that case all columns are treated as box
widgets.
"""
self.__super.__init__()
self.widget_list = MonitoredList(widget_list)
self.column_types = []
self.has_flow_type = False
for i in range(len(widget_list)):
w = widget_list[i]
if type(w) != tuple:
self.column_types.append(('weight',1))
elif w[0] == 'flow':
f, widget = w
self.widget_list[i] = widget
self.column_types.append((f,None))
self.has_flow_type = True
w = widget
elif w[0] in ('fixed', 'weight'):
f,width,widget = w
self.widget_list[i] = widget
self.column_types.append((f,width))
w = widget
else:
raise ColumnsError, "widget list item invalid: %r" % (w,)
if focus_column is None and w.selectable():
focus_column = i
self.widget_list.set_modified_callback(self._invalidate)
self.dividechars = dividechars
if focus_column is None:
focus_column = 0
self.focus_col = focus_column
self.pref_col = None
self.min_width = min_width
self.box_columns = box_columns
self._cache_maxcol = None
@property
def contents(self):
for i, w in enumerate(self.widget_list):
try:
yield w, self.column_types[i]
except IndexError:
yield w, ('weight', 1)
def _invalidate(self):
self._cache_maxcol = None
self.__super._invalidate()
def set_focus_column( self, num ):
"""Set the column in focus by its index in self.widget_list."""
self.focus_col = num
self._invalidate()
def get_focus_column( self ):
"""Return the focus column index."""
return self.focus_col
def set_focus(self, item):
"""Set the item in focus.
item -- widget or integer index"""
if type(item) == int:
assert item>=0 and item<len(self.widget_list)
position = item
else:
position = self.widget_list.index(item)
self.focus_col = position
self._invalidate()
def get_focus(self):
"""Return the widget in focus."""
return self.widget_list[self.focus_col]
def column_widths(self, size, focus=False):
"""Return a list of column widths.
size -- (maxcol,) if self.widget_list contains flow widgets or
(maxcol, maxrow) if it contains box widgets.
"""
maxcol = size[0]
if maxcol == self._cache_maxcol and not self.has_flow_type:
return self._cache_column_widths
widths=[]
weighted = []
shared = maxcol + self.dividechars
for i, (w, (t, width)) in enumerate(self.contents):
if t == 'fixed':
static_w = width
elif t == 'flow':
static_w = w.pack((maxcol,), focus)[0]
else:
static_w = self.min_width
if shared < static_w + self.dividechars:
break
widths.append(static_w)
shared -= static_w + self.dividechars
if t not in ('fixed', 'flow'):
weighted.append((width,i))
if shared:
# divide up the remaining space between weighted cols
weighted.sort()
wtotal = sum([weight for weight,i in weighted])
grow = shared + len(weighted)*self.min_width
for weight, i in weighted:
width = int(float(grow) * weight / wtotal + 0.5)
width = max(self.min_width, width)
widths[i] = width
grow -= width
wtotal -= weight
self._cache_maxcol = maxcol
self._cache_column_widths = widths
return widths
def render(self, size, focus=False):
"""Render columns and return canvas.
size -- (maxcol,) if self.widget_list contains flow widgets or
(maxcol, maxrow) if it contains box widgets.
"""
widths = self.column_widths(size, focus)
if not widths:
return SolidCanvas(" ", size[0], (size[1:]+(1,))[0])
box_maxrow = None
if len(size)==1 and self.box_columns:
box_maxrow = 1
# two-pass mode to determine maxrow for box columns
for i in range(len(widths)):
if i in self.box_columns:
continue
mc = widths[i]
w = self.widget_list[i]
rows = w.rows( (mc,),
focus = focus and self.focus_col == i )
box_maxrow = max(box_maxrow, rows)
l = []
for i in range(len(widths)):
mc = widths[i]
# if the widget has a width of 0, hide it
if mc <= 0:
continue
w = self.widget_list[i]
if box_maxrow and i in self.box_columns:
sub_size = (mc, box_maxrow)
else:
sub_size = (mc,) + size[1:]
canv = w.render(sub_size,
focus = focus and self.focus_col == i)
if i < len(widths)-1:
mc += self.dividechars
l.append((canv, i, self.focus_col == i, mc))
canv = CanvasJoin(l)
if canv.cols() < size[0]:
canv.pad_trim_left_right(0, size[0]-canv.cols())
return canv
def get_cursor_coords(self, size):
"""Return the cursor coordinates from the focus widget."""
w = self.widget_list[self.focus_col]
if not w.selectable():
return None
if not hasattr(w, 'get_cursor_coords'):
return None
widths = self.column_widths( size )
if len(widths) < self.focus_col+1:
return None
colw = widths[self.focus_col]
coords = w.get_cursor_coords( (colw,)+size[1:] )
if coords is None:
return None
x,y = coords
x += self.focus_col * self.dividechars
x += sum( widths[:self.focus_col] )
return x, y
def move_cursor_to_coords(self, size, col, row):
"""Choose a selectable column to focus based on the coords."""
widths = self.column_widths(size)
best = None
x = 0
for i in range(len(widths)):
w = self.widget_list[i]
end = x + widths[i]
if w.selectable():
# sometimes, col == 'left' - that doesn't seem like its handled here, does it?
# assert isinstance(x, int) and isinstance(col, int), (x, col)
if x > col and best is None:
# no other choice
best = i, x, end
break
if x > col and col-best[2] < x-col:
# choose one on left
break
best = i, x, end
if col < end:
# choose this one
break
x = end + self.dividechars
if best is None:
return False
i, x, end = best
w = self.widget_list[i]
if hasattr(w,'move_cursor_to_coords'):
if type(col)==int:
move_x = min(max(0,col-x),end-x-1)
else:
move_x = col
rval = w.move_cursor_to_coords((end-x,)+size[1:],
move_x, row)
if rval is False:
return False
self.focus_col = i
self.pref_col = col
self._invalidate()
return True
def mouse_event(self, size, event, button, col, row, focus):
"""
Send event to appropriate column.
May change focus on button 1 press.
"""
widths = self.column_widths(size)
x = 0
for i in range(len(widths)):
if col < x:
return False
w = self.widget_list[i]
end = x + widths[i]
if col >= end:
x = end + self.dividechars
continue
focus = focus and self.focus_col == i
if is_mouse_press(event) and button == 1:
if w.selectable():
self.set_focus(w)
if not hasattr(w,'mouse_event'):
return False
return w.mouse_event((end-x,)+size[1:], event, button,
col - x, row, focus)
return False
def get_pref_col(self, size):
"""Return the pref col from the column in focus."""
maxcol = size[0]
widths = self.column_widths( (maxcol,) )
w = self.widget_list[self.focus_col]
if len(widths) < self.focus_col+1:
return 0
col = None
if hasattr(w,'get_pref_col'):
col = w.get_pref_col((widths[self.focus_col],)+size[1:])
if type(col)==int:
col += self.focus_col * self.dividechars
col += sum( widths[:self.focus_col] )
if col is None:
col = self.pref_col
if col is None and w.selectable():
col = widths[self.focus_col] // 2
col += self.focus_col * self.dividechars
col += sum( widths[:self.focus_col] )
return col
def rows(self, size, focus=0 ):
"""Return the number of rows required by the columns.
Only makes sense if self.widget_list contains flow widgets."""
widths = self.column_widths(size, focus)
rows = 1
for i in range(len(widths)):
if self.box_columns and i in self.box_columns:
continue
mc = widths[i]
w = self.widget_list[i]
rows = max( rows, w.rows( (mc,),
focus = focus and self.focus_col == i ) )
return rows
def keypress(self, size, key):
"""Pass keypress to the focus column.
size -- (maxcol,) if self.widget_list contains flow widgets or
(maxcol, maxrow) if it contains box widgets.
"""
if self.focus_col is None: return key
widths = self.column_widths( size )
if self.focus_col < 0 or self.focus_col >= len(widths):
return key
i = self.focus_col
mc = widths[i]
w = self.widget_list[i]
if self._command_map[key] not in ('cursor up', 'cursor down',
'cursor page up', 'cursor page down'):
self.pref_col = None
key = w.keypress( (mc,)+size[1:], key )
if self._command_map[key] not in ('cursor left', 'cursor right'):
return key
if self._command_map[key] == 'cursor left':
candidates = range(i-1, -1, -1) # count backwards to 0
else: # key == 'right'
candidates = range(i+1, len(widths))
for j in candidates:
if not self.widget_list[j].selectable():
continue
self.set_focus_column( j )
return
return key
def selectable(self):
"""Return the selectable value of the focus column."""
return self.widget_list[self.focus_col].selectable()
def _test():
import doctest
doctest.testmod()
if __name__=='__main__':
_test()
<file_sep>/soundcloud_player/soundcloud_player.py
# http://excess.org/urwid/reference.html
import vlc
# http://liris.cnrs.fr/advene/download/python-ctypes/doc/
import os
import sys
# close you eyes
sys.path.append('%s/../' % sys.path[0] )
import urwid
class SoundCloudPlayer:
def __init__(self, playlist):
self.palette = [
('banner', '', '', '', '#fff', '#333'),
('streak', '', '', '', '#fff', '#333'),
('bg', '', '', '', '', '#666'),
]
name = os.getlogin() or None
txt = urwid.Text(('banner', u"Welcome %s!\n\n0xPr0xy Soundcloud Player!" %name), align='center')
map1 = urwid.AttrMap(txt, 'streak')
fill = urwid.Filler(map1)
self.map2 = urwid.AttrMap(fill, 'bg')
self.loop = urwid.MainLoop(self.map2, self.palette, unhandled_input=self.handle_input)
self.loop.screen.set_terminal_properties(colors=256)
self.create_player(playlist)
def handle_input(self,input):
if input in ('q', 'Q'):
self.player.stop()
raise urwid.ExitMainLoop()
if input in ('p', 'P'):
self.player.pause()
if input in ('r', 'R'):
self.player.stop()
self.player.play()
if input in (']','}'):
self.player.next()
if input in ('{', '['):
self.player.previous()
def create_player(self, playlist):
instanceParameters = [
'--quiet',
'--ignore-config',
'--sout-keep',
'--sout-all',
'--vout=caca'
]
self.instance=vlc.Instance(instanceParameters)
self.medialist = self.instance.media_list_new()
for item in playlist:
self.medialist.add_media(self.instance.media_new(item))
self.player = self.instance.media_list_player_new()
self.player.set_media_list(self.medialist)
self.player.play()
self.loop.run()<file_sep>/soundcloud.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import soundcloud
import sys
import os
import urwid
import requests
from soundcloud_player.soundcloud_player import SoundCloudPlayer
class ItemWidget (urwid.WidgetWrap):
def __init__ (self, entry, url):
""" Creates UI Element for every Entry"""
if entry is not None:
self._selectable = entry
self.content = url.url
self.item = [
urwid.Padding(urwid.AttrWrap(
urwid.Text('%s' % entry), 'body', 'focus')),
]
w = urwid.Columns(self.item)
self.__super.__init__(w)
def selectable (self):
return True
def keypress(self, size, key):
return key
class SoundCloud:
def __init__(self, query, track, playlist):
self.playlist = []
self.palette = [
('body','dark cyan', '', 'standout'),
('focus','dark red', '', 'standout'),
('head','light red', 'black'),
]
self.client = soundcloud.Client(client_id='570f56acefe61658492d4ee040a0a0cd')
if query: self.query = query
if track: self.findTracks()
if playlist: self.findPlaylist()
def play(self, url):
playlist = []
try:
r = requests.get(url)
playlist.append(r.url)
SoundCloudPlayer(playlist)
except Exception as e:
print e
#os.system('./stream/__main__.py %s' %url)
def download(self, url, title):
try:
print '\nDownloading: %s' %title
r = requests.get(str(url))
with open('%s.mp3' % title, 'wb') as code:
code.write(r.content)
print('\nDownload Complete: %s.mp3' % title)
except Exception as e:
print e
def initGui(self):
self.listbox = urwid.ListBox(urwid.SimpleListWalker(self.playlist))
self.view = urwid.Frame(urwid.AttrWrap(self.listbox, 'body'))
self.loop = urwid.MainLoop(self.view, self.palette, unhandled_input=self.keystroke)
self.loop.run()
def findTracks(self):
tracks = self.client.get('/tracks', q=self.query, licence='cc-by-sa')
for track in tracks:
url = self.client.get(track.stream_url, allow_redirects=False)
self.playlist.append(ItemWidget(track.title, url))
self.initGui()
def findPlaylist(self):
playlist = self.client.get('/playlists/' + self.query)
for track in playlist.tracks:
url = self.client.get(track['stream_url'], allow_redirects=False)
self.playlist.append(ItemWidget(track['title'], url))
self.initGui()
def keystroke (self,input):
""" Handle Keystrokes """
if input in ('q', 'Q'):
raise urwid.ExitMainLoop()
if input is 'enter':
try:
self.focus = self.listbox.get_focus()[0].content
except Exception as e:
print('listbox get_focus failed:\nError: %s' % e)
self.play(self.focus)
if input is ' ':
title = self.listbox.get_focus()[0]._selectable
try:
self.focus = self.listbox.get_focus()[0].content
except Exception as e:
print('listbox get_focus failed:\nError: %s' % e)
self.download(self.focus, title)
if len(sys.argv) == 3 and sys.argv[2] == 'track': instance = SoundCloud(sys.argv[1], True, False)
if len(sys.argv) == 3 and sys.argv[2] == 'playlist': instance = SoundCloud(sys.argv[1], False, True)
<file_sep>/README.txt
This is a soundcloud client for the command line that supports searching and streaming soundcloud songs from the command line.
Requirements:
--------------
VLC media player
Python 2.6+
Commands:
---------
./soundcloud.py 'artist' track
./soundcloud.py 'artist-album' playlist
Controls:
---------
Enter = Stream
SPACE = Download
Q = Quit
Enjoy!
0xPr0xy
<file_sep>/urwid/decoration.py
#!/usr/bin/python
#
# Urwid widget decoration classes
# Copyright (C) 2004-2011 <NAME>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
from urwid.util import int_scale
from urwid.widget import Widget, WidgetError, \
BOX, FLOW, LEFT, CENTER, RIGHT, PACK, CLIP, GIVEN, RELATIVE, RELATIVE_100, \
delegate_to_widget_mixin
from urwid.split_repr import remove_defaults
from urwid.canvas import CompositeCanvas, SolidCanvas
from urwid.widget import Divider, Edit, Text, SolidFill # doctests
class WidgetDecoration(Widget): # "decorator" was already taken
def __init__(self, original_widget):
"""
original_widget -- the widget being decorated
This is a base class for decoration widgets, widgets
that contain one or more widgets and only ever have
a single focus. This type of widget will affect the
display or behaviour of the original_widget but it is
not part of determining a chain of focus.
Don't actually do this -- use a WidgetDecoration subclass
instead, these are not real widgets:
>>> WidgetDecoration(Text(u"hi"))
<WidgetDecoration flow widget <Text flow widget 'hi'>>
"""
self._original_widget = original_widget
def _repr_words(self):
return self.__super._repr_words() + [repr(self._original_widget)]
def _get_original_widget(self):
return self._original_widget
def _set_original_widget(self, original_widget):
self._original_widget = original_widget
self._invalidate()
original_widget = property(_get_original_widget, _set_original_widget)
def _get_base_widget(self):
"""
Return the widget without decorations. If there is only one
Decoration then this is the same as original_widget.
>>> t = Text('hello')
>>> wd1 = WidgetDecoration(t)
>>> wd2 = WidgetDecoration(wd1)
>>> wd3 = WidgetDecoration(wd2)
>>> wd3.original_widget is wd2
True
>>> wd3.base_widget is t
True
"""
w = self
while hasattr(w, '_original_widget'):
w = w._original_widget
return w
base_widget = property(_get_base_widget)
def selectable(self):
return self._original_widget.selectable()
def sizing(self):
return self._original_widget.sizing()
class WidgetPlaceholder(delegate_to_widget_mixin('_original_widget'),
WidgetDecoration):
"""
This is a do-nothing decoration widget that can be used for swapping
between widgets without modifying the container of this widget.
This can be useful for making an interface with a number of distinct
pages or for showing and hiding menu or status bars.
The widget displayed is stored as a .original_widget property and
can be changed by assigning a new widget to it.
"""
pass
class AttrMapError(WidgetError):
pass
class AttrMap(delegate_to_widget_mixin('_original_widget'), WidgetDecoration):
"""
AttrMap is a decoration that maps one set of attributes to another for
a FlowWidget or BoxWidget
"""
def __init__(self, w, attr_map, focus_map=None):
"""
w -- widget to wrap (stored as self.original_widget)
attr_map -- attribute to apply to w, or dictionary of attribute mappings
focus_map -- attribute to apply when in focus or dictionary of
attribute mappings, if None use attr
This object will pass all function calls and variable references
to the wrapped widget.
>>> AttrMap(Divider(u"!"), 'bright')
<AttrMap flow widget <Divider flow widget '!'> attr_map={None: 'bright'}>
>>> AttrMap(Edit(), 'notfocus', 'focus')
<AttrMap selectable flow widget <Edit selectable flow widget '' edit_pos=0> attr_map={None: 'notfocus'} focus_map={None: 'focus'}>
>>> size = (5,)
>>> am = AttrMap(Text(u"hi"), 'greeting', 'fgreet')
>>> am.render(size, focus=False).content().next() # ... = b in Python 3
[('greeting', None, ...'hi ')]
>>> am.render(size, focus=True).content().next()
[('fgreet', None, ...'hi ')]
>>> am2 = AttrMap(Text(('word', u"hi")), {'word':'greeting', None:'bg'})
>>> am2
<AttrMap flow widget <Text flow widget 'hi'> attr_map={'word': 'greeting', None: 'bg'}>
>>> am2.render(size).content().next()
[('greeting', None, ...'hi'), ('bg', None, ...' ')]
"""
self.__super.__init__(w)
if type(attr_map) != dict:
self.set_attr_map({None: attr_map})
else:
self.set_attr_map(attr_map)
if focus_map is not None and type(focus_map) != dict:
self.set_focus_map({None: focus_map})
else:
self.set_focus_map(focus_map)
def _repr_attrs(self):
# only include the focus_attr when it takes effect (not None)
d = dict(self.__super._repr_attrs(), attr_map=self._attr_map)
if self._focus_map is not None:
d['focus_map'] = self._focus_map
return d
def get_attr_map(self):
# make a copy so ours is not accidentally modified
# FIXME: a dictionary that detects modifications would be better
return dict(self._attr_map)
def set_attr_map(self, attr_map):
"""
Set the attribute mapping dictionary {from_attr: to_attr, ...}
Note this function does not accept a single attribute the way the
constructor does. You must specify {None: attribute} instead.
>> w = AttrMap(Text("hi"), None)
>> w.set_attr({'a':'b'})
>> w
<AttrMap flow widget <Text flow widget 'hi'> attr_map={'a': 'b'}>
"""
for from_attr, to_attr in attr_map.items():
if not from_attr.__hash__ or not to_attr.__hash__:
raise AttrMapError("%r:%r attribute mapping is invalid. "
"Attributes must be hashable" % (from_attr, to_attr))
self._attr_map = attr_map
self._invalidate()
attr_map = property(get_attr_map, set_attr_map)
def get_focus_map(self):
# make a copy so ours is not accidentally modified
# FIXME: a dictionary that detects modifications would be better
if self._focus_map:
return dict(self._focus_map)
def set_focus_map(self, focus_map):
"""
Set the focus attribute mapping dictionary
{from_attr: to_attr, ...}
If None this widget will use the attr mapping instead (no change
when in focus).
Note this function does not accept a single attribute the way the
constructor does. You must specify {None: attribute} instead.
>> w = AttrMap(Text("hi"), {})
>> w.set_focus_map({'a':'b'})
>> w
<AttrMap flow widget <Text flow widget 'hi'> attr_map={} focus_map={'a': 'b'}>
>> w.set_focus_map(None)
>> w
<AttrMap flow widget <Text flow widget 'hi'> attr_map={}>
"""
if focus_map is not None:
for from_attr, to_attr in focus_map.items():
if not from_attr.__hash__ or not to_attr.__hash__:
raise AttrMapError("%r:%r attribute mapping is invalid. "
"Attributes must be hashable" % (from_attr, to_attr))
self._focus_map = focus_map
self._invalidate()
focus_map = property(get_focus_map, set_focus_map)
def render(self, size, focus=False):
"""
Render wrapped widget and apply attribute. Return canvas.
"""
attr_map = self._attr_map
if focus and self._focus_map is not None:
attr_map = self._focus_map
canv = self._original_widget.render(size, focus=focus)
canv = CompositeCanvas(canv)
canv.fill_attr_apply(attr_map)
return canv
class AttrWrap(AttrMap):
def __init__(self, w, attr, focus_attr=None):
"""
w -- widget to wrap (stored as self.original_widget)
attr -- attribute to apply to w
focus_attr -- attribute to apply when in focus, if None use attr
This widget is a special case of the new AttrMap widget, and it
will pass all function calls and variable references to the wrapped
widget. This class is maintained for backwards compatibility only,
new code should use AttrMap instead.
>>> AttrWrap(Divider(u"!"), 'bright')
<AttrWrap flow widget <Divider flow widget '!'> attr='bright'>
>>> AttrWrap(Edit(), 'notfocus', 'focus')
<AttrWrap selectable flow widget <Edit selectable flow widget '' edit_pos=0> attr='notfocus' focus_attr='focus'>
>>> size = (5,)
>>> aw = AttrWrap(Text(u"hi"), 'greeting', 'fgreet')
>>> aw.render(size, focus=False).content().next()
[('greeting', None, ...'hi ')]
>>> aw.render(size, focus=True).content().next()
[('fgreet', None, ...'hi ')]
"""
self.__super.__init__(w, attr, focus_attr)
def _repr_attrs(self):
# only include the focus_attr when it takes effect (not None)
d = dict(self.__super._repr_attrs(), attr=self.attr)
del d['attr_map']
if 'focus_map' in d:
del d['focus_map']
if self.focus_attr is not None:
d['focus_attr'] = self.focus_attr
return d
# backwards compatibility, widget used to be stored as w
get_w = WidgetDecoration._get_original_widget
set_w = WidgetDecoration._set_original_widget
w = property(get_w, set_w)
def get_attr(self):
return self.attr_map[None]
def set_attr(self, attr):
"""
Set the attribute to apply to the wrapped widget
>> w = AttrWrap(Divider("-"), None)
>> w.set_attr('new_attr')
>> w
<AttrWrap flow widget <Divider flow widget '-'> attr='new_attr'>
"""
self.set_attr_map({None: attr})
attr = property(get_attr, set_attr)
def get_focus_attr(self):
focus_map = self.focus_map
if focus_map:
return focus_map[None]
def set_focus_attr(self, focus_attr):
"""
Set the attribute to apply to the wapped widget when it is in
focus
If None this widget will use the attr instead (no change when in
focus).
>> w = AttrWrap(Divider("-"), 'old')
>> w.set_focus_attr('new_attr')
>> w
<AttrWrap flow widget <Divider flow widget '-'> attr='old' focus_attr='new_attr'>
>> w.set_focus_attr(None)
>> w
<AttrWrap flow widget <Divider flow widget '-'> attr='old'>
"""
self.set_focus_map({None: focus_attr})
focus_attr = property(get_focus_attr, set_focus_attr)
def __getattr__(self,name):
"""
Call getattr on wrapped widget. This has been the longstanding
behaviour of AttrWrap, but is discouraged. New code should be
using AttrMap and .base_widget or .original_widget instad.
"""
return getattr(self._original_widget, name)
def sizing(self):
return self._original_widget.sizing()
class BoxAdapterError(Exception):
pass
class BoxAdapter(WidgetDecoration):
"""
Adapter for using a box widget where a flow widget would usually go
"""
no_cache = ["rows"]
def __init__(self, box_widget, height):
"""
Create a flow widget that contains a box widget
box_widget -- box widget (stored as self.original_widget)
height -- number of rows for box widget
>>> BoxAdapter(SolidFill(u"x"), 5) # 5-rows of x's
<BoxAdapter flow widget <SolidFill box widget 'x'> height=5>
"""
if hasattr(box_widget, 'sizing') and BOX not in box_widget.sizing():
raise BoxAdapterError("%r is not a box widget" %
box_widget)
WidgetDecoration.__init__(self,box_widget)
self.height = height
def _repr_attrs(self):
return dict(self.__super._repr_attrs(), height=self.height)
# originally stored as box_widget, keep for compatibility
box_widget = property(WidgetDecoration._get_original_widget,
WidgetDecoration._set_original_widget)
def sizing(self):
return set([FLOW])
def rows(self, size, focus=False):
"""
Return the predetermined height (behave like a flow widget)
>>> BoxAdapter(SolidFill(u"x"), 5).rows((20,))
5
"""
return self.height
# The next few functions simply tack-on our height and pass through
# to self._original_widget
def get_cursor_coords(self, size):
(maxcol,) = size
if not hasattr(self._original_widget,'get_cursor_coords'):
return None
return self._original_widget.get_cursor_coords((maxcol, self.height))
def get_pref_col(self, size):
(maxcol,) = size
if not hasattr(self._original_widget,'get_pref_col'):
return None
return self._original_widget.get_pref_col((maxcol, self.height))
def keypress(self, size, key):
(maxcol,) = size
return self._original_widget.keypress((maxcol, self.height), key)
def move_cursor_to_coords(self, size, col, row):
(maxcol,) = size
if not hasattr(self._original_widget,'move_cursor_to_coords'):
return True
return self._original_widget.move_cursor_to_coords((maxcol,
self.height), col, row )
def mouse_event(self, size, event, button, col, row, focus):
(maxcol,) = size
if not hasattr(self._original_widget,'mouse_event'):
return False
return self._original_widget.mouse_event((maxcol, self.height),
event, button, col, row, focus)
def render(self, size, focus=False):
(maxcol,) = size
canv = self._original_widget.render((maxcol, self.height), focus)
canv = CompositeCanvas(canv)
return canv
def __getattr__(self, name):
"""
Pass calls to box widget.
"""
return getattr(self.box_widget, name)
class PaddingError(Exception):
pass
class Padding(WidgetDecoration):
def __init__(self, w, align=LEFT, width=PACK, min_width=None,
left=0, right=0):
r"""
w -- a box, flow or fixed widget to pad on the left and/or right
this widget is stored as self.original_widget
align -- one of:
'left', 'center', 'right'
('relative', percentage 0=left 100=right)
width -- one of:
fixed number of columns for self.original_widget
'pack' try to pack self.original_widget to its ideal size
('relative', percentage of total width)
'clip' to enable clipping mode for a fixed widget
min_width -- the minimum number of columns for
self.original_widget or None
left -- a fixed number of columns to pad on the left
right -- a fixed number of columns to pad on thr right
Clipping Mode: (width='clip')
In clipping mode this padding widget will behave as a flow
widget and self.original_widget will be treated as a fixed
widget. self.original_widget will will be clipped to fit
the available number of columns. For example if align is
'left' then self.original_widget may be clipped on the right.
>>> size = (7,)
>>> Padding(Text(u"Head"), ('relative', 20)).render(size).text # ... = b in Python 3
[...' Head ']
>>> Padding(Divider(u"-"), left=2, right=1).render(size).text
[...' ---- ']
>>> Padding(Divider(u"*"), 'center', 3).render(size).text
[...' *** ']
>>> p=Padding(Text(u"1234"), 'left', 2, None, 1, 1)
>>> p
<Padding flow widget <Text flow widget '1234'> left=1 right=1 width=2>
>>> p.render(size).text # align against left
[...' 12 ', ...' 34 ']
>>> p.align = 'right'
>>> p.render(size).text # align against right
[...' 12 ', ...' 34 ']
>>> Padding(Text(u"hi\nthere"), 'right').render(size).text
[...' hi ', ...' there']
"""
self.__super.__init__(w)
# convert obsolete parameters 'fixed left' and 'fixed right':
if type(align) == tuple and align[0] in ('fixed left',
'fixed right'):
if align[0]=='fixed left':
left = align[1]
align = LEFT
else:
right = align[1]
align = RIGHT
if type(width) == tuple and width[0] in ('fixed left',
'fixed right'):
if width[0]=='fixed left':
left = width[1]
else:
right = width[1]
width = RELATIVE_100
# convert old clipping mode width=None to width='clip'
if width is None:
width = CLIP
self.left = left
self.right = right
self._align_type, self._align_amount = normalize_align(align,
PaddingError)
self._width_type, self._width_amount = normalize_width(width,
PaddingError)
self.min_width = min_width
def sizing(self):
if self._width_type == CLIP:
return set([FLOW])
return self.original_widget.sizing()
def _repr_attrs(self):
attrs = dict(self.__super._repr_attrs(),
align=self.align,
width=self.width,
left=self.left,
right=self.right,
min_width=self.min_width)
return remove_defaults(attrs, Padding.__init__)
def _get_align(self):
"""
Return the padding alignment setting.
"""
return simplify_align(self._align_type, self._align_amount)
def _set_align(self, align):
"""
Set the padding alignment.
"""
self._align_type, self._align_amount = normalize_align(align,
PaddingError)
align = property(_get_align, _set_align)
def _get_width(self):
"""
Return the padding widthment setting.
"""
return simplify_width(self._width_type, self._width_amount)
def _set_width(self, width):
"""
Set the padding width.
"""
self._width_type, self._width_amount = normalize_width(width,
PaddingError)
width = property(_get_width, _set_width)
def render(self, size, focus=False):
left, right = self.padding_values(size, focus)
maxcol = size[0]
maxcol -= left+right
if self._width_type == CLIP:
canv = self._original_widget.render((), focus)
else:
canv = self._original_widget.render((maxcol,)+size[1:], focus)
if canv.cols() == 0:
canv = SolidCanvas(' ', size[0], canv.rows())
canv = CompositeCanvas(canv)
canv.set_depends([self._original_widget])
return canv
canv = CompositeCanvas(canv)
canv.set_depends([self._original_widget])
if left != 0 or right != 0:
canv.pad_trim_left_right(left, right)
return canv
def padding_values(self, size, focus):
"""Return the number of columns to pad on the left and right.
Override this method to define custom padding behaviour."""
maxcol = size[0]
if self._width_type == CLIP:
width, ignore = self._original_widget.pack((), focus=focus)
return calculate_left_right_padding(maxcol,
self._align_type, self._align_amount,
CLIP, width, None, self.left, self.right)
if self._width_type == PACK:
maxwidth = max(maxcol - self.left - self.right,
self.min_width or 0)
(width, ignore) = self._original_widget.pack((maxwidth,),
focus=focus)
return calculate_left_right_padding(maxcol,
self._align_type, self._align_amount,
GIVEN, width, self.min_width,
self.left, self.right)
return calculate_left_right_padding(maxcol,
self._align_type, self._align_amount,
self._width_type, self._width_amount,
self.min_width, self.left, self.right)
def rows(self, size, focus=False):
"""Return the rows needed for self.original_widget."""
(maxcol,) = size
left, right = self.padding_values(size, focus)
if self._width_type == PACK:
pcols, prows = self._original_widget.pack((maxcol-left-right,),
focus)
return prows
if self._width_type == CLIP:
fcols, frows = self._original_widget.pack((), focus)
return frows
return self._original_widget.rows((maxcol-left-right,), focus=focus)
def keypress(self, size, key):
"""Pass keypress to self._original_widget."""
maxcol = size[0]
left, right = self.padding_values(size, True)
maxvals = (maxcol-left-right,)+size[1:]
return self._original_widget.keypress(maxvals, key)
def get_cursor_coords(self,size):
"""Return the (x,y) coordinates of cursor within self._original_widget."""
if not hasattr(self._original_widget,'get_cursor_coords'):
return None
left, right = self.padding_values(size, True)
maxcol = size[0]
maxvals = (maxcol-left-right,)+size[1:]
coords = self._original_widget.get_cursor_coords(maxvals)
if coords is None:
return None
x, y = coords
return x+left, y
def move_cursor_to_coords(self, size, x, y):
"""Set the cursor position with (x,y) coordinates of self._original_widget.
Returns True if move succeeded, False otherwise.
"""
if not hasattr(self._original_widget,'move_cursor_to_coords'):
return True
left, right = self.padding_values(size, True)
maxcol = size[0]
maxvals = (maxcol-left-right,)+size[1:]
if type(x)==int:
if x < left:
x = left
elif x >= maxcol-right:
x = maxcol-right-1
x -= left
return self._original_widget.move_cursor_to_coords(maxvals, x, y)
def mouse_event(self, size, event, button, x, y, focus):
"""Send mouse event if position is within self._original_widget."""
if not hasattr(self._original_widget,'mouse_event'):
return False
left, right = self.padding_values(size, focus)
maxcol = size[0]
if x < left or x >= maxcol-right:
return False
maxvals = (maxcol-left-right,)+size[1:]
return self._original_widget.mouse_event(maxvals, event, button, x-left, y,
focus)
def get_pref_col(self, size):
"""Return the preferred column from self._original_widget, or None."""
if not hasattr(self._original_widget,'get_pref_col'):
return None
left, right = self.padding_values(size, True)
maxcol = size[0]
maxvals = (maxcol-left-right,)+size[1:]
x = self._original_widget.get_pref_col(maxvals)
if type(x) == int:
return x+left
return x
class FillerError(Exception):
pass
class Filler(WidgetDecoration):
def __init__(self, body, valign="middle", height=None, min_height=None):
"""
body -- a flow widget or box widget to be filled around (stored
as self.original_widget)
valign -- one of:
'top', 'middle', 'bottom'
('fixed top', rows)
('fixed bottom', rows)
('relative', percentage 0=top 100=bottom)
height -- one of:
None if body is a flow widget
number of rows high
('fixed bottom', rows) Only if valign is 'fixed top'
('fixed top', rows) Only if valign is 'fixed bottom'
('relative', percentage of total height)
min_height -- one of:
None if no minimum or if body is a flow widget
minimum number of rows for the widget when height not fixed
If body is a flow widget then height and min_height must be set
to None.
Filler widgets will try to satisfy height argument first by
reducing the valign amount when necessary. If height still
cannot be satisfied it will also be reduced.
"""
self.__super.__init__(body)
vt,va,ht,ha=decompose_valign_height(valign,height,FillerError)
self.valign_type, self.valign_amount = vt, va
self.height_type, self.height_amount = ht, ha
if self.height_type not in ('fixed', None):
self.min_height = min_height
else:
self.min_height = None
def sizing(self):
return set([BOX]) # always a box widget
# backwards compatibility, widget used to be stored as body
get_body = WidgetDecoration._get_original_widget
set_body = WidgetDecoration._set_original_widget
body = property(get_body, set_body)
def selectable(self):
"""Return selectable from body."""
return self._original_widget.selectable()
def filler_values(self, size, focus):
"""Return the number of rows to pad on the top and bottom.
Override this method to define custom padding behaviour."""
(maxcol, maxrow) = size
if self.height_type is None:
height = self._original_widget.rows((maxcol,),focus=focus)
return calculate_filler( self.valign_type,
self.valign_amount, 'fixed', height,
None, maxrow )
return calculate_filler( self.valign_type, self.valign_amount,
self.height_type, self.height_amount,
self.min_height, maxrow)
def render(self, size, focus=False):
"""Render self.original_widget with space above and/or below."""
(maxcol, maxrow) = size
top, bottom = self.filler_values(size, focus)
if self.height_type is None:
canv = self._original_widget.render((maxcol,), focus)
else:
canv = self._original_widget.render((maxcol,maxrow-top-bottom),focus)
canv = CompositeCanvas(canv)
if maxrow and canv.rows() > maxrow and canv.cursor is not None:
cx, cy = canv.cursor
if cy >= maxrow:
canv.trim(cy-maxrow+1,maxrow-top-bottom)
if canv.rows() > maxrow:
canv.trim(0, maxrow)
return canv
canv.pad_trim_top_bottom(top, bottom)
return canv
def keypress(self, size, key):
"""Pass keypress to self.original_widget."""
(maxcol, maxrow) = size
if self.height_type is None:
return self._original_widget.keypress((maxcol,), key)
top, bottom = self.filler_values((maxcol,maxrow), True)
return self._original_widget.keypress((maxcol,maxrow-top-bottom), key)
def get_cursor_coords(self, size):
"""Return cursor coords from self.original_widget if any."""
(maxcol, maxrow) = size
if not hasattr(self._original_widget, 'get_cursor_coords'):
return None
top, bottom = self.filler_values(size, True)
if self.height_type is None:
coords = self._original_widget.get_cursor_coords((maxcol,))
else:
coords = self._original_widget.get_cursor_coords(
(maxcol,maxrow-top-bottom))
if not coords:
return None
x, y = coords
if y >= maxrow:
y = maxrow-1
return x, y+top
def get_pref_col(self, size):
"""Return pref_col from self.original_widget if any."""
(maxcol, maxrow) = size
if not hasattr(self._original_widget, 'get_pref_col'):
return None
if self.height_type is None:
x = self._original_widget.get_pref_col((maxcol,))
else:
top, bottom = self.filler_values(size, True)
x = self._original_widget.get_pref_col(
(maxcol, maxrow-top-bottom))
return x
def move_cursor_to_coords(self, size, col, row):
"""Pass to self.original_widget."""
(maxcol, maxrow) = size
if not hasattr(self._original_widget, 'move_cursor_to_coords'):
return True
top, bottom = self.filler_values(size, True)
if row < top or row >= maxcol-bottom:
return False
if self.height_type is None:
return self._original_widget.move_cursor_to_coords((maxcol,),
col, row-top)
return self._original_widget.move_cursor_to_coords(
(maxcol, maxrow-top-bottom), col, row-top)
def mouse_event(self, size, event, button, col, row, focus):
"""Pass to self.original_widget."""
(maxcol, maxrow) = size
if not hasattr(self._original_widget, 'mouse_event'):
return False
top, bottom = self.filler_values(size, True)
if row < top or row >= maxrow-bottom:
return False
if self.height_type is None:
return self._original_widget.mouse_event((maxcol,),
event, button, col, row-top, focus)
return self._original_widget.mouse_event((maxcol, maxrow-top-bottom),
event, button,col, row-top, focus)
def normalize_align(align, err):
"""
Split align into (align_type, align_amount). Raise exception err
if align doesn't match a valid alignment.
"""
if align in (LEFT, CENTER, RIGHT):
return (align, 0)
elif type(align) == tuple and len(align) == 2 and align[0] == RELATIVE:
return align
raise err("align value %r is not one of 'left', 'center', "
"'right', ('relative', percentage 0=left 100=right)"
% (align,))
def simplify_align(align_type, align_amount):
"""
Recombine (align_type, align_amount) into an align value.
Inverse of normalize_align.
"""
if align_type == RELATIVE:
return (align_type, align_amount)
return align_type
def normalize_width(width, err):
"""
Split width into (width_type, width_amount). Raise exception err
if width doesn't match a valid alignment.
"""
if width in (CLIP, PACK):
return (width, 0)
elif type(width) == int:
return (GIVEN, width)
elif type(width) == tuple and len(width) == 2 and width[0] == RELATIVE:
return width
raise err("width value %r is not one of fixed number of columns, "
"'pack', ('relative', percentage of total width), 'clip'"
% (width,))
def simplify_width(width_type, width_amount):
"""
Recombine (width_type, width_amount) into an width value.
Inverse of normalize_width.
"""
if width_type in (CLIP, PACK):
return width_type
elif width_type == GIVEN:
return width_amount
return (width_type, width_amount)
def decompose_align_width( align, width, err ):
# FIXME: remove this once it is no longer called from Overlay
try:
if align in ('left','center','right'):
align = (align,0)
align_type, align_amount = align
assert align_type in ('left','center','right','fixed left',
'fixed right','relative')
except (AssertionError, ValueError, TypeError):
raise err("align value %r is not one of 'left', 'center', "
"'right', ('fixed left', columns), ('fixed right', "
"columns), ('relative', percentage 0=left 100=right)"
% (align,))
try:
if width is None:
width = None, None
elif type(width) == int:
width = 'fixed', width
width_type, width_amount = width
assert width_type in ('fixed','fixed right','fixed left',
'relative', None)
except (AssertionError, ValueError, TypeError):
raise err("width value %r is not one of ('fixed', columns "
"width), ('fixed right', columns), ('relative', "
"percentage of total width), None" % (width,))
if width_type == 'fixed left' and align_type != 'fixed right':
raise err("fixed left width may only be used with fixed "
"right align")
if width_type == 'fixed right' and align_type != 'fixed left':
raise err("fixed right width may only be used with fixed "
"left align")
return align_type, align_amount, width_type, width_amount
def decompose_valign_height( valign, height, err ):
try:
if valign in ('top','middle','bottom'):
valign = (valign,0)
valign_type, valign_amount = valign
assert valign_type in ('top','middle','bottom','fixed top','fixed bottom','relative')
except (AssertionError, ValueError, TypeError):
raise err, "Invalid valign: %r" % (valign,)
try:
if height is None:
height = None, None
elif type(height) == int:
height=('fixed',height)
height_type, height_amount = height
assert height_type in (None, 'fixed','fixed bottom','fixed top','relative')
except (AssertionError, ValueError, TypeError):
raise err, "Invalid height: %r"%(height,)
if height_type == 'fixed top' and valign_type != 'fixed bottom':
raise err, "fixed top height may only be used with fixed bottom valign"
if height_type == 'fixed bottom' and valign_type != 'fixed top':
raise err, "fixed bottom height may only be used with fixed top valign"
return valign_type, valign_amount, height_type, height_amount
def calculate_filler( valign_type, valign_amount, height_type, height_amount,
min_height, maxrow ):
if height_type == 'fixed':
height = height_amount
elif height_type == 'relative':
height = int(height_amount*maxrow // 100)
if min_height is not None:
height = max(height, min_height)
else:
assert height_type in ('fixed bottom','fixed top')
height = maxrow-height_amount-valign_amount
if min_height is not None:
height = max(height, min_height)
if height >= maxrow:
# use the full space (no padding)
return 0, 0
if valign_type == 'fixed top':
top = valign_amount
if top+height <= maxrow:
return top, maxrow-top-height
# need to shrink top
return maxrow-height, 0
elif valign_type == 'fixed bottom':
bottom = valign_amount
if bottom+height <= maxrow:
return maxrow-bottom-height, bottom
# need to shrink bottom
return 0, maxrow-height
elif valign_type == 'relative':
top = int( (maxrow-height)*valign_amount // 100)
elif valign_type == 'bottom':
top = maxrow-height
elif valign_type == 'middle':
top = int( (maxrow-height)/2 )
else: #self.valign_type == 'top'
top = 0
if top+height > maxrow: top = maxrow-height
if top < 0: top = 0
bottom = maxrow-height-top
return top, bottom
def calculate_left_right_padding(maxcol, align_type, align_amount,
width_type, width_amount, min_width, left, right):
"""
Return the amount of padding (or clipping) on the left and
right part of maxcol columns to satisfy the following:
align_type -- 'left', 'center', 'right', 'relative'
align_amount -- a percentage when align_type=='relative'
width_type -- 'fixed', 'relative', 'clip'
width_amount -- a percentage when width_type=='relative'
otherwise equal to the width of the widget
min_width -- a desired minimum width for the widget or None
left -- a fixed number of columns to pad on the left
right -- a fixed number of columns to pad on the right
>>> clrp = calculate_left_right_padding
>>> clrp(15, 'left', 0, 'fixed', 10, None, 2, 0)
(2, 3)
>>> clrp(15, 'relative', 0, 'fixed', 10, None, 2, 0)
(2, 3)
>>> clrp(15, 'relative', 100, 'fixed', 10, None, 2, 0)
(5, 0)
>>> clrp(15, 'center', 0, 'fixed', 4, None, 2, 0)
(6, 5)
>>> clrp(15, 'left', 0, 'clip', 18, None, 0, 0)
(0, -3)
>>> clrp(15, 'right', 0, 'clip', 18, None, 0, -1)
(-2, -1)
>>> clrp(15, 'center', 0, 'fixed', 18, None, 2, 0)
(0, 0)
>>> clrp(20, 'left', 0, 'relative', 60, None, 0, 0)
(0, 8)
>>> clrp(20, 'relative', 30, 'relative', 60, None, 0, 0)
(2, 6)
>>> clrp(20, 'relative', 30, 'relative', 60, 14, 0, 0)
(2, 4)
"""
if width_type == RELATIVE:
maxwidth = max(maxcol - left - right, 0)
width = int_scale(width_amount, 101, maxwidth + 1)
if min_width is not None:
width = max(width, min_width)
else:
width = width_amount
standard_alignments = {LEFT:0, CENTER:50, RIGHT:100}
align = standard_alignments.get(align_type, align_amount)
# add the remainder of left/right the padding
padding = maxcol - width - left - right
right += int_scale(100 - align, 101, padding + 1)
left = maxcol - width - right
# reduce padding if we are clipping an edge
if right < 0 and left > 0:
shift = min(left, -right)
left -= shift
right += shift
elif left < 0 and right > 0:
shift = min(right, -left)
right -= shift
left += shift
# only clip if width_type == 'clip'
if width_type != CLIP and (left < 0 or right < 0):
left = max(left, 0)
right = max(right, 0)
return left, right
def calculate_padding( align_type, align_amount, width_type, width_amount,
min_width, maxcol, clip=False ):
# FIXME: remove this when Overlay is no longer calling it
if width_type == 'fixed':
width = width_amount
elif width_type == 'relative':
width = int(width_amount*maxcol/100+.5)
if min_width is not None:
width = max(width, min_width)
else:
assert width_type in ('fixed right', 'fixed left')
width = maxcol-width_amount-align_amount
if min_width is not None:
width = max(width, min_width)
if width == maxcol or (width > maxcol and not clip):
# use the full space (no padding)
return 0, 0
if align_type == 'fixed left':
left = align_amount
if left+width <= maxcol:
return left, maxcol-left-width
# need to shrink left
return maxcol-width, 0
elif align_type == 'fixed right':
right = align_amount
if right+width <= maxcol:
return maxcol-right-width, right
# need to shrink right
return 0, maxcol-width
elif align_type == 'relative':
left = int( (maxcol-width)*align_amount/100+.5 )
elif align_type == 'right':
left = maxcol-width
elif align_type == 'center':
left = int( (maxcol-width)/2 )
else:
assert align_type == 'left'
left = 0
if width < maxcol:
if left+width > maxcol: left = maxcol-width
if left < 0: left = 0
right = maxcol-width-left
return left, right
def _test():
import doctest
doctest.testmod()
if __name__=='__main__':
_test()
<file_sep>/urwid/main_loop.py
#!/usr/bin/python
#
# Urwid main loop code
# Copyright (C) 2004-2011 <NAME>
# Copyright (C) 2008 <NAME>
# Copyright (C) 2009 <NAME>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
import time
import heapq
import select
import fcntl
import os
from urwid.util import is_mouse_event
from urwid.compat import PYTHON3, bytes
from urwid.command_map import command_map
from urwid.wimp import PopUpTarget
from urwid import signals
from urwid.display_common import INPUT_DESCRIPTORS_CHANGED
PIPE_BUFFER_READ_SIZE = 4096 # can expect this much on Linux, so try for that
class ExitMainLoop(Exception):
pass
class MainLoop(object):
def __init__(self, widget, palette=[], screen=None,
handle_mouse=True, input_filter=None, unhandled_input=None,
event_loop=None, pop_ups=False):
"""
Simple main loop implementation.
widget -- topmost widget used for painting the screen,
stored as self.widget and may be modified
palette -- initial palette for screen
screen -- screen object or None to use raw_display.Screen,
stored as self.screen
handle_mouse -- True to process mouse events, passed to
self.screen
input_filter -- a function to filter input before sending
it to self.widget, called from self.input_filter
unhandled_input -- a function called when input is not
handled by self.widget, called from self.unhandled_input
event_loop -- if screen supports external an event loop it
may be given here, or leave as None to use
SelectEventLoop, stored as self.event_loop
pop_ups -- True to wrap self.widget with a PopUpTarget
instance to allow any widget to open a pop-up anywhere on
the screen
This is the standard main loop implementation with a single
screen.
The widget passed must be a box widget.
"""
self._widget = widget
self.handle_mouse = handle_mouse
self.pop_ups = pop_ups # triggers property setting side-effect
if not screen:
from urwid import raw_display
screen = raw_display.Screen()
if palette:
screen.register_palette(palette)
self.screen = screen
self.screen_size = None
self._unhandled_input = unhandled_input
self._input_filter = input_filter
if not hasattr(screen, 'get_input_descriptors'
) and event_loop is not None:
raise NotImplementedError("screen object passed "
"%r does not support external event loops" % (screen,))
if event_loop is None:
event_loop = SelectEventLoop()
self.event_loop = event_loop
self._input_timeout = None
self._watch_pipes = {}
def _set_widget(self, widget):
self._widget = widget
if self.pop_ups:
self._topmost_widget.original_widget = self._widget
else:
self._topmost_widget = self._widget
widget = property(lambda self:self._widget, _set_widget)
def _set_pop_ups(self, pop_ups):
self._pop_ups = pop_ups
if pop_ups:
self._topmost_widget = PopUpTarget(self._widget)
else:
self._topmost_widget = self._widget
pop_ups = property(lambda self:self._pop_ups, _set_pop_ups)
def set_alarm_in(self, sec, callback, user_data=None):
"""
Schedule an alarm in sec seconds that will call
callback(main_loop, user_data) from the within the run()
function.
sec -- floating point seconds until alarm
callback -- callback(main_loop, user_data) callback function
user_data -- object to pass to callback
"""
def cb():
callback(self, user_data)
return self.event_loop.alarm(sec, cb)
def set_alarm_at(self, tm, callback, user_data=None):
"""
Schedule at tm time that will call
callback(main_loop, user_data) from the within the run()
function.
Returns a handle that may be passed to remove_alarm()
tm -- floating point local time of alarm
callback -- callback(main_loop, user_data) callback function
user_data -- object to pass to callback
"""
def cb():
callback(self, user_data)
return self.event_loop.alarm(tm - time.time(), cb)
def remove_alarm(self, handle):
"""
Remove an alarm.
Return True if the handle was found, False otherwise.
"""
return self.event_loop.remove_alarm(handle)
def watch_pipe(self, callback):
"""
Create a pipe for use by a subprocess or thread to trigger
a callback in the process/thread running the MainLoop.
callback -- function to call MainLoop.run thread/process
This function returns a file descriptor attached to the
write end of a pipe. The read end of the pipe is added to
the list of files the event loop is watching. When
data is written to the pipe the callback function will be
called and passed a single value containing data read.
This method should be used any time you want to update
widgets from another thread or subprocess.
Data may be written to the returned file descriptor with
os.write(fd, data). Ensure that data is less than 512
bytes (or 4K on Linux) so that the callback will be
triggered just once with the complete value of data
passed in.
If the callback returns False then the watch will be
removed and the read end of the pipe will be closed.
You are responsible for closing the write end of the pipe.
"""
pipe_rd, pipe_wr = os.pipe()
fcntl.fcntl(pipe_rd, fcntl.F_SETFL, os.O_NONBLOCK)
watch_handle = None
def cb():
data = os.read(pipe_rd, PIPE_BUFFER_READ_SIZE)
rval = callback(data)
if rval is False:
self.event_loop.remove_watch_file(watch_handle)
os.close(pipe_rd)
watch_handle = self.event_loop.watch_file(pipe_rd, cb)
self._watch_pipes[pipe_wr] = (watch_handle, pipe_rd)
return pipe_wr
def remove_watch_pipe(self, write_fd):
"""
Close the read end of the pipe and remove the watch created
by watch_pipe(). You are responsible for closing the write
end of the pipe.
Returns True if the watch pipe exists, False otherwise
"""
try:
watch_handle, pipe_rd = self._watch_pipes.remove(write_fd)
except KeyError:
return False
if not self.event_loop.remove_watch_file(watch_handle):
return False
os.close(pipe_rd)
return True
def watch_file(self, fd, callback):
"""
Call callback() when fd has some data to read. No parameters
are passed to callback.
Returns a handle that may be passed to remove_watch_file()
fd -- file descriptor to watch for input
callback -- function to call when input is available
"""
return self.event_loop.watch_file(fd, callback)
def remove_watch_file(self, handle):
"""
Remove a watch file.
Returns True if the watch file exists, False otherwise.
"""
return self.event_loop.remove_watch_file(handle)
def run(self):
"""
Start the main loop handling input events and updating
the screen. The loop will continue until an ExitMainLoop
exception is raised.
This function will call screen.run_wrapper() if screen.start()
has not already been called.
>>> w = _refl("widget") # _refl prints out function calls
>>> w.render_rval = "fake canvas" # *_rval is used for return values
>>> scr = _refl("screen")
>>> scr.get_input_descriptors_rval = [42]
>>> scr.get_cols_rows_rval = (20, 10)
>>> scr.started = True
>>> scr._urwid_signals = {}
>>> evl = _refl("event_loop")
>>> evl.enter_idle_rval = 1
>>> evl.watch_file_rval = 2
>>> ml = MainLoop(w, [], scr, event_loop=evl)
>>> ml.run() # doctest:+ELLIPSIS
screen.set_mouse_tracking()
screen.get_cols_rows()
widget.render((20, 10), focus=True)
screen.draw_screen((20, 10), 'fake canvas')
screen.get_input_descriptors()
event_loop.watch_file(42, <bound method ...>)
event_loop.enter_idle(<bound method ...>)
event_loop.run()
event_loop.remove_enter_idle(1)
event_loop.remove_watch_file(2)
>>> scr.started = False
>>> ml.run() # doctest:+ELLIPSIS
screen.run_wrapper(<bound method ...>)
"""
try:
if self.screen.started:
self._run()
else:
self.screen.run_wrapper(self._run)
except ExitMainLoop:
pass
def _run(self):
if self.handle_mouse:
self.screen.set_mouse_tracking()
if not hasattr(self.screen, 'get_input_descriptors'):
return self._run_screen_event_loop()
self.draw_screen()
fd_handles = []
def reset_input_descriptors(only_remove=False):
for handle in fd_handles:
self.event_loop.remove_watch_file(handle)
if only_remove:
return
fd_handles[:] = [
self.event_loop.watch_file(fd, self._update)
for fd in self.screen.get_input_descriptors()]
try:
signals.connect_signal(self.screen, INPUT_DESCRIPTORS_CHANGED,
reset_input_descriptors)
except NameError:
pass
# watch our input descriptors
reset_input_descriptors()
idle_handle = self.event_loop.enter_idle(self.entering_idle)
# Go..
self.event_loop.run()
# tidy up
self.event_loop.remove_enter_idle(idle_handle)
reset_input_descriptors(True)
signals.disconnect_signal(self.screen, INPUT_DESCRIPTORS_CHANGED,
reset_input_descriptors)
def _update(self, timeout=False):
"""
>>> w = _refl("widget")
>>> w.selectable_rval = True
>>> w.mouse_event_rval = True
>>> scr = _refl("screen")
>>> scr.get_cols_rows_rval = (15, 5)
>>> scr.get_input_nonblocking_rval = 1, ['y'], [121]
>>> evl = _refl("event_loop")
>>> ml = MainLoop(w, [], scr, event_loop=evl)
>>> ml._input_timeout = "old timeout"
>>> ml._update() # doctest:+ELLIPSIS
event_loop.remove_alarm('old timeout')
screen.get_input_nonblocking()
event_loop.alarm(1, <function ...>)
screen.get_cols_rows()
widget.selectable()
widget.keypress((15, 5), 'y')
>>> scr.get_input_nonblocking_rval = None, [("mouse press", 1, 5, 4)
... ], []
>>> ml._update()
screen.get_input_nonblocking()
widget.mouse_event((15, 5), 'mouse press', 1, 5, 4, focus=True)
>>> scr.get_input_nonblocking_rval = None, [], []
>>> ml._update()
screen.get_input_nonblocking()
"""
if self._input_timeout is not None and not timeout:
# cancel the timeout, something else triggered the update
self.event_loop.remove_alarm(self._input_timeout)
self._input_timeout = None
max_wait, keys, raw = self.screen.get_input_nonblocking()
if max_wait is not None:
# if get_input_nonblocking wants to be called back
# make sure it happens with an alarm
self._input_timeout = self.event_loop.alarm(max_wait,
lambda: self._update(timeout=True))
keys = self.input_filter(keys, raw)
if keys:
self.process_input(keys)
if 'window resize' in keys:
self.screen_size = None
def _run_screen_event_loop(self):
"""
This method is used when the screen does not support using
external event loops.
The alarms stored in the SelectEventLoop in self.event_loop
are modified by this method.
"""
next_alarm = None
while True:
self.draw_screen()
if not next_alarm and self.event_loop._alarms:
next_alarm = heapq.heappop(self.event_loop._alarms)
keys = None
while not keys:
if next_alarm:
sec = max(0, next_alarm[0] - time.time())
self.screen.set_input_timeouts(sec)
else:
self.screen.set_input_timeouts(None)
keys, raw = self.screen.get_input(True)
if not keys and next_alarm:
sec = next_alarm[0] - time.time()
if sec <= 0:
break
keys = self.input_filter(keys, raw)
if keys:
self.process_input(keys)
while next_alarm:
sec = next_alarm[0] - time.time()
if sec > 0:
break
tm, callback, user_data = next_alarm
callback(self, user_data)
if self._alarms:
next_alarm = heapq.heappop(self.event_loop._alarms)
else:
next_alarm = None
if 'window resize' in keys:
self.screen_size = None
def process_input(self, keys):
"""
This function will pass keyboard input and mouse events
to self.widget. This function is called automatically
from the run() method when there is input, but may also be
called to simulate input from the user.
keys -- list of input returned from self.screen.get_input()
Returns True if any key was handled by a widget or the
unhandled_input() method.
>>> w = _refl("widget")
>>> w.selectable_rval = True
>>> scr = _refl("screen")
>>> scr.get_cols_rows_rval = (10, 5)
>>> ml = MainLoop(w, [], scr)
>>> ml.process_input(['enter', ('mouse drag', 1, 14, 20)])
screen.get_cols_rows()
widget.selectable()
widget.keypress((10, 5), 'enter')
widget.mouse_event((10, 5), 'mouse drag', 1, 14, 20, focus=True)
True
"""
if not self.screen_size:
self.screen_size = self.screen.get_cols_rows()
something_handled = False
for k in keys:
if is_mouse_event(k):
event, button, col, row = k
if self._topmost_widget.mouse_event(self.screen_size,
event, button, col, row, focus=True ):
k = None
elif self._topmost_widget.selectable():
k = self._topmost_widget.keypress(self.screen_size, k)
if k:
if command_map[k] == 'redraw screen':
self.screen.clear()
something_handled = True
else:
something_handled |= bool(self.unhandled_input(k))
else:
something_handled = True
return something_handled
def input_filter(self, keys, raw):
"""
This function is passed each all the input events and raw
keystroke values. These values are passed to the
input_filter function passed to the constructor. That
function must return a list of keys to be passed to the
widgets to handle. If no input_filter was defined this
implementation will return all the input events.
input -- keyboard or mouse input
"""
if self._input_filter:
return self._input_filter(keys, raw)
return keys
def unhandled_input(self, input):
"""
This function is called with any input that was not handled
by the widgets, and calls the unhandled_input function passed
to the constructor. If no unhandled_input was defined then
the input will be ignored.
input -- keyboard or mouse input
The unhandled_input method should return True if it handled
the input.
"""
if self._unhandled_input:
return self._unhandled_input(input)
def entering_idle(self):
"""
This function is called whenever the event loop is about
to enter the idle state. self.draw_screen() is called here
to update the screen if anything has changed.
"""
if self.screen.started:
self.draw_screen()
def draw_screen(self):
"""
Renter the widgets and paint the screen. This function is
called automatically from run() but may be called additional
times if repainting is required without also processing input.
"""
if not self.screen_size:
self.screen_size = self.screen.get_cols_rows()
canvas = self._topmost_widget.render(self.screen_size, focus=True)
self.screen.draw_screen(self.screen_size, canvas)
class SelectEventLoop(object):
def __init__(self):
"""
Event loop based on select.select()
>>> import os
>>> rd, wr = os.pipe()
>>> evl = SelectEventLoop()
>>> def step1():
... print "writing"
... os.write(wr, "hi".encode('ascii'))
>>> def step2():
... print os.read(rd, 2).decode('ascii')
... raise ExitMainLoop
>>> handle = evl.alarm(0, step1)
>>> handle = evl.watch_file(rd, step2)
>>> evl.run()
writing
hi
"""
self._alarms = []
self._watch_files = {}
self._idle_handle = 0
self._idle_callbacks = {}
def alarm(self, seconds, callback):
"""
Call callback() given time from from now. No parameters are
passed to callback.
Returns a handle that may be passed to remove_alarm()
seconds -- floating point time to wait before calling callback
callback -- function to call from event loop
"""
tm = time.time() + seconds
heapq.heappush(self._alarms, (tm, callback))
return (tm, callback)
def remove_alarm(self, handle):
"""
Remove an alarm.
Returns True if the alarm exists, False otherwise
>>> evl = SelectEventLoop()
>>> handle = evl.alarm(50, lambda: None)
>>> evl.remove_alarm(handle)
True
>>> evl.remove_alarm(handle)
False
"""
try:
self._alarms.remove(handle)
heapq.heapify(self._alarms)
return True
except ValueError:
return False
def watch_file(self, fd, callback):
"""
Call callback() when fd has some data to read. No parameters
are passed to callback.
Returns a handle that may be passed to remove_watch_file()
fd -- file descriptor to watch for input
callback -- function to call when input is available
"""
self._watch_files[fd] = callback
return fd
def remove_watch_file(self, handle):
"""
Remove an input file.
Returns True if the input file exists, False otherwise
>>> evl = SelectEventLoop()
>>> handle = evl.watch_file(5, lambda: None)
>>> evl.remove_watch_file(handle)
True
>>> evl.remove_watch_file(handle)
False
"""
if handle in self._watch_files:
del self._watch_files[handle]
return True
return False
def enter_idle(self, callback):
"""
Add a callback for entering idle.
Returns a handle that may be passed to remove_idle()
"""
self._idle_handle += 1
self._idle_callbacks[self._idle_handle] = callback
return self._idle_handle
def remove_enter_idle(self, handle):
"""
Remove an idle callback.
Returns True if the handle was removed.
"""
try:
del self._idle_callbacks[handle]
except KeyError:
return False
return True
def _entering_idle(self):
"""
Call all the registered idle callbacks.
"""
for callback in self._idle_callbacks.values():
callback()
def run(self):
"""
Start the event loop. Exit the loop when any callback raises
an exception. If ExitMainLoop is raised, exit cleanly.
>>> import os
>>> rd, wr = os.pipe()
>>> os.write(wr, "data".encode('ascii')) # something to read from rd
4
>>> evl = SelectEventLoop()
>>> def say_hello():
... print "hello"
>>> def say_waiting():
... print "waiting"
>>> def exit_clean():
... print "clean exit"
... raise ExitMainLoop
>>> def exit_error():
... 1/0
>>> handle = evl.alarm(0.01, exit_clean)
>>> handle = evl.alarm(0.005, say_hello)
>>> evl.enter_idle(say_waiting)
1
>>> evl.run()
waiting
hello
waiting
clean exit
>>> handle = evl.watch_file(rd, exit_clean)
>>> evl.run()
clean exit
>>> evl.remove_watch_file(handle)
True
>>> handle = evl.alarm(0, exit_error)
>>> evl.run()
Traceback (most recent call last):
...
ZeroDivisionError: integer division or modulo by zero
>>> handle = evl.watch_file(rd, exit_error)
>>> evl.run()
Traceback (most recent call last):
...
ZeroDivisionError: integer division or modulo by zero
"""
try:
self._did_something = True
while True:
try:
self._loop()
except select.error, e:
if e.args[0] != 4:
# not just something we need to retry
raise
except ExitMainLoop:
pass
def _loop(self):
"""
A single iteration of the event loop
"""
fds = self._watch_files.keys()
if self._alarms or self._did_something:
if self._alarms:
tm = self._alarms[0][0]
timeout = max(0, tm - time.time())
if self._did_something and (not self._alarms or
(self._alarms and timeout > 0)):
timeout = 0
tm = 'idle'
ready, w, err = select.select(fds, [], fds, timeout)
else:
tm = None
ready, w, err = select.select(fds, [], fds)
if not ready:
if tm == 'idle':
self._entering_idle()
self._did_something = False
elif tm is not None:
# must have been a timeout
tm, alarm_callback = self._alarms.pop(0)
alarm_callback()
self._did_something = True
for fd in ready:
self._watch_files[fd]()
self._did_something = True
if not PYTHON3:
class GLibEventLoop(object):
def __init__(self):
"""
Event loop based on gobject.MainLoop
>>> import os
>>> rd, wr = os.pipe()
>>> evl = GLibEventLoop()
>>> def step1():
... print "writing"
... os.write(wr, "hi")
>>> def step2():
... print os.read(rd, 2)
... raise ExitMainLoop
>>> handle = evl.alarm(0, step1)
>>> handle = evl.watch_file(rd, step2)
>>> evl.run()
writing
hi
"""
import gobject
self.gobject = gobject
self._alarms = []
self._watch_files = {}
self._idle_handle = 0
self._glib_idle_enabled = False # have we called glib.idle_add?
self._idle_callbacks = {}
self._loop = self.gobject.MainLoop()
self._exc_info = None
self._enable_glib_idle()
def alarm(self, seconds, callback):
"""
Call callback() given time from from now. No parameters are
passed to callback.
Returns a handle that may be passed to remove_alarm()
seconds -- floating point time to wait before calling callback
callback -- function to call from event loop
"""
@self.handle_exit
def ret_false():
callback()
self._enable_glib_idle()
return False
fd = self.gobject.timeout_add(int(seconds*1000), ret_false)
self._alarms.append(fd)
return (fd, callback)
def remove_alarm(self, handle):
"""
Remove an alarm.
Returns True if the alarm exists, False otherwise
>>> evl = GLibEventLoop()
>>> handle = evl.alarm(50, lambda: None)
>>> evl.remove_alarm(handle)
True
>>> evl.remove_alarm(handle)
False
"""
try:
self._alarms.remove(handle[0])
self.gobject.source_remove(handle[0])
return True
except ValueError:
return False
def watch_file(self, fd, callback):
"""
Call callback() when fd has some data to read. No parameters
are passed to callback.
Returns a handle that may be passed to remove_watch_file()
fd -- file descriptor to watch for input
callback -- function to call when input is available
"""
@self.handle_exit
def io_callback(source, cb_condition):
callback()
self._enable_glib_idle()
return True
self._watch_files[fd] = \
self.gobject.io_add_watch(fd,self.gobject.IO_IN,io_callback)
return fd
def remove_watch_file(self, handle):
"""
Remove an input file.
Returns True if the input file exists, False otherwise
>>> evl = GLibEventLoop()
>>> handle = evl.watch_file(1, lambda: None)
>>> evl.remove_watch_file(handle)
True
>>> evl.remove_watch_file(handle)
False
"""
if handle in self._watch_files:
self.gobject.source_remove(self._watch_files[handle])
del self._watch_files[handle]
return True
return False
def enter_idle(self, callback):
"""
Add a callback for entering idle.
Returns a handle that may be passed to remove_enter_idle()
"""
self._idle_handle += 1
self._idle_callbacks[self._idle_handle] = callback
return self._idle_handle
def _enable_glib_idle(self):
if self._glib_idle_enabled:
return
self.gobject.idle_add(self._glib_idle_callback)
self._glib_idle_enabled = True
def _glib_idle_callback(self):
for callback in self._idle_callbacks.values():
callback()
self._glib_idle_enabled = False
return False # ask glib not to call again (or we would be called
def remove_enter_idle(self, handle):
"""
Remove an idle callback.
Returns True if the handle was removed.
"""
try:
del self._idle_callbacks[handle]
except KeyError:
return False
return True
def run(self):
"""
Start the event loop. Exit the loop when any callback raises
an exception. If ExitMainLoop is raised, exit cleanly.
>>> import os
>>> rd, wr = os.pipe()
>>> os.write(wr, "data") # something to read from rd
4
>>> evl = GLibEventLoop()
>>> def say_hello():
... print "hello"
>>> def say_waiting():
... print "waiting"
>>> def exit_clean():
... print "clean exit"
... raise ExitMainLoop
>>> def exit_error():
... 1/0
>>> handle = evl.alarm(0.01, exit_clean)
>>> handle = evl.alarm(0.005, say_hello)
>>> evl.enter_idle(say_waiting)
1
>>> evl.run()
waiting
hello
waiting
clean exit
>>> handle = evl.watch_file(rd, exit_clean)
>>> evl.run()
clean exit
>>> evl.remove_watch_file(handle)
True
>>> handle = evl.alarm(0, exit_error)
>>> evl.run()
Traceback (most recent call last):
...
ZeroDivisionError: integer division or modulo by zero
>>> handle = evl.watch_file(rd, exit_error)
>>> evl.run()
Traceback (most recent call last):
...
ZeroDivisionError: integer division or modulo by zero
"""
try:
self._loop.run()
finally:
if self._loop.is_running():
self._loop.quit()
if self._exc_info:
# An exception caused us to exit, raise it now
exc_info = self._exc_info
self._exc_info = None
raise exc_info[0], exc_info[1], exc_info[2]
def handle_exit(self,f):
"""
Decorator that cleanly exits the GLibEventLoop if ExitMainLoop is
thrown inside of the wrapped function. Store the exception info if
some other exception occurs, it will be reraised after the loop quits.
f -- function to be wrapped
"""
def wrapper(*args,**kargs):
try:
return f(*args,**kargs)
except ExitMainLoop:
self._loop.quit()
except:
import sys
self._exc_info = sys.exc_info()
if self._loop.is_running():
self._loop.quit()
return False
return wrapper
try:
from twisted.internet.abstract import FileDescriptor
except ImportError:
FileDescriptor = object
class TwistedInputDescriptor(FileDescriptor):
def __init__(self, reactor, fd, cb):
self._fileno = fd
self.cb = cb
FileDescriptor.__init__(self, reactor)
def fileno(self):
return self._fileno
def doRead(self):
return self.cb()
class TwistedEventLoop(object):
_idle_emulation_delay = 1.0/256 # a short time (in seconds)
def __init__(self, reactor=None, manage_reactor=True):
"""
Event loop based on Twisted
reactor -- reactor object to use, if None defaults to
twisted.internet.reactor
manage_reactor -- True if you want this event loop to run
and stop the reactor
*** WARNING ***
Twisted's reactor doesn't like to be stopped and run again.
If you need to stop and run your MainLoop, consider setting
manage_reactor=False and take care of running/stopping
the reactor at the beginning/ending of your program yourself.
"""
if reactor is None:
import twisted.internet.reactor
reactor = twisted.internet.reactor
self.reactor = reactor
self._alarms = []
self._watch_files = {}
self._idle_handle = 0
self._twisted_idle_enabled = False
self._idle_callbacks = {}
self._exc_info = None
self.manage_reactor = manage_reactor
self._enable_twisted_idle()
def alarm(self, seconds, callback):
"""
Call callback() given time from from now. No parameters are
passed to callback.
Returns a handle that may be passed to remove_alarm()
seconds -- floating point time to wait before calling callback
callback -- function to call from event loop
"""
handle = self.reactor.callLater(seconds, self.handle_exit(callback))
return handle
def remove_alarm(self, handle):
"""
Remove an alarm.
Returns True if the alarm exists, False otherwise
>>> evl = TwistedEventLoop()
>>> handle = evl.alarm(50, lambda: None)
>>> evl.remove_alarm(handle)
True
>>> evl.remove_alarm(handle)
False
"""
from twisted.internet.error import AlreadyCancelled, AlreadyCalled
try:
handle.cancel()
return True
except AlreadyCancelled:
return False
except AlreadyCalled:
return False
def watch_file(self, fd, callback):
"""
Call callback() when fd has some data to read. No parameters
are passed to callback.
Returns a handle that may be passed to remove_watch_file()
fd -- file descriptor to watch for input
callback -- function to call when input is available
"""
ind = TwistedInputDescriptor(self.reactor, fd,
self.handle_exit(callback))
self._watch_files[fd] = ind
self.reactor.addReader(ind)
return fd
def remove_watch_file(self, handle):
"""
Remove an input file.
Returns True if the input file exists, False otherwise
>>> evl = TwistedEventLoop()
>>> handle = evl.watch_file(1, lambda: None)
>>> evl.remove_watch_file(handle)
True
>>> evl.remove_watch_file(handle)
False
"""
if handle in self._watch_files:
self.reactor.removeReader(self._watch_files[handle])
del self._watch_files[handle]
return True
return False
def enter_idle(self, callback):
"""
Add a callback for entering idle.
Returns a handle that may be passed to remove_enter_idle()
"""
self._idle_handle += 1
self._idle_callbacks[self._idle_handle] = callback
return self._idle_handle
def _enable_twisted_idle(self):
"""
Twisted's reactors don't have an idle or enter-idle callback
so the best we can do for now is to set a timer event in a very
short time to approximate an enter-idle callback.
XXX: This will perform worse than the other event loops until we
can find a fix or workaround
"""
if self._twisted_idle_enabled:
return
self.reactor.callLater(self._idle_emulation_delay,
self.handle_exit(self._twisted_idle_callback, enable_idle=False))
self._twisted_idle_enabled = True
def _twisted_idle_callback(self):
for callback in self._idle_callbacks.values():
callback()
self._twisted_idle_enabled = False
def remove_enter_idle(self, handle):
"""
Remove an idle callback.
Returns True if the handle was removed.
"""
try:
del self._idle_callbacks[handle]
except KeyError:
return False
return True
def run(self):
"""
Start the event loop. Exit the loop when any callback raises
an exception. If ExitMainLoop is raised, exit cleanly.
>>> import os
>>> rd, wr = os.pipe()
>>> os.write(wr, "data") # something to read from rd
4
>>> evl = TwistedEventLoop()
>>> def say_hello_data():
... print "hello data"
... os.read(rd, 4)
>>> def say_waiting():
... print "waiting"
>>> def say_hello():
... print "hello"
>>> handle = evl.watch_file(rd, say_hello_data)
>>> def say_being_twisted():
... print "oh I'm messed up"
... raise ExitMainLoop
>>> handle = evl.alarm(0.0625, say_being_twisted)
>>> handle = evl.alarm(0.03125, say_hello)
>>> evl.enter_idle(say_waiting)
1
>>> evl.run()
hello data
waiting
hello
waiting
oh I'm messed up
"""
if not self.manage_reactor:
return
self.reactor.run()
if self._exc_info:
# An exception caused us to exit, raise it now
exc_info = self._exc_info
self._exc_info = None
raise exc_info[0], exc_info[1], exc_info[2]
def handle_exit(self, f, enable_idle=True):
"""
Decorator that cleanly exits the TwistedEventLoop if ExitMainLoop is
thrown inside of the wrapped function. Store the exception info if
some other exception occurs, it will be reraised after the loop quits.
f -- function to be wrapped
"""
def wrapper(*args,**kargs):
rval = None
try:
rval = f(*args,**kargs)
except ExitMainLoop:
if self.manage_reactor:
self.reactor.stop()
except:
import sys
print sys.exc_info()
self._exc_info = sys.exc_info()
if self.manage_reactor:
self.reactor.crash()
if enable_idle:
self._enable_twisted_idle()
return rval
return wrapper
def _refl(name, rval=None, exit=False):
"""
This function is used to test the main loop classes.
>>> scr = _refl("screen")
>>> scr.function("argument")
screen.function('argument')
>>> scr.callme(when="now")
screen.callme(when='now')
>>> scr.want_something_rval = 42
>>> x = scr.want_something()
screen.want_something()
>>> x
42
"""
class Reflect(object):
def __init__(self, name, rval=None):
self._name = name
self._rval = rval
def __call__(self, *argl, **argd):
args = ", ".join([repr(a) for a in argl])
if args and argd:
args = args + ", "
args = args + ", ".join([k+"="+repr(v) for k,v in argd.items()])
print self._name+"("+args+")"
if exit:
raise ExitMainLoop()
return self._rval
def __getattr__(self, attr):
if attr.endswith("_rval"):
raise AttributeError()
#print self._name+"."+attr
if hasattr(self, attr+"_rval"):
return Reflect(self._name+"."+attr, getattr(self, attr+"_rval"))
return Reflect(self._name+"."+attr)
return Reflect(name)
def _test():
import doctest
doctest.testmod()
if __name__=='__main__':
_test()
<file_sep>/soundcloud/tests/test_requests.py
from contextlib import contextmanager
import fudge
import soundcloud
from nose.tools import raises, assert_raises
from requests.exceptions import HTTPError
from soundcloud.tests.utils import MockResponse
@contextmanager
def response_status(fake_http_request, status):
response = MockResponse('{}', status_code=status)
fake_http_request.expects_call().returns(response)
yield
@fudge.patch('requests.get')
def test_bad_responses(fake):
"""Anything in the 400 or 500 range should raise an exception."""
client = soundcloud.Client(client_id='foo', client_secret='foo')
for status in range(400, 423):
with response_status(fake, status):
assert_raises(HTTPError, lambda: client.get('/me'))
for status in (500, 501, 502, 503, 504, 505):
with response_status(fake, status):
assert_raises(HTTPError, lambda: client.get('/me'))
@fudge.patch('requests.get')
def test_ok_response(fake):
"""A 200 range response should be fine."""
client = soundcloud.Client(client_id='foo', client_secret='foo')
for status in (200, 201, 202, 203, 204, 205, 206):
with response_status(fake, status):
user = client.get('/me')
@fudge.patch('requests.get')
def test_redirects(fake):
"""Make sure 300 responses raise an exception.
Note: ```requests``` transparently attempts redirects so if we get
this back it means the caller has disabled redirects or the max
number has been reached.
"""
client = soundcloud.Client(client_id='foo', client_secret='foo')
for status in (300, 301, 302, 303, 304, 305, 307):
with response_status(fake, status):
assert_raises(HTTPError, lambda: client.get('/me'))
<file_sep>/urwid/monitored_list.py
#!/usr/bin/python
#
# Urwid MonitoredList class
# Copyright (C) 2004-2011 <NAME>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
from urwid.compat import PYTHON3
def _call_modified(fn):
def call_modified_wrapper(self, *args, **kwargs):
rval = fn(self, *args, **kwargs)
self._modified()
return rval
return call_modified_wrapper
class MonitoredList(list):
"""
This class can trigger a callback any time its contents are changed
with the usual list operations append, extend, etc.
"""
def _modified(self):
pass
def set_modified_callback(self, callback):
"""
Assign a callback function in with no parameters.
Callback's return value is ignored.
>>> import sys
>>> ml = MonitoredList([1,2,3])
>>> ml.set_modified_callback(lambda: sys.stdout.write("modified\\n"))
>>> ml
MonitoredList([1, 2, 3])
>>> ml.append(10)
modified
>>> len(ml)
4
>>> ml += [11, 12, 13]
modified
>>> ml[:] = ml[:2] + ml[-2:]
modified
>>> ml
MonitoredList([1, 2, 12, 13])
"""
self._modified = callback
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, list(self))
__add__ = _call_modified(list.__add__)
__delitem__ = _call_modified(list.__delitem__)
if not PYTHON3:
__delslice__ = _call_modified(list.__delslice__)
__iadd__ = _call_modified(list.__iadd__)
__imul__ = _call_modified(list.__imul__)
__rmul__ = _call_modified(list.__rmul__)
__setitem__ = _call_modified(list.__setitem__)
if not PYTHON3:
__setslice__ = _call_modified(list.__setslice__)
append = _call_modified(list.append)
extend = _call_modified(list.extend)
insert = _call_modified(list.insert)
pop = _call_modified(list.pop)
remove = _call_modified(list.remove)
reverse = _call_modified(list.reverse)
sort = _call_modified(list.sort)
class MonitoredFocusList(MonitoredList):
"""
This class can trigger a callback any time its contents are changed
and any time the item "in focus" is modified or removed
"""
def __init__(self, *argl, **argd):
"""
This is a list that tracks one item as the focus item. If items
are inserted or removed it will update the focus.
>>> ml = MonitoredFocusList([10, 11, 12, 13, 14], focus=3)
>>> ml
MonitoredFocusList([10, 11, 12, 13, 14], focus=3)
>>> del(ml[1])
>>> ml
MonitoredFocusList([10, 12, 13, 14], focus=2)
>>> ml[:2] = [50, 51, 52, 53]
>>> ml
MonitoredFocusList([50, 51, 52, 53, 13, 14], focus=4)
>>> ml[4] = 99
>>> ml
MonitoredFocusList([50, 51, 52, 53, 99, 14], focus=4)
>>> ml[:] = []
>>> ml
MonitoredFocusList([], focus=None)
"""
focus = 0
if 'focus' in argd:
focus = argd['focus']
del argd['focus']
super(MonitoredFocusList, self).__init__(*argl, **argd)
self.set_focus(focus)
self._focus_modified = lambda ml, indices, new_items: None
def __repr__(self):
return "%s(%r, focus=%r)" % (
self.__class__.__name__, list(self), self.get_focus())
def get_focus(self):
"""
Return the index of the item "in focus" or None if
the list is empty. May also be accessed as .focus
>>> MonitoredFocusList([1,2,3], focus=2).get_focus()
2
>>> MonitoredFocusList().get_focus()
>>> MonitoredFocusList([1,2,3], focus=1).focus
1
"""
if not self:
return None
if self._focus >= len(self):
# should't happen.. but just in case
return len(self)-1
return self._focus
def set_focus(self, index):
"""
index -- index into self.widget_list, negative indexes count from
the end, any index out of range will raise an IndexError
Negative indexes work the same way they do in slicing.
May also be set using .focus
>>> ml = MonitoredFocusList([9, 10, 11])
>>> ml.set_focus(2); ml.get_focus()
2
>>> ml.set_focus(-2); ml.get_focus()
1
>>> ml.focus = 0; ml.get_focus()
0
"""
if not self:
self._focus = 0
return
if index < 0:
index += len(self)
if index < 0 or index >= len(self):
raise IndexError('list index out of range')
self._focus = int(index)
focus = property(get_focus, set_focus)
def set_focus_modified_callback(self, callback):
"""
Assign a function to handle updating the focus when the item
in focus is about to be changed. The callback is in the form:
callback(monitored_list, slc, new_items)
indices -- a (start, stop, step) tuple whose range covers the
items being modified
new_items -- a list of items replacing those at range(*indices)
The only valid action for the callback is to call set_focus().
Modifying the list in the callback has undefined behaviour.
"""
self._focus_modified = callback
def _handle_possible_focus_modified(self, slc, new_items=[]):
"""
Default behaviour is to move the focus to the item following
any removed items, or the last item in the list if that doesn't
exist.
"""
num_new_items = len(new_items)
start, stop, step = indices = slc.indices(len(self))
if step == 1:
if start <= self._focus < stop:
# call user handler, which might modify focus
self._focus_modified(self, indices, new_items)
if start + num_new_items <= self._focus < stop:
self._focus = stop
# adjust for added/removed items
if stop <= self._focus:
self._focus += num_new_items - (stop - start)
else:
removed = range(start, stop, step)
if self._focus in removed:
# call user handler, which might modify focus
self._focus_modified(self, indices, new_items)
if not num_new_items:
# extended slice being removed
if self._focus in removed:
self._focus += 1
# adjust for removed items
self._focus -= len(range(start, self._focus, step))
def _clamp_focus(self):
"""
adjust the focus if it is out of range
"""
if self._focus >= len(self):
self._focus = len(self)-1
if self._focus < 0:
self._focus = 0
# override all the list methods that might affect our focus
def __delitem__(self, y):
"""
>>> ml = MonitoredFocusList([0,1,2,3], focus=2)
>>> del ml[3]; ml
MonitoredFocusList([0, 1, 2], focus=2)
>>> del ml[0]; ml
MonitoredFocusList([1, 2], focus=1)
>>> del ml[1]; ml
MonitoredFocusList([1], focus=0)
>>> del ml[0]; ml
MonitoredFocusList([], focus=None)
>>> ml = MonitoredFocusList([5,4,6,4,5,4,6,4,5], focus=4)
>>> del ml[1::2]; ml
MonitoredFocusList([5, 6, 5, 6, 5], focus=2)
>>> del ml[::2]; ml
MonitoredFocusList([6, 6], focus=1)
"""
if isinstance(y, slice):
self._handle_possible_focus_modified(y)
else:
self._handle_possible_focus_modified(slice(y, y+1))
rval = super(MonitoredFocusList, self).__delitem__(y)
self._clamp_focus()
return rval
def __setitem__(self, i, y):
"""
>>> def modified(monitored_list, indices, new_items):
... print "range%r <- %r" % (indices, new_items)
>>> ml = MonitoredFocusList([0,1,2,3], focus=2)
>>> ml.set_focus_modified_callback(modified)
>>> ml[0] = 9
>>> ml[2] = 6
range(2, 3, 1) <- [6]
>>> ml[-1] = 8; ml
MonitoredFocusList([9, 1, 6, 8], focus=2)
>>> ml[1::2] = [12, 13]
>>> ml[::2] = [10, 11]
range(0, 4, 2) <- [10, 11]
"""
if isinstance(i, slice):
self._handle_possible_focus_modified(i, y)
else:
self._handle_possible_focus_modified(slice(i, i+1 or None), [y])
return super(MonitoredFocusList, self).__setitem__(i, y)
def __delslice__(self, i, j):
"""
>>> def modified(monitored_list, indices, new_items):
... print "range%r <- %r" % (indices, new_items)
>>> ml = MonitoredFocusList([0,1,2,3,4], focus=2)
>>> ml.set_focus_modified_callback(modified)
>>> del ml[3:5]; ml
MonitoredFocusList([0, 1, 2], focus=2)
>>> del ml[:1]; ml
MonitoredFocusList([1, 2], focus=1)
>>> del ml[1:]; ml
range(1, 2, 1) <- []
MonitoredFocusList([1], focus=0)
>>> del ml[:]; ml
range(0, 1, 1) <- []
MonitoredFocusList([], focus=None)
"""
self._handle_possible_focus_modified(slice(i, j))
rval = super(MonitoredFocusList, self).__delslice__(i, j)
self._clamp_focus()
return rval
def __setslice__(self, i, j, y):
"""
>>> ml = MonitoredFocusList([0,1,2,3,4], focus=2)
>>> ml[3:5] = [-1]; ml
MonitoredFocusList([0, 1, 2, -1], focus=2)
>>> ml[0:1] = []; ml
MonitoredFocusList([1, 2, -1], focus=1)
>>> ml[1:] = [3, 4]; ml
MonitoredFocusList([1, 3, 4], focus=1)
>>> ml[1:] = [2]; ml
MonitoredFocusList([1, 2], focus=1)
>>> ml[0:1] = [9,9,9]; ml
MonitoredFocusList([9, 9, 9, 2], focus=3)
>>> ml[:] = []; ml
MonitoredFocusList([], focus=None)
"""
self._handle_possible_focus_modified(slice(i, j), y)
rval = super(MonitoredFocusList, self).__setslice__(i, j, y)
self._clamp_focus()
return rval
def insert(self, index, object):
"""
>>> ml = MonitoredFocusList([0,1,2,3], focus=2)
>>> ml.insert(-1, -1); ml
MonitoredFocusList([0, 1, 2, -1, 3], focus=2)
>>> ml.insert(0, -2); ml
MonitoredFocusList([-2, 0, 1, 2, -1, 3], focus=3)
>>> ml.insert(3, -3); ml
MonitoredFocusList([-2, 0, 1, -3, 2, -1, 3], focus=4)
"""
self._handle_possible_focus_modified(slice(index, index), [object])
return super(MonitoredFocusList, self).insert(index, object)
def pop(self, index=-1):
"""
>>> ml = MonitoredFocusList([-2,0,1,-3,2,3], focus=4)
>>> ml.pop(3); ml
-3
MonitoredFocusList([-2, 0, 1, 2, 3], focus=3)
>>> ml.pop(0); ml
-2
MonitoredFocusList([0, 1, 2, 3], focus=2)
>>> ml.pop(-1); ml
3
MonitoredFocusList([0, 1, 2], focus=2)
>>> ml.pop(2); ml
2
MonitoredFocusList([0, 1], focus=1)
"""
self._handle_possible_focus_modified(slice(index, index+1 or None))
return super(MonitoredFocusList, self).pop(index)
def remove(self, value):
"""
>>> ml = MonitoredFocusList([-2,0,1,-3,2,-1,3], focus=4)
>>> ml.remove(-3); ml
MonitoredFocusList([-2, 0, 1, 2, -1, 3], focus=3)
>>> ml.remove(-2); ml
MonitoredFocusList([0, 1, 2, -1, 3], focus=2)
>>> ml.remove(3); ml
MonitoredFocusList([0, 1, 2, -1], focus=2)
"""
index = self.index(value)
self._handle_possible_focus_modified(slice(index, index+1 or None))
return super(MonitoredFocusList, self).remove(value)
def reverse(self):
"""
>>> ml = MonitoredFocusList([0,1,2,3,4], focus=1)
>>> ml.reverse(); ml
MonitoredFocusList([4, 3, 2, 1, 0], focus=3)
"""
self._focus = max(0, len(self) - self._focus - 1)
return super(MonitoredFocusList, self).reverse()
def sort(self):
"""
>>> ml = MonitoredFocusList([-2,0,1,-3,2,-1,3], focus=4)
>>> ml.sort(); ml
MonitoredFocusList([-3, -2, -1, 0, 1, 2, 3], focus=5)
"""
if not self:
return
value = self[self._focus]
rval = super(MonitoredFocusList, self).sort()
self._focus = self.index(value)
return rval
def _test():
import doctest
doctest.testmod()
if __name__=='__main__':
_test()
<file_sep>/urwid/tests.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Urwid unit testing .. ok, ok, ok
# Copyright (C) 2004-2011 <NAME>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
import unittest
try:
from test import test_support
except ImportError:
from test import support as test_support
from doctest import DocTestSuite, ELLIPSIS, IGNORE_EXCEPTION_DETAIL
import urwid
from urwid.util import bytes, B
from urwid.vterm_test import TermTest
from urwid.text_layout import calc_pos, calc_coords, CanNotDisplayText
from urwid.canvas import (shard_body, shard_body_tail, shards_trim_top,
shards_trim_sides, shards_join, shards_trim_rows, shard_body_row)
from urwid.graphics import calculate_bargraph_display
class DecodeOneTest(unittest.TestCase):
def gwt(self, ch, exp_ord, exp_pos):
ch = B(ch)
o, pos = urwid.str_util.decode_one(ch,0)
assert o==exp_ord, " got:%r expected:%r" % (o, exp_ord)
assert pos==exp_pos, " got:%r expected:%r" % (pos, exp_pos)
def test1byte(self):
self.gwt("ab", ord("a"), 1)
self.gwt("\xc0a", ord("?"), 1) # error
def test2byte(self):
self.gwt("\xc2", ord("?"), 1) # error
self.gwt("\xc0\x80", ord("?"), 1) # error
self.gwt("\xc2\x80", 0x80, 2)
self.gwt("\xdf\xbf", 0x7ff, 2)
def test3byte(self):
self.gwt("\xe0", ord("?"), 1) # error
self.gwt("\xe0\xa0", ord("?"), 1) # error
self.gwt("\xe0\x90\x80", ord("?"), 1) # error
self.gwt("\xe0\xa0\x80", 0x800, 3)
self.gwt("\xef\xbf\xbf", 0xffff, 3)
def test4byte(self):
self.gwt("\xf0", ord("?"), 1) # error
self.gwt("\xf0\x90", ord("?"), 1) # error
self.gwt("\xf0\x90\x80", ord("?"), 1) # error
self.gwt("\xf0\x80\x80\x80", ord("?"), 1) # error
self.gwt("\xf0\x90\x80\x80", 0x10000, 4)
self.gwt("\xf3\xbf\xbf\xbf", 0xfffff, 4)
class CalcWidthTest(unittest.TestCase):
def wtest(self, desc, s, exp):
s = B(s)
result = urwid.calc_width( s, 0, len(s))
assert result==exp, "%s got:%r expected:%r" % (desc, result, exp)
def test1(self):
urwid.set_encoding("utf-8")
self.wtest("narrow", "hello", 5)
self.wtest("wide char", '\xe6\x9b\xbf', 2)
self.wtest("invalid", '\xe6', 1)
self.wtest("zero width", '\xcc\x80', 0)
self.wtest("mixed", 'hello\xe6\x9b\xbf\xe6\x9b\xbf', 9)
def test2(self):
urwid.set_encoding("euc-jp")
self.wtest("narrow", "hello", 5)
self.wtest("wide", "\xA1\xA1\xA1\xA1", 4)
self.wtest("invalid", "\xA1", 1)
class ConvertDecSpecialTest(unittest.TestCase):
def ctest(self, desc, s, exp, expcs):
exp = B(exp)
urwid.set_encoding('ascii')
c = urwid.Text(s).render((5,))
result = c._text[0]
assert result==exp, "%s got:%r expected:%r" % (desc, result, exp)
resultcs = c._cs[0]
assert resultcs==expcs, "%s got:%r expected:%r" % (desc,
resultcs, expcs)
def test1(self):
self.ctest("no conversion", u"hello", "hello", [(None,5)])
self.ctest("only special", u"£££££", "}}}}}", [("0",5)])
self.ctest("mix left", u"££abc", "}}abc", [("0",2),(None,3)])
self.ctest("mix right", u"abc££", "abc}}", [(None,3),("0",2)])
self.ctest("mix inner", u"a££bc", "a}}bc",
[(None,1),("0",2),(None,2)] )
self.ctest("mix well", u"£a£b£", "}a}b}",
[("0",1),(None,1),("0",1),(None,1),("0",1)] )
class WithinDoubleByteTest(unittest.TestCase):
def setUp(self):
urwid.set_encoding("euc-jp")
def wtest(self, s, ls, pos, expected, desc):
result = urwid.within_double_byte(B(s), ls, pos)
assert result==expected, "%s got:%r expected: %r" % (desc,
result, expected)
def test1(self):
self.wtest("mnopqr",0,2,0,'simple no high bytes')
self.wtest("mn\xA1\xA1qr",0,2,1,'simple 1st half')
self.wtest("mn\xA1\xA1qr",0,3,2,'simple 2nd half')
self.wtest("m\xA1\xA1\xA1\xA1r",0,3,1,'subsequent 1st half')
self.wtest("m\xA1\xA1\xA1\xA1r",0,4,2,'subsequent 2nd half')
self.wtest("mn\xA1@qr",0,3,2,'simple 2nd half lo')
self.wtest("mn\xA1\xA1@r",0,4,0,'subsequent not 2nd half lo')
self.wtest("m\xA1\xA1\xA1@r",0,4,2,'subsequent 2nd half lo')
def test2(self):
self.wtest("\xA1\xA1qr",0,0,1,'begin 1st half')
self.wtest("\xA1\xA1qr",0,1,2,'begin 2nd half')
self.wtest("\xA1@qr",0,1,2,'begin 2nd half lo')
self.wtest("\xA1\xA1\xA1\xA1r",0,2,1,'begin subs. 1st half')
self.wtest("\xA1\xA1\xA1\xA1r",0,3,2,'begin subs. 2nd half')
self.wtest("\xA1\xA1\xA1@r",0,3,2,'begin subs. 2nd half lo')
self.wtest("\xA1@\xA1@r",0,3,2,'begin subs. 2nd half lo lo')
self.wtest("@\xA1\xA1@r",0,3,0,'begin subs. not 2nd half lo')
def test3(self):
self.wtest("abc \xA1\xA1qr",4,4,1,'newline 1st half')
self.wtest("abc \xA1\xA1qr",4,5,2,'newline 2nd half')
self.wtest("abc \xA1@qr",4,5,2,'newline 2nd half lo')
self.wtest("abc \xA1\xA1\xA1\xA1r",4,6,1,'newl subs. 1st half')
self.wtest("abc \xA1\xA1\xA1\xA1r",4,7,2,'newl subs. 2nd half')
self.wtest("abc \xA1\xA1\xA1@r",4,7,2,'newl subs. 2nd half lo')
self.wtest("abc \xA1@\xA1@r",4,7,2,'newl subs. 2nd half lo lo')
self.wtest("abc @\xA1\xA1@r",4,7,0,'newl subs. not 2nd half lo')
class CalcTextPosTest(unittest.TestCase):
def ctptest(self, text, tests):
text = B(text)
for s,e,p, expected in tests:
got = urwid.calc_text_pos( text, s, e, p )
assert got == expected, "%r got:%r expected:%r" % ((s,e,p),
got, expected)
def test1(self):
text = "hello world out there"
tests = [
(0,21,0, (0,0)),
(0,21,5, (5,5)),
(0,21,21, (21,21)),
(0,21,50, (21,21)),
(2,15,50, (15,13)),
(6,21,0, (6,0)),
(6,21,3, (9,3)),
]
self.ctptest(text, tests)
def test2_wide(self):
urwid.set_encoding("euc-jp")
text = "hel\xA1\xA1 world out there"
tests = [
(0,21,0, (0,0)),
(0,21,4, (3,3)),
(2,21,2, (3,1)),
(2,21,3, (5,3)),
(6,21,0, (6,0)),
]
self.ctptest(text, tests)
def test3_utf8(self):
urwid.set_encoding("utf-8")
text = "hel\xc4\x83 world \xe2\x81\x81 there"
tests = [
(0,21,0, (0,0)),
(0,21,4, (5,4)),
(2,21,1, (3,1)),
(2,21,2, (5,2)),
(2,21,3, (6,3)),
(6,21,7, (15,7)),
(6,21,8, (16,8)),
]
self.ctptest(text, tests)
def test4_utf8(self):
urwid.set_encoding("utf-8")
text = "he\xcc\x80llo \xe6\x9b\xbf world"
tests = [
(0,15,0, (0,0)),
(0,15,1, (1,1)),
(0,15,2, (4,2)),
(0,15,4, (6,4)),
(8,15,0, (8,0)),
(8,15,1, (8,0)),
(8,15,2, (11,2)),
(8,15,5, (14,5)),
]
self.ctptest(text, tests)
class CalcBreaksTest(unittest.TestCase):
def cbtest(self, width, exp):
result = urwid.default_layout.calculate_text_segments(
B(self.text), width, self.mode )
assert len(result) == len(exp), repr((result, exp))
for l,e in zip(result, exp):
end = l[-1][-1]
assert end == e, repr((result,exp))
def test(self):
for width, exp in self.do:
self.cbtest( width, exp )
class CalcBreaksCharTest(CalcBreaksTest):
mode = 'any'
text = "abfghsdjf askhtrvs\naltjhgsdf ljahtshgf"
# tests
do = [
( 100, [18,38] ),
( 6, [6, 12, 18, 25, 31, 37, 38] ),
( 10, [10, 18, 29, 38] ),
]
class CalcBreaksDBCharTest(CalcBreaksTest):
def setUp(self):
urwid.set_encoding("euc-jp")
mode = 'any'
text = "abfgh\xA1\xA1j\xA1\xA1xskhtrvs\naltjhgsdf\xA1\xA1jahtshgf"
# tests
do = [
( 10, [10, 18, 28, 38] ),
( 6, [5, 11, 17, 18, 25, 31, 37, 38] ),
( 100, [18, 38]),
]
class CalcBreaksWordTest(CalcBreaksTest):
mode = 'space'
text = "hello world\nout there. blah"
# tests
do = [
( 10, [5, 11, 22, 27] ),
( 5, [5, 11, 17, 22, 27] ),
( 100, [11, 27] ),
]
class CalcBreaksWordTest2(CalcBreaksTest):
mode = 'space'
text = "A simple set of words, really...."
do = [
( 10, [8, 15, 22, 33]),
( 17, [15, 33]),
( 13, [12, 22, 33]),
]
class CalcBreaksDBWordTest(CalcBreaksTest):
def setUp(self):
urwid.set_encoding("euc-jp")
mode = 'space'
text = "hel\xA1\xA1 world\nout-\xA1\xA1tre blah"
# tests
do = [
( 10, [5, 11, 21, 26] ),
( 5, [5, 11, 16, 21, 26] ),
( 100, [11, 26] ),
]
class CalcBreaksUTF8Test(CalcBreaksTest):
def setUp(self):
urwid.set_encoding("utf-8")
mode = 'space'
text = '\xe6\x9b\xbf\xe6\xb4\xbc\xe6\xb8\x8e\xe6\xba\x8f\xe6\xbd\xba'
do = [
(4, [6, 12, 15] ),
(10, [15] ),
(5, [6, 12, 15] ),
]
class CalcBreaksCantDisplayTest(unittest.TestCase):
def test(self):
urwid.set_encoding("euc-jp")
self.assertRaises(CanNotDisplayText,
urwid.default_layout.calculate_text_segments,
B('\xA1\xA1'), 1, 'space' )
urwid.set_encoding("utf-8")
self.assertRaises(CanNotDisplayText,
urwid.default_layout.calculate_text_segments,
B('\xe9\xa2\x96'), 1, 'space' )
class SubsegTest(unittest.TestCase):
def setUp(self):
urwid.set_encoding("euc-jp")
def st(self, seg, text, start, end, exp):
text = B(text)
s = urwid.LayoutSegment(seg)
result = s.subseg( text, start, end )
assert result == exp, "Expected %r, got %r"%(exp,result)
def test1_padding(self):
self.st( (10, None), "", 0, 8, [(8, None)] )
self.st( (10, None), "", 2, 10, [(8, None)] )
self.st( (10, 0), "", 3, 7, [(4, 0)] )
self.st( (10, 0), "", 0, 20, [(10, 0)] )
def test2_text(self):
self.st( (10, 0, B("1234567890")), "", 0, 8, [(8,0,B("12345678"))] )
self.st( (10, 0, B("1234567890")), "", 2, 10, [(8,0,B("34567890"))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 2, 8,
[(6, 0, B("\xA1\xA156\xA1\xA1"))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 3, 8,
[(5, 0, B(" 56\xA1\xA1"))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 2, 7,
[(5, 0, B("\xA1\xA156 "))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 3, 7,
[(4, 0, B(" 56 "))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 0, 20,
[(10, 0, B("12\xA1\xA156\xA1\xA190"))] )
def test3_range(self):
t = "1234567890"
self.st( (10, 0, 10), t, 0, 8, [(8, 0, 8)] )
self.st( (10, 0, 10), t, 2, 10, [(8, 2, 10)] )
self.st( (6, 2, 8), t, 1, 6, [(5, 3, 8)] )
self.st( (6, 2, 8), t, 0, 5, [(5, 2, 7)] )
self.st( (6, 2, 8), t, 1, 5, [(4, 3, 7)] )
t = "12\xA1\xA156\xA1\xA190"
self.st( (10, 0, 10), t, 0, 8, [(8, 0, 8)] )
self.st( (10, 0, 10), t, 2, 10, [(8, 2, 10)] )
self.st( (6, 2, 8), t, 1, 6, [(1, 3), (4, 4, 8)] )
self.st( (6, 2, 8), t, 0, 5, [(4, 2, 6), (1, 6)] )
self.st( (6, 2, 8), t, 1, 5, [(1, 3), (2, 4, 6), (1, 6)] )
class CalcTranslateTest(unittest.TestCase):
def setUp(self):
urwid.set_encoding("utf-8")
def test1_left(self):
result = urwid.default_layout.layout( self.text,
self.width, 'left', self.mode)
assert result == self.result_left, result
def test2_right(self):
result = urwid.default_layout.layout( self.text,
self.width, 'right', self.mode)
assert result == self.result_right, result
def test3_center(self):
result = urwid.default_layout.layout( self.text,
self.width, 'center', self.mode)
assert result == self.result_center, result
class CalcTranslateCharTest(CalcTranslateTest):
text = "It's out of control!\nYou've got to"
mode = 'any'
width = 15
result_left = [
[(15, 0, 15)],
[(5, 15, 20), (0, 20)],
[(13, 21, 34), (0, 34)]]
result_right = [
[(15, 0, 15)],
[(10, None), (5, 15, 20), (0,20)],
[(2, None), (13, 21, 34), (0,34)]]
result_center = [
[(15, 0, 15)],
[(5, None), (5, 15, 20), (0,20)],
[(1, None), (13, 21, 34), (0,34)]]
class CalcTranslateWordTest(CalcTranslateTest):
text = "It's out of control!\nYou've got to"
mode = 'space'
width = 14
result_left = [
[(11, 0, 11), (0, 11)],
[(8, 12, 20), (0, 20)],
[(13, 21, 34), (0, 34)]]
result_right = [
[(3, None), (11, 0, 11), (0, 11)],
[(6, None), (8, 12, 20), (0, 20)],
[(1, None), (13, 21, 34), (0, 34)]]
result_center = [
[(2, None), (11, 0, 11), (0, 11)],
[(3, None), (8, 12, 20), (0, 20)],
[(1, None), (13, 21, 34), (0, 34)]]
class CalcTranslateWordTest2(CalcTranslateTest):
text = "It's out of control!\nYou've got to "
mode = 'space'
width = 14
result_left = [
[(11, 0, 11), (0, 11)],
[(8, 12, 20), (0, 20)],
[(14, 21, 35), (0, 35)]]
result_right = [
[(3, None), (11, 0, 11), (0, 11)],
[(6, None), (8, 12, 20), (0, 20)],
[(14, 21, 35), (0, 35)]]
result_center = [
[(2, None), (11, 0, 11), (0, 11)],
[(3, None), (8, 12, 20), (0, 20)],
[(14, 21, 35), (0, 35)]]
class CalcTranslateWordTest3(CalcTranslateTest):
def setUp(self):
urwid.set_encoding('utf-8')
text = B('\xe6\x9b\xbf\xe6\xb4\xbc\n\xe6\xb8\x8e\xe6\xba\x8f\xe6\xbd\xba')
width = 10
mode = 'space'
result_left = [
[(4, 0, 6), (0, 6)],
[(6, 7, 16), (0, 16)]]
result_right = [
[(6, None), (4, 0, 6), (0, 6)],
[(4, None), (6, 7, 16), (0, 16)]]
result_center = [
[(3, None), (4, 0, 6), (0, 6)],
[(2, None), (6, 7, 16), (0, 16)]]
class CalcTranslateWordTest4(CalcTranslateTest):
text = ' <NAME>'
width = 3
mode = 'space'
result_left = [
[(0, 0)],
[(3, 1, 4), (0, 4)],
[(3, 5, 8)],
[(3, 8, 11), (0, 11)]]
result_right = [
[(3, None), (0, 0)],
[(3, 1, 4), (0, 4)],
[(3, 5, 8)],
[(3, 8, 11), (0, 11)]]
result_center = [
[(2, None), (0, 0)],
[(3, 1, 4), (0, 4)],
[(3, 5, 8)],
[(3, 8, 11), (0, 11)]]
class CalcTranslateWordTest5(CalcTranslateTest):
text = ' Word.'
width = 3
mode = 'space'
result_left = [[(3, 0, 3)], [(3, 3, 6), (0, 6)]]
result_right = [[(3, 0, 3)], [(3, 3, 6), (0, 6)]]
result_center = [[(3, 0, 3)], [(3, 3, 6), (0, 6)]]
class CalcTranslateClipTest(CalcTranslateTest):
text = "It's out of control!\nYou've got to\n\nturn it off!!!"
mode = 'clip'
width = 14
result_left = [
[(20, 0, 20), (0, 20)],
[(13, 21, 34), (0, 34)],
[(0, 35)],
[(14, 36, 50), (0, 50)]]
result_right = [
[(-6, None), (20, 0, 20), (0, 20)],
[(1, None), (13, 21, 34), (0, 34)],
[(14, None), (0, 35)],
[(14, 36, 50), (0, 50)]]
result_center = [
[(-3, None), (20, 0, 20), (0, 20)],
[(1, None), (13, 21, 34), (0, 34)],
[(7, None), (0, 35)],
[(14, 36, 50), (0, 50)]]
class CalcTranslateCantDisplayTest(CalcTranslateTest):
text = B('Hello\xe9\xa2\x96')
mode = 'space'
width = 1
result_left = [[]]
result_right = [[]]
result_center = [[]]
class CalcPosTest(unittest.TestCase):
def setUp(self):
self.text = "A" * 27
self.trans = [
[(2,None),(7,0,7),(0,7)],
[(13,8,21),(0,21)],
[(3,None),(5,22,27),(0,27)]]
self.mytests = [(1,0, 0), (2,0, 0), (11,0, 7),
(-3,1, 8), (-2,1, 8), (1,1, 9), (31,1, 21),
(1,2, 22), (11,2, 27) ]
def tests(self):
for x,y, expected in self.mytests:
got = calc_pos( self.text, self.trans, x, y )
assert got == expected, "%r got:%r expected:%r" % ((x, y), got,
expected)
class Pos2CoordsTest(unittest.TestCase):
pos_list = [5, 9, 20, 26]
text = "1234567890" * 3
mytests = [
( [[(15,0,15)], [(15,15,30),(0,30)]],
[(5,0),(9,0),(5,1),(11,1)] ),
( [[(9,0,9)], [(12,9,21)], [(9,21,30),(0,30)]],
[(5,0),(0,1),(11,1),(5,2)] ),
( [[(2,None), (15,0,15)], [(2,None), (15,15,30),(0,30)]],
[(7,0),(11,0),(7,1),(13,1)] ),
( [[(3, 6, 9),(0,9)], [(5, 20, 25),(0,25)]],
[(0,0),(3,0),(0,1),(5,1)] ),
( [[(10, 0, 10),(0,10)]],
[(5,0),(9,0),(10,0),(10,0)] ),
]
def test(self):
for t, answer in self.mytests:
for pos,a in zip(self.pos_list,answer) :
r = calc_coords( self.text, t, pos)
assert r==a, "%r got: %r expected: %r"%(t,r,a)
class CanvasCacheTest(unittest.TestCase):
def setUp(self):
# purge the cache
urwid.CanvasCache._widgets.clear()
def cct(self, widget, size, focus, expected):
got = urwid.CanvasCache.fetch(widget, urwid.Widget, size, focus)
assert expected==got, "got: %s expected: %s"%(got, expected)
def test1(self):
a = urwid.Text("")
b = urwid.Text("")
blah = urwid.TextCanvas()
blah.finalize(a, (10,1), False)
blah2 = urwid.TextCanvas()
blah2.finalize(a, (15,1), False)
bloo = urwid.TextCanvas()
bloo.finalize(b, (20,2), True)
urwid.CanvasCache.store(urwid.Widget, blah)
urwid.CanvasCache.store(urwid.Widget, blah2)
urwid.CanvasCache.store(urwid.Widget, bloo)
self.cct(a, (10,1), False, blah)
self.cct(a, (15,1), False, blah2)
self.cct(a, (15,1), True, None)
self.cct(a, (10,2), False, None)
self.cct(b, (20,2), True, bloo)
self.cct(b, (21,2), True, None)
urwid.CanvasCache.invalidate(a)
self.cct(a, (10,1), False, None)
self.cct(a, (15,1), False, None)
self.cct(b, (20,2), True, bloo)
class CanvasTest(unittest.TestCase):
def ct(self, text, attr, exp_content):
c = urwid.TextCanvas([B(t) for t in text], attr)
content = list(c.content())
assert content == exp_content, "got: %r expected: %r" % (content,
exp_content)
def ct2(self, text, attr, left, top, cols, rows, def_attr, exp_content):
c = urwid.TextCanvas([B(t) for t in text], attr)
content = list(c.content(left, top, cols, rows, def_attr))
assert content == exp_content, "got: %r expected: %r" % (content,
exp_content)
def test1(self):
self.ct(["Hello world"], None, [[(None, None, B("Hello world"))]])
self.ct(["Hello world"], [[("a",5)]],
[[("a", None, B("Hello")), (None, None, B(" world"))]])
self.ct(["Hi","There"], None,
[[(None, None, B("Hi "))], [(None, None, B("There"))]])
def test2(self):
self.ct2(["Hello"], None, 0, 0, 5, 1, None,
[[(None, None, B("Hello"))]])
self.ct2(["Hello"], None, 1, 0, 4, 1, None,
[[(None, None, B("ello"))]])
self.ct2(["Hello"], None, 0, 0, 4, 1, None,
[[(None, None, B("Hell"))]])
self.ct2(["Hi","There"], None, 1, 0, 3, 2, None,
[[(None, None, B("i "))], [(None, None, B("her"))]])
self.ct2(["Hi","There"], None, 0, 0, 5, 1, None,
[[(None, None, B("Hi "))]])
self.ct2(["Hi","There"], None, 0, 1, 5, 1, None,
[[(None, None, B("There"))]])
class ShardBodyTest(unittest.TestCase):
def sbt(self, shards, shard_tail, expected):
result = shard_body(shards, shard_tail, False)
assert result == expected, "got: %r expected: %r" % (result, expected)
def sbttail(self, num_rows, sbody, expected):
result = shard_body_tail(num_rows, sbody)
assert result == expected, "got: %r expected: %r" % (result, expected)
def sbtrow(self, sbody, expected):
result = list(shard_body_row(sbody))
assert result == expected, "got: %r expected: %r" % (result, expected)
def test1(self):
cviews = [(0,0,10,5,None,"foo"),(0,0,5,5,None,"bar")]
self.sbt(cviews, [],
[(0, None, (0,0,10,5,None,"foo")),
(0, None, (0,0,5,5,None,"bar"))])
self.sbt(cviews, [(0, 3, None, (0,0,5,8,None,"baz"))],
[(3, None, (0,0,5,8,None,"baz")),
(0, None, (0,0,10,5,None,"foo")),
(0, None, (0,0,5,5,None,"bar"))])
self.sbt(cviews, [(10, 3, None, (0,0,5,8,None,"baz"))],
[(0, None, (0,0,10,5,None,"foo")),
(3, None, (0,0,5,8,None,"baz")),
(0, None, (0,0,5,5,None,"bar"))])
self.sbt(cviews, [(15, 3, None, (0,0,5,8,None,"baz"))],
[(0, None, (0,0,10,5,None,"foo")),
(0, None, (0,0,5,5,None,"bar")),
(3, None, (0,0,5,8,None,"baz"))])
def test2(self):
sbody = [(0, None, (0,0,10,5,None,"foo")),
(0, None, (0,0,5,5,None,"bar")),
(3, None, (0,0,5,8,None,"baz"))]
self.sbttail(5, sbody, [])
self.sbttail(3, sbody,
[(0, 3, None, (0,0,10,5,None,"foo")),
(0, 3, None, (0,0,5,5,None,"bar")),
(0, 6, None, (0,0,5,8,None,"baz"))])
sbody = [(0, None, (0,0,10,3,None,"foo")),
(0, None, (0,0,5,5,None,"bar")),
(3, None, (0,0,5,9,None,"baz"))]
self.sbttail(3, sbody,
[(10, 3, None, (0,0,5,5,None,"bar")),
(0, 6, None, (0,0,5,9,None,"baz"))])
def test3(self):
self.sbtrow([(0, None, (0,0,10,5,None,"foo")),
(0, None, (0,0,5,5,None,"bar")),
(3, None, (0,0,5,8,None,"baz"))],
[20])
self.sbtrow([(0, iter("foo"), (0,0,10,5,None,"foo")),
(0, iter("bar"), (0,0,5,5,None,"bar")),
(3, iter("zzz"), (0,0,5,8,None,"baz"))],
["f","b","z"])
class ShardsTrimTest(unittest.TestCase):
def sttop(self, shards, top, expected):
result = shards_trim_top(shards, top)
assert result == expected, "got: %r expected: %r" (result, expected)
def strows(self, shards, rows, expected):
result = shards_trim_rows(shards, rows)
assert result == expected, "got: %r expected: %r" (result, expected)
def stsides(self, shards, left, cols, expected):
result = shards_trim_sides(shards, left, cols)
assert result == expected, "got: %r expected: %r" (result, expected)
def test1(self):
shards = [(5, [(0,0,10,5,None,"foo"),(0,0,5,5,None,"bar")])]
self.sttop(shards, 2,
[(3, [(0,2,10,3,None,"foo"),(0,2,5,3,None,"bar")])])
self.strows(shards, 2,
[(2, [(0,0,10,2,None,"foo"),(0,0,5,2,None,"bar")])])
shards = [(5, [(0,0,10,5,None,"foo")]),(3,[(0,0,10,3,None,"bar")])]
self.sttop(shards, 2,
[(3, [(0,2,10,3,None,"foo")]),(3,[(0,0,10,3,None,"bar")])])
self.sttop(shards, 5,
[(3, [(0,0,10,3,None,"bar")])])
self.sttop(shards, 7,
[(1, [(0,2,10,1,None,"bar")])])
self.strows(shards, 7,
[(5, [(0,0,10,5,None,"foo")]),(2, [(0,0,10,2,None,"bar")])])
self.strows(shards, 5,
[(5, [(0,0,10,5,None,"foo")])])
self.strows(shards, 4,
[(4, [(0,0,10,4,None,"foo")])])
shards = [(5, [(0,0,10,5,None,"foo"), (0,0,5,8,None,"baz")]),
(3,[(0,0,10,3,None,"bar")])]
self.sttop(shards, 2,
[(3, [(0,2,10,3,None,"foo"), (0,2,5,6,None,"baz")]),
(3,[(0,0,10,3,None,"bar")])])
self.sttop(shards, 5,
[(3, [(0,0,10,3,None,"bar"), (0,5,5,3,None,"baz")])])
self.sttop(shards, 7,
[(1, [(0,2,10,1,None,"bar"), (0,7,5,1,None,"baz")])])
self.strows(shards, 7,
[(5, [(0,0,10,5,None,"foo"), (0,0,5,7,None,"baz")]),
(2, [(0,0,10,2,None,"bar")])])
self.strows(shards, 5,
[(5, [(0,0,10,5,None,"foo"), (0,0,5,5,None,"baz")])])
self.strows(shards, 4,
[(4, [(0,0,10,4,None,"foo"), (0,0,5,4,None,"baz")])])
def test2(self):
shards = [(5, [(0,0,10,5,None,"foo"),(0,0,5,5,None,"bar")])]
self.stsides(shards, 0, 15,
[(5, [(0,0,10,5,None,"foo"),(0,0,5,5,None,"bar")])])
self.stsides(shards, 6, 9,
[(5, [(6,0,4,5,None,"foo"),(0,0,5,5,None,"bar")])])
self.stsides(shards, 6, 6,
[(5, [(6,0,4,5,None,"foo"),(0,0,2,5,None,"bar")])])
self.stsides(shards, 0, 10,
[(5, [(0,0,10,5,None,"foo")])])
self.stsides(shards, 10, 5,
[(5, [(0,0,5,5,None,"bar")])])
self.stsides(shards, 1, 7,
[(5, [(1,0,7,5,None,"foo")])])
shards = [(5, [(0,0,10,5,None,"foo"), (0,0,5,8,None,"baz")]),
(3,[(0,0,10,3,None,"bar")])]
self.stsides(shards, 0, 15,
[(5, [(0,0,10,5,None,"foo"), (0,0,5,8,None,"baz")]),
(3,[(0,0,10,3,None,"bar")])])
self.stsides(shards, 2, 13,
[(5, [(2,0,8,5,None,"foo"), (0,0,5,8,None,"baz")]),
(3,[(2,0,8,3,None,"bar")])])
self.stsides(shards, 2, 10,
[(5, [(2,0,8,5,None,"foo"), (0,0,2,8,None,"baz")]),
(3,[(2,0,8,3,None,"bar")])])
self.stsides(shards, 2, 8,
[(5, [(2,0,8,5,None,"foo")]),
(3,[(2,0,8,3,None,"bar")])])
self.stsides(shards, 2, 6,
[(5, [(2,0,6,5,None,"foo")]),
(3,[(2,0,6,3,None,"bar")])])
self.stsides(shards, 10, 5,
[(8, [(0,0,5,8,None,"baz")])])
self.stsides(shards, 11, 3,
[(8, [(1,0,3,8,None,"baz")])])
class ShardsJoinTest(unittest.TestCase):
def sjt(self, shard_lists, expected):
result = shards_join(shard_lists)
assert result == expected, "got: %r expected: %r" (result, expected)
def test(self):
shards1 = [(5, [(0,0,10,5,None,"foo"), (0,0,5,8,None,"baz")]),
(3,[(0,0,10,3,None,"bar")])]
shards2 = [(3, [(0,0,10,3,None,"aaa")]),
(5,[(0,0,10,5,None,"bbb")])]
shards3 = [(3, [(0,0,10,3,None,"111")]),
(2,[(0,0,10,3,None,"222")]),
(3,[(0,0,10,3,None,"333")])]
self.sjt([shards1], shards1)
self.sjt([shards1, shards2],
[(3, [(0,0,10,5,None,"foo"), (0,0,5,8,None,"baz"),
(0,0,10,3,None,"aaa")]),
(2, [(0,0,10,5,None,"bbb")]),
(3, [(0,0,10,3,None,"bar")])])
self.sjt([shards1, shards3],
[(3, [(0,0,10,5,None,"foo"), (0,0,5,8,None,"baz"),
(0,0,10,3,None,"111")]),
(2, [(0,0,10,3,None,"222")]),
(3, [(0,0,10,3,None,"bar"), (0,0,10,3,None,"333")])])
self.sjt([shards1, shards2, shards3],
[(3, [(0,0,10,5,None,"foo"), (0,0,5,8,None,"baz"),
(0,0,10,3,None,"aaa"), (0,0,10,3,None,"111")]),
(2, [(0,0,10,5,None,"bbb"), (0,0,10,3,None,"222")]),
(3, [(0,0,10,3,None,"bar"), (0,0,10,3,None,"333")])])
class TagMarkupTest(unittest.TestCase):
mytests = [
("simple one", "simple one", []),
(('blue',"john"), "john", [('blue',4)]),
(["a ","litt","le list"], "a little list", []),
(["mix",('high',[" it ",('ital',"up a")])," little"],
"mix it up a little",
[(None,3),('high',4),('ital',4)]),
([u"££", u"x££"], u"££x££", []),
([B("\xc2\x80"), B("\xc2\x80")], B("\xc2\x80\xc2\x80"), []),
]
def test(self):
for input, text, attr in self.mytests:
restext,resattr = urwid.decompose_tagmarkup( input )
assert restext == text, "got: %r expected: %r" % (restext, text)
assert resattr == attr, "got: %r expected: %r" % (resattr, attr)
def test_bad_tuple(self):
try:
urwid.decompose_tagmarkup((1,2,3))
except urwid.TagMarkupException, e:
pass
else:
assert 0, "should have thrown exception!"
def test_bad_type(self):
try:
urwid.decompose_tagmarkup(5)
except urwid.TagMarkupException, e:
pass
else:
assert 0, "should have thrown exception!"
class TextTest(unittest.TestCase):
def setUp(self):
self.t = urwid.Text("I walk the\ncity in the night")
def test1_wrap(self):
expected = [B(t) for t in "I walk the","city in ","the night "]
got = self.t.render((10,))._text
assert got == expected, "got: %r expected: %r" % (got, expected)
def test2_left(self):
self.t.set_align_mode('left')
expected = [B(t) for t in "I walk the ","city in the night "]
got = self.t.render((18,))._text
assert got == expected, "got: %r expected: %r" % (got, expected)
def test3_right(self):
self.t.set_align_mode('right')
expected = [B(t) for t in " I walk the"," city in the night"]
got = self.t.render((18,))._text
assert got == expected, "got: %r expected: %r" % (got, expected)
def test4_center(self):
self.t.set_align_mode('center')
expected = [B(t) for t in " I walk the "," city in the night"]
got = self.t.render((18,))._text
assert got == expected, "got: %r expected: %r" % (got, expected)
class EditTest(unittest.TestCase):
def setUp(self):
self.t1 = urwid.Edit("","blah blah")
self.t2 = urwid.Edit("stuff:", "blah blah")
self.t3 = urwid.Edit("junk:\n","blah blah\n\nbloo",1)
def ktest(self, e, key, expected, pos, desc):
got= e.keypress((12,),key)
assert got == expected, "%s. got: %r expected:%r" % (desc, got,
expected)
assert e.edit_pos == pos, "%s. pos: %r expected pos: " % (
desc, e.edit_pos, pos)
def test1_left(self):
self.t1.set_edit_pos(0)
self.ktest(self.t1,'left','left',0,"left at left edge")
self.ktest(self.t2,'left',None,8,"left within text")
self.t3.set_edit_pos(10)
self.ktest(self.t3,'left',None,9,"left after newline")
def test2_right(self):
self.ktest(self.t1,'right','right',9,"right at right edge")
self.t2.set_edit_pos(8)
self.ktest(self.t2,'right',None,9,"right at right edge-1")
self.t3.set_edit_pos(0)
self.t3.keypress((12,),'right')
assert self.t3.get_pref_col((12,)) == 1
def test3_up(self):
self.ktest(self.t1,'up','up',9,"up at top")
self.t2.set_edit_pos(2)
self.t2.keypress((12,),"left")
assert self.t2.get_pref_col((12,)) == 7
self.ktest(self.t2,'up','up',1,"up at top again")
assert self.t2.get_pref_col((12,)) == 7
self.t3.set_edit_pos(10)
self.ktest(self.t3,'up',None,0,"up at top+1")
def test4_down(self):
self.ktest(self.t1,'down','down',9,"down single line")
self.t3.set_edit_pos(5)
self.ktest(self.t3,'down',None,10,"down line 1 to 2")
self.ktest(self.t3,'down',None,15,"down line 2 to 3")
self.ktest(self.t3,'down','down',15,"down at bottom")
class EditRenderTest(unittest.TestCase):
def rtest(self, w, expected_text, expected_cursor):
expected_text = [B(t) for t in expected_text]
get_cursor = w.get_cursor_coords((4,))
assert get_cursor == expected_cursor, "got: %r expected: %r" % (
get_cursor, expected_cursor)
r = w.render((4,), focus = 1)
text = [t for a, cs, t in [ln[0] for ln in r.content()]]
assert text == expected_text, "got: %r expected: %r" % (text,
expected_text)
assert r.cursor == expected_cursor, "got: %r expected: %r" % (
r.cursor, expected_cursor)
def test1_SpaceWrap(self):
w = urwid.Edit("","blah blah")
w.set_edit_pos(0)
self.rtest(w,["blah","blah"],(0,0))
w.set_edit_pos(4)
self.rtest(w,["lah ","blah"],(3,0))
w.set_edit_pos(5)
self.rtest(w,["blah","blah"],(0,1))
w.set_edit_pos(9)
self.rtest(w,["blah","lah "],(3,1))
def test2_ClipWrap(self):
w = urwid.Edit("","blah\nblargh",1)
w.set_wrap_mode('clip')
w.set_edit_pos(0)
self.rtest(w,["blah","blar"],(0,0))
w.set_edit_pos(10)
self.rtest(w,["blah","argh"],(3,1))
w.set_align_mode('right')
w.set_edit_pos(6)
self.rtest(w,["blah","larg"],(0,1))
def test3_AnyWrap(self):
w = urwid.Edit("","blah blah")
w.set_wrap_mode('any')
self.rtest(w,["blah"," bla","h "],(1,2))
def test4_CursorNudge(self):
w = urwid.Edit("","hi",align='right')
w.keypress((4,),'end')
self.rtest(w,[" hi "],(3,0))
w.keypress((4,),'left')
self.rtest(w,[" hi"],(3,0))
class SelectableText(urwid.Text):
def selectable(self):
return 1
def keypress(self, size, key):
return key
class ListBoxCalculateVisibleTest(unittest.TestCase):
def cvtest(self, desc, body, focus, offset_rows, inset_fraction,
exp_offset_inset, exp_cur ):
lbox = urwid.ListBox(body)
lbox.body.set_focus( focus )
lbox.offset_rows = offset_rows
lbox.inset_fraction = inset_fraction
middle, top, bottom = lbox.calculate_visible((4,5),focus=1)
offset_inset, focus_widget, focus_pos, _ign, cursor = middle
if cursor is not None:
x, y = cursor
y += offset_inset
cursor = x, y
assert offset_inset == exp_offset_inset, "%s got: %r expected: %r" %(desc,offset_inset,exp_offset_inset)
assert cursor == exp_cur, "%s (cursor) got: %r expected: %r" %(desc,cursor,exp_cur)
def test1_simple(self):
T = urwid.Text
l = [T(""),T(""),T("\n"),T("\n\n"),T("\n"),T(""),T("")]
self.cvtest( "simple top position",
l, 3, 0, (0,1), 0, None )
self.cvtest( "simple middle position",
l, 3, 1, (0,1), 1, None )
self.cvtest( "simple bottom postion",
l, 3, 2, (0,1), 2, None )
self.cvtest( "straddle top edge",
l, 3, 0, (1,2), -1, None )
self.cvtest( "straddle bottom edge",
l, 3, 4, (0,1), 4, None )
self.cvtest( "off bottom edge",
l, 3, 5, (0,1), 4, None )
self.cvtest( "way off bottom edge",
l, 3, 100, (0,1), 4, None )
self.cvtest( "gap at top",
l, 0, 2, (0,1), 0, None )
self.cvtest( "gap at top and off bottom edge",
l, 2, 5, (0,1), 2, None )
self.cvtest( "gap at bottom",
l, 6, 1, (0,1), 4, None )
self.cvtest( "gap at bottom and straddling top edge",
l, 4, 0, (1,2), 1, None )
self.cvtest( "gap at bottom cannot completely fill",
[T(""),T(""),T("")], 1, 0, (0,1), 1, None )
self.cvtest( "gap at top and bottom",
[T(""),T(""),T("")], 1, 2, (0,1), 1, None )
def test2_cursor(self):
T, E = urwid.Text, urwid.Edit
l1 = [T(""),T(""),T("\n"),E("","\n\nX"),T("\n"),T(""),T("")]
l2 = [T(""),T(""),T("\n"),E("","YY\n\n"),T("\n"),T(""),T("")]
l2[3].set_edit_pos(2)
self.cvtest( "plain cursor in view",
l1, 3, 1, (0,1), 1, (1,3) )
self.cvtest( "cursor off top",
l2, 3, 0, (1,3), 0, (2, 0) )
self.cvtest( "cursor further off top",
l2, 3, 0, (2,3), 0, (2, 0) )
self.cvtest( "cursor off bottom",
l1, 3, 3, (0,1), 2, (1, 4) )
self.cvtest( "cursor way off bottom",
l1, 3, 100, (0,1), 2, (1, 4) )
class ListBoxChangeFocusTest(unittest.TestCase):
def cftest(self, desc, body, pos, offset_inset,
coming_from, cursor, snap_rows,
exp_offset_rows, exp_inset_fraction, exp_cur ):
lbox = urwid.ListBox(body)
lbox.change_focus( (4,5), pos, offset_inset, coming_from,
cursor, snap_rows )
exp = exp_offset_rows, exp_inset_fraction
act = lbox.offset_rows, lbox.inset_fraction
cursor = None
focus_widget, focus_pos = lbox.body.get_focus()
if focus_widget.selectable():
if hasattr(focus_widget,'get_cursor_coords'):
cursor=focus_widget.get_cursor_coords((4,))
assert act == exp, "%s got: %s expected: %s" %(desc, act, exp)
assert cursor == exp_cur, "%s (cursor) got: %r expected: %r" %(desc,cursor,exp_cur)
def test1unselectable(self):
T = urwid.Text
l = [T("\n"),T("\n\n"),T("\n\n"),T("\n\n"),T("\n")]
self.cftest( "simple unselectable",
l, 2, 0, None, None, None, 0, (0,1), None )
self.cftest( "unselectable",
l, 2, 1, None, None, None, 1, (0,1), None )
self.cftest( "unselectable off top",
l, 2, -2, None, None, None, 0, (2,3), None )
self.cftest( "unselectable off bottom",
l, 3, 2, None, None, None, 2, (0,1), None )
def test2selectable(self):
T, S = urwid.Text, SelectableText
l = [T("\n"),T("\n\n"),S("\n\n"),T("\n\n"),T("\n")]
self.cftest( "simple selectable",
l, 2, 0, None, None, None, 0, (0,1), None )
self.cftest( "selectable",
l, 2, 1, None, None, None, 1, (0,1), None )
self.cftest( "selectable at top",
l, 2, 0, 'below', None, None, 0, (0,1), None )
self.cftest( "selectable at bottom",
l, 2, 2, 'above', None, None, 2, (0,1), None )
self.cftest( "selectable off top snap",
l, 2, -1, 'below', None, None, 0, (0,1), None )
self.cftest( "selectable off bottom snap",
l, 2, 3, 'above', None, None, 2, (0,1), None )
self.cftest( "selectable off top no snap",
l, 2, -1, 'above', None, None, 0, (1,3), None )
self.cftest( "selectable off bottom no snap",
l, 2, 3, 'below', None, None, 3, (0,1), None )
def test3large_selectable(self):
T, S = urwid.Text, SelectableText
l = [T("\n"),S("\n\n\n\n\n\n"),T("\n")]
self.cftest( "large selectable no snap",
l, 1, -1, None, None, None, 0, (1,7), None )
self.cftest( "large selectable snap up",
l, 1, -2, 'below', None, None, 0, (0,1), None )
self.cftest( "large selectable snap up2",
l, 1, -2, 'below', None, 2, 0, (0,1), None )
self.cftest( "large selectable almost snap up",
l, 1, -2, 'below', None, 1, 0, (2,7), None )
self.cftest( "large selectable snap down",
l, 1, 0, 'above', None, None, 0, (2,7), None )
self.cftest( "large selectable snap down2",
l, 1, 0, 'above', None, 2, 0, (2,7), None )
self.cftest( "large selectable almost snap down",
l, 1, 0, 'above', None, 1, 0, (0,1), None )
m = [T("\n\n\n\n"), S("\n\n\n\n\n"), T("\n\n\n\n")]
self.cftest( "large selectable outside view down",
m, 1, 4, 'above', None, None, 0, (0,1), None )
self.cftest( "large selectable outside view up",
m, 1, -5, 'below', None, None, 0, (1,6), None )
def test4cursor(self):
T,E = urwid.Text, urwid.Edit
#...
class ListBoxRenderTest(unittest.TestCase):
def ltest(self,desc,body,focus,offset_inset_rows,exp_text,exp_cur):
exp_text = [B(t) for t in exp_text]
lbox = urwid.ListBox(body)
lbox.body.set_focus( focus )
lbox.shift_focus((4,10), offset_inset_rows )
canvas = lbox.render( (4,5), focus=1 )
text = [bytes().join([t for at, cs, t in ln]) for ln in canvas.content()]
cursor = canvas.cursor
assert text == exp_text, "%s (text) got: %r expected: %r" %(desc,text,exp_text)
assert cursor == exp_cur, "%s (cursor) got: %r expected: %r" %(desc,cursor,exp_cur)
def test1Simple(self):
T = urwid.Text
self.ltest( "simple one text item render",
[T("1\n2")], 0, 0,
["1 ","2 "," "," "," "],None)
self.ltest( "simple multi text item render off bottom",
[T("1"),T("2"),T("3\n4"),T("5"),T("6")], 2, 2,
["1 ","2 ","3 ","4 ","5 "],None)
self.ltest( "simple multi text item render off top",
[T("1"),T("2"),T("3\n4"),T("5"),T("6")], 2, 1,
["2 ","3 ","4 ","5 ","6 "],None)
def test2Trim(self):
T = urwid.Text
self.ltest( "trim unfocused bottom",
[T("1\n2"),T("3\n4"),T("5\n6")], 1, 2,
["1 ","2 ","3 ","4 ","5 "],None)
self.ltest( "trim unfocused top",
[T("1\n2"),T("3\n4"),T("5\n6")], 1, 1,
["2 ","3 ","4 ","5 ","6 "],None)
self.ltest( "trim none full focus",
[T("1\n2\n3\n4\n5")], 0, 0,
["1 ","2 ","3 ","4 ","5 "],None)
self.ltest( "trim focus bottom",
[T("1\n2\n3\n4\n5\n6")], 0, 0,
["1 ","2 ","3 ","4 ","5 "],None)
self.ltest( "trim focus top",
[T("1\n2\n3\n4\n5\n6")], 0, -1,
["2 ","3 ","4 ","5 ","6 "],None)
self.ltest( "trim focus top and bottom",
[T("1\n2\n3\n4\n5\n6\n7")], 0, -1,
["2 ","3 ","4 ","5 ","6 "],None)
def test3Shift(self):
T,E = urwid.Text, urwid.Edit
self.ltest( "shift up one fit",
[T("1\n2"),T("3"),T("4"),T("5"),T("6")], 4, 5,
["2 ","3 ","4 ","5 ","6 "],None)
e = E("","ab\nc",1)
e.set_edit_pos( 2 )
self.ltest( "shift down one cursor over edge",
[e,T("3"),T("4"),T("5\n6")], 0, -1,
["ab ","c ","3 ","4 ","5 "], (2,0))
self.ltest( "shift up one cursor over edge",
[T("1\n2"),T("3"),T("4"),E("","d\ne")], 3, 4,
["2 ","3 ","4 ","d ","e "], (1,4))
self.ltest( "shift none cursor top focus over edge",
[E("","ab\n"),T("3"),T("4"),T("5\n6")], 0, -1,
[" ","3 ","4 ","5 ","6 "], (0,0))
e = E("","abc\nd")
e.set_edit_pos( 3 )
self.ltest( "shift none cursor bottom focus over edge",
[T("1\n2"),T("3"),T("4"),e], 3, 4,
["1 ","2 ","3 ","4 ","abc "], (3,4))
class ListBoxKeypressTest(unittest.TestCase):
def ktest(self, desc, key, body, focus, offset_inset,
exp_focus, exp_offset_inset, exp_cur, lbox = None):
if lbox is None:
lbox = urwid.ListBox(body)
lbox.body.set_focus( focus )
lbox.shift_focus((4,10), offset_inset )
ret_key = lbox.keypress((4,5),key)
middle, top, bottom = lbox.calculate_visible((4,5),focus=1)
offset_inset, focus_widget, focus_pos, _ign, cursor = middle
if cursor is not None:
x, y = cursor
y += offset_inset
cursor = x, y
exp = exp_focus, exp_offset_inset
act = focus_pos, offset_inset
assert act == exp, "%s got: %r expected: %r" %(desc,act,exp)
assert cursor == exp_cur, "%s (cursor) got: %r expected: %r" %(desc,cursor,exp_cur)
return ret_key,lbox
def test1_up(self):
T,S,E = urwid.Text, SelectableText, urwid.Edit
self.ktest( "direct selectable both visible", 'up',
[S(""),S("")], 1, 1,
0, 0, None )
self.ktest( "selectable skip one all visible", 'up',
[S(""),T(""),S("")], 2, 2,
0, 0, None )
key,lbox = self.ktest( "nothing above no scroll", 'up',
[S("")], 0, 0,
0, 0, None )
assert key == 'up'
key, lbox = self.ktest( "unselectable above no scroll", 'up',
[T(""),T(""),S("")], 2, 2,
2, 2, None )
assert key == 'up'
self.ktest( "unselectable above scroll 1", 'up',
[T(""),S(""),T("\n\n\n")], 1, 0,
1, 1, None )
self.ktest( "selectable above scroll 1", 'up',
[S(""),S(""),T("\n\n\n")], 1, 0,
0, 0, None )
self.ktest( "selectable above too far", 'up',
[S(""),T(""),S(""),T("\n\n\n")], 2, 0,
2, 1, None )
self.ktest( "selectable above skip 1 scroll 1", 'up',
[S(""),T(""),S(""),T("\n\n\n")], 2, 1,
0, 0, None )
self.ktest( "tall selectable above scroll 2", 'up',
[S(""),S("\n"),S(""),T("\n\n\n")], 2, 0,
1, 0, None )
self.ktest( "very tall selectable above scroll 5", 'up',
[S(""),S("\n\n\n\n"),S(""),T("\n\n\n\n")], 2, 0,
1, 0, None )
self.ktest( "very tall selected scroll within 1", 'up',
[S(""),S("\n\n\n\n\n")], 1, -1,
1, 0, None )
self.ktest( "edit above pass cursor", 'up',
[E("","abc"),E("","de")], 1, 1,
0, 0, (2, 0) )
key,lbox = self.ktest( "edit too far above pass cursor A", 'up',
[E("","abc"),T("\n\n\n\n"),E("","de")], 2, 4,
1, 0, None )
self.ktest( "edit too far above pass cursor B", 'up',
None, None, None,
0, 0, (2,0), lbox )
self.ktest( "within focus cursor made not visible", 'up',
[T("\n\n\n"),E("hi\n","ab")], 1, 3,
0, 0, None )
self.ktest( "within focus cursor made not visible (2)", 'up',
[T("\n\n\n\n"),E("hi\n","ab")], 1, 3,
0, -1, None )
self.ktest( "force focus unselectable" , 'up',
[T("\n\n\n\n"),S("")], 1, 4,
0, 0, None )
self.ktest( "pathological cursor widget", 'up',
[T("\n"),E("\n\n\n\n\n","a")], 1, 4,
0, -1, None )
self.ktest( "unselectable to unselectable", 'up',
[T(""),T(""),T(""),T(""),T(""),T(""),T("")], 2, 0,
1, 0, None )
self.ktest( "unselectable over edge to same", 'up',
[T(""),T("12\n34"),T(""),T(""),T(""),T("")],1,-1,
1, 0, None )
key,lbox = self.ktest( "edit short between pass cursor A", 'up',
[E("","abcd"),E("","a"),E("","def")], 2, 2,
1, 1, (1,1) )
self.ktest( "edit short between pass cursor B", 'up',
None, None, None,
0, 0, (3,0), lbox )
e = E("","\n\n\n\n\n")
e.set_edit_pos(1)
key,lbox = self.ktest( "edit cursor force scroll", 'up',
[e], 0, -1,
0, 0, (0,0) )
assert lbox.inset_fraction[0] == 0
def test2_down(self):
T,S,E = urwid.Text, SelectableText, urwid.Edit
self.ktest( "direct selectable both visible", 'down',
[S(""),S("")], 0, 0,
1, 1, None )
self.ktest( "selectable skip one all visible", 'down',
[S(""),T(""),S("")], 0, 0,
2, 2, None )
key,lbox = self.ktest( "nothing below no scroll", 'down',
[S("")], 0, 0,
0, 0, None )
assert key == 'down'
key, lbox = self.ktest( "unselectable below no scroll", 'down',
[S(""),T(""),T("")], 0, 0,
0, 0, None )
assert key == 'down'
self.ktest( "unselectable below scroll 1", 'down',
[T("\n\n\n"),S(""),T("")], 1, 4,
1, 3, None )
self.ktest( "selectable below scroll 1", 'down',
[T("\n\n\n"),S(""),S("")], 1, 4,
2, 4, None )
self.ktest( "selectable below too far", 'down',
[T("\n\n\n"),S(""),T(""),S("")], 1, 4,
1, 3, None )
self.ktest( "selectable below skip 1 scroll 1", 'down',
[T("\n\n\n"),S(""),T(""),S("")], 1, 3,
3, 4, None )
self.ktest( "tall selectable below scroll 2", 'down',
[T("\n\n\n"),S(""),S("\n"),S("")], 1, 4,
2, 3, None )
self.ktest( "very tall selectable below scroll 5", 'down',
[T("\n\n\n\n"),S(""),S("\n\n\n\n"),S("")], 1, 4,
2, 0, None )
self.ktest( "very tall selected scroll within 1", 'down',
[S("\n\n\n\n\n"),S("")], 0, 0,
0, -1, None )
self.ktest( "edit below pass cursor", 'down',
[E("","de"),E("","abc")], 0, 0,
1, 1, (2, 1) )
key,lbox=self.ktest( "edit too far below pass cursor A", 'down',
[E("","de"),T("\n\n\n\n"),E("","abc")], 0, 0,
1, 0, None )
self.ktest( "edit too far below pass cursor B", 'down',
None, None, None,
2, 4, (2,4), lbox )
odd_e = E("","hi\nab")
odd_e.set_edit_pos( 2 )
# disble cursor movement in odd_e object
odd_e.move_cursor_to_coords = lambda s,c,xy: 0
self.ktest( "within focus cursor made not visible", 'down',
[odd_e,T("\n\n\n\n")], 0, 0,
1, 1, None )
self.ktest( "within focus cursor made not visible (2)", 'down',
[odd_e,T("\n\n\n\n"),], 0, 0,
1, 1, None )
self.ktest( "force focus unselectable" , 'down',
[S(""),T("\n\n\n\n")], 0, 0,
1, 0, None )
odd_e.set_edit_text( "hi\n\n\n\n\n" )
self.ktest( "pathological cursor widget", 'down',
[odd_e,T("\n")], 0, 0,
1, 4, None )
self.ktest( "unselectable to unselectable", 'down',
[T(""),T(""),T(""),T(""),T(""),T(""),T("")], 4, 4,
5, 4, None )
self.ktest( "unselectable over edge to same", 'down',
[T(""),T(""),T(""),T(""),T("12\n34"),T("")],4,4,
4, 3, None )
key,lbox=self.ktest( "edit short between pass cursor A", 'down',
[E("","abc"),E("","a"),E("","defg")], 0, 0,
1, 1, (1,1) )
self.ktest( "edit short between pass cursor B", 'down',
None, None, None,
2, 2, (3,2), lbox )
e = E("","\n\n\n\n\n")
e.set_edit_pos(4)
key,lbox = self.ktest( "edit cursor force scroll", 'down',
[e], 0, 0,
0, -1, (0,4) )
assert lbox.inset_fraction[0] == 1
def test3_page_up(self):
T,S,E = urwid.Text, SelectableText, urwid.Edit
self.ktest( "unselectable aligned to aligned", 'page up',
[T(""),T("\n"),T("\n\n"),T(""),T("\n"),T("\n\n")], 3, 0,
1, 0, None )
self.ktest( "unselectable unaligned to aligned", 'page up',
[T(""),T("\n"),T("\n"),T("\n"),T("\n"),T("\n\n")], 3,-1,
1, 0, None )
self.ktest( "selectable to unselectable", 'page up',
[T(""),T("\n"),T("\n"),T("\n"),S("\n"),T("\n\n")], 4, 1,
1, -1, None )
self.ktest( "selectable to cut off selectable", 'page up',
[S("\n\n"),T("\n"),T("\n"),S("\n"),T("\n\n")], 3, 1,
0, -1, None )
self.ktest( "seletable to selectable", 'page up',
[T("\n\n"),S("\n"),T("\n"),S("\n"),T("\n\n")], 3, 1,
1, 1, None )
self.ktest( "within very long selectable", 'page up',
[S(""),S("\n\n\n\n\n\n\n\n"),T("\n")], 1, -6,
1, -1, None )
e = E("","\n\nab\n\n\n\n\ncd\n")
e.set_edit_pos(11)
self.ktest( "within very long cursor widget", 'page up',
[S(""),e,T("\n")], 1, -6,
1, -2, (2, 0) )
self.ktest( "pathological cursor widget", 'page up',
[T(""),E("\n\n\n\n\n\n\n\n","ab"),T("")], 1, -5,
0, 0, None )
e = E("","\nab\n\n\n\n\ncd\n")
e.set_edit_pos(10)
self.ktest( "very long cursor widget snap", 'page up',
[T(""),e,T("\n")], 1, -5,
1, 0, (2, 1) )
self.ktest( "slight scroll selectable", 'page up',
[T("\n"),S("\n"),T(""),S(""),T("\n\n\n"),S("")], 5, 4,
3, 0, None )
self.ktest( "scroll into snap region", 'page up',
[T("\n"),S("\n"),T(""),T(""),T("\n\n\n"),S("")], 5, 4,
1, 0, None )
self.ktest( "mid scroll short", 'page up',
[T("\n"),T(""),T(""),S(""),T(""),T("\n"),S(""),T("\n")],
6, 2, 3, 1, None )
self.ktest( "mid scroll long", 'page up',
[T("\n"),S(""),T(""),S(""),T(""),T("\n"),S(""),T("\n")],
6, 2, 1, 0, None )
self.ktest( "mid scroll perfect", 'page up',
[T("\n"),S(""),S(""),S(""),T(""),T("\n"),S(""),T("\n")],
6, 2, 2, 0, None )
self.ktest( "cursor move up fail short", 'page up',
[T("\n"),T("\n"),E("","\nab"),T(""),T("")], 2, 1,
2, 4, (0, 4) )
self.ktest( "cursor force fail short", 'page up',
[T("\n"),T("\n"),E("\n","ab"),T(""),T("")], 2, 1,
0, 0, None )
odd_e = E("","hi\nab")
odd_e.set_edit_pos( 2 )
# disble cursor movement in odd_e object
odd_e.move_cursor_to_coords = lambda s,c,xy: 0
self.ktest( "cursor force fail long", 'page up',
[odd_e,T("\n"),T("\n"),T("\n"),S(""),T("\n")], 4, 2,
1, -1, None )
self.ktest( "prefer not cut off", 'page up',
[S("\n"),T("\n"),S(""),T("\n\n"),S(""),T("\n")], 4, 2,
2, 1, None )
self.ktest( "allow cut off", 'page up',
[S("\n"),T("\n"),T(""),T("\n\n"),S(""),T("\n")], 4, 2,
0, -1, None )
self.ktest( "at top fail", 'page up',
[T("\n\n"),T("\n"),T("\n\n\n")], 0, 0,
0, 0, None )
self.ktest( "all visible fail", 'page up',
[T("a"),T("\n")], 0, 0,
0, 0, None )
self.ktest( "current ok fail", 'page up',
[T("\n\n"),S("hi")], 1, 3,
1, 3, None )
self.ktest( "all visible choose top selectable", 'page up',
[T(""),S("a"),S("b"),S("c")], 3, 3,
1, 1, None )
self.ktest( "bring in edge choose top", 'page up',
[S("b"),T("-"),S("-"),T("c"),S("d"),T("-")],4,3,
0, 0, None )
self.ktest( "bring in edge choose top selectable", 'page up',
[T("b"),S("-"),S("-"),T("c"),S("d"),T("-")],4,3,
1, 1, None )
def test4_page_down(self):
T,S,E = urwid.Text, SelectableText, urwid.Edit
self.ktest( "unselectable aligned to aligned", 'page down',
[T("\n\n"),T("\n"),T(""),T("\n\n"),T("\n"),T("")], 2, 4,
4, 3, None )
self.ktest( "unselectable unaligned to aligned", 'page down',
[T("\n\n"),T("\n"),T("\n"),T("\n"),T("\n"),T("")], 2, 4,
4, 3, None )
self.ktest( "selectable to unselectable", 'page down',
[T("\n\n"),S("\n"),T("\n"),T("\n"),T("\n"),T("")], 1, 2,
4, 4, None )
self.ktest( "selectable to cut off selectable", 'page down',
[T("\n\n"),S("\n"),T("\n"),T("\n"),S("\n\n")], 1, 2,
4, 3, None )
self.ktest( "seletable to selectable", 'page down',
[T("\n\n"),S("\n"),T("\n"),S("\n"),T("\n\n")], 1, 1,
3, 2, None )
self.ktest( "within very long selectable", 'page down',
[T("\n"),S("\n\n\n\n\n\n\n\n"),S("")], 1, 2,
1, -3, None )
e = E("","\nab\n\n\n\n\ncd\n\n")
e.set_edit_pos(2)
self.ktest( "within very long cursor widget", 'page down',
[T("\n"),e,S("")], 1, 2,
1, -2, (1, 4) )
odd_e = E("","ab\n\n\n\n\n\n\n\n\n")
odd_e.set_edit_pos( 1 )
# disble cursor movement in odd_e object
odd_e.move_cursor_to_coords = lambda s,c,xy: 0
self.ktest( "pathological cursor widget", 'page down',
[T(""),odd_e,T("")], 1, 1,
2, 4, None )
e = E("","\nab\n\n\n\n\ncd\n")
e.set_edit_pos(2)
self.ktest( "very long cursor widget snap", 'page down',
[T("\n"),e,T("")], 1, 2,
1, -3, (1, 3) )
self.ktest( "slight scroll selectable", 'page down',
[S(""),T("\n\n\n"),S(""),T(""),S("\n"),T("\n")], 0, 0,
2, 4, None )
self.ktest( "scroll into snap region", 'page down',
[S(""),T("\n\n\n"),T(""),T(""),S("\n"),T("\n")], 0, 0,
4, 3, None )
self.ktest( "mid scroll short", 'page down',
[T("\n"),S(""),T("\n"),T(""),S(""),T(""),T(""),T("\n")],
1, 2, 4, 3, None )
self.ktest( "mid scroll long", 'page down',
[T("\n"),S(""),T("\n"),T(""),S(""),T(""),S(""),T("\n")],
1, 2, 6, 4, None )
self.ktest( "mid scroll perfect", 'page down',
[T("\n"),S(""),T("\n"),T(""),S(""),S(""),S(""),T("\n")],
1, 2, 5, 4, None )
e = E("","hi\nab")
e.set_edit_pos( 1 )
self.ktest( "cursor move up fail short", 'page down',
[T(""),T(""),e,T("\n"),T("\n")], 2, 1,
2, -1, (1, 0) )
odd_e = E("","hi\nab")
odd_e.set_edit_pos( 1 )
# disble cursor movement in odd_e object
odd_e.move_cursor_to_coords = lambda s,c,xy: 0
self.ktest( "cursor force fail short", 'page down',
[T(""),T(""),odd_e,T("\n"),T("\n")], 2, 2,
4, 3, None )
self.ktest( "cursor force fail long", 'page down',
[T("\n"),S(""),T("\n"),T("\n"),T("\n"),E("hi\n","ab")],
1, 2, 4, 4, None )
self.ktest( "prefer not cut off", 'page down',
[T("\n"),S(""),T("\n\n"),S(""),T("\n"),S("\n")], 1, 2,
3, 3, None )
self.ktest( "allow cut off", 'page down',
[T("\n"),S(""),T("\n\n"),T(""),T("\n"),S("\n")], 1, 2,
5, 4, None )
self.ktest( "at bottom fail", 'page down',
[T("\n\n"),T("\n"),T("\n\n\n")], 2, 1,
2, 1, None )
self.ktest( "all visible fail", 'page down',
[T("a"),T("\n")], 1, 1,
1, 1, None )
self.ktest( "current ok fail", 'page down',
[S("hi"),T("\n\n")], 0, 0,
0, 0, None )
self.ktest( "all visible choose last selectable", 'page down',
[S("a"),S("b"),S("c"),T("")], 0, 0,
2, 2, None )
self.ktest( "bring in edge choose last", 'page down',
[T("-"),S("d"),T("c"),S("-"),T("-"),S("b")],1,1,
5,4, None )
self.ktest( "bring in edge choose last selectable", 'page down',
[T("-"),S("d"),T("c"),S("-"),S("-"),T("b")],1,1,
4,3, None )
class PaddingTest(unittest.TestCase):
def ptest(self, desc, align, width, maxcol, left, right,min_width=None):
p = urwid.Padding(None, align, width, min_width)
l, r = p.padding_values((maxcol,),False)
assert (l,r)==(left,right), "%s expected %s but got %s"%(
desc, (left,right), (l,r))
def petest(self, desc, align, width):
try:
urwid.Padding(None, align, width)
except urwid.PaddingError, e:
return
assert 0, "%s expected error!" % desc
def test_create(self):
self.petest("invalid pad",6,5)
self.petest("invalid pad type",('bad',2),5)
self.petest("invalid width",'center','42')
self.petest("invalid width type",'center',('gouranga',4))
def test_values(self):
self.ptest("left align 5 7",'left',5,7,0,2)
self.ptest("left align 7 7",'left',7,7,0,0)
self.ptest("left align 9 7",'left',9,7,0,0)
self.ptest("right align 5 7",'right',5,7,2,0)
self.ptest("center align 5 7",'center',5,7,1,1)
self.ptest("fixed left",('fixed left',3),5,10,3,2)
self.ptest("fixed left reduce",('fixed left',3),8,10,2,0)
self.ptest("fixed left shrink",('fixed left',3),18,10,0,0)
self.ptest("fixed left, right",
('fixed left',3),('fixed right',4),17,3,4)
self.ptest("fixed left, right, min_width",
('fixed left',3),('fixed right',4),10,3,2,5)
self.ptest("fixed left, right, min_width 2",
('fixed left',3),('fixed right',4),10,2,0,8)
self.ptest("fixed right",('fixed right',3),5,10,2,3)
self.ptest("fixed right reduce",('fixed right',3),8,10,0,2)
self.ptest("fixed right shrink",('fixed right',3),18,10,0,0)
self.ptest("fixed right, left",
('fixed right',3),('fixed left',4),17,4,3)
self.ptest("fixed right, left, min_width",
('fixed right',3),('fixed left',4),10,2,3,5)
self.ptest("fixed right, left, min_width 2",
('fixed right',3),('fixed left',4),10,0,2,8)
self.ptest("relative 30",('relative',30),5,10,1,4)
self.ptest("relative 50",('relative',50),5,10,2,3)
self.ptest("relative 130 edge",('relative',130),5,10,5,0)
self.ptest("relative -10 edge",('relative',-10),4,10,0,6)
self.ptest("center relative 70",'center',('relative',70),
10,1,2)
self.ptest("center relative 70 grow 8",'center',('relative',70),
10,1,1,8)
def mctest(self, desc, left, right, size, cx, innercx):
class Inner:
def __init__(self, desc, innercx):
self.desc = desc
self.innercx = innercx
def move_cursor_to_coords(self,size,cx,cy):
assert cx==self.innercx, desc
i = Inner(desc,innercx)
p = urwid.Padding(i, ('fixed left',left),
('fixed right',right))
p.move_cursor_to_coords(size, cx, 0)
def test_cursor(self):
self.mctest("cursor left edge",2,2,(10,2),2,0)
self.mctest("cursor left edge-1",2,2,(10,2),1,0)
self.mctest("cursor right edge",2,2,(10,2),7,5)
self.mctest("cursor right edge+1",2,2,(10,2),8,5)
class FillerTest(unittest.TestCase):
def ftest(self, desc, valign, height, maxrow, top, bottom,
min_height=None):
f = urwid.Filler(None, valign, height, min_height)
t, b = f.filler_values((20,maxrow), False)
assert (t,b)==(top,bottom), "%s expected %s but got %s"%(
desc, (top,bottom), (t,b))
def fetest(self, desc, valign, height):
try:
urwid.Filler(None, valign, height)
except urwid.FillerError, e:
return
assert 0, "%s expected error!" % desc
def test_create(self):
self.fetest("invalid pad",6,5)
self.fetest("invalid pad type",('bad',2),5)
self.fetest("invalid width",'middle','42')
self.fetest("invalid width type",'middle',('gouranga',4))
self.fetest("invalid combination",('relative',20),
('fixed bottom',4))
self.fetest("invalid combination 2",('relative',20),
('fixed top',4))
def test_values(self):
self.ftest("top align 5 7",'top',5,7,0,2)
self.ftest("top align 7 7",'top',7,7,0,0)
self.ftest("top align 9 7",'top',9,7,0,0)
self.ftest("bottom align 5 7",'bottom',5,7,2,0)
self.ftest("middle align 5 7",'middle',5,7,1,1)
self.ftest("fixed top",('fixed top',3),5,10,3,2)
self.ftest("fixed top reduce",('fixed top',3),8,10,2,0)
self.ftest("fixed top shrink",('fixed top',3),18,10,0,0)
self.ftest("fixed top, bottom",
('fixed top',3),('fixed bottom',4),17,3,4)
self.ftest("fixed top, bottom, min_width",
('fixed top',3),('fixed bottom',4),10,3,2,5)
self.ftest("fixed top, bottom, min_width 2",
('fixed top',3),('fixed bottom',4),10,2,0,8)
self.ftest("fixed bottom",('fixed bottom',3),5,10,2,3)
self.ftest("fixed bottom reduce",('fixed bottom',3),8,10,0,2)
self.ftest("fixed bottom shrink",('fixed bottom',3),18,10,0,0)
self.ftest("fixed bottom, top",
('fixed bottom',3),('fixed top',4),17,4,3)
self.ftest("fixed bottom, top, min_height",
('fixed bottom',3),('fixed top',4),10,2,3,5)
self.ftest("fixed bottom, top, min_height 2",
('fixed bottom',3),('fixed top',4),10,0,2,8)
self.ftest("relative 30",('relative',30),5,10,1,4)
self.ftest("relative 50",('relative',50),5,10,2,3)
self.ftest("relative 130 edge",('relative',130),5,10,5,0)
self.ftest("relative -10 edge",('relative',-10),4,10,0,6)
self.ftest("middle relative 70",'middle',('relative',70),
10,1,2)
self.ftest("middle relative 70 grow 8",'middle',('relative',70),
10,1,1,8)
class FrameTest(unittest.TestCase):
def ftbtest(self, desc, focus_part, header_rows, footer_rows, size,
focus, top, bottom):
class FakeWidget:
def __init__(self, rows, want_focus):
self.ret_rows = rows
self.want_focus = want_focus
def rows(self, size, focus=False):
assert self.want_focus == focus
return self.ret_rows
header = footer = None
if header_rows:
header = FakeWidget(header_rows,
focus and focus_part == 'header')
if footer_rows:
footer = FakeWidget(footer_rows,
focus and focus_part == 'footer')
f = urwid.Frame(None, header, footer, focus_part)
rval = f.frame_top_bottom(size, focus)
exp = (top, bottom), (header_rows, footer_rows)
assert exp == rval, "%s expected %r but got %r"%(
desc,exp,rval)
def test(self):
self.ftbtest("simple", 'body', 0, 0, (9, 10), True, 0, 0)
self.ftbtest("simple h", 'body', 3, 0, (9, 10), True, 3, 0)
self.ftbtest("simple f", 'body', 0, 3, (9, 10), True, 0, 3)
self.ftbtest("simple hf", 'body', 3, 3, (9, 10), True, 3, 3)
self.ftbtest("almost full hf", 'body', 4, 5, (9, 10),
True, 4, 5)
self.ftbtest("full hf", 'body', 5, 5, (9, 10),
True, 4, 5)
self.ftbtest("x full h+1f", 'body', 6, 5, (9, 10),
False, 4, 5)
self.ftbtest("full h+1f", 'body', 6, 5, (9, 10),
True, 4, 5)
self.ftbtest("full hf+1", 'body', 5, 6, (9, 10),
True, 3, 6)
self.ftbtest("F full h+1f", 'footer', 6, 5, (9, 10),
True, 5, 5)
self.ftbtest("F full hf+1", 'footer', 5, 6, (9, 10),
True, 4, 6)
self.ftbtest("F full hf+5", 'footer', 5, 11, (9, 10),
True, 0, 10)
self.ftbtest("full hf+5", 'body', 5, 11, (9, 10),
True, 0, 9)
self.ftbtest("H full hf+1", 'header', 5, 6, (9, 10),
True, 5, 5)
self.ftbtest("H full h+1f", 'header', 6, 5, (9, 10),
True, 6, 4)
self.ftbtest("H full h+5f", 'header', 11, 5, (9, 10),
True, 10, 0)
class PileTest(unittest.TestCase):
def ktest(self, desc, l, focus_item, key,
rkey, rfocus, rpref_col):
p = urwid.Pile( l, focus_item )
rval = p.keypress( (20,), key )
assert rkey == rval, "%s key expected %r but got %r" %(
desc, rkey, rval)
new_focus = l.index(p.get_focus())
assert new_focus == rfocus, "%s focus expected %r but got %r" %(
desc, rfocus, new_focus)
new_pref = p.get_pref_col((20,))
assert new_pref == rpref_col, (
"%s pref_col expected %r but got %r" % (
desc, rpref_col, new_pref))
def test_select_change(self):
T,S,E = urwid.Text, SelectableText, urwid.Edit
self.ktest("simple up", [S("")], 0, "up", "up", 0, 0)
self.ktest("simple down", [S("")], 0, "down", "down", 0, 0)
self.ktest("ignore up", [T(""),S("")], 1, "up", "up", 1, 0)
self.ktest("ignore down", [S(""),T("")], 0, "down",
"down", 0, 0)
self.ktest("step up", [S(""),S("")], 1, "up", None, 0, 0)
self.ktest("step down", [S(""),S("")], 0, "down",
None, 1, 0)
self.ktest("skip step up", [S(""),T(""),S("")], 2, "up",
None, 0, 0)
self.ktest("skip step down", [S(""),T(""),S("")], 0, "down",
None, 2, 0)
self.ktest("pad skip step up", [T(""),S(""),T(""),S("")], 3,
"up", None, 1, 0)
self.ktest("pad skip step down", [S(""),T(""),S(""),T("")], 0,
"down", None, 2, 0)
self.ktest("padi skip step up", [S(""),T(""),S(""),T(""),S("")],
4, "up", None, 2, 0)
self.ktest("padi skip step down", [S(""),T(""),S(""),T(""),
S("")], 0, "down", None, 2, 0)
e = E("","abcd", edit_pos=1)
e.keypress((20,),"right") # set a pref_col
self.ktest("pref step up", [S(""),T(""),e], 2, "up",
None, 0, 2)
self.ktest("pref step down", [e,T(""),S("")], 0, "down",
None, 2, 2)
z = E("","1234")
self.ktest("prefx step up", [z,T(""),e], 2, "up",
None, 0, 2)
assert z.get_pref_col((20,)) == 2
z = E("","1234")
self.ktest("prefx step down", [e,T(""),z], 0, "down",
None, 2, 2)
assert z.get_pref_col((20,)) == 2
class ColumnsTest(unittest.TestCase):
def cwtest(self, desc, l, divide, size, exp):
c = urwid.Columns( l, divide )
rval = c.column_widths( size )
assert rval == exp, "%s expected %s, got %s"%(desc,exp,rval)
def test_widths(self):
x = urwid.Text("") # sample "column"
self.cwtest( "simple 1", [x], 0, (20,), [20] )
self.cwtest( "simple 2", [x,x], 0, (20,), [10,10] )
self.cwtest( "simple 2+1", [x,x], 1, (20,), [10,9] )
self.cwtest( "simple 3+1", [x,x,x], 1, (20,), [6,6,6] )
self.cwtest( "simple 3+2", [x,x,x], 2, (20,), [5,6,5] )
self.cwtest( "simple 3+2", [x,x,x], 2, (21,), [6,6,5] )
self.cwtest( "simple 4+1", [x,x,x,x], 1, (25,), [6,5,6,5] )
self.cwtest( "squish 4+1", [x,x,x,x], 1, (7,), [1,1,1,1] )
self.cwtest( "squish 4+1", [x,x,x,x], 1, (6,), [1,2,1] )
self.cwtest( "squish 4+1", [x,x,x,x], 1, (4,), [2,1] )
self.cwtest( "fixed 3", [('fixed',4,x),('fixed',6,x),
('fixed',2,x)], 1, (25,), [4,6,2] )
self.cwtest( "fixed 3 cut", [('fixed',4,x),('fixed',6,x),
('fixed',2,x)], 1, (13,), [4,6] )
self.cwtest( "fixed 3 cut2", [('fixed',4,x),('fixed',6,x),
('fixed',2,x)], 1, (10,), [4] )
self.cwtest( "mixed 4", [('weight',2,x),('fixed',5,x),
x, ('weight',3,x)], 1, (14,), [2,5,1,3] )
self.cwtest( "mixed 4 a", [('weight',2,x),('fixed',5,x),
x, ('weight',3,x)], 1, (12,), [1,5,1,2] )
self.cwtest( "mixed 4 b", [('weight',2,x),('fixed',5,x),
x, ('weight',3,x)], 1, (10,), [2,5,1] )
self.cwtest( "mixed 4 c", [('weight',2,x),('fixed',5,x),
x, ('weight',3,x)], 1, (20,), [4,5,2,6] )
def mctest(self, desc, l, divide, size, col, row, exp, f_col, pref_col):
c = urwid.Columns( l, divide )
rval = c.move_cursor_to_coords( size, col, row )
assert rval == exp, "%s expected %r, got %r"%(desc,exp,rval)
assert c.focus_col == f_col, "%s expected focus_col %s got %s"%(
desc, f_col, c.focus_col)
pc = c.get_pref_col( size )
assert pc == pref_col, "%s expected pref_col %s, got %s"%(
desc, pref_col, pc)
def test_move_cursor(self):
e, s, x = urwid.Edit("",""),SelectableText(""), urwid.Text("")
self.mctest("nothing selectbl",[x,x,x],1,(20,),9,0,False,0,None)
self.mctest("dead on",[x,s,x],1,(20,),9,0,True,1,9)
self.mctest("l edge",[x,s,x],1,(20,),6,0,True,1,6)
self.mctest("r edge",[x,s,x],1,(20,),13,0,True,1,13)
self.mctest("l off",[x,s,x],1,(20,),2,0,True,1,2)
self.mctest("r off",[x,s,x],1,(20,),17,0,True,1,17)
self.mctest("l off 2",[x,x,s],1,(20,),2,0,True,2,2)
self.mctest("r off 2",[s,x,x],1,(20,),17,0,True,0,17)
self.mctest("l between",[s,s,x],1,(20,),6,0,True,0,6)
self.mctest("r between",[x,s,s],1,(20,),13,0,True,1,13)
self.mctest("l between 2l",[s,s,x],2,(22,),6,0,True,0,6)
self.mctest("r between 2l",[x,s,s],2,(22,),14,0,True,1,14)
self.mctest("l between 2r",[s,s,x],2,(22,),7,0,True,1,7)
self.mctest("r between 2r",[x,s,s],2,(22,),15,0,True,2,15)
# unfortunate pref_col shifting
self.mctest("l e edge",[x,e,x],1,(20,),6,0,True,1,7)
self.mctest("r e edge",[x,e,x],1,(20,),13,0,True,1,12)
class LineBoxTest(unittest.TestCase):
def border(self, tl, t, tr, l, r, bl, b, br):
return [bytes().join([tl, t, tr]),
bytes().join([l, B(" "), r]),
bytes().join([bl, b, br]),]
def test_linebox_border(self):
urwid.set_encoding("utf-8")
t = urwid.Text("")
l = urwid.LineBox(t).render((3,)).text
# default
self.assertEqual(l,
self.border(B("\xe2\x94\x8c"), B("\xe2\x94\x80"),
B("\xe2\x94\x90"), B("\xe2\x94\x82"), B("\xe2\x94\x82"),
B("\xe2\x94\x94"), B("\xe2\x94\x80"), B("\xe2\x94\x98")))
nums = [B(str(n)) for n in range(8)]
b = dict(zip(["tlcorner", "tline", "trcorner", "lline", "rline",
"blcorner", "bline", "brcorner"], nums))
l = urwid.LineBox(t, **b).render((3,)).text
self.assertEqual(l, self.border(*nums))
class BarGraphTest(unittest.TestCase):
def bgtest(self, desc, data, top, widths, maxrow, exp ):
rval = calculate_bargraph_display(data,top,widths,maxrow)
assert rval == exp, "%s expected %r, got %r"%(desc,exp,rval)
def test1(self):
self.bgtest('simplest',[[0]],5,[1],1,
[(1,[(0,1)])] )
self.bgtest('simpler',[[0],[0]],5,[1,2],5,
[(5,[(0,3)])] )
self.bgtest('simple',[[5]],5,[1],1,
[(1,[(1,1)])] )
self.bgtest('2col-1',[[2],[0]],5,[1,2],5,
[(3,[(0,3)]), (2,[(1,1),(0,2)]) ] )
self.bgtest('2col-2',[[0],[2]],5,[1,2],5,
[(3,[(0,3)]), (2,[(0,1),(1,2)]) ] )
self.bgtest('2col-3',[[2],[3]],5,[1,2],5,
[(2,[(0,3)]), (1,[(0,1),(1,2)]), (2,[(1,3)]) ] )
self.bgtest('3col-1',[[5],[3],[0]],5,[2,1,1],5,
[(2,[(1,2),(0,2)]), (3,[(1,3),(0,1)]) ] )
self.bgtest('3col-2',[[4],[4],[4]],5,[2,1,1],5,
[(1,[(0,4)]), (4,[(1,4)]) ] )
self.bgtest('3col-3',[[1],[2],[3]],5,[2,1,1],5,
[(2,[(0,4)]), (1,[(0,3),(1,1)]), (1,[(0,2),(1,2)]),
(1,[(1,4)]) ] )
self.bgtest('3col-4',[[4],[2],[4]],5,[1,2,1],5,
[(1,[(0,4)]), (2,[(1,1),(0,2),(1,1)]), (2,[(1,4)]) ] )
def test2(self):
self.bgtest('simple1a',[[2,0],[2,1]],2,[1,1],2,
[(1,[(1,2)]),(1,[(1,1),(2,1)]) ] )
self.bgtest('simple1b',[[2,1],[2,0]],2,[1,1],2,
[(1,[(1,2)]),(1,[(2,1),(1,1)]) ] )
self.bgtest('cross1a',[[2,2],[1,2]],2,[1,1],2,
[(2,[(2,2)]) ] )
self.bgtest('cross1b',[[1,2],[2,2]],2,[1,1],2,
[(2,[(2,2)]) ] )
self.bgtest('mix1a',[[3,2,1],[2,2,2],[1,2,3]],3,[1,1,1],3,
[(1,[(1,1),(0,1),(3,1)]),(1,[(2,1),(3,2)]),
(1,[(3,3)]) ] )
self.bgtest('mix1b',[[1,2,3],[2,2,2],[3,2,1]],3,[1,1,1],3,
[(1,[(3,1),(0,1),(1,1)]),(1,[(3,2),(2,1)]),
(1,[(3,3)]) ] )
class SmoothBarGraphTest(unittest.TestCase):
def sbgtest(self, desc, data, top, exp ):
urwid.set_encoding('utf-8')
g = urwid.BarGraph( ['black','red','blue'],
None, {(1,0):'red/black', (2,1):'blue/red'})
g.set_data( data, top )
rval = g.calculate_display((5,3))
assert rval == exp, "%s expected %r, got %r"%(desc,exp,rval)
def test1(self):
self.sbgtest('simple', [[3]], 5,
[(1, [(0, 5)]), (1, [((1, 0, 6), 5)]), (1, [(1, 5)])] )
self.sbgtest('boring', [[4,2]], 6,
[(1, [(0, 5)]), (1, [(1, 5)]), (1, [(2,5)]) ] )
self.sbgtest('two', [[4],[2]], 6,
[(1, [(0, 5)]), (1, [(1, 3), (0, 2)]), (1, [(1, 5)]) ] )
self.sbgtest('twos', [[3],[4]], 6,
[(1, [(0, 5)]), (1, [((1,0,4), 3), (1, 2)]), (1, [(1,5)]) ] )
self.sbgtest('twof', [[4],[3]], 6,
[(1, [(0, 5)]), (1, [(1,3), ((1,0,4), 2)]), (1, [(1,5)]) ] )
class CanvasJoinTest(unittest.TestCase):
def cjtest(self, desc, l, expected):
l = [(c, None, False, n) for c, n in l]
result = list(urwid.CanvasJoin(l).content())
assert result == expected, "%s expected %r, got %r"%(
desc, expected, result)
def test(self):
C = urwid.TextCanvas
hello = C([B("hello")])
there = C([B("there")], [[("a",5)]])
a = C([B("a")])
hi = C([B("hi")])
how = C([B("how")], [[("a",1)]])
dy = C([B("dy")])
how_you = C([B("how"), B("you")])
self.cjtest("one", [(hello, 5)],
[[(None, None, B("hello"))]])
self.cjtest("two", [(hello, 5), (there, 5)],
[[(None, None, B("hello")), ("a", None, B("there"))]])
self.cjtest("two space", [(hello, 7), (there, 5)],
[[(None, None, B("hello")),(None,None,B(" ")),
("a", None, B("there"))]])
self.cjtest("three space", [(hi, 4), (how, 3), (dy, 2)],
[[(None, None, B("hi")),(None,None,B(" ")),("a",None, B("h")),
(None,None,B("ow")),(None,None,B("dy"))]])
self.cjtest("four space", [(a, 2), (hi, 3), (dy, 3), (a, 1)],
[[(None, None, B("a")),(None,None,B(" ")),
(None, None, B("hi")),(None,None,B(" ")),
(None, None, B("dy")),(None,None,B(" ")),
(None, None, B("a"))]])
self.cjtest("pile 2", [(how_you, 4), (hi, 2)],
[[(None, None, B('how')), (None, None, B(' ')),
(None, None, B('hi'))],
[(None, None, B('you')), (None, None, B(' ')),
(None, None, B(' '))]])
self.cjtest("pile 2r", [(hi, 4), (how_you, 3)],
[[(None, None, B('hi')), (None, None, B(' ')),
(None, None, B('how'))],
[(None, None, B(' ')),
(None, None, B('you'))]])
class CanvasOverlayTest(unittest.TestCase):
def cotest(self, desc, bgt, bga, fgt, fga, l, r, et):
bgt = B(bgt)
fgt = B(fgt)
bg = urwid.CompositeCanvas(
urwid.TextCanvas([bgt],[bga]))
fg = urwid.CompositeCanvas(
urwid.TextCanvas([fgt],[fga]))
bg.overlay(fg, l, 0)
result = list(bg.content())
assert result == et, "%s expected %r, got %r"%(
desc, et, result)
def test1(self):
self.cotest("left", "qxqxqxqx", [], "HI", [], 0, 6,
[[(None, None, B("HI")),(None,None,B("qxqxqx"))]])
self.cotest("right", "qxqxqxqx", [], "HI", [], 6, 0,
[[(None, None, B("qxqxqx")),(None,None,B("HI"))]])
self.cotest("center", "qxqxqxqx", [], "HI", [], 3, 3,
[[(None, None, B("qxq")),(None,None,B("HI")),
(None,None,B("xqx"))]])
self.cotest("center2", "qxqxqxqx", [], "HI ", [], 2, 2,
[[(None, None, B("qx")),(None,None,B("HI ")),
(None,None,B("qx"))]])
self.cotest("full", "rz", [], "HI", [], 0, 0,
[[(None, None, B("HI"))]])
def test2(self):
self.cotest("same","asdfghjkl",[('a',9)],"HI",[('a',2)],4,3,
[[('a',None,B("asdf")),('a',None,B("HI")),('a',None,B("jkl"))]])
self.cotest("diff","asdfghjkl",[('a',9)],"HI",[('b',2)],4,3,
[[('a',None,B("asdf")),('b',None,B("HI")),('a',None,B("jkl"))]])
self.cotest("None end","asdfghjkl",[('a',9)],"HI ",[('a',2)],
2,3,
[[('a',None,B("as")),('a',None,B("HI")),
(None,None,B(" ")),('a',None,B("jkl"))]])
self.cotest("float end","asdfghjkl",[('a',3)],"HI",[('a',2)],
4,3,
[[('a',None,B("asd")),(None,None,B("f")),
('a',None,B("HI")),(None,None,B("jkl"))]])
self.cotest("cover 2","asdfghjkl",[('a',5),('c',4)],"HI",
[('b',2)],4,3,
[[('a',None,B("asdf")),('b',None,B("HI")),('c',None,B("jkl"))]])
self.cotest("cover 2-2","asdfghjkl",
[('a',4),('d',1),('e',1),('c',3)],
"HI",[('b',2)], 4, 3,
[[('a',None,B("asdf")),('b',None,B("HI")),('c',None,B("jkl"))]])
def test3(self):
urwid.set_encoding("euc-jp")
self.cotest("db0","\xA1\xA1\xA1\xA1\xA1\xA1",[],"HI",[],2,2,
[[(None,None,B("\xA1\xA1")),(None,None,B("HI")),
(None,None,B("\xA1\xA1"))]])
self.cotest("db1","\xA1\xA1\xA1\xA1\xA1\xA1",[],"OHI",[],1,2,
[[(None,None,B(" ")),(None,None,B("OHI")),
(None,None,B("\xA1\xA1"))]])
self.cotest("db2","\xA1\xA1\xA1\xA1\xA1\xA1",[],"OHI",[],2,1,
[[(None,None,B("\xA1\xA1")),(None,None,B("OHI")),
(None,None,B(" "))]])
self.cotest("db3","\xA1\xA1\xA1\xA1\xA1\xA1",[],"OHIO",[],1,1,
[[(None,None,B(" ")),(None,None,B("OHIO")),(None,None,B(" "))]])
class CanvasPadTrimTest(unittest.TestCase):
def cptest(self, desc, ct, ca, l, r, et):
ct = B(ct)
c = urwid.CompositeCanvas(
urwid.TextCanvas([ct], [ca]))
c.pad_trim_left_right(l, r)
result = list(c.content())
assert result == et, "%s expected %r, got %r"%(
desc, et, result)
def test1(self):
self.cptest("none", "asdf", [], 0, 0,
[[(None,None,B("asdf"))]])
self.cptest("left pad", "asdf", [], 2, 0,
[[(None,None,B(" ")),(None,None,B("asdf"))]])
self.cptest("right pad", "asdf", [], 0, 2,
[[(None,None,B("asdf")),(None,None,B(" "))]])
def test2(self):
self.cptest("left trim", "asdf", [], -2, 0,
[[(None,None,B("df"))]])
self.cptest("right trim", "asdf", [], 0, -2,
[[(None,None,B("as"))]])
class WidgetSquishTest(unittest.TestCase):
def wstest(self, w):
c = w.render((80,0), focus=False)
assert c.rows() == 0
c = w.render((80,0), focus=True)
assert c.rows() == 0
c = w.render((80,1), focus=False)
assert c.rows() == 1
def test_listbox(self):
self.wstest(urwid.ListBox([]))
self.wstest(urwid.ListBox([urwid.Text("hello")]))
def test_bargraph(self):
self.wstest(urwid.BarGraph(['foo','bar']))
def test_graphvscale(self):
self.wstest(urwid.GraphVScale([(0,"hello")], 1))
self.wstest(urwid.GraphVScale([(5,"hello")], 1))
def test_solidfill(self):
self.wstest(urwid.SolidFill())
def test_filler(self):
self.wstest(urwid.Filler(urwid.Text("hello")))
def test_overlay(self):
self.wstest(urwid.Overlay(
urwid.BigText("hello",urwid.Thin6x6Font()),
urwid.SolidFill(),
'center', None, 'middle', None))
self.wstest(urwid.Overlay(
urwid.Text("hello"), urwid.SolidFill(),
'center', ('relative', 100), 'middle', None))
def test_frame(self):
self.wstest(urwid.Frame(urwid.SolidFill()))
self.wstest(urwid.Frame(urwid.SolidFill(),
header=urwid.Text("hello")))
self.wstest(urwid.Frame(urwid.SolidFill(),
header=urwid.Text("hello"),
footer=urwid.Text("hello")))
def test_pile(self):
self.wstest(urwid.Pile([urwid.SolidFill()]))
self.wstest(urwid.Pile([('flow', urwid.Text("hello"))]))
self.wstest(urwid.Pile([]))
def test_columns(self):
self.wstest(urwid.Columns([urwid.SolidFill()]))
def test_all():
"""
Return a TestSuite with all tests available
"""
unittests = [
DecodeOneTest,
CalcWidthTest,
ConvertDecSpecialTest,
WithinDoubleByteTest,
CalcTextPosTest,
CalcBreaksCharTest,
CalcBreaksDBCharTest,
CalcBreaksWordTest,
CalcBreaksWordTest2,
CalcBreaksDBWordTest,
CalcBreaksUTF8Test,
CalcBreaksCantDisplayTest,
SubsegTest,
CalcTranslateCharTest,
CalcTranslateWordTest,
CalcTranslateWordTest2,
CalcTranslateWordTest3,
CalcTranslateWordTest4,
CalcTranslateWordTest5,
CalcTranslateClipTest,
CalcTranslateCantDisplayTest,
CalcPosTest,
Pos2CoordsTest,
CanvasCacheTest,
CanvasTest,
ShardBodyTest,
ShardsTrimTest,
ShardsJoinTest,
TagMarkupTest,
TextTest,
EditTest,
EditRenderTest,
ListBoxCalculateVisibleTest,
ListBoxChangeFocusTest,
ListBoxRenderTest,
ListBoxKeypressTest,
PaddingTest,
FillerTest,
FrameTest,
PileTest,
ColumnsTest,
LineBoxTest,
BarGraphTest,
SmoothBarGraphTest,
CanvasJoinTest,
CanvasOverlayTest,
CanvasPadTrimTest,
WidgetSquishTest,
TermTest,
]
module_doctests = [
urwid.widget,
urwid.wimp,
urwid.decoration,
urwid.display_common,
urwid.main_loop,
urwid.monitored_list,
urwid.raw_display,
'urwid.split_repr', # override function with same name
urwid.util,
]
tests = unittest.TestSuite()
for t in unittests:
tests.addTest(unittest.TestLoader().loadTestsFromTestCase(t))
for m in module_doctests:
tests.addTest(DocTestSuite(
m, optionflags=ELLIPSIS | IGNORE_EXCEPTION_DETAIL))
return tests
if __name__ == '__main__':
test_support.run_unittest(test_all())
| 91864def4740905b5345998038866c88c74b86ee | [
"Python",
"Text"
] | 10 | Python | 0xPr0xy/soundcloud-cli | b4ba1d00455b47405db44229d89e29bdcb4f9e1e | b1ba009092c1d6ae46d825725d673f30ab0fb8cb | |
refs/heads/master | <repo_name>MoeMustafa/marketplace<file_sep>/src/app/products/product-list/product-list.component.ts
import { Component } from '@angular/core';
@Component({
selector: 'mp-products',
templateUrl: './product-list.component.html'
})
export class ProductListComponent {
pageTitle = 'Product List';
imageWidth = 50;
imageMargin = 2;
showImage = false;
products: any[] = [
{
'productId': 2,
'productName': '<NAME>',
'productCode': 'GDN-0012',
'releaseDate': 'March 18, 2018',
'description': 'Test product description',
'price': 22.33,
'starRating': 2.4,
'imageUrl': 'https://media.tractorsupply.com/is/image/TractorSupplyCompany/3599008?$456$'
}
];
toggleImage(): void {
this.showImage = !this.showImage;
}
constructor() { }
}
| 239ed2eb65af546512c8069a68459e9ab5d24a24 | [
"TypeScript"
] | 1 | TypeScript | MoeMustafa/marketplace | 35073f218f19aadd4bfb664277b08befb32c24d2 | 9f13d12052e8ad1d76c75e2d4b583a2b40b2fe60 | |
refs/heads/master | <repo_name>haresh-ram/firstChess<file_sep>/firstChess/src/main/webapp/sign.js
/**
*
*/
function openSignUp(){
cleanFields();
textfocus();
document.getElementById("outerBox").style.cssText="display:none";
document.getElementById("outerBoxSignUp").style.cssText="display:flex";
}
function closeSignUp(){
cleanFields();
textfocus();
document.getElementById("outerBoxSignUp").style.cssText="display:none";
document.getElementById("outerBox").style.cssText="display:flex";
}
function textfocus(){
document.getElementById("field1").style.cssText="border: 1px solid rgb(70, 172, 255);"
document.getElementById("field2").style.cssText="border: 1px solid rgb(70, 172, 255);"
document.getElementById("text1").style.cssText="border: 1px solid rgb(70, 172, 255);"
document.getElementById("text2").style.cssText="border: 1px solid rgb(70, 172, 255);"
document.getElementById("text3").style.cssText="border: 1px solid rgb(70, 172, 255);"
document.getElementById("text5").style.cssText="border: 1px solid rgb(70, 172, 255);"
document.getElementById("countries").style.cssText="border: 1px solid rgb(70, 172, 255);"
document.getElementById("label1").innerHTML="";
document.getElementById("label2").innerHTML="";
document.getElementById("label3").innerHTML="";
document.getElementById("label4").innerHTML="";
document.getElementById("label5").innerHTML="";
document.getElementById("label6").innerHTML="";
document.getElementById("label7").innerHTML="";
document.getElementById("label8").innerHTML="";
}
function cleanFields(){
document.getElementById("field1").value="";
document.getElementById("field2").value="";
document.getElementById("text1").value="";
document.getElementById("text2").value="";
document.getElementById("text3").value="";
document.getElementById("text5").value="";
}
function signUpCheck(){
var text1 = document.getElementById("text1");
var text2 = document.getElementById("text2");
var text3 = document.getElementById("text3");
var text5 = document.getElementById("text5");
var dropdown = document.getElementById("countries");
var regx = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
if(text1.value.trim()=="" || text2.value.trim()=="" || text3.value.trim()=="" || dropdown.options[dropdown.selectedIndex].value == "" || text3.value.trim() != text5.value.trim() || regx.test(text2.value) == false){
if(text1.value.trim()==""){
text1.style.cssText="border:2px solid red;";
document.getElementById("label3").innerHTML="Field cannot be Empty";
}
if(text2.value.trim()==""){
text2.style.cssText="border:2px solid red;";
document.getElementById("label4").innerHTML="Field cannot be Empty";
}
if(text3.value.trim()==""){
text3.style.cssText="border:2px solid red;";
document.getElementById("label5").innerHTML="Field cannot be Empty";
}
if(text5.value.trim()==""){
text5.style.cssText="border:2px solid red;";
document.getElementById("label8").innerHTML="Field cannot be Empty";
}
if(text3.value.trim() != text5.value.trim()){
text3.style.cssText="border:2px solid red;";
text5.style.cssText="border:2px solid red;";
document.getElementById("label5").innerHTML="Passwords not matching";
}
if(dropdown.options[dropdown.selectedIndex].value == ""){
dropdown.style.cssText="border:2px solid red;";
document.getElementById("label6").innerHTML="Select your Country";
}
if(regx.test(text2.value) == false){
text2.style.cssText="border:2px solid red;";
document.getElementById("label4").innerHTML="Invalid EmailID";
}
return false;
}else{
var v=false;
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function(){
if(this.readyState == 4 && this.status == 200){
var res = this.responseText;
if(res=="false"){
text2.style.cssText="border:2px solid red;";
document.getElementById("label4").innerHTML="E-mail ID already exists";
v=false;
}else
v=true;
}
};
xhttp.open("POST","chessLoginServlet?email="+document.getElementById("text2").value+"&action="+"signUpPriorCheck",false);
xhttp.send();
if(v==false)
return false;
else
return true;
}
}
function checkValidLogin(){
var field1 = document.getElementById("field1");
var field2 = document.getElementById("field2");
if(field1.value.trim()=="" || field2.value.trim()==""){
if(field1.value.trim()==""){
field1.style.cssText="border:2px solid red;";
document.getElementById("label1").innerHTML="Field cannot be Empty"
}
if(field2.value.trim()==""){
field2.style.cssText="border:2px solid red;";
document.getElementById("label2").innerHTML="Field cannot be Empty"
}
return false;
}else{
var v=false;
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function(){
if(this.readyState == 4 && this.status == 200){
var res = this.responseText;
if(res=="false"){
document.getElementById("label7").innerHTML="Invalid E-mail/Password";
v=false;
}else
v=true;
}
};
xhttp.open("POST","chessLoginServlet?email="+document.getElementById("field1").value+"&password="+document.getElementById("field2").value+"&action="+"loginPriorCheck",false);
xhttp.send();
document.getElementById("field2").value="";
if(v==false)
return false;
else
return true;
}
}
function openGame(){
var gameCode = localStorage.getItem("gameCode");
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function(){
if(this.readyState == 4 && this.status == 200){
window.location.href="board.jsp";
}
};
xhttp.open("POST","chessLoginServlet?gameCode="+gameCode+"&action="+"registerGame");
xhttp.send();
}
function getProfile(){
var xhttp = new XMLHttpRequest();
var response;
xhttp.onreadystatechange = function(){
if(this.readyState == 4 && this.status == 200){
response = this.responseText;
var json = JSON.parse(response);
document.getElementById("profileID").innerHTML = json.userID;
document.getElementById("profileEmail").innerHTML = json.email;
document.getElementById("profileCountry").innerHTML = json.country;
document.getElementById("matchesPlayed").innerHTML = json.matchesPlayed;
document.getElementById("matchesWon").innerHTML = json.matchesWon;
document.getElementById("matchesLost").innerHTML = json.matchesLost;
document.getElementById("matchesDraw").innerHTML = json.matchesDraw;
document.getElementById("profileName").innerHTML = json.username;
}
};
xhttp.open("POST","chessLoginServlet?action="+"getProfile");
xhttp.send();
}
function codeSubmit(){
var box = document.getElementById("gameCodeBox");
if(box.value.trim()==""){
box.style.cssText="border:2px solid red;";
document.getElementById("codeLabel").innerHTML = "Field cannot be Empty";
return false;
}
localStorage.setItem("gameCode",box.value);
document.getElementById("gameCodeBox").value="";
window.location.href="board.jsp";
}
function codeBoxFocus(){
document.getElementById("gameCodeBox").style.cssText="border: 1px solid rgb(70, 172, 255);";
document.getElementById("codeLabel").innerHTML="";
document.getElementById("gameCodeBox1").style.cssText="border: 1px solid rgb(70, 172, 255);";
document.getElementById("codeLabel1").innerHTML="";
}
function createGame(){
document.getElementById("tab1").style.cssText = "background-color:black;color:white";
document.getElementById("tab2").style.cssText = "background-color:rgb(52, 183, 255);color:black";
document.getElementById("tab3").style.cssText = "background-color:rgb(52, 183, 255);color:black";
if("" + (getComputedStyle(document.getElementById("codeCreateDiv")).display) == "none"){
document.getElementById("codeDiv").style.cssText = "display:none";
document.getElementById("codeDiv2").style.cssText = "display:none";
document.getElementById("codeCreateDiv").style.cssText = "display:block";
var rand = Math.floor(10000 + (Math.random() * 100000000));
localStorage.setItem("gameCode",rand);
}
}
function spectateJoin(){
document.getElementById("tab3").style.cssText = "background-color:black;color:white";
document.getElementById("tab2").style.cssText = "background-color:rgb(52, 183, 255);color:black";
document.getElementById("tab1").style.cssText = "background-color:rgb(52, 183, 255);color:black";
codeBoxFocus();
document.getElementById("gameCodeBox1").value="";
document.getElementById("codeCreateDiv").style.cssText = "display:none";
document.getElementById("codeDiv").style.cssText = "display:none";
document.getElementById("codeDiv2").style.cssText = "display:block";
codeBoxFocus();
}
function joinGame(){
document.getElementById("tab2").style.cssText = "background-color:black;color:white";
document.getElementById("tab1").style.cssText = "background-color:rgb(52, 183, 255);color:black";
document.getElementById("tab3").style.cssText = "background-color:rgb(52, 183, 255);color:black";
codeBoxFocus();
document.getElementById("gameCodeBox").value="";
document.getElementById("codeCreateDiv").style.cssText = "display:none";
document.getElementById("codeDiv").style.cssText = "display:block";
document.getElementById("codeDiv2").style.cssText = "display:none";
}
function spectateGame(){
var box = document.getElementById("gameCodeBox1");
if(box.value.trim()==""){
box.style.cssText="border:2px solid red;";
document.getElementById("codeLabel1").innerHTML = "Field cannot be Empty";
return false;
}
localStorage.setItem("gameCode",box.value);
document.getElementById("gameCodeBox1").value="";
window.location.href="spectateBoard.jsp";
}
function putCode(){
var gameCode = localStorage.getItem("gameCode");
document.getElementById("codeSpan").innerHTML = gameCode;
}
function openLeaderBoard(){
document.getElementById("leaderBoardPage").style.cssText="display:flex";
}
function closeLeaderBoard(){
document.getElementById("leaderBoardPage").style.cssText="display:none";
}
<file_sep>/firstChess/src/main/webapp/spectator.js
/**
*
*/
function spectatorBoardPos(){
var gameCode = localStorage.getItem("gameCode");
var xhttp = new XMLHttpRequest();
var indx;
var ar= ["00","01","02","03","04","05","06","07",
"10","11","12","13","14","15","16","17",
"20","21","22","23","24","25","26","27",
"30","31","32","33","34","35","36","37",
"40","41","42","43","44","45","46","47",
"50","51","52","53","54","55","56","57",
"60","61","62","63","64","65","66","67",
"70","71","72","73","74","75","76","77"];
xhttp.onreadystatechange = function(){
if(this.readyState = 4 && this.status == 200){
var resp = this.responseText;
var arr = resp.split(",");
console.log(arr);
for(let i=0;i<64;i++){
if(arr[i]=="we1" || arr[i]=="we2"){
document.getElementById(ar[i]).innerHTML = '\u2656';
}else if(arr[i] == "wh"){
document.getElementById(ar[i]).innerHTML = '\u2658';
}else if(arr[i] == "wm"){
document.getElementById(ar[i]).innerHTML = '\u2657';
}else if(arr[i] == "wq"){
document.getElementById(ar[i]).innerHTML = '\u2655';
}else if(arr[i] == "wk"){
document.getElementById(ar[i]).innerHTML = '\u2654';
}else if(arr[i] == "wp"){
document.getElementById(ar[i]).innerHTML = '\u2659';
}else if(arr[i] == "be1" || arr[i] == "be2"){
document.getElementById(ar[i]).innerHTML = '\u265C';
}else if(arr[i] == "bh"){
document.getElementById(ar[i]).innerHTML = '\u265E';
}else if(arr[i] == "bm"){
document.getElementById(ar[i]).innerHTML = '\u265D';
}else if(arr[i] == "bq"){
document.getElementById(ar[i]).innerHTML = '\u265B';
}else if(arr[i] == "bk"){
document.getElementById(ar[i]).innerHTML = '\u265A';
}else if(arr[i] == "bp"){
document.getElementById(ar[i]).innerHTML = '\u265F';
}
}
}
};
xhttp.open("POST","chessLoginServlet?action="+"getSpectatorBoardPos"+"&gameCode="+gameCode,false);
xhttp.send();
}
| aaa1c9adcd92c2315548a1e7c166246154d8024c | [
"JavaScript"
] | 2 | JavaScript | haresh-ram/firstChess | 55283a0049e94d95fe47618562535d40f85fcaa5 | 8b1cae3889555bc4c41703dcc6970d40c8893354 | |
refs/heads/master | <repo_name>e-be-walk/anagram-detector-v-000<file_sep>/lib/anagram.rb
# Your code goes here!
class Anagram
attr_accessor :word
def initialize(word)
@word = word
end
def match(matching_words)
anagram_words = []
word_sorted = @word.split("").sort.join
matching_words.each do |words|
words_sorted = words.split("").sort.join
anagram_words << words if word_sorted == words_sorted
end
anagram_words
end
end
| 6a051c63ec274ac091e3ebf729499eef0c996df6 | [
"Ruby"
] | 1 | Ruby | e-be-walk/anagram-detector-v-000 | 01e748cbd7632bc039a1316cecfcde4c86d850cd | 00af44b0ffd47728e1e171414f749072d6b046dd | |
refs/heads/master | <repo_name>fahimc/SpriteJS<file_sep>/src/main.js
(function(window) {
function Main() {
if (window.addEventListener) {
window.addEventListener("load", onLoad);
} else {
window.attachEvent("onload", onLoad);
}
}
function onLoad() {
Canvas.beginFill(document.body);
// Canvas.drawLine(1000,0,100,100)
Canvas.drawRect(30,40,100,100);
Canvas.beginFill(document.body);
Canvas.drawRect(100,100,100,100);
// Canvas.drawCircle(0,0,100,100);
}
Main();
})(window);
<file_sep>/README.md
SpriteJS
======== | af2bbfad222fab46ca3d80c76d953b71864c4082 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | fahimc/SpriteJS | 4447577e6d8b12d8e16772f0e6d939269f0cbbef | 9a7ab6c57079792e9801d5325310ba04ccc91406 | |
refs/heads/master | <file_sep># Based on https://git.io/fjkGc
# The full path to the main package is use in the
# imports tool to format imports correctly.
NAMESPACE = github.com/xorcare/caiman
# The name of the file recommended in the standard
# documentation go test -cover and used codecov.io
# to check code coverage.
COVER_FILE ?= coverage.out
# Main targets.
.DEFAULT_GOAL := build
build: ## Build the project binary
@go build ./cmd/caiman
ci: check build ## Target for integration with ci pipeline
go get github.com/xorcare/caiman/cmd/caiman
caiman --help
check: static test build ## Check project with static checks and unit tests
help: ## Print this help
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | \
awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
imports: tools ## Check and fix import section by import rules
@test -z $$(for d in $$(go list -f {{.Dir}} ./...); do goimports -e -l -local $(NAMESPACE) -w $$d/*.go; done)
install: ## Install the project binary to $GOPATH/bin
@go install ./cmd/caiman
lint: tools ## Check the project with lint
@golint -set_exit_status ./...
static: imports vet lint ## Run static checks (lint, imports, vet, ...) all over the project
test: ## Run unit tests
@go test ./... -count=1 -race
@go test ./... -count=1 -coverprofile=$(COVER_FILE) -covermode=atomic $d
@go tool cover -func=$(COVER_FILE) | grep ^total
tools: ## Install all needed tools, e.g. for static checks
@go install \
golang.org/x/lint/golint \
golang.org/x/tools/cmd/goimports
toolsup: ## Update all needed tools, e.g. for static checks
go mod tidy && \
go get \
golang.org/x/lint/golint@latest \
golang.org/x/tools/cmd/goimports@latest && \
go mod download && \
go mod verify
$(MAKE) tools
vet: ## Check the project with vet
@go vet ./...
.PHONY: build check ci help imports install lint static test tools toolsup vet
<file_sep>// Copyright © 2019-2020 <NAME>. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package converter
import (
"bytes"
"encoding/base64"
"fmt"
"log"
"strings"
"sync"
"text/template"
"github.com/emersion/go-vcard"
"github.com/go-ldap/ldif"
"github.com/spf13/cast"
"github.com/xorcare/caiman/internal/config"
)
var _funcMap = map[string]interface{}{
"Base64Encode": base64Encode,
}
// Converter structure of the converter.
type Converter struct {
Config config.Config
templates map[string]*template.Template
once sync.Once
}
// LDIF2vCARD4 converts LDIF to vCard 4.
func (c *Converter) LDIF2vCARD4(l ldif.LDIF) (cards Cards, err error) {
skipped, successful, nullable := 0, 0, 0
defer func() {
log.Println(fmt.Sprintf("total entries %d", len(l.Entries)))
log.Println(fmt.Sprintf("skipped %d entries because it is nil", nullable))
log.Println(fmt.Sprintf("skipped %d entries because bad count of filled fields", skipped))
log.Println(fmt.Sprintf("successfully processed %d entries", successful))
}()
for _, v := range l.Entries {
if v == nil || v.Entry == nil {
nullable++
continue
}
entry := adapter{entry: *v.Entry}
card := vcard.Card{}
for fieldName, fields := range c.Config.Fields {
fieldName = strings.ToUpper(fieldName)
for _, field := range fields {
tpl, err := c.parseTemplate(field.Template)
if err != nil {
return nil, err
}
buf := bytes.NewBuffer([]byte{})
err = tpl.Execute(buf, map[string]interface{}{
"entry": entry,
})
if err != nil {
return nil, err
}
add(card, fieldName, vcard.Field{
Value: buf.String(),
Params: field.Params,
Group: field.Group,
})
}
}
if len(card) <= 3 {
skipped++
continue
}
vcard.ToV4(card)
cards = append(cards, card)
successful++
}
return cards, nil
}
func (c *Converter) parseTemplate(tpl string) (tmpl *template.Template, err error) {
c.once.Do(func() {
c.templates = make(map[string]*template.Template)
})
if tmpl, ok := c.templates[tpl]; ok {
return tmpl.Clone()
}
tmpl, err = template.New("").Funcs(_funcMap).Parse(tpl)
if err != nil {
return nil, err
}
c.templates[tpl] = tmpl
return tmpl.Clone()
}
func add(card vcard.Card, k string, f vcard.Field) {
if k != "" && strings.TrimSpace(f.Value) != "" {
card.Add(k, &f)
}
}
func base64Encode(content interface{}) (string, error) {
str, err := cast.ToStringE(content)
if err != nil {
return "", err
}
return base64.StdEncoding.EncodeToString([]byte(str)), nil
}
<file_sep># caiman
[](https://travis-ci.org/xorcare/caiman)
[](https://goreportcard.com/report/github.com/xorcare/caiman)
This is a tool for converting [LDIF] ([LDAP]) data into standard [vCard4] data, and
then downloading it to services or applications that are compatible with the
vCard4 standard.
The tool is mainly focused on the [Microsoft AD] product, but can be used with other
LDAPs, with possible limitations.
## Installation
```bash
go get github.com/xorcare/caiman/cmd/caiman
```
## Command line interface, [CLI]
```
caiman is a tool to convert people data from LDAP(LDIF) format to vCard4 contact format
Usage:
caiman [flags]
Examples:
caiman < person.ldif > person.vcf
caiman --config-file ~/.caiman.yaml < person.ldif > person.vcf
caiman --config-dump > .caiman.yaml
cat person.ldif | caiman > person.vcf
cat person.ldif | caiman | tee person.vcf
Flags:
-d, --config-dump print to standard output all configuration values, it prints configuration data in YAML format
-f, --config-file string the settings file from which the settings will be loaded
-h, --help help for caiman
```
Example of a command to convert LDAP to vCard:
```
$ caiman < person.ldif > person.vcf
2019/01/31 21:01:44 total entries 1030
2019/01/31 21:01:44 skipped 1 entries because it is nil
2019/01/31 21:01:44 skipped 5 entries because bad count of filled fields
2019/01/31 21:01:44 successfully processed 1024 entries
```
Example of a command to export data from LDAP:
```
ldapsearch -x -h example.com -D "DOMAIN\user.name" -LL -W -b 'DC=example,DC=com' '(objectClass=person)' | tee person.ldif
```
## License
© <NAME>, 2019-2020
Released under the [BSD 3-Clause License][LIC].
[LIC]:https://github.com/xorcare/caiman/blob/master/LICENSE 'BSD 3-Clause "New" or "Revised" License'
[vCard4]:https://en.wikipedia.org/wiki/VCard#vCard_4.0 'vCard 4.0 its the latest standard, which is built upon the RFC 6350 standard'
[LDIF]:https://tools.ietf.org/html/rfc2849 'The LDAP Data Interchange Format (LDIF) - Technical Specification'
[LDAP]:https://en.wikipedia.org/wiki/LDAP_Data_Interchange_Format 'LDAP Data Interchange Format'
[Microsoft AD]:https://docs.microsoft.com/en-us/azure/active-directory 'Azure Active Directory documentation'
[CLI]:https://en.wikipedia.org/wiki/Command-line_interface 'Command-line interface'
<file_sep>// Copyright © 2019-2020 <NAME>. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cli
import (
"errors"
"log"
"os"
"github.com/go-ldap/ldif"
"github.com/spf13/cobra"
"golang.org/x/crypto/ssh/terminal"
"github.com/xorcare/caiman/internal/config"
"github.com/xorcare/caiman/internal/converter"
)
var _config = config.Default()
var _configDump = false
var _configFile = ""
var _caimanCMD = &cobra.Command{
Use: "caiman",
Long: "caiman is a tool to convert people data from LDAP(LDIF) format to vCard4 contact format",
Example: "caiman < person.ldif > person.vcf" +
"\ncaiman --config-file ~/.caiman.yaml < person.ldif > person.vcf" +
"\ncaiman --config-dump > .caiman.yaml" +
"\ncat person.ldif | caiman > person.vcf" +
"\ncat person.ldif | caiman | tee person.vcf",
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
log.SetOutput(cmd.ErrOrStderr())
return nil
},
PreRunE: func(cmd *cobra.Command, args []string) (err error) {
if _configFile == "" {
return
}
file, err := os.Open(_configFile)
if os.IsNotExist(err) {
return nil
} else if err != nil {
return err
} else {
defer file.Close()
}
return _config.Decode(file)
},
RunE: exec,
SilenceUsage: true,
}
func init() {
_caimanCMD.Flags().StringVarP(
&_configFile, "config-file", "f", _configFile,
"the settings file from which the settings will be loaded")
_caimanCMD.Flags().BoolVarP(
&_configDump, "config-dump", "d", _configDump,
"print to standard output all configuration values,"+
" it prints configuration data in YAML format")
}
func exec(cmd *cobra.Command, _ []string) error {
if _configDump {
return _config.Encode(cmd.OutOrStdout())
}
if terminal.IsTerminal(0) {
return errors.New("no piped data")
}
dif := ldif.LDIF{}
if err := ldif.Unmarshal(cmd.InOrStdin(), &dif); err != nil {
return err
}
conv := converter.Converter{Config: _config}
cards, err := conv.LDIF2vCARD4(dif)
if err != nil {
return err
}
if err := cards.Encode(cmd.OutOrStdout()); err != nil {
return err
}
return nil
}
// Execute starts command line processing.
func Execute() {
if err := _caimanCMD.Execute(); err != nil {
os.Exit(1)
}
}
<file_sep>// Copyright © 2020 <NAME>. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package converter
import (
"io"
"github.com/emersion/go-vcard"
)
// Cards its data vCard set with helper methods.
type Cards []vcard.Card
// Encode its encodes data into a vCard text representation.
func (c Cards) Encode(w io.Writer) error {
encoder := vcard.NewEncoder(w)
for _, card := range c {
if err := encoder.Encode(card); err != nil {
return err
}
}
return nil
}
<file_sep>// Copyright © 2019-2020 <NAME>. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"github.com/xorcare/caiman/cmd/caiman/cli"
)
// Useful links:
// RFC 2849 https://tools.ietf.org/html/rfc2849 The LDAP Data Interchange Format (LDIF) - Technical Specification
// RFC 4525 https://tools.ietf.org/html/rfc4525 Lightweight Directory Access Protocol (LDAP)
// RFC 6350 https://tools.ietf.org/html/rfc6350 vCard Format Specification
// RFC 6351 https://tools.ietf.org/html/rfc6351 xCard: vCard XML Representation
// RFC 7095 https://tools.ietf.org/html/rfc7095 jCard: The JSON Format for vCard
// https://en.wikipedia.org/wiki/VCard#vCard_4.0 vCard
// https://en.wikipedia.org/wiki/LDAP_Data_Interchange_Format LDAP Data Interchange Format
func main() {
cli.Execute()
}
<file_sep>// Copyright © 2019-2020 <NAME>. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package config
import (
"io"
"github.com/emersion/go-vcard"
"gopkg.in/yaml.v2"
)
var _default = Config{
Fields: map[string][]Field{
vcard.FieldEmail: {
{
Template: `{{.entry.Attr "mail"}}`,
Params: vcard.Params{
vcard.ParamType: []string{"INTERNET", vcard.TypeWork},
vcard.ParamPreferred: []string{"1"},
},
},
},
vcard.FieldName: {
{
Template: `{{.entry.Attr "sn"}}{{$givenName := .entry.Attr "givenName"}}{{if ne $givenName ""}};{{$givenName}}{{end}}`,
},
},
vcard.FieldTelephone: {
{
Template: `{{.entry.Attr "mobile"}}`,
Params: vcard.Params{
vcard.ParamType: []string{
vcard.TypeText, vcard.TypeVoice, vcard.TypeHome,
},
vcard.ParamPreferred: []string{"1"},
},
},
{
Template: `{{.entry.Attr "telephoneNumber"}}`,
Params: vcard.Params{
vcard.ParamType: []string{vcard.TypeWork},
},
},
},
vcard.FieldTitle: {
{
Template: `{{.entry.Attr "title"}}`,
},
},
vcard.FieldOrganization: {
{
Template: `{{if .entry.Attr "department" }};{{.entry.Attr "department"}}{{else}}{{end}}`,
},
},
vcard.FieldPhoto: {
{
Template: `{{.entry.Attr "jpegPhoto" | Base64Encode}}`,
Params: vcard.Params{
"ENCODING": []string{"b"},
vcard.ParamType: []string{"JPEG"},
vcard.ParamPreferred: []string{"1"},
},
},
{
Template: `{{.entry.Attr "photo" | Base64Encode}}`,
Params: vcard.Params{
"ENCODING": []string{"b"},
vcard.ParamPreferred: []string{"2"},
},
},
{
Template: `{{.entry.Attr "thumbnailPhoto" | Base64Encode}}`,
Params: vcard.Params{
"ENCODING": []string{"b"},
},
},
},
},
}
// Config the basic structure configuration.
type Config struct {
Fields map[string][]Field
}
// Decode the YAML settings from reader.
func (c *Config) Decode(r io.Reader) error {
return yaml.NewDecoder(r).Decode(c)
}
// Encode the YAML settings to writer.
func (c Config) Encode(w io.Writer) error {
return yaml.NewEncoder(w).Encode(c)
}
// Field a field contains a value and some parameters.
type Field struct {
Group string
// Params is a set of field parameters.
Params vcard.Params
// Template golang text template string.
Template string
}
// Default returns copy of default config.
func Default() Config {
return _default
}
<file_sep>module github.com/xorcare/caiman
go 1.13
require (
github.com/emersion/go-vcard v0.0.0-20190105225839-8856043f13c5
github.com/go-ldap/ldif v0.0.0-20180918085934-3491d58cdb60
github.com/kr/pretty v0.1.0 // indirect
github.com/spf13/cast v1.3.0
github.com/spf13/cobra v0.0.5
github.com/stretchr/testify v1.4.0 // indirect
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f
golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7 // indirect
golang.org/x/tools v0.0.0-20200125223703-d33eef8e6825
gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d // indirect
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
gopkg.in/ldap.v2 v2.5.1
gopkg.in/yaml.v2 v2.2.2
)
<file_sep>// Copyright © 2019-2020 <NAME>. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package converter
import "gopkg.in/ldap.v2"
// adaptor an entry representation for go templates.
type adapter struct {
entry ldap.Entry
}
// Attr returns the first value for the named attribute, or "".
func (a adapter) Attr(key string) string {
return a.entry.GetAttributeValue(key)
}
// Attrs returns the values for the named attribute, or an empty list.
func (a adapter) Attrs(key string) []string {
return a.entry.GetAttributeValues(key)
}
| 1678684a7f9ffe2b3185b4bd9d575684f7b4a5f0 | [
"Go",
"Go Module",
"Makefile",
"Markdown"
] | 9 | Makefile | xorcare/caiman | b41606a45027a367784969af7fe05fbe8ebc5ddd | 0c689454656a46bf9c1ff1d4a35c3707e4c089b5 | |
refs/heads/master | <file_sep>#!/usr/bin/python
####################################
# +++++++++++++++++++ #
# EventbriteMonitor #
# +++++++++++++++++++ #
# Author: CuPcakeN1njA #
# ++++++++++++++++++++ #
####################################
"""Usage ==> EventbriteMonitor.py (url of eventbrite ticket page)
Use this tool to monitor an Eventbrite ticket page.
This script will send you a text message using the
twilio api when tickets are available again if they
have been sold out and are reselling.
In order to use this script you will have to create a twilio account
and modify the alert() function and add in your own api tokens and contact
numbers. How to do this can be found in the twilio documentation and examples
and you can set up your twilio account here: https://www.twilio.com
"""
import requests
import time
from twilio.rest import Client
import sys
from datetime import datetime
def gethtml(url):
resp = requests.get(url)
html = resp.text
return html
def check(html):
if "Sold Out" in html:
return False
else:
return True
def alert(url):
account_sid = "Twilio account_sid"
auth_token = "Twilio auth_token"
client = Client(account_sid, auth_token)
message = client.api.account.messages.create(
to = "Your phone number",
from_ ="Your twilio phone number",
body = ("Your tickets are on sale\n Buy them from: \n%s" % url))
def usage():
print("""
Usage ==> python EventbriteMonitor.py (url of eventbrite ticket page)
""")
if __name__ == "__main__":
if len(sys.argv) != 2:
usage()
else:
try:
url = sys.argv[1]
while True:
if check(gethtml(url)) == True:
print("Tickets Available")
break
else:
print("Tickets Unavailable")
time.sleep(120)
alert(url)
except Exception as e:
print("\nProgram exiting because of this error: %s" % e )
f = open("log.txt", "a+")
f.write("\nProgram ended at %s because of this error:\n%s" % (datetime.now(), e))
f.close()
<file_sep># EventbriteMonitor
An extremely simple script that alerts you via sms when Eventbrite tickets are available
This script will be best used when tickets have been sold out however there may be more tickets being released without a release date. This script can monitor an Eventbrite ticket page and will alert you when the tickets are no longer sold out.
## Usage
```python EventbriteMonitor.py (url of the Eventbrite ticket page)```
This will run the tool once however if you want to continously monitor the ticket page you may want to set something else up.
To use this tool you will require a twilio account. You can get a free trial account from .
You will then have to modify this section of the code and add in your tokens and contact numbers.
```
account_sid = "Twilio account_sid"
auth_token = "<PASSWORD>"
client = Client(account_sid, auth_token)
message = client.api.account.messages.create(
to = "Your phone number",
from_ ="Your twilio phone number",
```
## Implementation
There are multiple ways to implement this tool. The best way would be to run this tool on a vps or dedicated box. Even a raspberry pi with internet connectivity would work. Although a more feasable method would be to use your own computer and add the code to run on startup or use task scheduler if you are in windows.
To set this up on a linux box you could modify ```~/.bashrc``` and add ```sudo screen -dm python (/path/to/EventbriteMonitor.py) (url of evenbrite ticket page)```
This will start the script in the background when you first boot up. If you dont want to restart your pc you could then run ```sudo screen -dm python (/path/to/EventbriteMonitor.py) (url of evenbrite ticket page)``` in your terminal.
This is the method you could use when implementing this program on a vps or dedicated server/raspberry pi.
## Finally
If the program comes across an error whether this being something wrong with twilio or a networkng error it will exit the program and dump the error to ```log.txt``` which will be in the same directory as the script.
Once it sees that the tickets are available again it will send an sms to the phone number entered in the code and then exit aswell.
| 3e57a424315d5f9f2b9ed901b0d9767469082a9b | [
"Markdown",
"Python"
] | 2 | Python | unknwncharlie/EventbriteMonitor | e711dae0141074db3ff2a249fd29bfabeb27f39d | b1e2895a2022e69c0ff58e991fb25ad9c811978a | |
refs/heads/master | <file_sep>/* global $, _ */
(function (common) {
'use strict';
// Constants used in this file
const CONSTANTS = {
// Classes that are added by this extension
classes: {
decorated: 'js-tcnce-decorated-number',
polling: 'js-tcnce-polling'
},
// Selectors used by this extension
selectors: {
composerClass: 'js-composer',
listCardClass: 'list-card',
titleElement: '.list-card-title.js-card-name',
undecoratedCards: '.list-card.js-member-droppable.ui-droppable:not(.js-tcnce-decorated-number):not(.js-composer)'
}
};
/**
* Determines if an element has been decorated yet.
*
* @param {jQuery} $element the jQuery element to check for decoration
* @returns {boolean} true, if the element is decorated; false otherwise
*/
var isDecorated = function isDecorated($element) {
return !!$element.hasClass(CONSTANTS.classes.decorated);
},
/**
* Marks an element as decorated
*
* @param {jQuery} $element the jQuery element to mark as decorated
* @returns {jQuery} $element for chaining
*/
markDecorated = function setDecorated($element) {
return $element.addClass(CONSTANTS.classes.decorated);
},
/**
* Determines if the element is currently polling.
*
* @param {jQuery} $element the element to look at
* @returns {boolean} true, if the element is decorated; false otherwise
*/
isPolling = function isPolling($element) {
return !!$element.hasClass(CONSTANTS.classes.polling);
},
/**
* Marks an element as polling.
*
* @param {jQuery} $element the card to mark as polling
* @returns {jQuery} $element for chaining
*/
markPolling = function markPolling($element) {
return $element.addClass(CONSTANTS.classes.polling);
},
/**
* Marks an element as no longer polling.
*
* @param {jQuery} $element the card to remove the polling mark from
* @returns {jQuery} $element for chaining
*/
unmarkPolling = function unmarkPolling($element) {
return $element.removeClass(CONSTANTS.classes.polling);
},
/**
* Decorates a card. The ID of a card is specified in its "href" attribute, which *may* not be populated if the
* card is newly added as a result of the user adding a new card. This function includes some retry logic to poll
* for the "href" attribute before decorating the card.
*
* @param {jQuery} $card the jQuery card element
*/
decorateCard = function decorateCard($card) {
// Sanity check: decorating a card multiple times will prepend the id to the title!
if (!isDecorated($card)) {
/**
* Gets the ID from an href attribute value.
*
* @param {string} href the href attribute value to parse
* @returns {string} the ID specified in the element
*/
var getId = function getId(href) {
var parts = href.split('/');
// Pull out the ID from the href attribute
return parts[parts.length-1].split('-')[0];
},
/**
* Gets the href attribute value from a jQuery element.
*
* @param {jQuery} $element the element to get the "href" attribute value from
* @returns {string} the "href" attribute value, or undefined if it does not exist
*/
getHrefFrom$element = function getHrefFrom$element($element) {
return _.get($element, '[0].href');
},
/**
* Decorates the card by prepending the ID to the card title.
*
* @param {jQuery} $cardElement the element to decorate
*/
doDecorateCardElement = function doDecorateCardElement($cardElement) {
// Pull out the ID from the href attribute
var id = getId(getHrefFrom$element($cardElement)),
// Get the element which has is title
$titleElement = $cardElement.find(CONSTANTS.selectors.titleElement),
// Get the original title
title = $titleElement.html();
// Update the title HTML (prepend the id)
$titleElement.html('#' + id + ' ' + title);
// Flag the card as being decorated
markDecorated($cardElement);
};
// We can only decorate cards that have an "href" attribute
if (!!getHrefFrom$element($card) && !isPolling($card)) {
// Great, the card element has a "href" attribute - go ahead and decorate the card
doDecorateCardElement($card);
} else {
// Doh! The card does not yet have an "href" attribute! We need to wait until the attribute shows up
var pollIntervalMS = 50,
pollIntervalMaxCount = 50;
// Check to see if this added node is a candidate for polling
//
// It must be a list card (CONSTANTS.selectors.listCardClass)
// It must not be in the composing phase (CONSTANTS.selectors.composerClass)
// It must not be in the polling phase (!isPolling($card))
if ($card.hasClass(CONSTANTS.selectors.listCardClass) && !$card.hasClass(CONSTANTS.selectors.composerClass) && !isPolling($card)) {
// Mark the card as polling
markPolling($card);
// The ID is assigned on the server and appended to the element once the AJAX call returns; use a
// polling interval to figure out when this occurs
var intervalHandle = setInterval(function onInterval() {
// Get the href attribute
var href = getHrefFrom$element($card),
/**
* Stops the polling.
*/
stopPolling = function stopPolling() {
// Mark the card as not polling; eventually the AJAX call will return and
// the "href" element will be populated; the next call to decorated cards
// will decorate this card
unmarkPolling($card);
// Stop the interval
clearInterval(intervalHandle);
};
// If there is an "href" attribute value
if (!!href) {
// The "href" attribute has been set; decorate the card
doDecorateCardElement($card);
// Mark this element as no longer polling
unmarkPolling($card);
// Stop the polling
stopPolling();
}
// Do not poll indefinitely!
if (--pollIntervalMaxCount <= 0) {
stopPolling();
}
}, pollIntervalMS);
}
}
}
},
/**
* Decorates all cards passed in.
*
* @param {jQuery} $cards the jQuery card elements
*/
decorateCards = function decorateCards() {
// Find all cards which have not been decorated
($(CONSTANTS.selectors.undecoratedCards)).each(function invokeDecorateCard() {
decorateCard($(this));
});
};
// I believe that the way Chrome loads the extensions has changed; the previous version of the published
// extension did not require execute() onload, but now this is required.
window.onload = function onLoad() {
// Invoked when the document is ready
$(document).ready(function ready() {
// Decorate all the cards immediately
decorateCards();
// Add a mutation observer which will be invoked whenever elements are added
// We debounce the call to decorateCards() to be kind to the browser
new MutationObserver(_.debounce(decorateCards, 125)).observe(this, { childList: true, characterData: false, attributes: false, subtree: true});
});
// Notify the background page to update the "page action" status
chrome.extension.sendRequest(common.messages.SHOW_PAGE_ACTION);
};
// Invoke with the namespaced member from the global context
}(this.tcnce));
| dfaf1c17a22d9f43d895bf7e4a034669199242e2 | [
"JavaScript"
] | 1 | JavaScript | hal313/trello-card-numbers-chrome-extension | 550e941bf9301bd09d352969ce94522e3bf462a9 | 1ed28c7ea11eea56bafe3a07c38c7e4a20f27275 | |
refs/heads/master | <file_sep>package algs.multi
import algs.AbstractAlgorithm
import containers.AlgorithmDataContainer
import containers.DataContainer
import func.GlobalFunc
import func.Vector
class StepDivideMethod: AbstractAlgorithm() {
override val algName: String get() = "Метод дробления шага пополам"
override val requiredArgs: List<String> get() = listOf("a", "u")
private val eps = GlobalFunc.epsilon
override fun apply(args: Map<String, Any>): DataContainer {
checkArgs(args)
var a = args.getValue("a") as Double
var u0 = args.getValue("u") as Vector
for (iterations in 1 until maxIterations) {
val grad = GlobalFunc.gradient(u0)
if (GlobalFunc.module(grad) < eps) {
return AlgorithmDataContainer(
solution = u0,
iteration = iterations,
epsilon = eps,
algName = algName
)
}
var u1 = u0 - grad * a
while (GlobalFunc.J(u1) >= GlobalFunc.J(u0)) {
a /= 2
u1 = u0 - grad * a
}
u0 = u1
}
throw Error(iterationErrorMsg)
}
}
<file_sep>package containers
import func.Vector
abstract class DataContainer(var solution: Vector, var iteration: Int, var epsilon: Double) {
override fun toString(): String {
return "Решение = $solution\nитераций = $iteration\neps = $epsilon"
}
}<file_sep>package algs.multi
import algs.AbstractAlgorithm
import algs.single.BisectionMethod
import containers.AlgorithmDataContainer
import containers.DataContainer
import func.GlobalFunc
import func.Vector
class FastDownMethod: AbstractAlgorithm() {
override val algName: String get() = "Алгоритм наискорейшего спуска"
override val requiredArgs: List<String> get() = listOf("u")
override val maxIterations: Int get() = 5
override fun apply(args: Map<String, Any>): DataContainer {
checkArgs(args)
var u = args.getValue("u") as Vector
val eps = GlobalFunc.epsilon
for (iteration in 1 until maxIterations) {
val grad = GlobalFunc.gradient(u)
if (GlobalFunc.module(grad) < eps) {
return AlgorithmDataContainer(
solution = u,
iteration = iteration,
epsilon = eps,
algName = algName
)
}
val method = BisectionMethod()
val a = method.apply(mapOf("a" to 0.0, "b" to 1.0, "u0" to u))
u -= grad.times(a.solution[0])
}
throw Error(iterationErrorMsg)
}
}<file_sep>package algs
abstract class AbstractAlgorithm: AlgorithmI {
protected val iterationErrorMsg = "$algName превысил предел в $maxIterations итераций!"
protected fun checkArgs(args: Map<String, Any>) {
for(arg in requiredArgs) {
if(!args.keys.contains(arg))
throw IllegalArgumentException("Не передан аргумент \"$arg\", для работы $algName")
}
this.args = args
}
protected lateinit var args:Map<String, Any>
open val algName: String get() = this.javaClass.name
open val requiredArgs: List<String> get() = listOf()
open val maxIterations: Int get() = 100
override fun equals(other: Any?): Boolean {
if(this === other) return true
if(other is AbstractAlgorithm) {
return other.algName == algName
}
return false
}
override fun hashCode(): Int {
return iterationErrorMsg.hashCode()
}
}<file_sep>package algs
//import algorithm.containers.DataContainer
import containers.DataContainer
interface AlgorithmI {
fun apply(args: Map<String, Any>) : DataContainer
}<file_sep>rootProject.name = 'MO-lab2'
<file_sep>package algs.multi
import algs.AbstractAlgorithm
import containers.AlgorithmDataContainer
import containers.DataContainer
import func.GlobalFunc
import func.Vector
class NewtonsMethod: AbstractAlgorithm() {
override val algName: String get() = "<NAME>"
override val requiredArgs: List<String> get() = listOf("u")
override fun apply(args: Map<String, Any>): DataContainer {
checkArgs(args)
var u0 = args.getValue("u") as Vector
val eps = GlobalFunc.epsilon
for (iterations in 1 until maxIterations) {
var grad = GlobalFunc.gradient(u0)
if (GlobalFunc.module(grad) < eps) {
return AlgorithmDataContainer(
solution = u0,
epsilon = eps,
iteration = iterations,
algName = algName
)
}
u0 -= Vector.toVector(grad * GlobalFunc.getH(u0).inverse)
}
throw Error(iterationErrorMsg)
}
}
<file_sep>package algs.single
import algs.AbstractAlgorithm
import containers.AlgorithmDataContainer
import func.GlobalFunc
import func.Vector
import java.lang.Math.abs
class BisectionMethod: AbstractAlgorithm() {
override val algName: String get() = "Метод деления пополам"
override val requiredArgs get() = listOf("a", "b", "u0")
private val sigma = 0.0001
override fun apply(args: Map<String, Any>): AlgorithmDataContainer {
checkArgs(args)
var a = args.getValue("a") as Double
var b = args.getValue("b") as Double
val eps = GlobalFunc.epsilon
val u1 = args.getValue("u0") as Vector
for(iterations in 1 until maxIterations) {
val a1 = (b + a - sigma) / 2
val a2 = (b + a + sigma) / 2
val f1 = GlobalFunc.J(u1 - a1 * GlobalFunc.gradient(u1))
val f2 = GlobalFunc.J(u1 - a2 * GlobalFunc.gradient(u1))
when {
f1 < f2 -> b = a2
f1 > f2 -> a = a1
else -> {
a = a1
b = a2
}
}
if(abs(b - a) < eps)
return AlgorithmDataContainer(
solution = Vector(listOf((b + a) / 2)),
iteration = iterations,
epsilon = eps,
algName = algName
)
}
throw Error(iterationErrorMsg)
}
}
private operator fun Double.times(gradient: Vector): Vector {
return gradient * this
}
<file_sep>package func
import matrix.AbstractMatrix
import matrix.Matrix
import kotlin.math.pow
import kotlin.math.sqrt
class GlobalFunc {
companion object {
const val epsilon: Double = 0.001
fun J(U: Vector) :Double {
val u1 = U[0]
val u2 = U[1]
// return (u1 - u2).pow(2) + u1 - 5 * u2
// return (u1 - 5).pow(2) + (u2 + 3).pow(2)
return -(u1 + u2).pow(2) + 9 * u1 * u2
//(u1 - u2)^2 + u1 - 5u2
}
fun JDiff_U1(U: Vector): Double {
val u1 = U[0]
val u2 = U[1]
// return 2 * u1 - 2 * u2 + 1
// return 2 * (u1 - 5)
return 7 * u2 - 2 * u1
//2u1 - 2u2 + 1
}
fun JDiff_U2(U: Vector): Double {
val u1 = U[0]
val u2 = U[1]
// return 2 * u2 - 2 * u1 - 5
// return 2 * (u2 + 3)
return 7 * u1 - 2 * u2
//-2u1 + 2u2 - 5
}
fun JDiff_U1_U2(U: Vector): Double {
// return -2.0
// return 0.0
return 7.0
}
fun JDiff_U1_U1(U: Vector): Double {
// return 2.0
// return 2.0
return -2.0
}
fun JDiff_U2_U1(U: Vector): Double {
// return -2.0
// return 0.0
return 7.0
}
fun JDiff_U2_U2(U: Vector): Double {
// return 2.0
// return 2.0
return -2.0
}
fun getH(U: Vector): AbstractMatrix {
var array = arrayOf(
doubleArrayOf(JDiff_U1_U1(U), JDiff_U2_U1(U)),
doubleArrayOf(JDiff_U1_U2(U), JDiff_U2_U2(U))
)
return Matrix(array)
}
fun gradient(U: Vector): Vector {
val coeffs = listOf(JDiff_U1(U), JDiff_U2(U))
return Vector(coeffs)
}
fun gradValue(U: Vector): Double {
val gradient = gradient(U)
return module(gradient)
}
fun module(U: Vector): Double {
var value = 0.0
for (point in U) {
value += point.pow(2)
}
return sqrt(value)
}
}
}<file_sep>package containers
import func.Vector
class AlgorithmDataContainer(solution: Vector, iteration: Int, epsilon: Double, val algName: String)
: DataContainer(solution, iteration, epsilon) {
override fun toString(): String {
return super.toString().plus("\nалгоритм - $algName" )
}
}<file_sep>package func
import matrix.AbstractMatrix
import matrix.Matrix
import java.util.*
import kotlin.collections.ArrayList
class Vector(private val coefficients: List<Double>) : Iterable<Double> {
private val count = 4
var format = "%.${count}f"
override fun iterator(): Iterator<Double> {
return coefficients.iterator()
}
operator fun get(i: Int): Double {
return coefficients[i]
}
operator fun minus(vector: Vector): Vector {
var list = ArrayList<Double>(coefficients.size)
for ((i, item) in coefficients.withIndex()) {
list.add(item - vector[i])
}
return Vector(list)
}
@JvmName("timeByDouble")
operator fun times(coef: Double): Vector {
var list = ArrayList<Double>(coefficients.size)
for (item in coefficients) {
list.add(item * coef)
}
return Vector(list)
}
@JvmName("timeByMatrix")
operator fun times(matrix: AbstractMatrix): Vector {
var temp = this.toMatrix()
var result = temp * matrix
return toVector(result)
}
override fun toString(): String {
val sj = StringJoiner(", ")
for (item in coefficients) {
val str = String.format(format, item)
sj.add(str)
}
return "($sj)"
}
fun toMatrix(): AbstractMatrix {
return Matrix(
arrayOf(
doubleArrayOf(this[0], this[1])
)
)
}
companion object {
fun toVector(matrix: AbstractMatrix): Vector {
var list = ArrayList<Double>()
if (matrix.rowsNum == 1) {
for (elem in matrix.getRow(0)) {
list.add(elem)
}
}
return Vector(list)
}
}
}
private operator fun AbstractMatrix.times(matrix: AbstractMatrix): AbstractMatrix {
return this.mulMatrix(matrix)
}
<file_sep>import algs.AlgorithmI
import algs.multi.FastDownMethod
import algs.multi.NewtonsMethod
import algs.multi.StepDivideMethod
import containers.DataContainer
import func.Vector
fun main() {
val algs = listOf(StepDivideMethod(), FastDownMethod(), NewtonsMethod())
val bannedAlgs = emptyList<AlgorithmI>()
val args = mapOf("a" to 0.1, "u" to Vector(listOf(1.0, 1.0)))
var data = ArrayList<DataContainer>()
for (alg in algs) {
if (bannedAlgs.contains(alg)) continue
val container = alg.apply(args)
data.add(container)
}
data.forEach { println("$it\n") }
}
| ed4c134ee1f5aed7e9256bfad7a26bd45c8862df | [
"Kotlin",
"Gradle"
] | 12 | Kotlin | trokhinas/MO-2 | bf2458dca5d6faa866c860b9a92da93ae8159a0b | 5d9bdb2151cd8db66c96d767c026d41179669e1e | |
refs/heads/master | <file_sep># gamelist-app
a CRUD made with MERN stack

<file_sep>const {Router}= require('express');
const {getGames, createGame, getGame, deleteGame, editGame} = require('../controllers/gamesController');
//initialize
const router = Router();
//routes
router.route('/')
.get(getGames)
.post(createGame)
router.route('/:id')
.get(getGame)
.put(editGame)
.delete(deleteGame)
module.exports = router;<file_sep>import React, { Component } from 'react'
import axios from 'axios'
import {Link} from 'react-router-dom'
export default class GameList extends Component {
state = {
games: []
}
async componentDidMount(){
this.getGames()
}
getGames = async () => {
const res = await axios.get('http://localhost:4000/api/games')
console.log(res)
this.setState({
games: res.data.games
})
}
deleteGame = async id => {
await axios.delete('http://localhost:4000/api/games/'+ id)
this.getGames()
}
render() {
return (
<div className="row">
{this.state.games.map(game =>
<div className="col-md-4 p-2" key={game._id}>
<div className="card">
<div className="card-header bg-dark text-light">
<h5>{game.title}</h5>
</div>
<div className="card-body">
<p>{game.genre}</p>
<p>{game.status}</p>
</div>
<div className="card-footer bg-dark text-light d-flex justify-content-between">
<button
className="btn btn-danger"
onClick={() => this.deleteGame(game._id)}>
Delete
</button>
<Link
className="btn btn-secondary"
to={'/edit/' + game._id}>
Edit
</Link>
</div>
</div>
</div>
)}
</div>
)
}
}
| e02a3bd1c50df0012b25de5b9322efcf0ced6931 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | rtagliaviaz/gamelist-app | da8760d94968e01a62821a9120d99226bd69c154 | a696cf5f5847bbb32896e31051a0a00ad2214006 | |
refs/heads/master | <repo_name>lusen82/ProgrammingAssignment2<file_sep>/cachematrix.R
## These two functions cooperate in providing a cache construction for presenting
## the inverse of a matrix. If the inverse is already calculated, that value in the
## environment will be used. The cache can be cleared and reset on request.
## This function is used for caching of inverse of matrices.
## It defines four functions; set, get, setsolve and getsolve and
## puts them in a list to be returned.
makeCacheMatrix <- function(x = matrix()) {
## The inverse is first reset to NULL when wnating to re-
## calculate the matrix inverse for a new matrix or after an update:
inv <- NULL
## THe matrix is reset and the inv variable in the other environment is cleared:
set <- function(y) {
x <<- y
inv <<- NULL
}
## Returns the matrix
get <- function() x
## Sets the inv variable to the solve value:
setsolve <- function(solve) inv <<- solve
## Gets the inverted matrix that previous have been solve and put to env:
getsolve <- function() inv
##This objct is returned with the functions that can operate on it.
## Example: val = makeCacheMatrix(matrix(c(2,0,2,0), 2, 2)), calling
## val$get() will return the matrix..
list(set = set, get = get,
setsolve = setsolve,
getsolve = getsolve)
}
## This function returns an inverse of a matrix, if the inverse
## is created and exists already. If not, the inverse is calculated
## and is set to the parent environment with the help of setsolve
## defined in MackeCacheMatrix.
cacheSolve <- function(x, ...) {
## Return a matrix that is the inverse of 'x'
## Try to get the solved inverse and return it if its not null:
inv <- x$getsolve()
if(!is.null(inv)) {
message("getting cached data")
return(inv)
}
## Get the original matrix:
data <- x$get()
## Calculate the inverse of this matrix:
inv <- solve(data, ...)
## Store the inverse to the "cached" variable:
x$setsolve <- inv
## Return the inversed matrix:
inv
}
| 6c6f1ac86a39f499a1e87a96d6b66d028ea863e7 | [
"R"
] | 1 | R | lusen82/ProgrammingAssignment2 | 96bbf200bb84549935975a2fd920a3fde2dd6fb4 | 38952af9b0496e55292cc31c242e888c8b6b420d | |
refs/heads/master | <repo_name>drewnewell/tic-tac-toe-challenge<file_sep>/game/static/js/app.js
'use strict'
const wsClient = new WebSocket("ws://localhost:5000/listen")
class Users extends React.Component {
handleNewGame = (userId, event) => {
fetch('/api/games', {
credentials: 'include',
method: 'POST',
body: JSON.stringify({'player': userId}),
headers: {'Content-Type': 'application/json'}
}) .then( response => {
if (!response.ok) {
response.text().then(res => alert(res))
}
})
}
render() {
return (
<div className="col-3">
<h2>Users</h2>
<small>Click an opponent to start a new game!</small>
<ul>
{ this.props.users.map((user, i) => (
<li><a onClick={this.handleNewGame.bind(this, user.id)} key={i}>{user.id}, {user.name}</a></li>
))
}
</ul>
</div>
)
}
}
class Games extends React.Component {
constructor(props) {
super(props)
this.state = {
gameId: null
}
}
showGame = (game, gameId, event) => {
this.setState({
gameId: gameId
})
this.forceUpdate()
}
render() {
const getUser = (userId) => {
return this.props.users.filter((user) => user.id == userId)[0]
}
const gameStr = (game) => getUser(game.player1).name + ' v. ' + getUser(game.player2).name
return (
<div className="col-9">
<div className="row">
<div className="col-4">
<h2>Games</h2>
<small>Select a game to view.</small>
<ul>
{ this.props.games.map((game, i) => (
<li><a key={i} onClick={this.showGame.bind(this, game, i)}>{gameStr(game)}</a></li>
))
}
</ul>
</div>
<div className="col-8">
<Game games={this.props.games} gameId={this.state.gameId} />
</div>
</div>
</div>
)
}
}
class Game extends React.Component {
handleMove = (loc, event) => {
fetch('/api/games/' + this.props.gameId, {
credentials: 'include',
method: 'POST',
body: JSON.stringify({'location': loc}),
headers: {'Content-Type': 'application/json'}
}) .then( response => {
if (!response.ok) {
response.text().then(res => alert(res))
}
})
}
render() {
if (this.props.gameId == null) {
return <h2>Game</h2>
}
const game = this.props.games[this.props.gameId]
return (
<div>
<h2>Game</h2>
<div>turn: {game.turn}</div>
<div>result: {game.result}</div>
<div>
<ul className="board">
{ game.board.map((move, i) => (
<li className="square col-4" key={i} onClick={this.handleMove.bind(this, i)}>{move}</li>
))
}
</ul>
</div>
</div>
)
}
}
class App extends React.Component {
constructor(props) {
super(props)
this.state = {
userId: null,
users: [],
games: [],
}
}
componentDidMount() {
wsClient.onmessage = (message) => {
var data = JSON.parse(message.data)
Object.keys(data).map((key, i) => {
this.setState({
[key]: data[key]
})
})
}
}
handleUserUpdate = (event) => {
fetch('/api/username', {
credentials: 'include',
method: 'POST',
body: JSON.stringify({'username': event.target.value}),
headers: {'Content-Type': 'application/json'}
}).then(
response => {
if (!response.ok) { console.log(response) }
}
)
}
render() {
const user = this.state.users.filter(u => u.id == this.state.userId)[0]
if (!user) {
return <div>need user</div>
}
return (
<div className="container">
<div className="row my-3">
<div className="col-3">
<div className="form-group">
<label>user id: </label>
{user.id}
</div>
<div className="form-group">
<label for="username">username</label>
<input type="text" name="username" value={user.name} onChange={this.handleUserUpdate} />
<div><small>Enter a username to update.</small></div>
</div>
</div>
</div>
<div className="row">
<Users users={this.state.users} />
<Games games={this.state.games} users={this.state.users}/>
</div>
</div>
)
}
}
ReactDOM.render(
React.createElement(App),
document.getElementById('root')
)
<file_sep>/game/tictactoe.py
import asyncio
import uuid
import json
from functools import wraps
from quart import Quart, websocket, request, jsonify, session
from quart.templating import render_template
### App Setup ###
app = Quart(__name__)
app.secret_key = 'very_secret'
connected_websockets = set()
games = []
users = dict()
### Game Logic ###
class Game:
'''Tic Tac Toe game class holding the game state and rules.'''
def __init__(self, player1, player2, *args, **kwargs):
self.player1 = player1
self.player2 = player2
self.turn = self.player1
self.board = [None] * 9
def move(self, player, location):
if self.result() is not None:
raise Exception('game over!')
if player != self.turn:
raise Exception('its not your turn!')
if self.board[location] is not None:
raise Exception('invalid move')
self.board[location] = player
if player != self.player1:
self.turn = self.player1
else:
self.turn = self.player2
def result(self):
wins = [
[0, 1, 2],
[3, 4, 5],
[6, 7, 8],
[0, 3, 6],
[1, 4, 7],
[2, 5, 8],
[0, 4, 8],
[2, 4, 6],
]
# check for winner
for w in wins:
if self.board[w[0]] == self.board[w[1]] == self.board[w[2]] != None:
return self.board[w[0]]
# check for draw
if all(self.board):
return 'draw'
else:
return None
def to_json(self):
return dict(
player1 = self.player1,
player2 = self.player2,
turn = self.turn,
result = self.result(),
board = self.board,
)
class GameJSONEncoder(json.JSONEncoder):
'''JSON Encoder for game class.'''
def default(self, object_):
if isinstance(object_, Game):
return object_.to_json()
else:
return super().default(object_)
app.json_encoder = GameJSONEncoder
### Endpoints and Utils ###
def get_users():
'''Return users in a format suitable for front end.'''
return [dict(id=k, name=v) for k, v in users.items()]
def broadcast(message):
'''Send message to all other websockets, who are pulling from their queues.'''
for queue in connected_websockets:
queue.put_nowait(message)
def user_required(func):
'''Wrapper to make sure user has an id, broadcasts new users who join.'''
@wraps(func)
def wrapper(*args, **kwargs):
global users
if 'id' not in session:
session['id'] = uuid.uuid4().hex[:5]
if session['id'] not in users:
users[session['id']] = ''
# notify all players
broadcast(dict(users=get_users()))
return func(*args, **kwargs)
return wrapper
@app.route('/')
@user_required
def index():
return render_template('index.html')
@app.route('/api/games')
@user_required
def list_games():
return jsonify(dict(games=games))
@app.route('/api/username', methods=['POST'])
@user_required
async def update_username():
body = await request.json
users[session['id']] = body.get('username')
# notify all players
broadcast(dict(users=get_users()))
return 'success', 200
@app.route('/api/games', methods=['POST'])
@user_required
async def create_game():
body = await request.json
player1 = session['id']
player2 = body.get('player')
if player1 == player2:
return 'can\'t play yourself!', 403
new_game = Game(player1, player2)
games.append(new_game)
# notify all players
broadcast(dict(games=games))
return jsonify(games)
@app.route('/api/games/<int:game_id>')
@user_required
def get_game(game_id):
try:
return jsonify(games[game_id])
except IndexError:
return 'Game id not found', 404
@app.route('/api/games/<int:game_id>', methods=['POST'])
@user_required
async def move_game(game_id):
body = await request.json
try:
game = games[game_id]
except IndexError:
return 'Game id not found', 404
player = session['id']
location = body.get('location')
try:
game.move(player, location)
except Exception as e:
return str(e), 401
# notify all players
broadcast(dict(games=games))
return jsonify(game)
### Websocket Endpoint ###
def collect_websocket(func):
'''Wrapper to register all websocket connections.'''
@wraps(func)
async def wrapper(*args, **kwargs):
global connected_websockets
queue = asyncio.Queue()
connected_websockets.add(queue)
try:
return await func(queue, *args, **kwargs)
finally:
connected_websockets.remove(queue)
return wrapper
@app.websocket('/listen')
@collect_websocket
async def listen(queue):
'''Endpoint to broadcast queue messages to websocket connections.'''
# send initial state when first listening
await websocket.send(json.dumps(
dict(userId=session['id'], games=games, users=get_users()),
cls=GameJSONEncoder
))
while True:
# read from this websocket's queue and send
data = await queue.get()
await websocket.send(json.dumps(data, cls=GameJSONEncoder))
app.run()
| e395fbab4ae028599715f6045dbf765b6ea4d8b8 | [
"JavaScript",
"Python"
] | 2 | JavaScript | drewnewell/tic-tac-toe-challenge | 1fa0966e11c62f2d2a64393ebf022c6ae8578880 | 9eb7cccfe706898132aaa534c752155c9a7f4a89 | |
refs/heads/master | <file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Inventory : MonoBehaviour
{
List<Items> itemList;
public Inventory()
{
itemList = new List<Items>();
Debug.Log("Inventory");
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Projectile : MonoBehaviour
{
public float speed = 10000f;
public float lifeTime = 1f;
public Rigidbody rB;
void Start()
{
rB.AddForce(rB.transform.forward * speed);
}
void Awake() { Destroy(gameObject, lifeTime); }
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Items : MonoBehaviour
{
public enum ItemType
{
Sword,
Shield,
Wand,
Potion,
}
public ItemType itemType;
public int amount;
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Gun : MonoBehaviour
{
public GameObject Sphere;
public Transform Spawn;
void Update()
{
if (Input.GetButtonDown("Fire1"))
{
Instantiate(Sphere, Spawn.position, Spawn.rotation);
//Debug.Log("fire" + Spawn.position + Spawn.rotation);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class playercollision : MonoBehaviour
{
// Start is called before the first frame update
private void OnCollisionEnter(Collision collision)
{
if (collision.collider.tag == "object")
{
Debug.Log("awauu");
}
}
}
<file_sep># Unity-Project-Template
project template for unity
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PlayerMovement : MonoBehaviour
{
public float force = 1000f;
public Rigidbody rB;
void FixedUpdate()
{
float forcetime = force * Time.deltaTime;
if (Input.GetKey("w"))
{
rB.AddForce(0, 0, forcetime);
}
if (Input.GetKey("s"))
{
rB.AddForce(0, 0, -forcetime);
}
if (Input.GetKey("d"))
{
rB.AddForce(forcetime, 0, 0);
}
if (Input.GetKey("a"))
{
rB.AddForce(-forcetime, 0, 0);
}
//Get the Screen positions of the object
Vector2 positionOnScreen = Camera.main.WorldToViewportPoint (transform.position);
//Get the Screen position of the mouse
Vector2 mouseOnScreen = (Vector2)Camera.main.ScreenToViewportPoint(Input.mousePosition);
//Get the angle between the points
float angle = AngleBetweenTwoPoints(positionOnScreen, mouseOnScreen);
//Ta Daaa
transform.rotation = Quaternion.Euler (new Vector3(0f, -angle, 0f));
}
float AngleBetweenTwoPoints(Vector3 a, Vector3 b)
{
return Mathf.Atan2(a.y - b.y, a.x - b.x) * Mathf.Rad2Deg;
}
} | 261c38c18eb84cd3a8752239c368a36c1117e4b3 | [
"Markdown",
"C#"
] | 7 | C# | taamtera/NewUnityProject | 372870f6eccd7aa754816b1a7f1c40ea379aaa73 | 139b9d1d5c4624da89428fbc842b1dc5b820c9e2 | |
refs/heads/master | <repo_name>sjm00010/VetUJA<file_sep>/src/main/java/com/vetuja/controladores/ControladorCita.java
package com.vetuja.controladores;
import com.vetuja.DAO.CitaDAO;
import com.vetuja.clases.Cita;
import java.io.Serializable;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import javax.annotation.PostConstruct;
import javax.faces.view.ViewScoped;
import javax.inject.Inject;
import javax.inject.Named;
/**
*
* @author juanc
*/
@Named("ctrlCitas")
@ViewScoped
public class ControladorCita implements Serializable {
private static final Logger logger = Logger.getLogger(ControladorCita.class.getName());
@Inject
private CitaDAO citasDAO;
//View-Model
private Cita cita;
private Integer id;
public ControladorCita() {
}
@PostConstruct
private void init() {
cita = new Cita();
id = 0;
}
public Cita getCita() {
return cita;
}
public Cita getCita(int id) {
return citasDAO.buscaId(id);
}
public void setCita(Cita cita) {
this.cita = cita;
}
public List<Cita> getCitas(String id) {
final Pattern pattern = Pattern.compile("\\d{8}[A-Z]");
if (pattern.matcher(id).matches()){
return citasDAO.buscaCitas(id);
}
return citasDAO.buscaTodos();
}
public void recupera() {
cita = citasDAO.buscaId((cita.getId()));
}
public String borra(Cita c) {
if (citasDAO.borra(c.getId())) {
return "citas.xhtml?faces-redirect=true";
} else {
return "inicio.xhtml?faces-redirect=true";
}
}
public String creaCita() {
if (citasDAO.crea(cita)) {
return "/common_users/citas.xhtml?faces-redirect=true";
} else {
return null;
}
}
/**
* @return the id
*/
public Integer getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(Integer id) {
this.id = id;
}
public boolean hayCitas(String id){
logger.log(Level.INFO, id);
final Pattern pattern = Pattern.compile("\\d{8}[A-Z]");
if (pattern.matcher(id).matches()){
if(citasDAO.buscaCitas(id).isEmpty())
return false;
}else{
if(citasDAO.buscaTodos().isEmpty())
return false;
}
return true;
}
}
<file_sep>/src/main/webapp/resources/js/visualizaDuenio.js
let el = selector => document.querySelector(selector);
class DuenioCtrl {
constructor() {
this.srvUrl = "api/duenio"; //REST service url
//view-model
}
init() { }
detalleDuenio(id) {
let panel = el('#form\\:paneldetalle');
let enviado = false;
return fetch(this.srvUrl + "/" + id)
.then(response => {
if (response.ok) {
enviado = true; //dueño accepted in server
} //else bean-validation errors!
return response.json();
}).then(response => {
let error = [];
if (enviado === true) {
console.log(`Confirmado detalle de dueño: ${response.DNI}`);
el('#fotoDuenio').innerHTML = `<img src = "${response.foto}" alt="foto cliente" class="card-img" height="100%"/>`;
el('#detalleNom').innerHTML = `${response.nombre} ${response.apellidos}`;
el('#detalleDir').innerHTML = `${response.direccion}`;
panel.style.display = "block";
} else { //show bean-validation errors
console.warn(response);
}
return enviado;
}).catch(ex => { //Network error
panel.style.display = "none";
console.error("Error en conexión");
return enviado;
});
}
}
window.addEventListener('load', () => {
//Create and initialize controller
window.ctrl = new DuenioCtrl();
console.log('Inicializando controlador dueño');
ctrl.init();
});<file_sep>/src/main/java/com/vetuja/controladores/Preferencias.java
package com.vetuja.controladores;
import com.vetuja.clases.Cliente;
import com.vetuja.clases.Veterinario;
import java.io.Serializable;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.enterprise.context.SessionScoped;
import javax.inject.Named;
/**
*
* @author sjm00010
*/
@Named(value="prefs")
@SessionScoped
public class Preferencias implements Serializable{
private static final Logger logger = Logger.getLogger(Preferencias.class.getName());
// Variables
private Cliente cli;
private Veterinario vet;
public Preferencias() {
this.cli = null;
this.vet = null;
}
public Preferencias(Cliente cli, Veterinario vet) {
this.cli = cli;
this.vet = vet;
}
/**
* @return the cli
*/
public Cliente getCli() {
return cli;
}
/**
* @param cli the cli to set
*/
public void setCli(Cliente cli) {
this.cli = cli;
}
/**
* @return the vet
*/
public Veterinario getVet() {
return vet;
}
/**
* @param vet the vet to set
*/
public void setVet(Veterinario vet) {
this.vet = vet;
}
/**
* Funcion para obtener la foto del usuario.
* @return Foto
*/
public String getFoto(){
if (vet != null){
return vet.getFoto();
}else if(cli != null){
return cli.getFoto();
}
return null;
}
/**
* Funcion para obtener el nombre del usuario.
* @return Nombre
*/
public String getNombre(){
if (vet != null){
return vet.getNombre();
}else if(cli != null){
return cli.getNombre();
}
return null;
}
/**
* Funcion para saber que tipo de usuario hay conectado.
* @return Tipo de usuario USER, ADMIN, NULL(sin identificar)
*/
public String isLogin(){
if (vet != null){
// logger.log(Level.INFO, "ADMIN");
return "ADMIN";
}else if(cli != null){
// logger.log(Level.INFO, "USER");
return "USER";
}
// logger.log(Level.INFO, "NULL");
return null;
}
/**
* Devuelve el ID del usuario registrado
* @return ID
*/
public String getID(){
// logger.log(Level.INFO, cli.getDNI());
if (cli != null){
return cli.getDNI();
}
return null;
}
}
<file_sep>/src/main/java/com/vetuja/controladores/ControladorUsuario.java
package com.vetuja.controladores;
import com.vetuja.DAO.CitaDAO;
import com.vetuja.DAO.ClienteDAO;
import com.vetuja.DAO.MascotaDAO;
import com.vetuja.DAO.VeterinarioDAO;
import com.vetuja.clases.Cliente;
import com.vetuja.clases.Veterinario;
import java.io.Serializable;
import java.text.ParseException;
import java.util.List;
import java.util.logging.Logger;
import javax.annotation.PostConstruct;
import javax.faces.context.FacesContext;
import javax.faces.view.ViewScoped;
import javax.inject.Inject;
import javax.inject.Named;
/**
*
* @author sjm00010
*/
@Named("ctrlUser")
@ViewScoped
public class ControladorUsuario implements Serializable {
private static final Logger logger = Logger.getLogger(ControladorUsuario.class.getName());
@Inject
FacesContext fc;
@Inject
private ClienteDAO clientesDAO;
@Inject
private VeterinarioDAO veterinariosDAO;
/* Para cuando se modifiquen los identificadores hacer los cambios en las
clases que tienen referencias a clientes o veterinarios
*/
@Inject
private MascotaDAO mascotasDAO;
@Inject
private CitaDAO citasDAO;
//View-Model
private Cliente cliente;
private Veterinario veterinario;
private String aux;
public ControladorUsuario() {
}
@PostConstruct
private void init() {
cliente = new Cliente();
veterinario = new Veterinario();
aux=null;
}
/****************************************
* Getters y Setters *
****************************************/
public Cliente getCliente() {
return cliente;
}
public Cliente getCliente(String dni) {
return clientesDAO.buscaId(dni);
}
public void setCliente(Cliente cliente) {
this.cliente = cliente;
}
public List<Cliente> getClientes() {
return clientesDAO.buscaTodos();
}
public Veterinario getVeterinario() {
return veterinario;
}
public Veterinario getVeterinario(String id) {
return veterinariosDAO.buscaId(id);
}
public void setVeterinario(Veterinario veterinario) {
this.veterinario = veterinario;
}
public List<Veterinario> getVeterinarios() {
return veterinariosDAO.buscaTodos();
}
/**
* @return the aux
*/
public String getAux() {
return aux;
}
/**
* @param aux the aux to set
*/
public void setAux(String aux) {
this.aux = aux;
}
public void recuperaVet() {
veterinario = veterinariosDAO.buscaId(veterinario.getCodCol());
}
public String getNombreCli(String DNI) {
Cliente cli = clientesDAO.buscaId(DNI);
return cli.getNombre();
}
public String getNombreVet(String CC) {
Veterinario vet = veterinariosDAO.buscaId(CC);
return vet.getNombre();
}
public void recupera() {
cliente = clientesDAO.buscaId(cliente.getDNI());
this.aux = cliente.getDNI();
}
/**
* Función para obtener los datos para el detalle de las mascotas
*/
public void recuperaAll() {
cliente = clientesDAO.buscaId(cliente.getDNI());
veterinario = veterinariosDAO.buscaId(veterinario.getCodCol());
}
/****************************************
* Funciones CRUD *
****************************************/
public String creaCliente() {
if (cliente.getPass().equals(getAux())) {
if (clientesDAO.crea(cliente)) {
return "/inicio/inicio.jsf?faces-redirect=true";
}
}
return null;
}
public String modificaCliente() throws ParseException {
if (!clientesDAO.guarda(cliente)) {
clientesDAO.crea(cliente);
clientesDAO.borra(aux);
}
return "/admin/clientes.xhtml?faces-redirect=true";
}
public String borraCliente(String id) {
clientesDAO.borra(id);
citasDAO.borraCli(id);
mascotasDAO.borraCli(id);
return null;
}
}
<file_sep>/src/main/webapp/common_users/js/app-cita.module.js
class ControladorCitas {
constructor($http) {
console.log("Controlador cargado");
this.$http = $http;
this.serviceUrl = 'api/cita';
//init view-model
this.cc="";
this.selec=false;
this.veterinarios = [];
this.horas = [];
this.cargaVeterinarios();
}
cargaVeterinarios() {
this.$http({
method: 'GET',
url: this.serviceUrl
}).then(response => {
console.log("Veterinarios cargados");
this.veterinarios = response.data;
});
}
muestraHorario(cc){
this.cc=cc;
this.$http.get(this.serviceUrl + '/' + cc)
.then(response => {
this.horas = response.data;
this.errorMsgs = [];
}).then(() => this.visualiza()
).catch(response => { //bean validation errors
this.errorMsgs = response.data;
});
}
visualiza(){
this.selec = true;
}
}
ControladorCitas.$inject = ['$http'];
const appCitasComponent = {
templateUrl: "./js/app-citas.template.html",
controller: ControladorCitas
};
export const AppLibrosModule = angular
.module("citas.app", [])
.component("appCitas", appCitasComponent)
.name;
<file_sep>/src/main/java/com/vetuja/clases/Mascota.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.vetuja.clases;
import java.io.Serializable;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.logging.Logger;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.validation.constraints.*;
/**
*
* @author dgl00018
*/
@Entity
public class Mascota implements Serializable {
@Id
@Pattern(regexp = "\\d{15}", message = "Código identificación incorrecto, debe tener 15 números.")
private String ci;
@Size(min = 3, max = 12, message = "El nombre debe tener una longitud entre {min} y {max} caracteres.")
private String nombre;
@Size(min = 2, max = 12, message = "La raza debe tener una longitud entre {min} y {max} caracteres.")
private String raza;
@Size(min = 2, max = 12, message = "La especie debe tener una longitud entre {min} y {max} caracteres.")
private String especie;
@Pattern(regexp = "(Macho|Hembra)", message = "El sexo debe ser Macho o Hembra.")
private String sexo;
@Positive(message = "La altura tiene que ser mayor a 0 cm.")
private Integer altura;
@Positive(message = "El peso tiene que ser mayor a 0 g.")
private Integer peso;
@Past(message = "Tu mascota debe haber nacido, revisa la fecha de nacimiento.")
@NotNull(message = "La fecha de nacimiento es requerida.")
@Temporal(TemporalType.DATE)
private Date fechanac;
@NotEmpty(message = "Debe introducir una foto de la mascota")
private String foto;
@Pattern(regexp = "\\d{8}[A-Z]", message = "DNI incorrecto, debe tener 8 números y la letra debe estar en mayúscula.")
@NotNull(message = "Se debe seleccionar un cliente.")
private String cliDNI;
@Pattern(regexp = "[A-Z]{2}[0-9]{4}", message = "Código de colegiado incorrecto, debe tener 2 letras y 4 números.")
@NotNull(message = "Se debe seleccionar un veterinario.")
private String vetCC;
private static final Logger logger = Logger.getLogger(Mascota.class.getName());
public Mascota() {
ci = "";
nombre = "";
raza = "";
especie = "";
sexo = "";
fechanac = null;
foto = "";
altura = null;
peso = null;
}
/**
* @param ci Código de identificación
* @param nombre Nombre de la mascota
* @param raza Raza de la mascota
* @param especie Especie de la mascota
* @param sexo Sexo de la mascota
* @param altura Altura de la mascota
* @param peso Peso de la mascota
* @param fechanac Fecha de nacimiento
* @param foto Foto de la mascota
* @param cliDNI DNI del dueño
* @param vetCC Códogo de colegiado del veterinario que lo registro
*/
public Mascota(String ci, String nombre, String raza, String especie, String sexo, Integer altura, Integer peso, Date fechanac, String foto, String cliDNI, String vetCC) {
this.ci = ci;
this.nombre = nombre;
this.raza = raza;
this.especie = especie;
this.sexo = sexo;
this.altura = altura;
this.peso = peso;
this.fechanac = fechanac;
this.foto = foto;
this.cliDNI = cliDNI;
this.vetCC = vetCC;
}
/**
* @return El codigo identificacion
*/
public String getCi() {
return ci;
}
/**
* @param ci El codigo identificacion a cambiar
*/
public void setCi(String ci) {
this.ci = ci;
}
/**
* @return El nombre
*/
public String getNombre() {
return nombre;
}
/**
* @param nombre El nombre a cambiar
*/
public void setNombre(String nombre) {
this.nombre = nombre;
}
/**
* @return La raza
*/
public String getRaza() {
return raza;
}
/**
* @param raza La raza a cambiar
*/
public void setRaza(String raza) {
this.raza = raza;
}
/**
* @return El sexo
*/
public String getSexo() {
return sexo;
}
/**
* @param sexo El sexo a cambiar
*/
public void setSexo(String sexo) {
this.sexo = sexo;
}
/**
* @return La fecha de nacimiento
*/
public Date getFechanac() {
return fechanac;
}
/**
* @param fnac La fecha de nacimiento a cambiar
*/
public void setFechanac(Date fechanac) {
this.fechanac = fechanac;
}
/**
* @return Nombre de la foto de mascota, dentro de la carpeta 'mascotas'
*/
public String getFoto() {
return foto;
}
/**
* @param foto Nombre de la foto de mascota, dentro de la carpeta 'mascotas'
*/
public void setFoto(String foto) {
this.foto = foto;
}
/**
* @return the cliDNI
*/
public String getCliDNI() {
return cliDNI;
}
/**
* @param cliDNI the cliDNI to set
*/
public void setCliDNI(String cliDNI) {
this.cliDNI = cliDNI;
}
/**
* @return the vetCC
*/
public String getVetCC() {
return vetCC;
}
/**
* @param vetCC the vetCC to set
*/
public void setVetCC(String vetCC) {
this.vetCC = vetCC;
}
/**
* @return the altura
*/
public Integer getAltura() {
return altura;
}
/**
* @param altura the altura to set
*/
public void setAltura(Integer altura) {
this.altura = altura;
}
/**
* @return the peso
*/
public Integer getPeso() {
return peso;
}
/**
* @param peso the peso to set
*/
public void setPeso(Integer peso) {
this.peso = peso;
}
/**
* @return the especie
*/
public String getEspecie() {
return especie;
}
/**
* @param especie the especie to set
*/
public void setEspecie(String especie) {
this.especie = especie;
}
public String leerFecha(){
DateFormat dateFormat = new SimpleDateFormat("dd-MM-yyyy");
return dateFormat.format(this.fechanac);
}
}
<file_sep>/src/main/java/com/vetuja/resources/DuenioResource.java
package com.vetuja.resources;
import com.vetuja.DAO.ClienteDAO;
import com.vetuja.clases.Cliente;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
*
* @author sjm00010
*/
@Path("/duenio") //Acceso /api/duenio
@Produces(MediaType.APPLICATION_JSON)
public class DuenioResource {
@Inject
private ClienteDAO clientesDAO;
@GET
@Path("/{id}")
public Response getDuenio(@PathParam("id") String id) {
Response response;
Cliente l = clientesDAO.buscaId(id);
if( l != null) {
response= Response.ok(l).build();
} else {
//Error messages
List<Map<String,Object>> errores=new ArrayList<>();
Map<String,Object> err=new HashMap<>();
err.put("message", "El dueño no existe");
errores.add(err);
response=Response.status(Response.Status.BAD_REQUEST)
.entity(errores).build();
}
return response;
}
}
<file_sep>/README.md
# VetUJA
## Proyecto DAW
### Descripción
VetUJA es una aplicación web para gestionar una clínica veterinaria. Permite a los clientes llevar un control sobre sus mascotas y sobre las citas con los veterinarios. Y a los veterinarios tener organizado todo de forma fácil y cómoda.
### Tecnologías usadas
- **Maven** : Gestor de dependencias
- **Payara Server** : Servidor para lanzar la página
- **JSF** : Paginas *.xhtml* procesadas en el servidor
- **JPA** : Para el almacenamiento de los datos en SQL, haciendo uso de H2 para la base de datos
- **JS** : Para la validación de los formularios desde el cliente
- **REST** : Para ciertos servicios asíncronos
- **AngularJS** : Uso de los componentes para la página citas
<file_sep>/src/main/webapp/resources/js/validaModificaCliente.js
$(() => {
window.ctrl = new modifClienteCtrl(); //Register global var
ctrl.init(); //Attach view event Handlers
});
class modifClienteCtrl {
constructor() {
this.config = {
formulario: "#fModifClienteMC",
nombre: "#fModifCliente\\:nombreMC",
apellidos: "#fModifCliente\\:apellidosMC",
fecha: "#fModifCliente\\:fnacMC",
direccion: "#fModifCliente\\:direccionMC",
email: "#fModifCliente\\:emailMC",
foto: "#fModifCliente\\:fotoMC",
contrasena: "#fModifCliente\\:passMC"
};
}
init() {
$(this.config.formulario)
.on('submit', event => { //ev. handler
if (this.validarFormulario() === false) {
//Stop submission
event.preventDefault();
}
;
});
console.log("Iniciando controlador JS");
}
validarFormulario() {
console.log("Validación modificación cliente con JS");
let el = selector => document.querySelector(selector);
let nombre = $(this.config.nombre).val();
let apellidos = $(this.config.apellidos).val();
let fnac = $(this.config.fecha).val();
let dir = $(this.config.direccion).val();
let email = $(this.config.email).val();
let foto = $(this.config.foto).val();
let contrasena = $(this.config.contrasena).val();
let valido = true;
//Validación
if (nombre.length < 3 || nombre.length > 12) {
el('#errNombreMC').innerHTML = `<p class= "alert alert-danger"> La longitud del nombre debe estar entre 3 y 12 caracteres. </p>`;
valido = false;
} else {
el('#errNombreMC').innerHTML = "";
}
if (apellidos.length < 6 || apellidos.length > 20) {
el('#errApellMC').innerHTML = `<p class="alert alert-danger"> La longitud de los apellidos debe estar entre 6 y 20 caracteres. </p>`;
valido = false;
} else {
el('#errApellMC').innerHTML = "";
}
if (fnac.length === 0) {
el('#errFechaMC').innerHTML = `<p class="alert alert-danger"> Debe introducir una fecha. </p>`;
valido = false;
} else {
el('#errFechaMC').innerHTML = "";
}
if (dir.length < 6 || dir.length > 30) {
el('#errDirMC').innerHTML = `<p class="alert alert-danger"> La longitud de la dirección debe estar entre 6 y 30 caracteres. </p>`;
valido = false;
} else {
el('#errDirMC').innerHTML = "";
}
if (email.search(/[a-zA-Z0-9._+-]+@[a-zA-Z]+\.[a-zA-Z.]{2,}/) === -1) {
el('#errEmailMC').innerHTML = `<p class="alert alert-danger"> El correo debe tener el formato <EMAIL> </p>`;
valido = false;
} else {
el('#errEmailMC').innerHTML = "";
}
if (foto.length === 0) {
el('#errFotoMC').innerHTML = `<p class="alert alert-danger"> Debe introducir una foto. </p>`;
valido = false;
} else {
el('#errFotoMC').innerHTML = "";
}
if (contrasena.length < 6 || contrasena.length > 10) {
el('#errContrasenaMC').innerHTML = `<p class="alert alert-danger"> La longitud de la contraseña debe estar entre 6 y 10 caracteres. </p>`;
valido = false;
} else {
el('#errContrasenaMC').innerHTML = "";
}
return valido;
}
}
//Ventana emergente borrado cliente
function borrar(event) {
console.log("Confirma borrado");
let opcion = confirm("¿Estás seguro de borrar el cliente?");
console.log(opcion);
if (opcion === false) {
console.log("Cancelado");
event.preventDefault();
}
}
//Ventana emergente modificado cliente
function modifica(event) {
console.log("Confirma modificado cliente");
let opcion = confirm("¿Estás seguro de modificar el cliente?");
console.log(opcion);
if (opcion === false) {
console.log("Cancelado");
event.preventDefault();
}
}<file_sep>/src/main/webapp/resources/js/validaModificaMascota.js
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
class modifMascotaCtrl {
constructor() {
this.config = {
formulario: "#form",
nombre: "#form\\:nombreMM",
raza: "#form\\:razaMM",
especie: "#form\\:especieMM",
sexo: "#form\\:sexoMM",
fecha: "#form\\:fechanacMM",
altura: "#form\\:alturaMM",
peso: "#form\\:pesoMM",
foto: "#form\\:fotoMM",
cliDNI: "#form\\:cliDNIMM",
vetCC: "#form\\:vetCCMM"
};
}
init() {
$(this.config.formulario)
.on('submit', event => { //ev. handler
if (this.validarFormulario() === false) {
//Stop submission
event.preventDefault();
}
;
});
console.log("Iniciando controlador JS");
}
validarFormulario() {
console.log("Validación modificación cliente con JS");
let el = selector => document.querySelector(selector);
let nombre = $(this.config.nombre).val();
let raza = $(this.config.raza).val();
let especie = $(this.config.especie).val();
let sexo = $(this.config.sexo).val();
let fechanac = $(this.config.fecha).val();
let foto = $(this.config.foto).val();
let altura = $(this.config.altura).val();
let peso = $(this.config.peso).val();
let cliDNI = $(this.config.cliDNI).val();
let vetCC = $(this.config.vetCC).val();
let valido = true;
//Validación mascotas
if (nombre.length < 3 || nombre.length > 12) {
el('#errNombreMM').innerHTML = `<p class= "alert alert-danger"> La longitud del nombre debe estar entre 3 y 12 caracteres. </p>`;
valido = false;
} else {
el('#errNombreMM').innerHTML = "";
}
if (raza.length < 2 || raza.length > 12) {
el('#errRazaMM').innerHTML = `<p class="alert alert-danger"> La raza debe tener una longitud entre 2 y 12 caracteres. </p>`;
valido = false;
} else {
el('#errRazaMM').innerHTML = "";
}
if (especie.length < 2 || especie.length > 12) {
el('#errEspecieMM').innerHTML = `<p class="alert alert-danger"> La especie debe tener una longitud entre 2 y 12 caracteres. </p>`;
valido = false;
} else {
el('#errEspecieMM').innerHTML = "";
}
if (sexo !== "Macho" || sexo !== "Hembra") {
el('#errSexoMM').innerHTML = `<p class="alert alert-danger"> El sexo debe ser Macho o Hembra </p>`;
valido = false;
} else {
el('#errSexoMM').innerHTML = "";
}
if (altura.length <= 0) {
el('#errAlturaMM').innerHTML = `<p class="alert alert-danger"> La altura debe ser mayor que 0. </p>`;
valido = false;
} else {
el('#errAlturaMM').innerHTML = "";
}
if (peso.length <= 0) {
el('#errPesoMM').innerHTML = `<p class="alert alert-danger"> El peso tiene que ser mayor a 0 g </p>`;
valido = false;
} else {
el('#errPesoMM').innerHTML = "";
}
if (fechanac.length > 0) {
el('#errFechanacMM').innerHTML = `<p class="alert alert-danger"> Debe introducir una fecha. </p>`;
valido = false;
} else {
el('#errFechanacMM').innerHTML = "";
}
if (foto.length === 0) {
el('#errFotoMM').innerHTML = `<p class="alert alert-danger"> Debe introducir una foto. </p>`;
valido = false;
} else {
el('#errFotoMM').innerHTML = "";
}
if (cliDNI.length <= 0) {
el('#errCliDNIMM').innerHTML = `<p class="alert alert-danger"> El DNI debe tener 8 números y la letra debe estar en mayúscula. </p>`;
valido = false;
} else {
el('#errCliDNIMM').innerHTML = "";
}
if (vetCC.length <= 0) {
el('#errVetCCMM').innerHTML = `<p class="alert alert-danger"> El código de colegiado debe tener longitud mayor a 0 </p>`;
valido = false;
} else {
el('#errvetCCMM').innerHTML = "";
}
return valido;
}
}
//Ventana emergente borrado cliente
function borrar_m(event) {
console.log("Confirma borrado");
let opcion = confirm("¿Estás seguro de borrar la mascota?");
console.log(opcion);
if (opcion === false) {
console.log("Cancelado");
event.preventDefault();
}
}
//Ventana emergente modificado cliente
function modifica_m(event) {
console.log("Confirma modificar mascota");
let opcion = confirm("¿Estás seguro de modificar la mascota?");
console.log(opcion);
if (opcion === false) {
console.log("Cancelado");
event.preventDefault();
}
}
<file_sep>/src/main/java/com/vetuja/controladores/IdentificadorUsuario.java
package com.vetuja.controladores;
import com.vetuja.DAO.ClienteDAO;
import com.vetuja.DAO.VeterinarioDAO;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import javax.security.enterprise.credential.UsernamePasswordCredential;
import javax.security.enterprise.identitystore.CredentialValidationResult;
import static javax.security.enterprise.identitystore.CredentialValidationResult.INVALID_RESULT;
import javax.security.enterprise.identitystore.IdentityStore;
/**
* Clase temporal para realizar pruebas, en la version con JPA se eliminará.
*
* @author sjm00010
*/
@ApplicationScoped
public class IdentificadorUsuario implements IdentityStore {
// Datos de usuarios, Clientes y Veterinarios
@Inject
private ClienteDAO clientesDAO;
@Inject
private VeterinarioDAO veterinariosDAO;
public IdentificadorUsuario() {
}
public CredentialValidationResult validate(
UsernamePasswordCredential usernamePasswordCredential) {
//Recuperar credenciales proporcionadas por el servidor
String username = usernamePasswordCredential.getCaller();
String password = usernamePasswordCredential.getPasswordAsString();
//Ejemplo simple de verificación de credenciales
String validPassword = clientesDAO.getPass(username);
if (validPassword != null && validPassword.equals(password)) {
//Autenticación completada, obtener los roles del usuario...
Set<String> roles = new HashSet<>(Arrays.asList("USUARIOS"));
//Pasar datos del usuario al servidor
return new CredentialValidationResult(username, roles);
}
if (validPassword == null) {
//Ejemplo simple de verificación de credenciales
validPassword = veterinariosDAO.getPass(username);
if (validPassword != null && validPassword.equals(password)) {
//Autenticación completada, obtener los roles del usuario...
Set<String> roles = new HashSet<>(Arrays.asList("ADMINISTRADORES"));
//Pasar datos del usuario al servidor
return new CredentialValidationResult(username, roles);
}
}
return INVALID_RESULT; //Autenticación inválida
}
}
<file_sep>/src/main/java/com/vetuja/clases/Cliente.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.vetuja.clases;
import java.io.Serializable;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.logging.Logger;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.validation.constraints.*;
/**
*
* @author sjm00010
*/
@Entity
public class Cliente implements Serializable {
@Id
@Pattern(regexp = "\\d{8}[A-Z]", message = "DNI incorrecto, debe tener 8 números y la letra debe estar en mayúscula.")
private String DNI;
@Size(min = 3, max = 12, message = "El nombre debe tener una longitud entre {min} y {max} caracteres.")
private String nombre;
@Size(min = 6, max = 20, message = "Los apellidos deben tener una longitud entre {min} y {max} caracteres.")
private String apellidos;
@Size(min = 6, max = 30, message = "La direción debe tener una longitud entre {min} y {max} caracteres.")
private String direccion;
@Past(message = "Si no has nacido, ¿que haces registrandote? Revisa la fecha de nacimiento.")
@Temporal(TemporalType.DATE)
@NotNull(message = "La fecha de nacimiento es requerida.")
private Date fnac;
@NotEmpty(message = "Introduce una foto para que podamos conocerte.")
private String foto;
@Pattern(regexp = "[a-zA-Z0-9._+-]+@[a-zA-Z]+\\.[a-zA-Z.]{2,}", message = "El correo introducido no es valido, debe tener el formato <EMAIL>")
private String email;
@Size(min = 6, max = 10, message = "La contraseña debe tener una longitud entre {min} y {max} caracteres.")
private String pass;
//Loger para errores
private static final Logger logger = Logger.getLogger(Cliente.class.getName());
/**
*
*/
public Cliente() {
DNI = null;
nombre = "";
apellidos = "";
direccion = "";
fnac = null;
foto = "";
email = "";
pass = "";
}
/**
* @param DNI DNI del usuario. Solo los 8 números
* @param nombre Nombre del usuario, sin apellidos
* @param apellidos Apellidos del usuario
* @param direccion Direccion del usuario, local sin provincia y demas
* @param fnac Fecha de nacimiento
* @param foto Nombre de la foto de usuario, dentro de la carpeta 'usuarios'
* @param email Correo electronico
* @param pass Contraseña
*/
public Cliente(String DNI, String nombre, String apellidos, String direccion,
Date fnac, String foto, String email, String pass) {
this.DNI = DNI;
this.nombre = nombre;
this.apellidos = apellidos;
this.direccion = direccion;
this.fnac = fnac;
this.foto = foto;
this.email = email;
this.pass = pass;
}
public Cliente(Cliente c) {
this.DNI = c.DNI;
this.nombre = c.nombre;
this.apellidos = c.apellidos;
this.direccion = c.direccion;
this.fnac = c.fnac;
this.foto = c.foto;
this.email = c.email;
this.pass = c.pass;
}
/**
* @return El DNI
*/
public String getDNI() {
return DNI;
}
/**
* @param DNI El DNI a cambiar
*/
public void setDNI(String DNI) {
this.DNI = DNI;
}
/**
* @return El nombre
*/
public String getNombre() {
return nombre;
}
/**
* @param nombre El nombre a cambiar
*/
public void setNombre(String nombre) {
this.nombre = nombre;
}
/**
* @return Los apellidos
*/
public String getApellidos() {
return apellidos;
}
/**
* @param apellidos Los apellidos a cambiar
*/
public void setApellidos(String apellidos) {
this.apellidos = apellidos;
}
/**
* @return La dirección
*/
public String getDireccion() {
return direccion;
}
/**
* @param direccion La dirección a cambiar
*/
public void setDireccion(String direccion) {
this.direccion = direccion;
}
/**
* @return La fecha de nacimiento
*/
public Date getFnac() {
return fnac;
}
/**
* @param fnac La fecha de nacimiento a cambiar
*/
public void setFnac(Date fnac) {
this.fnac = fnac;
}
/**
* @return La contraseña
*/
public String getPass() {
return pass;
}
/**
* @param pass La contraseña a cambiar
*/
public void setPass(String pass) {
this.pass = pass;
}
/**
* @return El correo electronico
*/
public String getEmail() {
return email;
}
/**
* @param email El correo electronico a cambiar
*/
public void setEmail(String email) {
this.email = email;
}
/**
* @return Nombre de la foto de usuario, dentro de la carpeta 'usuarios'
*/
public String getFoto() {
return foto;
}
/**
* @param foto Nombre de la foto de usuario, dentro de la carpeta 'usuarios'
*/
public void setFoto(String foto) {
this.foto = foto;
}
public String leerFecha() {
DateFormat dateFormat = new SimpleDateFormat("dd-MM-yyyy");
return dateFormat.format(this.fnac);
}
}
| e512776308eea3e1cefabe7ffc6e68d8da2302a0 | [
"JavaScript",
"Java",
"Markdown"
] | 12 | Java | sjm00010/VetUJA | 113f2b65de863ddcbf736e233b6a0203c9a60f22 | abeb9b809174c06d2667255881e3cb61e5edc062 | |
refs/heads/main | <file_sep>from pytube import YouTube
link = 'put your link here'
video = YouTube(link)
# vidoe quality will be 720p
video.streams.get_highest_resolution().download(output_path="put the output location")
# video quality will be 360p
video.streams.get_highest_resolution().download(output_path="put the output location")<file_sep># Youtube-Downloader.mp4
we should install pypi with pip: (source)
$ python -m pip install git+https://github.com/pytube/pytube
and we should install pytube library
For videos:
He put a code to download the highest quality which is 720p ( its gonna be vcodec and acodec together)
video.streams.get\_highest\_resolution().download(output\_path="put the output location")
we use the (output\_path="put the output location") to pick the location so make sure to choose the exact location that you want so you will be able to find it easily
and he put a code for lowest quality and its gonna be 360p
video.streams.get\_lowest\_resolution().download(output\_path="put the output location")
For playlist:
for video in Playlist.videos:
video.streams.get\_highest\_resolution().download(output\_path="put Playlist link here")
as we know that will make the quality 720p
and don't forget to double-check the location
I hope that I helped to explain his code a little bit
Have a good one
Thank you in advance...
<file_sep>from pytube import Playlist
link = "put your public playlist link here"
Playlist = Playlist(link)
for video in Playlist.videos:
video.streams.get_highest_resolution().download(output_path="put Playlist link here") | 9424654608fa073973d4efdc21eb7a21975f144d | [
"Markdown",
"Python"
] | 3 | Python | MTaherr/Youtube-Downloader.mp4 | 9476aa229886e87a60df13c1ff36df9b090410bc | 87fe598dfd5c040b449220086ecfdf15107ecf8d | |
refs/heads/master | <file_sep>from sklearn import decomposition
from matplotlib import pyplot as plt
import tensorflow as tf
import autoencoder_mnist as ae
import argparse, input_data
import numpy as np
# model-checkpoint-0349-191950
def corrupt_input(x):
corrupting_matrix = tf.random_uniform(shape=tf.shape(x), minval=0,maxval=2,dtype=tf.int32)
return x * tf.cast(corrupting_matrix, tf.float32)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Test various optimization strategies')
parser.add_argument('savepath', nargs=1, type=str)
args = parser.parse_args()
print("\nPULLING UP MNIST DATA")
mnist = input_data.read_data_sets("data/", one_hot=False)
print(mnist.test.labels)
with tf.Graph().as_default():
with tf.variable_scope("autoencoder_model"):
x = tf.placeholder("float", [None, 784]) # mnist data image of shape 28*28=784
corrupt = tf.placeholder(tf.float32)
phase_train = tf.placeholder(tf.bool)
c_x = (corrupt_input(x) * corrupt) + (x * (1 - corrupt))
code = ae.encoder(c_x, 2, phase_train)
output = ae.decoder(code, 2, phase_train)
cost, train_summary_op = ae.loss(output, x)
global_step = tf.Variable(0, name='global_step', trainable=False)
train_op = ae.training(cost, global_step)
eval_op, in_im_op, out_im_op, val_summary_op = ae.evaluate(output, x)
saver = tf.train.Saver()
sess = tf.Session()
print("\nSTARTING AUTOENCODER\n", args.savepath[0])
sess = tf.Session()
saver = tf.train.Saver()
saver.restore(sess, args.savepath[0])
print("\nGENERATING AE CODES AND RECONSTRUCTION")
original_input, corr_func, noise_input, ae_reconstruction = sess.run([x, corrupt_input(x), c_x, output],feed_dict={x: mnist.test.images * np.random.randint(2, size=(784)), phase_train: True, corrupt: 1})
plt.imshow(original_input[2].reshape((28,28)), cmap=plt.cm.gray)
plt.show()
plt.imshow(corr_func[2].reshape((28, 28)), cmap=plt.cm.gray)
plt.show()
plt.imshow(noise_input[2].reshape((28,28)), cmap=plt.cm.gray)
plt.show()
plt.imshow(ae_reconstruction[2].reshape((28,28)), cmap=plt.cm.gray)
plt.show()
| 5c19dce940b6173f51dd1c9180b43fe2127df3bc | [
"Python"
] | 1 | Python | aei0109/DAE | 41b4f171dfe602286255cf462b5438b43e2abea7 | 3a4e45d2732fb043b53465eeae25f88852f202fe | |
refs/heads/master | <repo_name>Mikea15/Minerals<file_sep>/Minerals/MineralEntity.h
//
// MineralEntity.hpp
// Minerals
//
// Created by <NAME> on 27/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef MineralEntity_hpp
#define MineralEntity_hpp
#include <iostream>
#include <string>
#include <cmath>
#include "GameObject.h"
#include "Rect.h"
#include "Texture.h"
using namespace Skybelow;
class MineralEntity : public GameObject
{
public:
MineralEntity( );
~MineralEntity( );
void LoadTexture( SDL_Renderer *renderer, std::string path );
void Start();
void Update();
void Cleanup();
private:
};
#endif /* MineralEntity_hpp */
<file_sep>/Engine/Timer.h
//
// Timer.hpp
// Minerals
//
// Created by <NAME> on 27/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef Timer_hpp
#define Timer_hpp
#include <iostream>
#include <SDL2/SDL.h>
namespace Skybelow
{
class Timer
{
public:
Timer();
void Start();
void Stop();
void Pause();
void Resume();
Uint32 GetTicks();
bool IsStarted( ) const {
return m_isStarted;
}
bool IsPaused( ) const {
return m_isStarted && m_isPaused;
}
private:
bool m_isPaused;
bool m_isStarted;
Uint32 m_startTicks;
Uint32 m_pausedTicks;
};
}
#endif /* Timer_hpp */
<file_sep>/Engine/Rect.h
//
// Rect.hpp
// Minerals
//
// Created by <NAME> on 23/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef Rect_hpp
#define Rect_hpp
#include <iostream>
#include <SDL2/SDL.h>
#include "Vector2.h"
namespace Skybelow
{
class Rect {
public:
Rect();
Rect( Vector2 position, int width, int height );
Rect( SDL_Rect rect );
Vector2 GetCenter();
bool IsColliding( SDL_Rect a, SDL_Rect b );
bool Contains( float x, float y );
private:
Vector2 m_Position;
int m_Height;
int m_Width;
SDL_Rect rect;
};
}
#endif /* Rect_hpp */
<file_sep>/Minerals/GameState.h
//
// GameState.h
// Minerals
//
// Created by <NAME> on 25/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef GameState_h
#define GameState_h
#include "GameEngine.h"
class GameState
{
public:
virtual void Init(GameEngine *game) = 0;
virtual void HandleEvent( GameEngine *game, SDL_Event event ) = 0;
virtual void Update( GameEngine *game ) = 0;
virtual void Render( GameEngine *game ) = 0;
virtual void Cleanup() = 0;
virtual void Pause() = 0;
virtual void Resume() = 0;
void ChangeState( GameEngine *game, GameState *state )
{
game->ChangeState(state);
}
protected:
GameState() {};
};
#endif /* GameState_h */
<file_sep>/Minerals/GameplayState.h
//
// GameplayState.hpp
// Minerals
//
// Created by <NAME> on 26/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef GameplayState_hpp
#define GameplayState_hpp
#include <iostream>
#include <vector>
#include <sstream>
#include "GameState.h"
#include "IntroState.h"
#include "GameObject.h"
#include "MineralEntity.h"
#include "Timer.h"
#include "Grid.h"
#include "Texture.h"
#define SCORE_BONUS 2
#define TIME_TILL_NEW_BLOCK 1
#define TIME_TILL_NEW_GAME 2
class GameplayState : public GameState
{
public:
void Init(GameEngine *game);
void HandleEvent( GameEngine *game, SDL_Event event );
void Update( GameEngine *game );
void Render( GameEngine *game );
void Cleanup();
void ClickedCell( Vector2 position );
void Pause();
void Resume();
static GameplayState* Instance() {
return &m_GameplayState;
}
protected:
GameplayState();
~GameplayState();
private:
static GameplayState m_GameplayState;
Grid *m_grid;
Texture m_board;
Vector2 m_boardPosition;
SDL_Texture *m_fontTexture;
Texture m_scoreRect;
Skybelow::Timer m_time;
float m_timeToPushBlock;
float m_currentTime;
float m_timeToBeginNew;
Skybelow::Timer m_fpsTimer;
std::stringstream m_timeText;
int m_score;
std::stringstream m_scoreText;
Texture m_scoreTexture;
bool m_gameOver;
Texture m_gameOverText;
int m_frameCount;
bool m_showDebugInfo;
bool m_mouseClickDownPrevious;
};
#endif /* GameplayState_hpp */
<file_sep>/Engine/Thread.h
//
// Thread.hpp
// Minerals
//
// Created by <NAME> on 28/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef Thread_hpp
#define Thread_hpp
#include <stdio.h>
#include <pthread.h>
class Thread
{
public:
Thread();
virtual ~Thread();
int Start();
int Join();
int Detach();
pthread_t Self();
virtual void* Run() = 0;
private:
pthread_t m_tid;
int m_running;
int m_detached;
};
#endif /* Thread_hpp */
<file_sep>/Engine/Vector2.h
//
// Vector2.hpp
// Minerals
//
// Created by <NAME> on 23/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef Vector2_hpp
#define Vector2_hpp
#include <cmath>
#include <stdio.h>
#include "Math.h"
namespace Skybelow
{
struct Vector2
{
float x;
float y;
Vector2( ) : x(0), y(0)
{
}
Vector2( float x, float y ): x(x), y(y)
{
}
Vector2( const Vector2& v ) : x(v.x), y(v.y)
{
}
float Dot( Vector2 other )
{
return x * other.x + y * other.y;
}
float LengthSqr( )
{
return x * x + y * y;
}
float Length( )
{
return sqrtf( Length() );
}
Vector2 Lerp( Vector2 a, Vector2 b, float t )
{
return Vector2( Math::Lerp(a.x, b.x, t), Math::Lerp(a.y, b.y, t) );
}
Vector2 EaseInOutQuad( Vector2 a, Vector2 b, float t, float duration )
{
return Vector2( Math::EaseInOutQuad(t, a.x, b.x, duration), Math::EaseInOutQuad(t, a.y, b.y, duration) );
}
Vector2 Normalized( )
{
return Vector2( x / Length(), y / Length() );
}
Vector2 operator+( const Vector2& a ) const {
return Vector2( x + a.x, y + a.y );
}
Vector2 operator-( const Vector2& a ) const {
return Vector2( x - a.x, y - a.y );
}
Vector2 operator+=( const Vector2& a ) {
x += a.x;
y += a.y;
return *this;
}
Vector2 operator-=( const Vector2& a ) {
x -= a.x;
y -= a.y;
return *this;
}
Vector2 operator*( const float scale ) {
return Vector2( x * scale, y * scale );
}
Vector2 operator*=( const float scale ) {
x *= scale;
y *= scale;
return *this;
}
bool operator==(const Vector2 other ) const {
return ( x == other.x && y == other.y );
}
};
}
#endif /* Vector2_hpp */
<file_sep>/readme.txt
=================
# Minerals v001 #
=================
- Executable can be found in ./Build folder.
Enjoy.
<NAME>
<EMAIL>
<file_sep>/Engine/WorkQueue.cpp
//
// WorkQueue.cpp
// Minerals
//
// Created by <NAME> on 28/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#include "WorkQueue.h"
template <typename T>
WorkQueue<T>::WorkQueue()
{
pthread_mutex_init(&m_mutex, NULL);
pthread_cond_init(&m_condv, NULL);
}
template <typename T>
WorkQueue<T>::~WorkQueue()
{
pthread_mutex_destroy(&m_mutex);
pthread_cond_destroy(&m_condv);
}
template <typename T>
void WorkQueue<T>::Add(T item)
{
pthread_mutex_lock(&m_mutex);
m_queue.push_back( item );
pthread_cond_signal(&m_condv);
pthread_mutex_unlock(&m_mutex);
}
template <typename T>
T WorkQueue<T>::Remove()
{
pthread_mutex_lock(&m_mutex);
while( m_queue.size() == 0 ) {
pthread_cond_wait(&m_condv, &m_mutex);
}
T item = m_queue.front();
m_queue.pop_front();
pthread_mutex_unlock(&m_mutex);
return item;
}
template <typename T>
int WorkQueue<T>::Size()
{
pthread_mutex_lock(&m_mutex);
int size = m_queue.size();
pthread_mutex_unlock(&m_mutex);
return size;
}
<file_sep>/Engine/ConsumerThread.cpp
//
// ConsumerThread.cpp
// Minerals
//
// Created by <NAME> on 28/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#include "ConsumerThread.h"
<file_sep>/Engine/Texture.h
//
// Texture.hpp
// Minerals
//
// Created by <NAME> on 27/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef Texture_hpp
#define Texture_hpp
#include <iostream>
#include <string>
#include <SDL2/SDL.h>
#include <SDL2_image/SDL_image.h>
#include <SDL2_ttf/SDL_ttf.h>
#include "Vector2.h"
namespace Skybelow
{
class Texture
{
public:
Texture();
~Texture();
bool LoadFromFile( SDL_Renderer *renderer, std::string path );
bool LoadFromRenderedText(TTF_Font *font, SDL_Renderer *renderer, std::string textureText, SDL_Color textColor );
void Clear();
void SetColor( Uint8 red, Uint8 green, Uint8 blue );
void SetBlendMode( SDL_BlendMode blending );
void SetAlpha( Uint8 alpha );
void Render( SDL_Renderer *renderer, Vector2 position, SDL_Rect* clip = NULL, double angle = 0.0, SDL_Point* center = NULL, SDL_RendererFlip flip = SDL_FLIP_NONE );
int GetWidth( ) { return m_width; }
int GetHeight( ) { return m_height; }
void SetWidth( float width ) {
m_width = width;
}
void SetHeight( float height ) {
m_height = height;
}
SDL_Texture* GetTexture( ) const {
return m_texture;
}
private:
SDL_Texture* m_texture;
int m_width;
int m_height;
};
}
#endif /* Texture_hpp */
<file_sep>/Minerals/GameEngine.h
//
// GameEngine.h
// Minerals
//
// Created by <NAME> on 23/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef _GameEngine_h_
#define _GameEngine_h_
#if __APPLE__
#include <SDL2_image/SDL_image.h>
#include <SDL2_ttf/SDL_ttf.h>
#include <SDL2/SDL.h>
#elif _WIN32 || _WIN64
#include <SDL/SDL_image.h>
#include <SDL/SDL_ttf.h>
#include <SDL/SDL.h>
#endif
#include <iostream>
#include <string>
#include <vector>
class GameState;
class GameEngine
{
public:
GameEngine( );
int Run();
void Init( std::string windowName, int width, int height );
void Cleanup();
void ChangeState( GameState * state );
void PushState( GameState *state );
void PopState( );
void HandleEvents( );
void Update( );
void Render( );
bool IsRunning();
void Load();
void Save();
void Quit();
// exposing this for lack of time.
// going rambo style.. :/
int m_time;
int m_timeNow;
int m_timeBefore;
SDL_Event m_event;
SDL_Window* MainWindow( ) {
return m_window;
}
SDL_Renderer* MainRenderer( ) {
return m_renderer;
}
TTF_Font* MainFont() {
return m_font;
}
private:
std::string m_windowName;
int m_windowHeight;
int m_windowWidth;
int m_windowBits;
bool m_isRunning;
bool m_fullscreen;
std::vector<GameState*> m_gameStates;
SDL_Window *m_window;
// SDL 2.0. Hardware Accelerated.
SDL_Renderer *m_renderer;
TTF_Font *m_font;
};
#endif /* GameEngine */
<file_sep>/Minerals/IntroState.h
//
// IntroState.hpp
// Minerals
//
// Created by <NAME> on 26/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef IntroState_hpp
#define IntroState_hpp
#include <iostream>
#include "GameState.h"
#include "Texture.h"
using namespace Skybelow;
class IntroState : public GameState
{
public:
void Init(GameEngine *game);
void HandleEvent( GameEngine *game, SDL_Event event );
void Update( GameEngine *game );
void Render( GameEngine *game );
void Cleanup();
void Pause();
void Resume();
static IntroState* Instance() {
return &m_IntroState;
}
protected:
IntroState();
~IntroState();
private:
static IntroState m_IntroState;
Texture m_text;
SDL_Rect textDest;
SDL_Texture *m_fontTexture;
int m_time;
int m_startTime;
int m_timeToNextScreen;
};
#endif /* IntroState_hpp */
<file_sep>/Engine/WorkItem.h
//
// WorkItem.hpp
// Minerals
//
// Created by <NAME> on 28/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef WorkItem_hpp
#define WorkItem_hpp
#include <iostream>
#include <string>
// #include "thread.h"
#include "WorkQueue.h"
class WorkItem
{
string m_message;
int m_number;
public:
WorkItem(const char* message, int number)
: m_message(message), m_number(number) {}
~WorkItem() {}
const char* getMessage() { return m_message.c_str(); }
int getNumber() { return m_number; }
};
#endif /* WorkItem_hpp */
<file_sep>/Minerals/main.cpp
//
// main.cpp
// Minerals
//
// Created by <NAME> on 23/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
//
// For Future Reference.. I'll be needing this info.
// /*
// * This is a simple C program to demonstrate the usage of callbacks
// * The callback function is in the same file as the calling code.
// * The callback function can later be put into external library like
// * e.g. a shared object to increase flexibility.
// *
// */
//
//#include <stdio.h>
//#include <string.h>
//
//typedef struct _MyMsg {
// int appId;
// char msgbody[32];
//} MyMsg;
//
//void myfunc(MyMsg *msg)
//{
// if (strlen(msg->msgbody) > 0 )
// printf("App Id = %d \nMsg = %s \n",msg->appId, msg->msgbody);
// else
// printf("App Id = %d \nMsg = No Msg\n",msg->appId);
//}
//
///*
// * Prototype declaration
// */
//void (*callback)(MyMsg *);
//
//int main(void)
//{
// MyMsg msg1;
// msg1.appId = 100;
// strcpy(msg1.msgbody, "This is a test\n");
//
// /*
// * Assign the address of the function "myfunc" to the function
// * pointer "callback" (may be also written as "callback = &myfunc;")
// */
// callback = myfunc;
//
// /*
// * Call the function (may be also written as "(*callback)(&msg1);")
// */
// callback(&msg1);
//
// return 0;
//}
//
//
//
//
//
//
#include <iostream>
#include <string>
#if __APPLE__
#include <SDL2/SDL.h>
#endif
#include "GameEngine.h"
#include "IntroState.h"
std::string GetResourcePath( const std::string &subDir = "" )
{
#ifdef _WIN32
const char PATH_SEPARATOR = '\\';
#else
const char PATH_SEPARATOR = '/';
#endif
static std::string baseRes;
if( baseRes.empty() )
{
char *basePath = SDL_GetBasePath();
if( basePath != NULL )
{
baseRes = basePath;
SDL_free(basePath);
}
else
{
std::cout << "Error getting resource path: " << SDL_GetError() << std::endl;
return "";
}
size_t position = baseRes.rfind("bin");
baseRes = baseRes.substr(0, position) + "res" + PATH_SEPARATOR;
}
return subDir.empty() ? baseRes : baseRes + subDir + PATH_SEPARATOR;
}
int main(int argc, char * arg[])
{
GameEngine *game = new GameEngine();
game->Init("Minerals", 800, 600);
game->ChangeState(IntroState::Instance());
while( game->IsRunning() )
{
SDL_PollEvent( &game->m_event );
game->HandleEvents();
game->m_timeNow = SDL_GetTicks();
if( game->m_timeNow - game->m_timeBefore > game->m_time )
{
game->Update();
game->m_timeBefore = game->m_timeNow;
}
else
{
SDL_Delay( game->m_time - ( game->m_timeNow - game->m_timeBefore ) );
}
game->Render();
}
game->Cleanup();
return 0;
}<file_sep>/README.md
Minerals
========
## Project
- Language: C++
- 3rd Party Libraries: SDL2, SDL_image, SDL_mixer, SDL_ttf
<file_sep>/Engine/State.h
//
// State.hpp
// Minerals
//
// Created by <NAME> on 26/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
#ifndef State_hpp
#define State_hpp
#include <stdio.h>
namespace Skybelow
{
template <class entity_type>
class State
{
public:
virtual ~State();
virtual void Enter( entity_type* ) = 0;
virtual void Execute( entity_type* ) = 0;
virtual void Exit( entity_type* ) = 0;
};
}
#endif /* State_hpp */
<file_sep>/Engine/WorkQueue.h
//
// WorkQueue.hpp
// Minerals
//
// Created by <NAME> on 28/09/15.
// Copyright © 2015 SkyBelow. All rights reserved.
//
// http://vichargrave.com/multithreaded-work-queue-in-c/
//
#ifndef WorkQueue_hpp
#define WorkQueue_hpp
#include <pthread.h>
#include <list>
using namespace std;
template <typename T>
class WorkQueue
{
public:
WorkQueue();
~WorkQueue();
void Add(T item);
T Remove( );
int Size();
list<T> m_queue;
pthread_mutex_t m_mutex;
pthread_cond_t m_condv;
private:
};
#endif /* WorkQueue_hpp */
| 5dbcdeb1d5959e8102f7c852d22c00bff05b613a | [
"Markdown",
"Text",
"C++"
] | 18 | C++ | Mikea15/Minerals | 4eaca22f8b4a23ed258e2b034abc5ab17577c545 | b56bee05cb0ecfbf6e3931345020bfd17a6583fb | |
refs/heads/master | <file_sep>import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import {ChoixComponent} from "./choix/choix.component";
import {PredictionComponent} from "./prediction/prediction.component";
import {GrapheComponent} from "./graphe/graphe.component";
import {AideComponent} from "./aide/aide.component";
const routes: Routes = [
{path: 'choix', component: ChoixComponent},
{path: 'graphe', component: GrapheComponent},
{path: 'prediction', component: PredictionComponent},
{path: 'aide', component: AideComponent},
{path: '', redirectTo:'/choix', pathMatch:'full' }
];
@NgModule({
imports: [RouterModule.forRoot(routes)],
exports: [RouterModule]
})
export class AppRoutingModule { }
<file_sep>import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { ChoixComponent } from './choix/choix.component';
import { GrapheComponent } from './graphe/graphe.component';
import { PredictionComponent } from './prediction/prediction.component';
import { AideComponent } from './aide/aide.component';
import {FontAwesomeModule} from "@fortawesome/angular-fontawesome";
import {choix_service} from "../services/choix_service";
import { HttpClientModule } from '@angular/common/http';
import {FormsModule} from "@angular/forms";
import {JwPaginationComponent, JwPaginationModule} from "jw-angular-pagination";
@NgModule({
declarations: [
AppComponent,
ChoixComponent,
GrapheComponent,
PredictionComponent,
AideComponent,
],
imports: [
BrowserModule,
AppRoutingModule,
FontAwesomeModule,
HttpClientModule,
FormsModule,
JwPaginationModule,
],
providers: [choix_service,],
bootstrap: [AppComponent]
})
export class AppModule { }
| 2a00648ffd31671cd3219fe8d0664bb512ba019b | [
"TypeScript"
] | 2 | TypeScript | tarik-guit/projet_semestre_s5_frontend | 6d7ec50a21186c46265bda9752bfab2950461bd7 | 95d1b5392323653016b1be0bcccec7f494ebf140 | |
refs/heads/master | <repo_name>znematoadz/todoApp<file_sep>/README.md
# todoApp
A simple todo list app created using React.
Try it out <a href="https://znematoadz.github.io/todoApp/">Todo List App</a>
<file_sep>/src/App.js
import React from 'react';
const TodoForm = ({addTodo}) => {
// input tracker
let input;
return (
<div className="container">
<form onSubmit={(e) => {
e.preventDefault();
addTodo(input.value);
input.value = '';
}}>
<div className="input-group">
<input
className="form-control col-sm-12 "
ref={node => {
input = node;
}}
placeholder='Add task here...'
/>
<button className="input-group-append btn-primary shadow-none"
><i className="fa fa-plus text-white "></i></button>
</div>
<br />
</form>
</div>
);
};
const Todo = ({todo, remove, toggleClass}) => {
// each todo
//
return (<li className={todo.addClass} id={todo.id} onClick={() => {(toggleClass(todo.id))}}>
{todo.text}
<button className='fa fa-times removeBtn text-danger h-100 p-3 btn close' onClick={() => {(remove(todo.id))}}></button>
</li>);
}
const TodoList = ({todos, remove, toggleClass}) => {
//map through the todos
const todoNode = todos.map((todo) => {
return (<Todo todo={todo} key={todo.id} remove={remove} toggleClass={toggleClass}/>)
});
return (<ul className="list-group">{todoNode}</ul>)
}
const Title = ({todoCount}) => {
return (
<div>
<h1>Todo List</h1>
<p>number of tasks ({todoCount})</p>
</div>
);
}
const Footer = () => {
return (
<div className="fixed-bottom bg-white">
<p className="text-secondary m-0 lead">Created by <NAME></p>
<a className=" github-icon mb-5" href="https://github.com/znematoadz/todoApp">
<i className=" fa fa-github text-secondary mb-3"></i>
</a>
</div>
);
}
// App
class App extends React.Component {
constructor(props){
super(props);
this.state = {
txt: '',
classNames: 'list-group-item',
data: [],
}
}
componentDidMount() {
this.hydrateStateWithLocalStorage();
// add event listener to save state to localStorage
// when user leaves/refreshes the page
window.addEventListener(
"beforeunload",
this.saveStateToLocalStorage.bind(this)
);
}
componentWillUnmount() {
window.removeEventListener(
"beforeunload",
this.saveStateToLocalStorage.bind(this)
);
// saves if component has a chance to unmount
this.saveStateToLocalStorage();
}
hydrateStateWithLocalStorage() {
// for all items in state
for (let key in this.state) {
// if the key exists in localStorage
if (localStorage.hasOwnProperty(key)) {
// get the key's value from localStorage
let value = localStorage.getItem(key);
// parse the localStorage string and setState
try {
value = JSON.parse(value);
this.setState({ [key]: value });
} catch (e) {
// handle empty string
this.setState({ [key]: value });
}
}
}
}
saveStateToLocalStorage() {
// for every item in React state
for (let key in this.state) {
// save to localStorage
localStorage.setItem(key, JSON.stringify(this.state[key]));
}
}
// todo handler
addTodo(val) {
if(val !== '') {
// assemble data
const todo = {text: val, addClass: 'list-group-item', id: window.id = Date.now()}
// update data
this.state.data.push(todo);
// update state
this.setState({
data: this.state.data
});
}
}
// handle remove
handleRemove(id){
// filter all todos except the one to be removed
// eslint-disable-next-line
const remainder = this.state.data.filter((todo) => {
if(todo.id !== id) return todo;
});
// update state with filter
this.setState({
data: remainder
});
}
handleToggleClass(id){
// get list item by id
const addClass = document.getElementById(`${id}`);
// add class name to list item dynamically adding the check mark and and crossing out the text.
if (addClass !== undefined) addClass.classList.contains('checked') ? addClass.classList.remove('checked') : addClass.classList.add('checked');
// filtering through and adding class to filtered items state this saves className for reload.
// eslint-disable-next-line
this.state.data.filter((todo) => {
if(todo.id === id) return todo.addClass === "list-group-item checked" ? todo.addClass = "list-group-item" : todo.addClass = "list-group-item checked"
})
}
render() {
return (
<div>
<Title todoCount={this.state.data.length}/>
<TodoForm addTodo={this.addTodo.bind(this)}/>
<TodoList
todos={this.state.data}
remove={this.handleRemove.bind(this)}
toggleClass={this.handleToggleClass.bind(this)}
/>
<div className="col-sm-8 h-100"><p className="invisible">list bottom</p></div>
<div>
<Footer/>
</div>
</div>
)}
}
export default App;
| 735b1d4ba122aa5f3b8a853a304640441c4f6a56 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | znematoadz/todoApp | 6a3a52456d60274644b55a66bb2cf6d789e2d485 | 928d70abc6047036c3538e9b7b9dd525d11fd60f | |
refs/heads/master | <file_sep>all: docs
docs:
npx jsdoc -r -c jsdoc.json -d doc --verbose --pedantic . README.md
lock:
rm -r node_modules || true
yarn install
rm -r node_modules
pnpm i
clean:
rm -r doc
<file_sep># Trustedkey Javascript SDK
[](https://circleci.com/gh/trustedkey/trustedkey.js)
[](https://standardjs.com)
## Generate docs
```
make
```
<file_sep>//
// claims.js
//
// Copyright © 2018 Trusted Key Solutions. All rights reserved.
//
const OID = require('./oid')
/**
* Some well-known OpenID Connect claims, for convenience.
* @type {Object.<string,Dotted?>}
*
* @exports Claims
*/
module.exports = {
name: OID.commonName,
family_name: OID.surname,
given_name: OID.givenName,
profile: OID.socialProfile,
picture: OID.documentImageHead, // FIXME: should return URL
email: OID.emailAddress,
email_verified: OID.emailAddress,
address: OID.postalAddress,
phone_number: OID.telephoneNumber,
phone_number_verified: OID.telephoneNumber,
gender: OID.gender,
birthdate: OID.dateOfBirth,
middle_name: OID.middleName,
preferred_username: null,
'https://auth.trustedkey.com/root': null,
// These are the known OIDs, excluding claims declared by OIDC spec:
'https://auth.trustedkey.com/documentID': OID.documentID,
'https://auth.trustedkey.com/documentType': OID.documentType,
'https://auth.trustedkey.com/documentClass': OID.documentClass,
'https://auth.trustedkey.com/documentImageFront': OID.documentImageFront,
'https://auth.trustedkey.com/documentImageBack': OID.documentImageBack,
'https://auth.trustedkey.com/documentIssuer': OID.documentIssuer,
'https://auth.trustedkey.com/documentResult': OID.documentResult,
'https://auth.trustedkey.com/documentIssueDate': OID.documentIssueDate,
'https://auth.trustedkey.com/documentDigest': OID.documentDigest,
'https://auth.trustedkey.com/documentThumb': OID.documentThumb,
'https://auth.trustedkey.com/country': OID.country,
'https://auth.trustedkey.com/locality': OID.locality,
'https://auth.trustedkey.com/postalCode': OID.postalCode,
'https://auth.trustedkey.com/stateOrProvinceName': OID.stateOrProvinceName,
'https://auth.trustedkey.com/organization': OID.organization,
'https://auth.trustedkey.com/placeOfBirth': OID.placeOfBirth,
'https://auth.trustedkey.com/streetAddress': OID.streetAddress,
'https://auth.trustedkey.com/courseName': OID.courseName,
'https://auth.trustedkey.com/courseContents': OID.courseContents,
'https://auth.trustedkey.com/publicKey': OID.publicKey,
'https://auth.trustedkey.com/levelOfAssurance': OID.levelOfAssurance,
'https://auth.trustedkey.com/age21OrUp': OID.age21OrUp,
'https://auth.trustedkey.com/memberId': OID.memberId,
'https://auth.trustedkey.com/groupId': OID.groupId,
'https://auth.trustedkey.com/suffix': OID.suffix,
'https://auth.trustedkey.com/endpoint': OID.endpoint,
'https://auth.trustedkey.com/docRef': OID.docRef
}
<file_sep>/* eslint-env mocha */
const Assert = require('assert')
const OID = require('../oid')
describe('OID', function () {
it('all have dotted notation', function () {
Object.values(OID).forEach(oid => oid.split('.').forEach(i => Assert.ok(i > 0 && i < 100000000)))
})
it('are all unique', function () {
const inv = {}
Object.values(OID).forEach(oid => {
Assert.strictEqual(inv[oid], undefined)
inv[oid] = oid
})
})
})
<file_sep>/* eslint-env mocha */
const Assert = require('assert')
const Express = require('express')
const RP = require('request-promise-native')
const Utils = require('../utils')
const Http = require('../services/http')
const WalletService = require('../services/walletservice')
describe('http server', function () {
let app, http, server
const secret = 'secret'
const appid = 'id'
before(done => {
app = Express()
app.use(require('body-parser').raw({type: '*/*'}))
server = app.listen(0, (err, p) => {
Assert.strictEqual(err, undefined)
http = new Http(`http://localhost:${server.address().port}`, appid, secret)
done()
})
})
after(() => {
// server.close() would wait for all keep-alive sockets to timeout.
server.unref()
})
describe('http', function () {
before(function () {
function Auth (req, res, next) {
const fullUrl = req.protocol + '://' + req.get('host') + req.originalUrl
if (req.headers.authorization) {
Assert.strictEqual(req.headers.authorization.split(' ')[0], 'Bearer')
const claims = Utils.verifyJws(req.headers.authorization.split(' ')[1], secret)
Assert.strictEqual(claims.aud, fullUrl)
Assert.strictEqual(claims.iss, appid)
Assert.ok(claims.iat <= Utils.getUnixTime())
Assert.ok(claims.exp > Utils.getUnixTime())
if (req.body && req.body.length > 0) {
Assert.strictEqual(Utils.sha256(req.body, 'hex'), claims.body)
} else {
Assert.strictEqual(claims.body, null)
}
next()
} else {
next(Error('Auth fail'))
}
}
app.get('/get', Auth, (req, res) => res.send('gotten'))
app.post('/post', Auth, (req, res) => res.send('posted'))
app.delete('/delete', Auth, (req, res) => res.send('deleted'))
app.all('/keep', (req, res) => res.send(req.get('connection')))
})
it('has Connection: keep-alive', async function () {
Assert.strictEqual(await http.get('/keep'), 'keep-alive')
Assert.strictEqual(await http.post('/keep'), 'keep-alive')
})
it('can build URL without params', function () {
const h = http.buildUrl('/test')
Assert.strictEqual(h, `http://localhost:${server.address().port}/test`)
})
it('can build URL with params', function () {
const h = http.buildUrl('/test', {a: 2})
Assert.strictEqual(h, `http://localhost:${server.address().port}/test?a=2`)
})
it('can create JWT header without body', async function () {
const h = http.getHeaders('url')
Assert.strictEqual(h.Authorization.split(' ')[0], 'Bearer')
const claims = Utils.verifyJws(h.Authorization.split(' ')[1], secret)
const payload = {
iss: appid,
aud: 'url',
iat: claims.iat,
exp: claims.exp,
body: null
}
Assert.deepStrictEqual(claims, payload)
})
it('can create JWT header with body', async function () {
const h = http.getHeaders('url', 'body')
Assert.strictEqual(h.Authorization.split(' ')[0], 'Bearer')
const claims = Utils.verifyJws(h.Authorization.split(' ')[1], secret)
const payload = {
iss: appid,
aud: 'url',
iat: claims.iat,
exp: claims.exp,
body: Utils.sha256('body', 'hex')
}
Assert.deepStrictEqual(claims, payload)
})
it('can POST without contents', async function () {
Assert.strictEqual(await http.post('post'), 'posted')
})
it('can POST with query parameters', async function () {
Assert.strictEqual(await http.post('post', {a: 4}), 'posted')
})
it('can POST string', async function () {
Assert.strictEqual(await http.post('post', {}, 's'), 'posted')
})
it('can POST object', async function () {
Assert.strictEqual(await http.post('post', {}, {a: 2}), 'posted')
})
it('can GET', async function () {
Assert.strictEqual(await http.get('get'), 'gotten')
})
it('can GET with query parameters', async function () {
Assert.strictEqual(await http.get('get', {a: 3}), 'gotten')
})
it('can GET with trailing slash', async function () {
Assert.strictEqual(await http.get('/get', {a: 3}), 'gotten')
})
it('can GET with absolute url', async function () {
Assert.strictEqual(await http.get(`http://localhost:${server.address().port}/get`), 'gotten')
})
it('can DELETE', async function () {
Assert.strictEqual(await http.delete('delete'), 'deleted')
})
it('can DELETE with query parameters', async function () {
Assert.strictEqual(await http.delete('delete', {b: 1}), 'deleted')
})
it('can DELETE with trailing slash', async function () {
Assert.strictEqual(await http.delete('/delete', {b: 1}), 'deleted')
})
it('can DELETE with absolute url', async function () {
Assert.strictEqual(await http.delete(`http://localhost:${server.address().port}/delete`), 'deleted')
})
it('reused connections', async function () {
const count = await new Promise((resolve, reject) => server.getConnections((err, count) => err ? reject(err) : resolve(count)))
Assert.strictEqual(count, 1)
})
})
context('wallet OAuth', function () {
const Code = 'codex'
const State = 'statx'
const AccessToken = 'access_tokenx'
const UserInfo = {name: '<NAME>'}
const RedirectUri = `https://localhost:123/callback`
let walletservice
before(function () {
walletservice = new WalletService(`http://localhost:${server.address().port}`, appid, secret)
app.get('/oauth/authorize', (req, res) => {
Assert.deepStrictEqual(req.query, {
redirect_uri: RedirectUri,
client_id: appid,
state: State,
response_type: 'code',
scope: 'openid'
})
const callback = Utils.mergeQueryParams(req.query.redirect_uri, {
code: Code
})
res.redirect(callback)
})
app.post('/oauth/token', (req, res) => {
const params = req.body.toString().split('&').map(kv => kv.split('=')).reduce((p, [k, v]) => {
p[k] = decodeURIComponent(v)
return p
}, {})
Assert.deepStrictEqual(params, {
client_id: appid,
client_secret: secret,
grant_type: 'authorization_code',
code: Code,
redirect_uri: RedirectUri
})
res.json({
access_token: AccessToken,
token_type: 'Bearer',
id_token: 'id_token'
})
})
app.get('/oauth/user', (req, res) => {
Assert.strictEqual(req.headers.authorization, `Bearer ${AccessToken}`)
res.json(UserInfo)
})
})
it('can build /authorize URL', function () {
const url = walletservice.buildAuthorizeUrl(RedirectUri, State)
Assert.strictEqual(url, `http://localhost:${server.address().port}/oauth/authorize?client_id=id&redirect_uri=${encodeURIComponent(RedirectUri)}&state=${State}&response_type=code&scope=openid`)
})
it('can get code from /authorize', async function () {
const url = walletservice.buildAuthorizeUrl(RedirectUri, State)
try {
throw Error(await RP.get(url, {followRedirect: false}))
} catch (err) {
Assert.strictEqual(err.message, `302 - "Found. Redirecting to ${RedirectUri}?code=${Code}"`)
}
})
it('can get access_token from /token', async function () {
const grant = await walletservice.token(RedirectUri, Code)
Assert.deepStrictEqual(grant, {
'access_token': AccessToken,
'id_token': 'id_token',
'token_type': 'Bearer'
})
})
it('can get user info from /user', async function () {
const userInfo = await walletservice.userInfo('access_tokenx')
Assert.deepStrictEqual(userInfo, UserInfo)
})
})
})
<file_sep>const Assert = require('assert')
const Utils = require('../utils')
const HttpUtils = require('./http')
module.exports = ValidateService
/**
* An implementation of a the validation API, used to check to validity of credentials and claims.
*
* @constructor
* @param {String} backendUrl The base backend URL
*/
function ValidateService (backendUrl = 'https://issuer.trustedkey.com') {
this.httpClient = new HttpUtils(backendUrl)
}
function validate (httpClient, address) {
Assert.strictEqual(typeof address, 'string', 'address must be of type `string`')
return httpClient.get('isRevoked', {
address: address
}).then(r => {
return r.data.isRevoked === false
})
}
function makeAddressList (addresses) {
if (addresses instanceof Array) {
return addresses.map(Utils.serialToAddress).join(',')
} else {
return Utils.serialToAddress(addresses)
}
}
/**
* Check the status of the specified blockchain transaction ID.
*
* @param {String} txid Transaction ID to check.
* @returns {Promise.<string>} Transaction status
*/
ValidateService.prototype.getTransactionStatus = function (txid) {
Assert.strictEqual(typeof txid, 'string', 'txid must be of type `string`')
return this.httpClient.get('getTransactionStatus', {
txid: txid
}).then(r => {
return r.data.getTransactionStatus
})
}
/**
* Validate the given credential by calling into the smart contract.
*
* @param {String} credentialAddressString Credential to check.
* @returns {Promise.<boolean>} Status indicating valid address
*/
ValidateService.prototype.validateCredential = function (credentialAddressString) {
return validate(this.httpClient, credentialAddressString)
}
/**
* Validate given claim(s) by calling into the smart contract.
*
* @param {String|Array.<string>} claimSerialNumbers Array of claim serial numbers.
* @returns {Promise.<boolean>} Status indicating valid address
*/
ValidateService.prototype.validateClaims = function (claimSerialNumbers) {
const addresses = makeAddressList(claimSerialNumbers)
return validate(this.httpClient, addresses)
}
/**
* @typedef KeyInfo
* @type {object}
* @property {boolean} isRevoked whether the address was revoked
* @property {number} timestamp the unix-epoch timestamp of the last operation
* @property {string} revokedBy the address of the revoker
* @property {string} replaces the address of the credential that is replaced by this
* @property {string} recovery the address of the registered recovery key
* @property {string} rootAddress the root address of this credential
*
* @typedef {Object.<string,KeyInfo>} KeyInfoMap
*/
/**
* Get extensive key information for given address.
*
* @param {String|Array.<string>} address blockchain address(es) of claim/credential to query
* @returns {Promise.<KeyInfoMap>} KeyInfoMap structure from smart contract
*/
ValidateService.prototype.keyInfo = function (address) {
const addresses = makeAddressList(address)
return this.httpClient.get('keyInfo', {address: addresses})
.then(r => r.data)
}
<file_sep>//
// oid.js
//
// Copyright © 2017 Trusted Key Solutions. All rights reserved.
//
/**
* @typedef {string} Dotted
*
* Some well-known OIDs, for convenience.
* @type {Object.<string,Dotted>}
*
* @exports oid
*/
module.exports = {
/**
* The OID in dotted notation for an identity document's ID number.
* @constant
* @default
*/
documentID: '1.3.6.1.4.1.51341.2',
/**
* The OID in dotted notation for an identity document's type, for example "Michigan (MI) Driver License".
* @constant
* @default
*/
documentType: '1.3.6.1.4.1.51341.1',
/**
* The OID in dotted notation for an identity document's class, for example "Passport", "Drivers License".
* @constant
* @default
*/
documentClass: '1.3.6.1.4.1.51341.6',
/**
* The OID in dotted notation for an identity document's photo.
* @constant
* @default
*/
documentImageFront: '1.3.6.1.4.1.51341.3',
/**
* The OID in dotted notation for an identity document's photo.
* @constant
* @default
*/
documentImageBack: '1.3.6.1.4.1.51341.7',
/**
* The OID in dotted notation for an identity document's photo (headshot).
* @constant
* @default
*/
documentImageHead: '1.3.6.1.4.1.51341.8',
/**
* The OID in dotted notation for an identity document's issuer.
* @constant
* @default
*/
documentIssuer: '1.3.6.1.4.1.51341.4',
/**
* The OID in dotted notation for the result of the document verification.
* @constant
* @default
*/
documentResult: '1.3.6.1.4.1.51341.5',
/**
* The OID in dotted notation for the original documents issue date.
* @constant
* @default
*/
documentIssueDate: '1.3.6.1.4.1.51341.9',
/**
* The OID in dotted notation for the original documents SHA256 digest.
* @constant
* @default
*/
documentDigest: '1.3.6.1.4.1.51341.10',
/**
* The OID in dotted notation for the documents Base64 thumbnail.
* @constant
* @default
*/
documentThumb: '1.3.6.1.4.1.51341.11',
/**
* The OID in dotted notation for the gender (F/M) on an identity document.
* @constant
* @default
*/
gender: '1.3.6.1.5.5.7.9.3',
/**
* The OID in dotted notation for a person's email address.
* @constant
* @default
*/
emailAddress: '1.2.840.113549.1.9.1',
/**
* The OID in dotted notation for a person's full name.
* @constant
* @default
*/
commonName: '2.5.4.3',
/**
* The OID in dotted notation for the birthday on an identity document.
* @constant
* @default
*/
dateOfBirth: '1.3.6.1.5.5.7.9.1',
/**
* The OID in dotted notation for a person's registered phone number.
* @constant
* @default
*/
telephoneNumber: '2.5.4.20',
/**
* The OID in dotted notation for a person's last name.
* @constant
* @default
*/
surname: '2.5.4.4',
/**
* The OID in dotted notation for a document's country.
* @constant
* @default
*/
country: '2.5.4.6',
/**
* The OID in dotted notation for the locality.
* @constant
* @default
*/
locality: '2.5.4.7',
/**
* The OID in dotted notation for the postal code.
* @constant
* @default
*/
postalCode: '2.5.4.17',
/**
* The OID in dotted notation for the state or province.
* @constant
* @default
*/
stateOrProvinceName: '2.5.4.8',
/**
* The OID in dotted notation for the organization's name.
* @constant
* @default
*/
organization: '2.5.4.10',
/**
* The OID in dotted notation for a person's first name.
* @constant
* @default
*/
givenName: '2.5.4.42',
/**
* The OID in dotted notation for the birth place on an identity document.
* @constant
* @default
*/
placeOfBirth: '1.3.6.1.5.5.7.9.2',
/**
* The OID in dotted notation for a person's registered postal address.
* @constant
* @default
*/
postalAddress: '2.5.4.16',
/**
* The OID in dotted notation for a person's street address.
* @constant
* @default
*/
streetAddress: '2.5.4.9',
/**
* The OID in dotted notation for a person's social profile URL.
* @constant
* @default
*/
socialProfile: '1.3.6.1.4.1.51341.12',
/**
* The OID in dotted notation for a completed course's name.
* @constant
* @default
*/
courseName: '1.3.6.1.4.1.51341.13',
/**
* The OID in dotted notation for a completed course's contents.
* @constant
* @default
*/
courseContents: '1.3.6.1.4.1.51341.14',
/**
* The OID in dotted notation for the user's public key.
* @constant
* @default
*/
publicKey: '1.3.6.1.4.1.51341.15',
/**
* The OID in dotted notation for the claim's level of assurance.
* @constant
* @default
*/
levelOfAssurance: '1.3.6.1.4.1.51341.16',
/**
* The OID in dotted notation for a claim that the user is at least 21 years old.
* @constant
* @default
*/
age21OrUp: '1.3.6.1.4.1.51341.21',
/**
* The OID in dotted notation for the member Id number.
* @constant
* @default
*/
memberId: '1.3.6.1.4.1.51341.22',
/**
* The OID in dotted notation for the group Id number.
* @constant
* @default
*/
groupId: '1.3.6.1.4.1.51341.23',
/**
* The OID in dotted notation for a person's middle name.
* @constant
* @default
*/
middleName: '1.3.6.1.4.1.51341.17',
/**
* The OID in dotted notation for a person's suffix. eg. Jr. Sr.
* @constant
* @default
*/
suffix: '1.3.6.1.4.1.51341.18',
/**
* The OID in dotted notation for distributed claim whose actual value needs to be extracted from an endpoint.
* @constant
* @default
*/
endpoint: '1.3.6.1.4.1.51341.24',
/**
* THE OID is dotted notion for DocRef which is assigned by Issuer
* @constant
* @default
*/
docRef: '1.3.6.1.4.1.51341.25'
}
<file_sep>/* eslint-env mocha */
const Assert = require('assert')
const Utils = require('../utils')
const FS = require('fs')
describe('Utils', function () {
const jwkEC = {'kty': 'EC', 'crv': 'P-256', 'x': '<KEY>', 'y': '<KEY>'}
const pemEC = `-----BEGIN PUBLIC KEY-----
<KEY>
-----END PUBLIC KEY-----`
const hexEC = '<KEY>'
const jwkRSA = {'kty': 'RSA', 'n': '<KEY>', 'e': 'AQAB'}
const pemRSA = `-----BEGIN PUBLIC KEY-----
<KEY>
-----END PUBLIC KEY-----`
const pemECpk = `-----BEGIN EC PRIVATE KEY-----
<KEY>
-----END EC PRIVATE KEY-----`
context('serial', function () {
it('Converts serialNo to address', function () {
Assert.strictEqual(Utils.serialToAddress('e3b0c44298fc1c149afbf4c8996fb92427ae41e4'), '0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4')
Assert.strictEqual(Utils.serialToAddress('b0c44298fc1c149afbf4c8996fb92427ae41e4'), '0x00b0c44298fc1c149afbf4c8996fb92427ae41e4')
Assert.strictEqual(Utils.serialToAddress('4'), '0x0000000000000000000000000000000000000004')
Assert.strictEqual(Utils.serialToAddress('0x4'), '0x0000000000000000000000000000000000000004')
})
it('Is a NOP for addresses', function () {
Assert.strictEqual(Utils.serialToAddress('0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4'), '0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4')
Assert.strictEqual(Utils.serialToAddress('0x00b0c44298fc1c149afbf4c8996fb92427ae41e4'), '0x00b0c44298fc1c149afbf4c8996fb92427ae41e4')
Assert.strictEqual(Utils.serialToAddress('0x00b0c44298fc1c149afbf4c8996fb92427ae41e4,0x00b0c44298fc1c149afbf4c8996fb92427ae41e4'), '0x00b0c44298fc1c149afbf4c8996fb92427ae41e4,0x00b0c44298fc1c149afbf4c8996fb92427ae41e4')
})
})
context('address', function () {
it('Passes for valid addresses', function () {
Assert.strictEqual(Utils.isAddress('0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4'), true)
Assert.strictEqual(Utils.isAddress('0xE3b0c44298fC1c149afbf4D8996fb92427ae41e4'), true)
})
it('Fails for invalid addresses', function () {
Assert.strictEqual(Utils.isAddress('asdf'), false)
Assert.strictEqual(Utils.isAddress('0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'), false)
})
})
context('jwkToHex', function () {
it('converts RSA public key', function () {
const hexRSA = '<KEY>dbea26593ea4a1d617f1ffbaa7ee0e0a7a372c7139c8ce143a41199dcd653b3e55cd46be75e304c242c95e2aae135c106e703ac9a408772f2f0033612038502b121114e1436e4d52d35b0497e39cdd515c63456ca1aa425690047ebd4b0c27ad39ee91c6cbc50c57b1d3d9eebd6e5e8456e38a83af63afe31bed597132816ac0d531614ba040b39587cf0507c18a2a76ad6533596f5c8429c1d3e6f6f5083847d9f58254d5aaa3508f417d82b59eb495f0d8d5d039530a9fec3cd56cf01f8b3721943'
Assert.strictEqual(Utils.jwkToHex(jwkRSA), hexRSA)
})
it('converts EC public key', function () {
Assert.strictEqual(Utils.jwkToHex(jwkEC), hexEC)
})
it('throws on invalid jwk', function () {
Assert.throws(() => Utils.jwkToHex({kty: 'RSA'}), /Unsupported/)
Assert.throws(() => Utils.jwkToHex({}), /Unsupported/)
})
it('throws on RSA jwk with PK', function () {
Assert.throws(() => Utils.jwkToHex({d: 's83ZmuWKtcqbpnME5112vxZqpKpCFctE4Jye_BneVxE', ...jwkEC}))
Assert.throws(() => Utils.jwkToHex({d: '<KEY>', ...jwkRSA}))
})
})
context('hexToJwk', function () {
it('EC public key', function () {
const jwk = Utils.hexToJwk(hexEC)
Assert.deepEqual(jwk, jwkEC)
})
})
context('pemToJwk', function () {
it('converts RSA public key', function () {
Assert.deepStrictEqual(Utils.pemToJwk(pemRSA), jwkRSA)
Assert.deepStrictEqual(Utils.pemToJwk(`-----BEGIN PUBLIC KEY-----
<KEY>
-----END PUBLIC KEY-----`), {
'e': 'AQAB',
'kty': 'RSA',
'n': '<KEY>'
})
})
it('converts EC public key', function () {
Assert.deepStrictEqual(Utils.pemToJwk(pemEC), jwkEC)
})
it('converts EC private key', function () {
Assert.deepStrictEqual(Utils.pemToJwk(`-----<KEY>`),
{
'crv': 'P-256',
'd': '<KEY>',
'kty': 'EC',
'x': '<KEY>',
'y': '<KEY>'
})
})
it('converts EC private key', function () {
Assert.deepStrictEqual(Utils.pemToJwk(pemECpk), {
'crv': 'P-256',
'd': '<KEY>',
'kty': 'EC',
'x': '<KEY>',
'y': '<KEY>'
})
})
it('converts small RSA public key', function () {
Assert.deepStrictEqual(Utils.pemToJwk(`-----BEGIN PUBLIC KEY-----
<KEY>
-----END PUBLIC KEY-----`),
{
'kty': 'RSA',
'n': '<KEY>',
'e': 'AQAB'
})
})
it('converts small RSA public key, e=3', function () {
Assert.deepStrictEqual(Utils.pemToJwk(`-----BEGIN PUBLIC KEY-----
<KEY>
-----END PUBLIC KEY-----`),
{
'kty': 'RSA',
'n': '<KEY>',
'e': 'Aw'
})
})
it('throws on invalid PEM', function () {
Assert.throws(() => Utils.pemToJwk(``), /Unsupported/)
})
})
context('jwkToPem', function () {
it('converts RSA public key', function () {
Assert.strictEqual(Utils.jwkToPem(jwkRSA), pemRSA)
})
it('converts EC public key', function () {
Assert.strictEqual(Utils.jwkToPem(jwkEC), pemEC)
})
it('converts EC private key', function () {
const jwk = {...jwkEC, d: 's83ZmuWKtcqbpnME5112vxZqpKpCFctE4Jye_BneVxE'}
Assert.strictEqual(Utils.jwkToPem(jwk), pemECpk)
})
it('converts EC private key jwk->pem->jwk', function () {
const jwk = {...jwkEC, d: 's83ZmuWKtcqbpnME5112vxZqpKpCFctE4Jye_BneVxE'}
Assert.deepStrictEqual(Utils.pemToJwk(Utils.jwkToPem(jwk)), jwk)
})
it('converts EC private key v.v. pem->jwk->pem', function () {
Assert.strictEqual(Utils.jwkToPem(Utils.pemToJwk(pemECpk)), pemECpk)
})
})
context('mergeQueryParams', function () {
it('accepts null arg', function () {
Assert.strictEqual(Utils.mergeQueryParams('abc', null), 'abc')
})
it('merges params', function () {
Assert.strictEqual(Utils.mergeQueryParams('abc', {a: 2, b: 'b c'}), 'abc?a=2&b=b%20c')
})
})
context('sha256', function () {
it('accepts and returns buffer', function () {
Assert.ok(Utils.sha256(Buffer.from('')) instanceof Buffer, 'Expected instanceof Buffer')
})
it('takes optional encoding', function () {
Assert.strictEqual(Utils.sha256('', 'hex'), 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
Assert.strictEqual(Utils.sha256('', 'base64'), '47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=')
})
})
context('keccak256', function () {
it('accepts and returns buffer', function () {
Assert.ok(Utils.keccak256(Buffer.from('')) instanceof Buffer, 'Expected instanceof Buffer')
})
it('takes optional encoding', function () {
Assert.strictEqual(Utils.keccak256('', 'hex'), 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470')
Assert.strictEqual(Utils.keccak256('', 'base64'), 'xdJGAYb3IzySfn2y3McDwOUAtlPKgic7e/rYBF2FpHA=')
})
})
context('base64url', function () {
it('takes Buffer, returns string', function () {
const base64urlTest = '_-w'
Assert.strictEqual(Utils.base64url(Buffer.from(base64urlTest, 'base64')), base64urlTest)
})
})
context('jws', function () {
const msg = 'msg'
context('none', function () {
it('verifyJws', function () {
Assert.strictEqual(Utils.verifyJws('eyJhbGciOiJub25lIn0.bXNn.', '').toString(), msg)
})
it('verifyJws callback', function () {
Assert.strictEqual(Utils.verifyJws('<KEY>.bXNn.', jose => {
Assert.deepStrictEqual(jose, {alg: 'none'})
return ''
}).toString(), msg)
})
it('verifyJws fail if sig present', function () {
Assert.strictEqual(Utils.verifyJws('<KEY>', ''), null)
})
it('verifyJws fail if secret given', function () {
Assert.strictEqual(Utils.verifyJws('<KEY>.bXNn.', 'secret'), null)
})
})
context('ES256', function () {
let cred
before(async () => {
cred = await Utils.generateKeyPair()
})
it('createEcdsaJws with credential', function () {
const jws = Utils.createEcdsaJws(msg, cred)
const [h, m, s] = jws.split('.').map(p => Buffer.from(p, 'base64').toString('binary'))
Assert.strictEqual(h, '{"alg":"ES256"}')
Assert.strictEqual(m, msg)
Assert.strictEqual(s.length, 64)
})
it('createEcdsaJws+verifyJws', function () {
const jws = Utils.createEcdsaJws(msg, cred)
Assert.strictEqual(Utils.verifyJws(jws, cred).toString(), msg)
})
it('createEcdsaJws+verifyJws callback', function () {
const jws = Utils.createEcdsaJws(msg, cred)
Assert.strictEqual(Utils.verifyJws(jws, jose => {
Assert.deepStrictEqual(jose, {alg: 'ES256'})
return cred.pubKeyHex
}).toString(), msg)
})
it('createEcdsaJws+verifyJws with JWK', function () {
const jwk = Utils.hexToJwk(cred.pubKeyHex)
const jws = Utils.createEcdsaJws(msg, cred)
Assert.strictEqual(Utils.verifyJws(jws, jwk).toString(), msg)
})
it('createEcdsaJws+verifyJws JWT', function () {
const payload = {iat: 1234, msg}
const jws = Utils.createEcdsaJws(payload, cred, {typ: 'JWT'})
Assert.deepEqual(Utils.verifyJws(jws, cred), payload)
})
it('createEcdsaJws+verifyJws pubKeyHex', function () {
const jws = Utils.createEcdsaJws(msg, cred)
Assert.strictEqual(Utils.verifyJws(jws, cred.pubKeyHex).toString(), msg)
})
})
context('HS256', function () {
const JWS = 'eyJhb<KEY>'
const secret = 'secret'
it('createHmacJws with secret', function () {
const jws = Utils.createHmacJws(msg, secret)
Assert.strictEqual(jws, JWS)
const [h, m, s] = jws.split('.').map(p => Buffer.from(p, 'base64').toString('binary'))
Assert.strictEqual(h, '{"alg":"HS256"}')
Assert.strictEqual(m, msg)
Assert.strictEqual(s.length, 32)
})
it('verifyJws', function () {
Assert.strictEqual(Utils.verifyJws(JWS, secret).toString(), msg)
})
it('verifyJws callback', function () {
Assert.strictEqual(Utils.verifyJws(JWS, jose => {
Assert.deepStrictEqual(jose, {alg: 'HS256'})
return secret
}).toString(), msg)
})
it('verifyJws with wrong secret', function () {
Assert.strictEqual(Utils.verifyJws(JWS, 'wrong secret'), null)
})
it('createHmacJws+verifyJws JWT', function () {
const payload = {iat: 1234, msg}
const jws = Utils.createHmacJws(payload, secret, {typ: 'JWT'})
Assert.deepStrictEqual(Utils.verifyJws(jws, secret), payload)
})
})
})
context('parseHexString', function () {
it('parses hex into string', function () {
Assert.strictEqual(Utils.parseHexString('00'), '\0')
})
})
context('parseX509Date', function () {
it('parses string into date', function () {
Assert.ok(Utils.parseX509Date('141213110908Z') instanceof Date)
})
it('parses short string into date', function () {
Assert.strictEqual(Utils.parseX509Date('141213110908Z').toUTCString(), 'Sat, 13 Dec 2014 11:09:08 GMT')
})
it('parses long string into date', function () {
Assert.strictEqual(Utils.parseX509Date('19141213110908Z').toUTCString(), 'Sun, 13 Dec 1914 11:09:08 GMT')
})
})
context('dateToString', function () {
it('turns current Date into string', function () {
Assert.strictEqual(typeof Utils.dateToString(new Date()), 'string')
})
it('turns past Date into short string', function () {
Assert.strictEqual(Utils.dateToString(new Date(Date.UTC(1970, 0, 1, 0, 2, 3))), '700101000203Z')
})
it('turns future Date into long string', function () {
Assert.strictEqual(Utils.dateToString(new Date(Date.UTC(2080, 0, 1, 0, 0, 0))), '20800101000000Z')
})
})
it('has path to tkroot', function () {
const tkroot = Utils.getRootPemPath()
Assert.ok(/^\/.+\/tkroot.pem$/.test(tkroot), 'Not an absolute path to tkroot.pem')
})
context('getJwkThumbprint', function () {
it('getJwkThumbprint(jwkEC)', function () {
const thumbprint = Utils.getJwkThumbprint(jwkEC)
Assert.strictEqual(thumbprint, 't0SeLs6tHEgrSplyGDH-Sb600WfOgDWFbxfJsWnFlVQ')
})
it('getJwkThumbprint(jwkRSA)', function () {
const thumbprint = Utils.getJwkThumbprint(jwkRSA)
Assert.strictEqual(thumbprint, 'E74oqrEZWBKPrFTjsNLeIUtnVKMm2EW5ZhpNEaV-4RY')
})
it('ignore optional members (jwkEC)', function () {
const thumbprint = Utils.getJwkThumbprint({...jwkEC, opt: 123})
Assert.strictEqual(thumbprint, 't0SeLs6tHEgrSplyGDH-Sb600WfOgDWFbxfJsWnFlVQ')
})
it('ignore optional members (jwkRSA)', function () {
const thumbprint = Utils.getJwkThumbprint({...jwkRSA, opt: 123})
Assert.strictEqual(thumbprint, 'E74oqrEZWBKPrFTjsNLeIUtnVKMm2EW5ZhpNEaV-4RY')
})
it('key order doesn\'t matter', function () {
const jwkEC2 = Object.entries(jwkEC).reduceRight((p, [k, v]) => ({...p, [k]: v}), {})
const thumbprint = Utils.getJwkThumbprint(jwkEC2)
Assert.strictEqual(thumbprint, 't0SeLs6tHEgrSplyGDH-Sb600WfOgDWFbxfJsWnFlVQ')
})
})
const CommonNamePEM = `-----BEGIN CERTIFICATE-----
<KEY>qgAwIB<KEY>
-----END CERTIFICATE-----`
const IssuerPEM = `-----BEGIN CERTIFICATE-----
<KEY>4zGvBKre32hOAIEluECNSQzwKdjibKzQ0FrLFj3UBeE8
bPCx2Tty0FQNeijKZWRnH7H9dwqFS1eDYm2DRyAt/FDYQPdFz/hRtVo=
-----END CERTIFICATE-----`
context('parsePem', function () {
it('parse EC PEM and attributes', function () {
const parsed = Utils.parsePem(CommonNamePEM)
Assert.deepStrictEqual(parsed, {
'subjectaddress': '0xc0a4afdef2b560e61576117d4c8e6b38cdf68467',
'serialNo': '0x5e2ae12b8c140093e22cc39a2a8366ecb3f2809d',
'notAfter': new Date('2019-08-15T05:23:19.000Z'),
'notBefore': new Date('2018-08-15T05:23:19.000Z'),
'issuer': '/C=US/ST=Seattle/L=Seattle/O=Trusted Key test/OU=IT/CN=trustedkey.com',
'issuerPem': null,
'attributes': [{
'oid': '1.3.6.1.4.1.53318295.1',
'value': '<EMAIL>'
},
{
'oid': '1.3.6.1.4.1.53318295.6',
'value': 'Email'
},
{
'oid': '1.2.840.113549.1.9.1',
'value': '<EMAIL>'
}],
'caissuer': [],
'crl': ['ethereum:0x48624beaad14ea386e21<KEY>'],
'ocsp': ['http://ocsp.trustedkey.com']
})
})
it('parse RSA PEM', function () {
Assert.doesNotThrow(() => Utils.parsePem(IssuerPEM))
})
it('parse PEM without header', function () {
Assert.doesNotThrow(() => Utils.parsePem(CommonNamePEM.replace(/-----[^-]+-----|\r|\n/g, '')))
})
it('parse PEM and fail without CA certs', function () {
Assert.throws(() => Utils.parsePem(CommonNamePEM, []), /Signature verification failed$/)
})
it('parse PEM and fail with unknown issuer', function () {
Assert.throws(() => Utils.parsePem(CommonNamePEM, [CommonNamePEM]), /Signature verification failed$/)
})
it('parse PEM and succeed with valid signature', function () {
Assert.doesNotThrow(() => Utils.parsePem(CommonNamePEM, [IssuerPEM]))
})
it('parse PEM and succeed with multiple issuers', function () {
Assert.strictEqual(Utils.parsePem(CommonNamePEM, [CommonNamePEM, IssuerPEM]).issuerPem, IssuerPEM)
})
it('parse PEM and fail with bogus issuer', function () {
Assert.throws(() => Utils.parsePem(CommonNamePEM, ['bogus']), /Invalid PEM/)
})
it('parse PEM and fail with bogus cert', function () {
Assert.throws(() => Utils.parsePem('bogus', [IssuerPEM]), /Invalid PEM/)
})
it('parse issuer PEM', function () {
const parsed = Utils.parsePem(IssuerPEM)
Assert.deepStrictEqual(parsed, {
'attributes': [
{
'oid': '2.5.4.6',
'value': 'US'
},
{
'oid': '2.5.4.8',
'value': 'Seattle'
},
{
'oid': '2.5.4.7',
'value': 'Seattle'
},
{
'oid': '172.16.31.10',
'value': 'Trusted Key test'
},
{
'oid': '192.168.3.11',
'value': 'IT'
},
{
'oid': '2.5.4.3',
'value': 'trustedkey.com'
}
],
'issuer': '/C=US/ST=Seattle/L=Seattle/O=Trusted Key test/OU=IT/CN=trustedkey.com',
'issuerPem': null,
'notAfter': new Date('2018-08-15T23:06:43.000Z'),
'notBefore': new Date('2018-08-14T23:06:43.000Z'),
'serialNo': '0x000000000000000000000000e973395209c6932b',
'subjectaddress': undefined,
'caissuer': [],
'crl': [],
'ocsp': []
})
})
it('parse tkroot', function () {
const tkroot = FS.readFileSync(Utils.getRootPemPath(), 'ascii')
Assert.doesNotThrow(() => Utils.parsePem(tkroot, [tkroot]))
})
it('validate self-signed PEM', function () {
Assert.doesNotThrow(() => Utils.parsePem(IssuerPEM, [IssuerPEM]))
})
})
context('wait*', function () {
it('wait', async function () {
const now = new Date()
await Utils.wait(50)
Assert.ok(new Date().getTime() - now >= 50)
})
it('waitUntil immediate', async function () {
const now = new Date()
Assert.strictEqual(await Utils.waitUntil(500, () => 2), 2)
Assert.ok(new Date().getTime() - now < 500)
})
it('waitUntil timeout', async function () {
const now = new Date()
Assert.strictEqual(await Utils.waitUntil(50, () => false), false)
Assert.ok(new Date().getTime() - now >= 50)
})
})
context('validateClaim', function () {
let claim
before(function () {
claim = Utils.parsePem(CommonNamePEM, [IssuerPEM])
})
it('validates', () => {
Assert.strictEqual(Utils.validateClaim(claim, new Date('2018-08-15T15:23:19.000Z')), true)
})
it('validates notBefore', () => {
Assert.strictEqual(Utils.validateClaim(claim, new Date('2018-08-15T04:23:19.000Z')), false)
})
it('validates notAfter', () => {
Assert.strictEqual(Utils.validateClaim(claim, new Date('2019-08-16T05:23:19.000Z')), false)
})
it('validates issuer notBefore', () => {
Assert.strictEqual(Utils.validateClaim(claim, new Date('2018-08-14T05:23:19.000Z')), false)
})
it('validates issuer notAfter', () => {
Assert.strictEqual(Utils.validateClaim(claim, new Date('2018-08-16T05:23:19.000Z')), false)
})
})
context('verifyChain', function () {
const OtherIssuer = `-----BEGIN CERTIFICATE-----
MIIDKTCCApKgAwIBAgIJAJD3lnm9KEylMA0GCSqGSIb3DQEBCwUAMHIxCzAJBgNV
BAYTAlVTMRAwDgYDVQQIDAdTZWF0dGxlMRAwDgYDVQQHDAdTZWF0dGxlMRkwFwYD
VQQKDBBUcnVzdGVkIEtleSB0ZXN0MQswCQYDVQQL<KEY>BUGA1UEAwwOdHJ1
c<KEY>sZTEZ
MBcGA1UE<KEY>
-----END CERTIFICATE-----`
it('verify chain with self signed (Issuer)', function () {
Assert.strictEqual(Utils.verifyChain([IssuerPEM]), true)
})
it('verify chain with self signed (OtherIssuer)', function () {
Assert.strictEqual(Utils.verifyChain([OtherIssuer]), true)
})
it('verify chain should fail without self signed cert', function () {
Assert.strictEqual(Utils.verifyChain([CommonNamePEM]), false)
})
it('verify chain should fail with invalid chain', function () {
Assert.strictEqual(Utils.verifyChain([CommonNamePEM, OtherIssuer]), false)
})
it('verify chain should fail with incorrect order', function () {
Assert.strictEqual(Utils.verifyChain([IssuerPEM, CommonNamePEM]), false)
})
it('verify chain with correct issuer', function () {
Assert.strictEqual(Utils.verifyChain([CommonNamePEM, IssuerPEM]), true)
})
it('verify TK root with TK root', function () {
const RootPem = FS.readFileSync(Utils.getRootPemPath(), 'ascii')
Assert.strictEqual(Utils.verifyChain([RootPem]), true)
})
})
})
<file_sep>const HttpUtils = require('./http')
const Crypto = require('crypto')
const Assert = require('assert')
module.exports = CredentialRegistryService
/**
* Utility class with wrappers for the various Credential Registry API endpoints.
*
* @constructor
* @param {String} backendUrl The base backend URL
* @param {String} [appId] Application ID, without this only unauthorized APIs can be used
* @param {String} [appSecret] Application shared secret, without this only unauthorized APIs can be used
*/
function CredentialRegistryService (backendUrl, appId, appSecret) {
this.httpClient = new HttpUtils(backendUrl, appId, appSecret)
}
/**
* Revoke our default credential by sending a request to the blockchain. The receiver must have been registered as
* a delegate in the smart contract. A challenge is signed by the default registered credential, which is
* verified by the smart contract.
*
* @param {string} delegateAddressString The hex-encoded blockchain address of the registered delegate credential.
* @param {Object} keyPair Jsrsasign keypair object
* @param {string} [address] When revoking a claim, its address
* @returns {Promise.<object>} returning JSON from API
*/
CredentialRegistryService.prototype.revokeCredential = function (delegateAddressString, keyPair, address) {
Assert.strictEqual(typeof delegateAddressString, 'string', 'delegateAddressString must be of type `string`')
Assert.strictEqual(typeof keyPair, 'object', 'keyPair must be of type `object`')
Assert.strictEqual(0 in keyPair, false, 'keyPair should not be Buffer or Array-like')
const addressWithout0x = delegateAddressString.replace('0x', '')
const hash = Crypto.createHash('sha256')
const digest = hash.update(addressWithout0x, 'hex').update(address ? 'indirect' : 'revocation').digest('hex')
const sig = keyPair.signWithMessageHash(digest)
return this.httpClient.post('revoke', {
signature: sig,
pubkey: keyPair.pubKeyHex,
address: address
})
}
/**
* Revoke claim by sending a request to the blockchain. The receiver must have been registered as
* a delegate in the smart contract.
*
* @param {string} address The address is its serial number
* @returns {Promise.<object>} return JSON from API
*/
CredentialRegistryService.prototype.revokeClaim = function (address) {
Assert.strictEqual(typeof address, 'string', 'address must be of type `string`')
return this.httpClient.post('revokeClaim', {
address: address
})
}
<file_sep>/* eslint-env mocha */
const Assert = require('assert')
const OID = require('../oid')
const Claims = require('../claims')
describe('claims', function () {
it('has all known OIDs', function () {
const allOidClaims = new Set(Object.values(Claims))
const missingOids = Object.values(OID).filter(oid => !allOidClaims.has(oid))
Assert.deepStrictEqual(missingOids, [])
})
})
<file_sep>//
// utils.js
//
// Copyright © 2018 Trusted Key Solutions. All rights reserved.
//
const Crypto = require('crypto')
const CryptoJS = require('crypto-js')
const Assert = require('assert')
const Jsrsasign = require('jsrsasign')
const URL = require('url')
const Moment = require('moment')
const JWT = require('jsonwebtoken')
const CrytoAlg = new Map([
['SHA256withRSA', 'RSA-SHA256'],
['SHA256withECDSA', 'sha256'],
['SHA1withRSA', 'RSA-SHA1']
])
/**
* Static Trustedkey utility functions
*
* @exports utils
*/
const utils = module.exports
/**
* Add new query parameters to an existing URL.
* @param {String} path the current url (may be relative)
* @param {?Object} [params] object with new query parameters
* @returns {String} new URL with the query parameters merged
*/
utils.mergeQueryParams = function (path, params) {
Assert.strictEqual(typeof path, 'string', 'path must be of type `string`')
Assert.strictEqual(typeof params, 'object', 'params must be of type `object`')
const url = URL.parse(path, true)
Object.keys(params || {})
.filter(key => params[key] !== undefined)
.forEach(key => { url.query[key] = params[key] })
delete url.search // force recreation from .query
return url.format()
}
/**
* Get the SHA256 of the specified blob
*
* @param {String|Buffer} blob String or Buffer
* @param {String} [encoding] Optional encoding for the final digest
* @returns {Buffer|String} Buffer or string with SHA256
*/
utils.sha256 = function (blob, encoding) {
const hash = Crypto.createHash('sha256')
return hash.update(blob).digest(encoding)
}
/**
* Get the SHA3/KECCAK256 of the specified blob
*
* @param {String|Buffer} blob String or Buffer
* @param {String} [encoding] Optional encoding for the final digest
* @returns {Buffer|String} Buffer or string with SHA3/KECCAK256
*/
utils.keccak256 = function (blob, encoding) {
Assert.ok(encoding == null || encoding === 'hex' || encoding === 'base64', 'encoding should be `hex` or undefined')
if (blob instanceof Buffer) {
blob = CryptoJS.enc.Base64.parse(blob.toString('base64'))
}
const digest = CryptoJS.SHA3(blob, {outputLength: 256})
if (encoding === 'hex') {
return digest.toString(CryptoJS.enc.Hex)
} else if (encoding === 'base64') {
return digest.toString(CryptoJS.enc.Base64)
} else {
return Buffer.from(digest.toString(CryptoJS.enc.Base64), 'base64')
}
}
/**
* Convert a certificate serial number to blockchain address
*
* @param {String} serialhex Hex encoded serial number
* @returns {String} 0x prefixed address
*/
utils.serialToAddress = function (serialhex) {
Assert.strictEqual(typeof serialhex, 'string', 'serialhex must be of type `string`')
return '0x' + serialhex.replace('0x', '').padStart(40, '0')
}
/**
* Base64 encode URL string
*
* @param {String|Uint8Array|Array} data data to encode
* @param {String} [encoding] Optional encoding of data
* @returns {String} base64-encoded URL
*/
utils.base64url = function (data, encoding) {
return Buffer.from(data, encoding).toString('base64').replace(/=/g, '').replace(/\//g, '_').replace(/\+/g, '-')
}
/**
* Get UTC seconds since UNIX epoch or convert date into unix time
*
* @param {Date} [date] Optional date object
* @returns {Number} Unix timestamp
*/
utils.getUnixTime = function (date) {
return Math.floor((date || new Date()).getTime() / 1000)
}
/**
* Create a JSON Web Signature
*
* @deprecated
* @param {object|String|Buffer} message Message can be string or object. Objects will be JSON stringified
* @param {String} secret HMAC shared secret
* @param {object} [header={alg: "HS256"}] JOSE header OPTIONAL
* @returns {String} Signed JWS
*/
utils.createHmacJws = function (message, secret, header = {}) {
Assert.strictEqual(typeof secret, 'string', 'secret must be of type `string`')
Assert.strictEqual(typeof header, 'object', 'header must be of type `object`')
const options = {header, algorithm: 'HS256'}
return JWT.sign(message, secret, options)
}
/**
* Create a JSON Web Signature
*
* @deprecated
* @param {object|String|Buffer} message Message can be string or object. Objects will be JSON stringified
* @param {object} credential key pair
* @param {object} [header={alg: "ES256"}] JOSE header OPTIONAL
* @returns {String} Signed JWS
*/
utils.createEcdsaJws = function (message, credential, header = {}) {
Assert.strictEqual(typeof credential, 'object', 'credential must be of type `object`')
Assert.strictEqual(typeof header, 'object', 'header must be of type `object`')
const jwk = utils.hexToJwk(credential.pubKeyHex)
// Add the private key to the public key JWK
jwk.d = utils.base64url(credential.prvKeyHex, 'hex')
const pem = utils.jwkToPem(jwk)
const options = {header, algorithm: 'ES256'}
return JWT.sign(message, pem, options)
}
/**
* Verify a JSON Web Signature.
*
* @deprecated
* @param {String} jws the JWS or JWT as string
* @param {String|function|Object} secretOrCallback Shared secret or public key
* @returns {Object|String|null} the parsed claims or `null` on failure
*/
utils.verifyJws = function (jws, secretOrCallback) {
Assert.strictEqual(typeof jws, 'string', 'jws must be of type `string`')
// Convert the given secret into a secret or public key for jsonwebtoken,
if (typeof secretOrCallback === 'function') {
// JWT.verify will no longer return a value if we use the callback overload
const decoded = JWT.decode(jws, {complete: true})
// Removed promise support since it was never actually used anywhere
secretOrCallback = secretOrCallback(decoded.header)
}
if (typeof secretOrCallback === 'object' && secretOrCallback.pubKeyHex) {
// Convert jsrsasign credential to hex public key
secretOrCallback = secretOrCallback.pubKeyHex
}
if (typeof secretOrCallback === 'string' && secretOrCallback.length === 130) {
// Convert uncompressed hex public key to JWK
secretOrCallback = utils.hexToJwk(secretOrCallback)
}
if (typeof secretOrCallback === 'object' && secretOrCallback.kty) {
// Convert JWK to PEM public key
secretOrCallback = utils.jwkToPem(secretOrCallback)
}
try {
return JWT.verify(jws, secretOrCallback)
} catch (err) {
// Keep the old behavior of returning null instead of throwing
if (err.name === 'JsonWebTokenError') {
return null
} else {
throw err
}
}
}
/**
* @typedef JwkEC
* @type {object}
* @property {'EC'} kty Key Type
* @property {'P-256'} crv Elliptic Curve
* @property {string} x Base64-URL encoded X coordinate
* @property {string} y Base64-URL encoded Y coordinate
* @property {string} [d] Base64-URL encoded private key
*
* @typedef JwkRSA
* @type {object}
* @property {'RSA'} kty Key Type
* @property {string} e Base64-URL encoded exponent
* @property {string} n Base64-URL encoded modulus
*/
/**
* Convert a JWK into a hex public key
* @param {JwkEC} jwk JSON Web Key for public EC key
* @return {String} string with hex public key
*/
utils.jwkToHex = function (jwk) {
Assert.strictEqual(jwk.d, undefined, 'jwk.d must be of type `undefined`')
if (jwk.kty === 'EC' && jwk.crv === 'P-256') {
// Convert x,y coordinates from JWK to hex encoded public key
const hex = '04' + Buffer.concat([Buffer.from(jwk.x, 'base64'), Buffer.from(jwk.y, 'base64')]).toString('hex')
Assert.strictEqual(hex.length, 130)
return hex
} else if (jwk.kty === 'RSA' && jwk.e === 'AQAB') {
return Buffer.from(jwk.n, 'base64').toString('hex')
}
throw Error('Unsupported JWK:' + jwk)
}
/**
* Convert a hex key into JWK
* @param {String} keyHex hex encoded ECC key
* @param {String} [crv] curve identifier ("P-256" by default)
* @return {JwkEC} JSON Web Key
*/
utils.hexToJwk = function (keyHex, crv = 'P-256') {
Assert.strictEqual(typeof keyHex, 'string', 'pubKeyHex must be of type `string`')
const buffer = Buffer.from(keyHex, 'hex')
if (buffer.length === 65) {
Assert.ok(keyHex.startsWith('04'), 'pubKeyHex must be an uncompressed ECC public key')
return {
crv,
'kty': 'EC',
'x': utils.base64url(buffer.slice(1, 33)),
'y': utils.base64url(buffer.slice(33))
}
} else {
throw Error('Unsupported key: ' + keyHex)
}
}
function createDerChunk (tag, ...nested) {
let header
const size = nested.reduce((p, b) => p + b.length, 0)
if (size < 0x80) {
header = Buffer.alloc(2)
header.writeUInt8(size, 1)
} else if (size < 0x100) {
header = Buffer.alloc(3)
header.writeUInt8(0x81, 1)
header.writeUInt8(size, 2)
} else {
Assert.ok(size <= 0xffff, 'Invalid PEM size: ' + size)
header = Buffer.alloc(4)
header.writeUInt8(0x82, 1)
header.writeUInt16BE(size, 2)
}
header.writeUInt8(tag)
return Buffer.concat([header, ...nested])
}
function readDerChunk (buffer, offset = 0) {
offset++
let size = buffer.readUInt8(offset++)
if (size === 0x81) {
size = buffer.readUInt8(offset++)
Assert.ok(size >= 0x80, 'Invalid PEM size: ' + size)
} else if (size === 0x82) {
size = buffer.readUInt16BE(offset)
Assert.ok(size >= 0x100, 'Invalid PEM size: ' + size)
offset += 2
} else {
Assert.ok(size < 0x80, 'Invalid PEM size: ' + size)
}
return buffer.slice(offset, offset + size)
}
/**
* Convert a PEM encoded public key to JWK, with minimal checking.
* @param {string} pem Public key in PEM format
* @returns {JwkRSA|JwkEC} JSON Web Key
*/
utils.pemToJwk = function (pem) {
const base64 = pem.match(/^-----BEGIN [A-V ]{6,10} KEY-----([^-]+)-----END [A-V ]{6,10} KEY-----/)
if (!base64) {
throw Error('Unsupported PEM:' + pem)
}
const der = Buffer.from(base64[1], 'base64')
return utils.derToJwk(der)
}
/**
* Convert a DER encoded public key to JWK, with minimal checking.
* @param {Buffer} der Public key in DER format
* @returns {JwkRSA|JwkEC} JSON Web Key
*/
utils.derToJwk = function (der) {
const main = readDerChunk(der)
const type = readDerChunk(main)
const key = readDerChunk(main, type.length + 2)
switch (type.toString('hex')) {
case '06092a864886f70d0101010500': {
// RSA public
const rsa = readDerChunk(key, 1)
const n = readDerChunk(rsa).slice(1)
const lenSize = n.length >= 0x100 ? 3 : (n.length >= 0x80 ? 2 : 1)
const e = readDerChunk(rsa, n.length + lenSize + 2)
return {
'kty': 'RSA',
'e': utils.base64url(e),
'n': utils.base64url(n)
}
}
case '06072a8648ce3d020106082a8648ce3d030107': {
// EC public
Assert.strictEqual(key.length, 66)
const x = key.slice(2, 34)
const y = key.slice(34, 66)
return {
'crv': 'P-256',
'kty': 'EC',
'x': utils.base64url(x),
'y': utils.base64url(y)
}
}
case '00': {
// EC private (nested)
const nested = readDerChunk(main, 5 + key.length)
return utils.derToJwk(nested)
}
case '01': {
// EC private
let skip = 5 + key.length
while (main[skip] !== 0xa1) {
skip += 2 + readDerChunk(main, skip).length
}
const pub = readDerChunk(main, 2 + skip)
Assert.strictEqual(pub.length, 66)
const x = pub.slice(2, 34)
const y = pub.slice(34, 66)
return {
'crv': 'P-256',
'kty': 'EC',
'd': utils.base64url(key, 'hex'),
'x': utils.base64url(x),
'y': utils.base64url(y)
}
}
default:
throw Error('Unsupported PEM type:' + type.toString('hex'))
}
}
function wrap (str) {
return str.replace(/(.{64})/g, '$1\n')
}
/**
* Convert a JWK into a hex public key
* @param {JwkEC|JwkRSA} jwk JSON Web Key for public EC key
* @return {String} string with PEM public key
*/
utils.jwkToPem = function (jwk) {
if (jwk.kty === 'EC' && jwk.crv === 'P-256') {
// Convert x,y coordinates from JWK to public key
const xy = Buffer.concat([Buffer.from(jwk.x, 'base64'), Buffer.from(jwk.y, 'base64')])
Assert.strictEqual(xy.length, 64)
if (jwk.d) {
// Add the private key as well
const header = Buffer.from('30770201010420', 'hex')
const pk = Buffer.from(jwk.d, 'base64')
const xyhdr = Buffer.from('a00a06082a8648ce3d030107a14403420004', 'hex')
const all = Buffer.concat([header, pk, xyhdr, xy])
return wrap(`-----BEGIN EC PRIVATE KEY-----
${all.toString('base64')}
-----END EC PRIVATE KEY-----`)
} else {
// Basic template for the PEM; we'll overwrite the coordinates in-place
return wrap(`-----BEGIN PUBLIC KEY-----
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE${xy.toString('base64')}
-----END PUBLIC KEY-----`)
}
} else if (jwk.kty === 'RSA') {
// Convert public key from JWK to base64 encoded public key
const pub = Buffer.from(jwk.n, 'base64')
const exp = Buffer.from(jwk.e, 'base64')
const pubDer = createDerChunk(0x02, Buffer.alloc(1), pub)
const expDer = createDerChunk(0x02, exp)
const key = createDerChunk(0x30, pubDer, expDer)
const pkey = createDerChunk(0x03, Buffer.alloc(1), key)
const der = createDerChunk(0x30, Buffer.from('300d06092a864886f70d0101010500', 'hex'), pkey)
return wrap(`-----BEGIN PUBLIC KEY-----
${der.toString('base64')}
-----END PUBLIC KEY-----`)
}
throw Error('Unsupported JWK:' + jwk)
}
/**
* Verify an ECDSA named curve signed message
*
* @param {String} curveName Curve name (secp256r1)
* @param {String} message Message payload
* @param {String|Object} pubkey Public key to check signature against (hex)
* @param {Buffer|String} signature Signature payload (hex)
* @return {boolean} Indicate whether signature is correct
*/
utils.checkECDSA = function (curveName, message, pubkey, signature) {
Assert.strictEqual(typeof curveName, 'string', 'curveName must be of type `string`')
// Verify a digest value
const digest = utils.sha256(message, 'hex')
if (pubkey.kty) {
pubkey = utils.jwkToHex(pubkey)
} else if (pubkey.pubKeyHex) {
pubkey = pubkey.pubKeyHex
}
if (signature instanceof Buffer) {
signature = signature.toString('hex')
}
// Convert r||s signatures to proper DER
if (signature.length === 128 && !/^303e02/i.test(signature)) {
const r = Buffer.from(signature.substr(0, 64), 'hex')
const s = Buffer.from(signature.substr(64), 'hex')
signature = createDerChunk(0x30, createDerChunk(0x02, r), createDerChunk(0x02, s)).toString('hex')
}
const curve = new Jsrsasign.KJUR.crypto.ECDSA({xy: pubkey, curve: curveName})
return curve.verifyHex(digest, signature, pubkey)
}
/**
* Convert a user public key to blockchain address
*
* @param {String} pubkeyhex User ECC public key (hex encoded)
* @returns {String} User address with leading 0x
*/
utils.userPubKeyHexToAddress = function (pubkeyhex) {
Assert.strictEqual(typeof pubkeyhex, 'string', 'pubkeyhex must be of type `string`')
Assert.ok(pubkeyhex.startsWith('04'), 'pubkeyhex must be an uncompressed ECC public key')
Assert.strictEqual(pubkeyhex.length, 130)
// Get the uncompressed public key without prefix, take the sha256 hash, and skip the first 12 bytes
const blob = Buffer.from(pubkeyhex.substr(2), 'hex')
const digest = utils.sha256(blob, 'hex')
return '0x' + digest.substr(2 * 12)
}
/**
* Wrap the call and change the callback into a promise resolve or reject.
* @param {function} call A function that takes a callback as last parameter
* @returns {function} Wrapper function that returns a promise
*/
utils.promisify = function (call) {
return function () {
// Save the 'this' reference for use inside the promise
const self = this
const args = Array.prototype.slice.call(arguments)
return new Promise((resolve, reject) => {
// Append the callback that either rejects or resolves the promise
args.push((err, a, b, c, d) => err ? reject(err) : resolve(a, b, c, d))
call.apply(self, args)
})
}
}
/**
* Generate a 32-byte random nonce.
* @param {String} [encoding] Encoding for result (default base64)
* @param {Number} [length] Number of bytes for the result (default 32)
* @returns {String} The encoding of the nonce
*/
utils.generateNonce = function (encoding, length) {
return Crypto.randomBytes(length || 32).toString(encoding || 'base64')
}
/**
* Wait for specified number of milliseconds (ms).
* @param {Number} durationMS Number of milliseconds to wait.
* @return {Promise} Promise that resolves in due time.
*/
utils.wait = function (durationMS) {
return new Promise((resolve, reject) => {
return setTimeout(resolve, durationMS)
})
}
/**
* Wait until the callback returns a truthy value (or timeout).
* @param {Number} ms Number of milliseconds to wait.
* @param {function} callback Callback to invoke (once a second).
* @param {Number} [step] Number of milliseconds to wait per try.
* @return {Promise} Promise that resolves when the callback is truthy
*/
utils.waitUntil = function (ms, callback, step = 1000) {
Assert.strictEqual(typeof ms, 'number', 'ms must be of type `number`')
Assert.strictEqual(typeof callback, 'function', 'callback must be of type `function`')
Assert.ok(step > 0, 'step must be of type `number` > 0')
return Promise.resolve()
.then(callback)
.then(done => {
if (done || ms <= 0) {
return done
}
return utils.wait(Math.min(step, ms))
.then(() => utils.waitUntil(ms - step, callback, step))
})
}
/**
* Generate a new key pair.
* @param {string|number} [curveOrLength] The name of the EC curve or RSA modulus length (optional)
* @return {Promise.<Object>} New jsrsasign key object of given curve
*/
utils.generateKeyPair = async function (curveOrLength) {
let pair
try {
const {promisify} = require('util')
const keyOptions = Object.assign(curveOrLength > 0 ? { modulusLength: curveOrLength } : { namedCurve: curveOrLength || 'prime256v1' },
{
publicKeyEncoding: {
type: 'spki',
format: 'pem'
},
privateKeyEncoding: {
type: 'pkcs8',
format: 'pem'
}
})
const {privateKey} = await promisify(Crypto.generateKeyPair)(
curveOrLength > 0 ? 'rsa' : 'ec',
keyOptions
)
pair = Jsrsasign.KEYUTIL.getKey(privateKey)
} catch (err) {
pair = Jsrsasign.KEYUTIL.generateKeypair(curveOrLength > 0 ? 'RSA' : 'EC', curveOrLength || 'secp256r1').prvKeyObj
}
return {
prvKeyHex: pair.prvKeyHex,
pubKeyHex: pair.pubKeyHex
}
}
/**
* HMAC-based One-time Password Algorithm
* @param {string|Buffer} key The shared key for the HMAC.
* @param {string|Buffer} message The message (64-bit counter or 32-bit time/30)
* @return {string} six digit HOTP code
*/
utils.oneTimePassword = function (key, message) {
const hash = Crypto.createHmac('sha1', key).update(message).digest()
const offset = hash[hash.length - 1] & 0xf
// 4 bytes starting at the offset, remove the most significant bit
const truncatedHash = hash.readInt32BE(offset) & 0x7FFFFFFF // big endian
const code = truncatedHash % 1000000
// pad code with 0 until length of code is 6;
return String('00000' + code).slice(-6)
}
/**
* Decode a string with HEX-encoded data into a plain binary string.
* @param {string} hex String with HEX-encoded data
* @param {string} [encoding='binary'] Optional encoding of the data
* @returns {string} String with binary encoded data
*/
utils.parseHexString = function (hex, encoding) {
Assert.strictEqual(typeof hex, 'string', 'hex must be of type `string`')
return Buffer.from(hex, 'hex').toString(encoding || 'binary')
}
/**
* Parse ASN.1 YYMMDDHHMMSSZ or YYYYMMDDHHMMSSZ into a Date object.
* @param {string} date ASN.1 YYMMDDHHMMSSZ or YYYYMMDDHHMMSSZ date string.
* @returns {?Date} New date object or `null` for invalid dates
*/
utils.parseX509Date = function (date) {
Assert.strictEqual(typeof date, 'string', 'date must be of type `string`')
const match = /^([0-9]{2,4})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})Z$/.exec(date)
if (match === null) {
return null
}
// - Where YY is less than 50, the year shall be interpreted as 20YY.
const year = match[1].length === 2 && match[1] < 50 ? 2000 + parseInt(match[1]) : match[1]
return new Date(Date.UTC(year, match[2] - 1, match[3], match[4], match[5], match[6]))
}
/**
* Create ASN.1 YYMMDDHHMMSSZ or YYYYMMDDHHMMSSZ into a string.
* @param {Date} date Date object.
* @returns {string} New ASN.1 YYMMDDHHMMSSZ or YYYYMMDDHHMMSSZ date string
*/
utils.dateToString = function (date) {
if (date.getFullYear() < 1950 || date.getFullYear() > 2049) {
return Moment(date).utc().format('YYYYMMDDHHmmss[Z]', date)
} else {
return Moment(date).utc().format('YYMMDDHHmmss[Z]', date)
}
}
/**
* Check whether the given string is a valid blockchain address.
* @param {string} str The string to check for 0x-prefixed address
* @returns {boolean} `true` if the string is a valid address; false otherwise.
*/
utils.isAddress = function (str) {
Assert.strictEqual(typeof str, 'string', 'str must be of type `string`')
return /^0x[0-9a-fA-F]{40}$/.test(str)
}
/**
* @param {string} pem PEM string
* @returns {Jsrsasign.X509} certificate
*/
function readPEM (pem) {
Assert.strictEqual(typeof pem, 'string', 'pem must be of type `string`')
// Ignore PEM headers (if present)
const base64 = pem.replace(/^-----(BEGIN|END) CERTIFICATE-----/g, '')
// Load the certificate from PEM string
const cert = new Jsrsasign.X509()
cert.readCertHex(Buffer.from(base64, 'base64').toString('hex'))
return cert
}
/**
* @param {Jsrsasign.X509} cert certificate
* @param {Jsrsasign.X509} caCert CA certificate
* @returns {boolean} success or failure
*/
function verifySignature (cert, caCert) {
try {
const algName = cert.getSignatureAlgorithmName()
const cryptoAlg = CrytoAlg.get(algName)
if (cryptoAlg == null) {
throw Error(`Unsupported signature algorithm: ${algName}`)
}
const hSigVal = Buffer.from(cert.getSignatureValueHex(), 'hex')
const tbs = Buffer.from(Jsrsasign.ASN1HEX.getTLVbyList(cert.hex, 0, [0], '30'), 'hex')
const verify = Crypto.createVerify(cryptoAlg)
verify.update(tbs)
const caPem = Jsrsasign.KEYUTIL.getPEM(caCert.getPublicKey())
return verify.verify(caPem, hSigVal)
} catch (err) {
throw Error(`Invalid PEM: ${err}`)
}
}
/**
* @typedef Attribute
* @type {object}
* @property {string} oid The OID path for this attribute
* @property {string} value The value of this attribute
*
* @typedef Claim
* @type {object}
* @property {string} subjectaddress The public key hash
* @property {string} serialNo
* @property {string} issuer The X.500 issuer name
* @property {?string} issuerPem The issuer X.509 PEM
* @property {Array.<string>} ocsp Array of OCSP responders
* @property {Array.<string>} caissuer Array of issuers
* @property {Array.<string>} crl Array of CRL distribution URIs
* @property {Date} notBefore
* @property {Date} notAfter
* @property {Array.<Attribute>} attributes The array of attributes of this claim
**/
/**
* Parse a PEM encoded X509 certificate.
* @param {string} pem The X509 certificate in PEM format
* @param {Array.<string>} [chain] The X509 certificates of the CA chain
* @returns {Claim} The parsed X509 certificate
*/
utils.parsePem = function (pem, chain) {
// Load the certificate from PEM string
const cert = readPEM(pem)
// Validate certificate chain (issuer whitelist)
let issuerPem = null
if (chain) {
issuerPem = chain.find(caPem => verifySignature(cert, readPEM(caPem)))
if (issuerPem == null) {
throw Error('Signature verification failed')
}
}
const serialNo = utils.serialToAddress(cert.getSerialNumberHex())
const issuer = cert.getIssuerString()
const ocsp = cert.getExtAIAInfo() || {caissuer: [], ocsp: []}
const crl = cert.getExtCRLDistributionPointsURI() || []
const subjectPubkey = cert.getPublicKey().pubKeyHex
const subjectaddress = subjectPubkey !== undefined ? utils.userPubKeyHexToAddress(subjectPubkey) : undefined
const notAfter = utils.parseX509Date(cert.getNotAfter())
const notBefore = utils.parseX509Date(cert.getNotBefore())
// Extract the token information from the DName
const attributes = []
try {
for (let d = 0; ; d++) {
const tlv = Jsrsasign.ASN1HEX.getTLVbyList(cert.hex, 0, [0, 5, d])
const oidHex = Jsrsasign.ASN1HEX.getVbyList(tlv, 0, [0, 0])
if (oidHex === '') break
const valueHex = Jsrsasign.ASN1HEX.getVbyList(tlv, 0, [0, 1])
// CONSIDER: use encoding based on ASN.1 value type
const value = utils.parseHexString(valueHex, 'utf-8')
const oid = Jsrsasign.ASN1HEX.hextooidstr(oidHex)
attributes.push({oid, value})
}
} catch (err) {}
return Object.assign({subjectaddress, serialNo, notBefore, notAfter, attributes, issuer, crl, issuerPem}, ocsp)
}
/**
* Validate the parsed claim
* @param {Claim} claim Parsed claim object
* @param {Date} [date] Optional date to test against
* @returns {Boolean} success or failure
*/
utils.validateClaim = function (claim, date) {
Assert.strictEqual(typeof claim, 'object', 'claim must be of type `object`')
const now = date || new Date()
if (claim.notAfter < now) {
return false
}
if (claim.notBefore > now) {
return false
}
if (claim.issuerPem) {
// Rescursively validate the issuer
const issuerClaim = utils.parsePem(claim.issuerPem)
return utils.validateClaim(issuerClaim, now)
}
return true
}
/**
* Return the absolute path to the Trusted Key root CA certificate.
* @returns {string} Absolute path to the Trusted Key root CA certificate
*/
utils.getRootPemPath = function () {
return require('path').join(__dirname, 'tkroot.pem')
}
/**
* Get the JWK thumbprint for the given key.
* @param {JwkEC|JwkRSA} jwk JSON Web Key
* @returns {string} Base64-URL encoded thumbprint for the JWK
*/
utils.getJwkThumbprint = function (jwk) {
// Only the required members of a key's representation are used when computing its JWK Thumbprint value.
const json = JSON.stringify(jwk, ['crv', 'e', 'k', 'kty', 'n', 'x', 'y'])
return this.base64url(utils.sha256(json))
}
/**
* Verify x5c X509 cert chain
* @param {Array.string} chain encoded BASE64 PEMs in x5c
* @return {boolean} evaluation of chain validation
*/
utils.verifyChain = function (chain) {
Assert.ok(Array.isArray(chain), 'chain must be instance of `Array`')
const certs = chain.map(p => readPEM(p))
const getCAIdx = (i) => (i === certs.length - 1) ? i : i + 1
for (let i = 0; i < certs.length; i++) {
if (!verifySignature(certs[i], certs[getCAIdx(i)])) {
return false
}
}
return true
}
<file_sep>const Utils = require('../utils')
const HttpUtils = require('./http')
module.exports = DocsigService
/**
* Submit
*
* @constructor
* @param {String} backendUrl The base backend URL
* @param {String} appId Wallet application ID
* @param {String} appSecret Wallet shared secret
*
* @param {String} docsigAppId Docsig application ID
* @param {String} docsigAppSecret Docsig shared secret
*/
function DocsigService (backendUrl, appId, appSecret, docsigAppId, docsigAppSecret) {
this.httpClient = new HttpUtils(backendUrl, appId, appSecret)
this._docsigAppId = docsigAppId
this._docsigAppSecret = docsigAppSecret
}
/**
* Submit a document to be signed
*
* @param {String} signatoryEmail Signatory email address
* @param {String} callbackUrl Url where signed PDF will be uploaded with PUT
* @param {String} documentUrl Url where document can be downloaded
* @param {Array.<Dotted>} objectIds Array of objectIds to request from signatory
* @returns {Promise.<Object>} JSON response from API
*/
DocsigService.prototype.documentSignRequest = function (signatoryEmail, callbackUrl, documentUrl, objectIds) {
const appId = this._docsigAppId
const appSecret = this._docsigAppSecret
const payload = {
iss: appId,
signatory: signatoryEmail,
callbackUrl: callbackUrl,
documentUrl: documentUrl,
objectIds: objectIds
}
const header = {typ: 'JWT', iss: appId}
const jwt = Utils.createHmacJws(payload, appSecret, header)
return this.httpClient.post('newDocumentSignRequest?jwt=' + jwt)
}
| 6072195b6d6744e0386a633c928d14ae4b495ffe | [
"Markdown",
"JavaScript",
"Makefile"
] | 12 | Makefile | trustedkey/trustedkey.js | edf2f23ac490aae6ae4cbf26b46b2893489adb37 | f655ad5868898d7fef25ed4053eceb7882739f25 | |
refs/heads/master | <file_sep>var express = require('express');
var app = express();
var bodyParser = require('body-parser');
var Map = require('collections/map');
var MultiMap = require('collections/multi-map');
var INT_MAX = 1000000007;
app.use(express.static(__dirname + '/'));
app.use(bodyParser.urlencoded());
app.get('/',function(req,res){
res.sendfile('graph.html');
});
var server = app.listen(3200, function () {
var host = server.address().address;
var port = server.address().port;
console.log('Example app listening at http://%s:%s', host, port);
});
app.post('/djikstra',function(req,res){
var info = req.body;
var vertices = parseInt(info['vertices']);
var source = parseInt(info['init']);
var arr = info['vector'];
var weight = info['weight'];
var len = info['len'];
var distance = [];
for(var i = 1;i<=vertices;i++)
{
distance[i] = INT_MAX;
}
distance[source] = 0;
var map = new MultiMap;
map.set(source,[distance[source]]);
while(map.length)
{
var u = map.sorted()[0][0];
map.delete(distance.indexOf(u));
u = distance.indexOf(u);
--u;
//strangely, nodejs ignores empty slots in 2D arrays
for(var v = 0;v<parseInt(len[u+1]);v++)//but retaisn them in 1D array :P
{
var dest = parseInt(arr[u][v]);
if(dest != 0)
{
var w = parseInt(weight[u][v]);
if(distance[dest]>distance[u+1]+w)
{
if(distance[v]!=INT_MAX)
{
map.delete(dest);
}
distance[dest] = distance[u+1]+w;
map.set(dest,[distance[dest]]);
}
}
}
}
res.send(distance);
});<file_sep>This tool requires nodejs to run, nd runs on localhost:3200 for now.
It also requires the following dependencies
1. collectionsjs
2. expressjs
3. body-parser
Kindly npm install them and then run. | 5d1ebe502481a7ead4ba22b0fc4d81ee9d657e1b | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | ActionSeeker/GraphJS | 5f0293ba8228b2797ab6494e082ac1440bf80f17 | 9373bbf0eaba3828bd931b30bc8b41af15e8d9d1 | |
refs/heads/master | <repo_name>edwincarlson/RoundCaddy<file_sep>/public/js/services/SessionService.js
angular.module('SessionService', []).factory('Session', function($rootScope, $location, $cookieStore, socket) {
var login = function(user){
var currentUser = $cookieStore.get('user');
if (currentUser != undefined){
$cookieStore.remove('user');
$cookieStore.put("user", user);
console.log(user);
$rootScope.username = user.username;
if(user.role === "owner"){
$rootScope.noSession = false;
$rootScope.studentSession = false;
$rootScope.adminSession = true;
}
else if(user.role === "student"){
$rootScope.noSession = false;
$rootScope.studentSession = true;
$rootScope.adminSession = false;
}
}
else{
$cookieStore.put("user", user);
}
}
// creates new session - user session, admin session
var createSession = function(course){
if(course.owner === true){
currentUser= $cookieStore.get('user');
$cookieStore.remove('user');
currentUser.currentCourse = {
_id: course.OrganizationID,
name: course.Name
};
currentUser.session = 'Professor';
$cookieStore.put('user', currentUser);
$rootScope.noSession = false;
$rootScope.studentSession = false;
$rootScope.adminSession = true;
$rootScope.username = currentUser.username;
}
else if(course.owner === false){
currentUser= $cookieStore.get('user');
$cookieStore.remove('user');
currentUser.currentCourse = {
_id: course.OrganizationID,
name: course.Name
};
currentUser.session = 'Student';
$cookieStore.put('user', currentUser);
$rootScope.noSession = false;
$rootScope.studentSession = true;
$rootScope.adminSession = false;
$rootScope.username = currentUser.username;
}
}
// creates new session - no session, user session, admin session
var checkSession = function(user){
if(user === undefined){
$rootScope.studentSession = false;
$rootScope.adminSession = false;
$rootScope.noSession = true;
//window.location.replace("https://learn.aero.und.edu/index.asp");
window.close();
}
else if(user.role === "owner"){
$rootScope.noSession = false;
$rootScope.studentSession = false;
$rootScope.adminSession = true;
$rootScope.username = user.username;
socketUser = {
username : user.username,
_id : user._id
}
socket.emit("adduser", socketUser);
}
else if(user.role === "student"){
$rootScope.noSession = false;
$rootScope.adminSession = false;
$rootScope.studentSession = true;
$rootScope.username = user.username;
socketUser = {
username : user.username,
_id : user._id
}
socket.emit("adduser", socketUser);
}
}
// terminate current session
var endSession = function(){
//delete cookie we created at login
//$cookieStore.remove('user');
$rootScope.username = '';
$rootScope.studentSession = false;
$rootScope.adminSession = false;
$rootScope.noSession = true;
//$location.path('/login');
}
return {
login : login,
createSession : createSession,
checkSession : checkSession,
endSession : endSession
};
});<file_sep>/public/js/services/CourseService.js
angular.module('CourseService', []).factory('Course', ['$http', function($http) {
return {
// call to get all rooms
getCourses : function(userID) {
return $http.get('/api/user/courses/' + userID);
}
}
}]);<file_sep>/public/js/services/UserService.js
angular.module('UserService', []).factory('UserService', ['$http', function($http) {
//user login
var authenticate = function() {
return $http.post('/api/authenticate');
}
//user logout
var logout = function(userData) {
return $http.post('/api/logout', userData);
}
return {
authenticate : authenticate,
logout : logout
};
}]);<file_sep>/public/js/controllers/HistoryCtrl.js
angular.module('HistoryCtrl', []).controller('HistoryController', function($scope, $state, $cookieStore, $filter, Room, ngTableParams, $stateParams, Chat, Session) {
//getting currentuser for session validation
var currentUser = $cookieStore.get('user');
if (currentUser != undefined){
Session.checkSession(currentUser);
}
else{
Session.endSession();
$state.go('login');
}
$scope.currentSession = "All";
$scope.sessionSelect = function(session){
var data = [];
$scope.currentSession = session;
}
$scope.allMsgs = [];
$scope.currentSession = null;
var getData = function(){
var sessionMsgs = [];
var data = [];
if($scope.currentSession === null){
return data;
}
else if($scope.currentSession === "All Days"){
$scope.msgs = $scope.allMsgs;
data = $scope.allMsgs;
return data;
}
else{
for(var i=0; i<$scope.allMsgs.length; i++){
if($scope.currentSession === moment(new Date($scope.allMsgs[i].posted)).format("dddd, MMMM Do YYYY")){
sessionMsgs.push($scope.allMsgs[i]);
}
}
$scope.msgs = sessionMsgs;
data = sessionMsgs
return data;
}
}
Room.getRoom($stateParams.room)
.success(function(data, status, headers, config) {
$scope.currentRoom = data;
});
Chat.get($stateParams.room)
//we comminucated with the server
.success(function(data, status, headers, config) {
$scope.msgs = [];
$scope.sessions = [];
var newSession = null;
//received past messages for this room
for(var i=0; i<data.length; i++){
var date = data[i].posted;
data[i].posted = moment(date).format('MM/D/YYYY h:mm a');
newSession = moment(new Date(data[i].posted)).format("dddd, MMMM Do YYYY");
var count = 0;
for(var x=0; x<$scope.sessions.length; x++){
if(newSession === $scope.sessions[x]){
count++;
}
}
if(count===0){
$scope.sessions.push(newSession);
}
$scope.allMsgs = data;
$scope.msgs = data;
$scope.currentSession = "All Days";
}
});
$scope.$watch("currentSession", function () {
$scope.tableParams.reload();
});
$scope.tableParams = new ngTableParams({
page: 1, // show first page
count: 10 // count per page
}, {
total: function () { return getData().length; }, // length of data
getData: function($defer, params) {
// use build-in angular filter
var filteredData = params.filter() ? $filter('filter')(getData(), params.filter()) : getData();
var orderedData = params.sorting() ? $filter('orderBy')(filteredData, params.orderBy()) : getData();
params.total(orderedData.length); // set total for recalc pagination
$defer.resolve(orderedData.slice((params.page() - 1) * params.count(), params.page() * params.count()));
},
$scope: { $data: {} }
});
});<file_sep>/public/js/controllers/CreateRoomCtrl.js
angular.module('CreateRoomCtrl', []).controller('CreateRoomController', function($scope, $state, $cookieStore, Room, UserService, Session, socket) {
//initializing user service
var User = UserService;
//checking for valid session
var currentUser = $cookieStore.get('user');
Session.checkSession(currentUser);
//date selectors for room creation
jQuery('#roomStart').datetimepicker();
jQuery('#roomEnd').datetimepicker();
//setting course name from current course
$scope.course = currentUser.course;
//reset successMessage
$scope.successMessage = false;
$scope.status = "Closed";
$scope.open = false;
$scope.openRoom = function(){
$scope.status = "Open";
$scope.open = true;
}
$scope.closeRoom = function(){
$scope.status = "Closed";
$scope.open = false;
}
//submit create room form for validation
$scope.submitCreateRoom = function(isValid){
var minLength = 0;
//input fields valid
$scope.titleValid = false;
$scope.descriptionValid = false;
$scope.startValid = false;
$scope.endValid = false;
// input fields invalid
$scope.titleInvalid = false;
$scope.selectInvalid = false;
$scope.startInvalid = false;
$scope.endInvalid = false;
//form is not valid
if(isValid === false){
//title validation
if($scope.roomCreateForm.title.$valid && $scope.roomCreateForm.title.$pristine === false){
$scope.titleValid = true;
}
else{
$scope.titleInvalid = true;
}
//description validation if one is supplied
if($scope.description === ''){
$scope.descriptionValid = false;
}
else if($scope.description === undefined){
$scope.descriptionValid = false;
}
else{
$scope.descriptionValid = true;
}
//start date validation if one is supplied
if($('#roomStart').val() === ''){
$scope.startValid = false;
}
else if($('#roomStart').val() === undefined){
$scope.startValid = false;
}
else{
if(moment().isAfter(new Date($('#roomStart').val()))){
$scope.startInvalid = true;
$scope.startMessage = "Start Date must be after the current date and time."
}
else{
$scope.startValid = true;
}
}
//end date validation if one is supplied
if($('#roomEnd').val() === ''){
$scope.endValid = false;
}
else if($('#roomEnd').val() === undefined){
$scope.endValid = false;
}
else{
if(moment(new Date($('#roomStart').val())).isAfter(new Date($('#roomEnd').val()))){
console.log("here")
$scope.endInvalid = true;
$scope.endMessage = "End Date must be after the Start date and time.";
}
else if(moment().isAfter(new Date($('#roomEnd').val()))){
$scope.endInvalid = true;
$scope.endMessage = "End Date must be after the current date and time.";
}
else{
$scope.endValid = true;
}
}
}
//form is valid
if(isValid){
$scope.titleValid = true;
$scope.submitStart = true;
$scope.submitEnd = true;
//description validation if one is supplied
if($scope.description === ''){
$scope.descriptionValid = false;
}
else if($scope.description === undefined){
$scope.descriptionValid = false;
}
else{
$scope.descriptionValid = true;
}
//start date validation if one is supplied
if($('#roomStart').val() === ''){
$scope.startValid = false;
}
else if($('#roomStart').val() === undefined){
$scope.startValid = false;
}
else{
if(moment().isAfter(new Date($('#roomStart').val()))){
$scope.startInvalid = true;
$scope.startMessage = "Start Date must be after the current date and time."
$scope.submitStart = false;
}
else{
$scope.startValid = true;
}
}
//end date validation if one is supplied
if($('#roomEnd').val() === ''){
$scope.endValid = false;
}
else if($('#roomEnd').val() === undefined){
$scope.endValid = false;
}
else{
if(moment(new Date($('#roomStart').val())).isAfter(new Date($('#roomEnd').val()))){
$scope.endInvalid = true;
$scope.endMessage = "End Date must be after the Start date and time.";
$scope.submitEnd = false;
}
else if(moment().isAfter(new Date($('#roomEnd').val()))){
$scope.endInvalid = true;
$scope.endMessage = "End Date must be after the current date and time.";
$scope.submitEnd = false;
}
else{
$scope.endValid = true;
}
}
if($scope.submitStart === true && $scope.submitEnd === true){
createRoom();
}
}
};
//function for create room form
var createRoom = function(){
//create JSON data of room
console.log($scope.description);
if($scope.description === '' || $scope.description === undefined){
$scope.description = "No Schedule";
}
var room = {
title : $scope.roomTitle,
description : $scope.description,
courseID : currentUser.courseID,
start : $('#roomStart').val(),
end : $('#roomEnd').val(),
open : $scope.open
}
//Call Room Service create which posts to db
Room.create(room)
//we comminucated with the server
.success(function(data, status, headers, config) {
//room was created
if(room.start === ''){
room.start = null;
}
if(room.end === ''){
room.end = null;
}
room._id = data.id;
socket.emit('createRoomReq', room);
$scope.successMessage = true;
$scope.alertMessage = "Room successfully created!"
//Reseting values after creating room
$scope.roomTitle = "";
$scope.description = "";
$('#roomStart').val("");
$('#roomEnd').val("");
$scope.status = "Closed";
$scope.open = false;
})
.error(function(data, status, headers, config) {
//not good
console.log(data);
console.log(status);
console.log(headers);
console.log(config);
});
};
});<file_sep>/public/js/app.js
var app = angular.module('golfApp', [
'ngCookies',
'ui.router',
'appRoutes',
'ui.bootstrap',
// 'RoomsCtrl',
// 'ChatCtrl',
// 'ChatService',
// 'CreateRoomCtrl',
// 'RoomService',
// 'EditRoomCtrl',
'LoginCtrl',
// 'UserService',
// 'TokenInterceptorService',
// 'SessionService',
'ProfileCtrl',
// 'SocketService',
// 'HistoryCtrl',
// 'CourseService',
// 'CoursesCtrl'
]);
// app.factory("socket", function(){
// var socket = io.connect('http://localhost:3000');
// return socket;
// });
// app.config(function($httpProvider){
// $httpProvider.interceptors.push('TokenInterceptor');
// });
// app.run(function($rootScope, $state, $cookieStore, Session) {
// $rootScope.$on("$stateChangeStart", function(event, next) {
// if (next.access.requiredLogin && $cookieStore.get('user') === undefined) {
// Session.endSession();
// $rootScope.accessMessage = "Must be logged in to access that feature.";
// $rootScope.accessError = true;
// }
// });
// }); | 79f72cc6ab2fce97877cb2cf1d4dc5ee1c34fb9a | [
"JavaScript"
] | 6 | JavaScript | edwincarlson/RoundCaddy | d1b4f9cf4b0778f95b331f50ab09e8efb7a300e9 | a8cc8240142b403f26272382e134eb8cedf8272f | |
refs/heads/master | <repo_name>thesekcy/NovoSiteEW21<file_sep>/ew21/app/Http/Controllers/TrabalheController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
class TrabalheController extends Controller
{
public function index(){
return view('trabalhe-conosco');
}
public function enviar(Request $req){
$rules = [
'nome' => 'required',
'email' => 'required',
'area' => 'required',
'mensagem' => 'required',
'curriculo' => 'mimes:jpeg,png,jpeg,pdf,doc,docx',
];
$errormessageEx = [
'O arquivo Anexado não é do de nem um formato aceito. <strong>png, jpg, jpeg, pdf, doc, docx</strong>'
];
$this->validate($req, $rules, $errormessageEx);
$dados = $req->all();
//dd($dados);
$curriculo = $req->file('curriculo');
$dir = "curriculos/";
$ex = $curriculo->guessClientExtension();
$fileName = $req['nome']."-".$req['email']."-".$req['area'].".".$ex;
$curriculo->move($dir, $fileName);
//messages
$erro = ":( Desculpe ocorreu um erro durante o envio do seu curriculo, se o erro persistir contate <EMAIL> e informe o problema. Obrigado.";
$sucesso = "Obrigado pelo envio do seu curriculo <strong>" .$dados['nome'] . "</strong>. Nossa equipe irá analisar e entrar em contato. Para duvidas ligue para (11) 3892-3099 | (11) 2098-6969";
if($dados !== null){
return redirect()->route('user.trabalhe-conosco')->with('success', $sucesso);
}else{
return redirect()->back()->with('error', $erro);
}
}
}
<file_sep>/ew21/app/Http/Controllers/ContatoController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Mail;
use App\Contatos;
class ContatoController extends Controller
{
public function index(){
return view('contato');
}
public function enviar(Request $req){
$dados = $req->all();
//messages
$erro = ":( Desculpe ocorreu um erro durante o envio do seu email, se o erro persistir contate <EMAIL> e informe o problema. Obrigado.";
$sucesso = "Obrigado pelo contato <strong>" .$dados['nome'] . "</strong>. Aguarde que nossa equipe irá entrar em contato. Para duvidas ligue para (11) 3892-3099 | (11) 2098-6969";
$objeto = (object)$dados['servicos'];
try{
Contatos::create($dados);
if($dados !== null){
Mail::send('mail.admin', ['body' => $dados, 'servicos' => $objeto], function($message){
$message->from('<EMAIL>', '<NAME>');
$message->to('<EMAIL>');
$message->subject('MKTEW21 - Contato realizado no site.');
});
Mail::send('mail.user', ['body' => $dados], function($message) use ($dados){
$message->from('<EMAIL>', '<NAME>');
$message->to($dados['email']);
$message->subject('MKTEW21 - Obrigado pelo contato.');
});
return redirect()->route('user.contato')->with('success', $sucesso);
}else{
return redirect()->back()->with('error', $erro);
}
//dd($dados);
}catch (Exception $e){
return redirect()->back()->with('error', $erro);
}
}
}
<file_sep>/ew21/app/Http/Controllers/MatInstController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Contatos;
class MatInstController extends Controller
{
public function index(){
return view('material-institucional');
}
public function enviar(Request $req){
$dados = $req->all();
//messages
$erro = ":( Desculpe ocorreu um erro durante o processo, se o erro persistir contate <EMAIL> e informe o problema. Obrigado.";
$sucesso = "Conteúdo liberado para download <strong>" .$dados['nome'] . "</strong>.";
try{
if($dados !== null){
Contatos::create($dados);
return redirect()->route('user.material-institucional')->with('success', $sucesso);
}else{
return redirect()->back()->with('error', $erro);
}
//dd($dados);
}catch (Exception $e){
return redirect()->back()->with('error', $erro);
}
}
public function download(){
$filename = 'Pocket_Tire_suas_duvidas_sobre_Adwords.pdf';
return response()->download(storage_path("app/public/{$filename}"));
}
}
<file_sep>/ew21/app/Http/Controllers/Controller.php
<?php
namespace App\Http\Controllers;
use Illuminate\Foundation\Bus\DispatchesJobs;
use Illuminate\Routing\Controller as BaseController;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
class Controller extends BaseController
{
use AuthorizesRequests, DispatchesJobs, ValidatesRequests;
public function index (){
$json = file_get_contents('http://blog.mktew21.com.br/feed/json');
$registros = json_decode($json);
return view('welcome', compact('registros'));
}
public function politicasIndex(){
return view('politicas');
}
}
<file_sep>/ew21/routes/web.php
<?php
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::get('/', 'Controller@index');
Route::get('/home', 'Controller@index');
Route::get('/Servicos', ['as' => 'user.servicos','uses' => 'ServicosController@index']);
Route::get('/Politicas', ['as' => 'user.politicas','uses' => 'Controller@politicasIndex']);
Route::get('/Contato', ['as' => 'user.contato','uses' => 'ContatoController@index']);
Route::post('/Contato/Enviar', ['as' => 'user.contato.enviar','uses' => 'ContatoController@enviar']);
Route::get('/Trabalhe-Conosco', ['as' => 'user.trabalhe-conosco','uses' => 'TrabalheController@index']);
Route::post('/Trabalhe-Conosco/Enviar', ['as' => 'user.trabalhe-conosco.enviar','uses' => 'TrabalheController@enviar']);
Route::get('/Material-Institucional', ['as' => 'user.material-institucional','uses' => 'MatInstController@index']);
Route::post('/Material-Institucional/Enviar', ['as' => 'user.material-institucional.enviar','uses' => 'MatInstController@enviar']);
Route::get('/Material-Institucional/Download', ['as' => 'user.material-institucional.download','uses' => 'MatInstController@download']);
//Portifolio
Route::get('/Portifolio/Publicacoes-Segmentadas', ['as' => 'user.potifolio.publi-seg','uses' => 'PortifolioController@psIndex']);
Route::get('/Portifolio/Websites', ['as' => 'user.potifolio.websites','uses' => 'PortifolioController@webIndex']);
Route::get('/Portifolio/Google-Ads', ['as' => 'user.potifolio.google-ads','uses' => 'PortifolioController@gadsIndex']);
Route::get('/Portifolio/Redes-Sociais', ['as' => 'user.potifolio.redes-sociais','uses' => 'PortifolioController@redsIndex']);
Route::get('/Portifolio/Marcas', ['as' => 'user.potifolio.marcas','uses' => 'PortifolioController@marcasIndex']);
Route::get('/Portifolio/Comunicacao-Interna', ['as' => 'user.potifolio.com-interna','uses' => 'PortifolioController@comintIndex']);
Route::get('/Portifolio/Rotulos-Embalagens', ['as' => 'user.potifolio.rot-emba','uses' => 'PortifolioController@rotembaIndex']);
Route::get('/Portifolio/Revistas-Corporativas', ['as' => 'user.potifolio.rev-corp','uses' => 'PortifolioController@revcorpIndex']);
Route::get('/Portifolio/Identidade-Visual', ['as' => 'user.potifolio.ident-visual','uses' => 'PortifolioController@ivisualIndex']);
<file_sep>/ew21/app/Http/Controllers/PortifolioController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
class PortifolioController extends Controller
{
public function psIndex(){
return view('portifolio.publicacoes-segmentadas');
}
public function webIndex(){
return view('portifolio.websites');
}
public function gadsIndex(){
return view('portifolio.googleads');
}
public function redsIndex(){
return view('portifolio.redes-sociais');
}
public function marcasIndex(){
return view('portifolio.marcas');
}
public function comintIndex(){
return view('portifolio.comunicacao-interna');
}
public function rotembaIndex(){
return view('portifolio.rotulos-embalagens');
}
public function revcorpIndex(){
return view('portifolio.revistas-corporativas');
}
public function ivisualIndex(){
return view('portifolio.identidade-visual');
}
}
| 19d1af7f0329204ae34e98aa990756e4be218f2b | [
"PHP"
] | 6 | PHP | thesekcy/NovoSiteEW21 | 7b460b8765a910cf088892838af41ad2cb0e043c | fb5c67d2f480dedb1844f7c2ac63585a5016772f | |
refs/heads/master | <file_sep>from blox import AttrDict
from blox.torch.modules import DummyModule
from gcp.evaluation.evaluation_matching import DTWEvalBinding, BalancedPrunedDTWBinding, BalancedEvalBinding
class TreeDenseRec(DummyModule):
def __init__(self, hp, *_, decoder, **__):
super().__init__()
self._hp = hp
self.eval_binding = None
self.decoder = decoder
def get_sample_with_len(self, i_ex, length, outputs, inputs, pruning_scheme, name=None):
"""Perform evaluation matching, return dense sequence of specified length."""
if self.eval_binding is None:
self.eval_binding = self._get_eval_binding(pruning_scheme)
return self.eval_binding(outputs, inputs, length, i_ex, name)
def get_all_samples_with_len(self, length, outputs, inputs, pruning_scheme, name=None):
"""Perform evaluation matching, return dense sequence of specified length."""
if self.eval_binding is None:
self.eval_binding = self._get_eval_binding(pruning_scheme)
if hasattr(self.eval_binding, 'get_all_samples'):
return self.eval_binding.get_all_samples(outputs, inputs, length, name)
else:
return [self.eval_binding(outputs, inputs, length, i_ex, name) for i_ex in range(outputs.end_ind.shape[0])]
def _get_eval_binding(self, pruning_scheme):
if pruning_scheme == 'dtw':
return DTWEvalBinding(self._hp)
if pruning_scheme == 'pruned_dtw':
assert self._hp.matching_type == 'balanced'
return BalancedPrunedDTWBinding(self._hp)
if pruning_scheme == 'basic':
assert self._hp.matching_type == 'balanced'
return BalancedEvalBinding(self._hp)
else:
raise ValueError("Eval pruning scheme {} not currently supported!".format(pruning_scheme))
def forward(self, tree, inputs):
decoded_seq = self.decoder.decode_seq(inputs, tree.bf.e_g_prime)
tree.set_attr_bf(**decoded_seq)
return AttrDict()
<file_sep>import os
from contextlib import contextmanager
import torch
import torch.nn as nn
from blox.torch.layers import LayerBuilderParams
from blox import AttrDict
from blox.torch.modules import Updater
from blox.torch.models import base as bloxm
from tensorflow.contrib.training import HParams
class BaseModel(bloxm.BaseModel):
def __init__(self, logger):
super().__init__()
self._hp = None
self._logger = logger
@contextmanager
def val_mode(self):
"""Sets validation parameters. To be used like: with model.val_mode(): ...<do something>..."""
raise NotImplementedError("Need to implement val_mode context manager in subclass!")
def step(self):
self.call_children('step', Updater)
def override_defaults(self, policyparams):
for name, value in policyparams.items():
print('overriding param {} to value {}'.format(name, value))
if value == getattr(self._hp, name):
raise ValueError("attribute is {} is identical to default value!!".format(name))
self._hp.set_hparam(name, value)
def _default_hparams(self):
# Data Dimensions
default_dict = AttrDict({
'batch_size': -1,
'max_seq_len': -1,
'n_actions': -1,
'state_dim': -1,
'img_sz': 32, # image resolution
'input_nc': 3, # number of input feature maps
'n_conv_layers': None, # Number of conv layers. Can be of format 'n-<int>' for any int for relative spec
})
# Network params
default_dict.update({
'use_convs': True,
'use_batchnorm': True, # TODO deprecate
'normalization': 'batch',
'predictor_normalization': 'group',
})
# Misc params
default_dict.update({
'filter_repeated_tail': False, # whether to remove repeated states from the dataset
'rep_tail': False,
'dataset_class': None,
'standardize': None,
'split': None,
'subsampler': None,
'subsample_args': None,
'checkpt_path': None,
})
# add new params to parent params
parent_params = HParams()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def postprocess_params(self):
if not self._hp.use_convs:
# self._hp.input_nc = self._hp.img_sz ** 2 * self._hp.input_nc
self._hp.input_nc = self._hp.state_dim
self._hp.add_hparam('builder', LayerBuilderParams(
self._hp.use_convs, self._hp.use_batchnorm, self._hp.normalization, self._hp.predictor_normalization))
self._hp.add_hparam('fc_builder', LayerBuilderParams(
False, self._hp.use_batchnorm, self._hp.normalization, self._hp.predictor_normalization))
def build_network(self):
raise NotImplementedError("Need to implement this function in the subclass!")
def forward(self, inputs):
raise NotImplementedError("Need to implement this function in the subclass!")
def loss(self, outputs, inputs):
raise NotImplementedError("Need to implement this function in the subclass!")
def log_outputs(self, outputs, inputs, losses, step, log_images, phase):
# Log generally useful outputs
self._log_losses(losses, step, log_images, phase)
if phase == 'train':
self.log_gradients(step, phase)
for module in self.modules():
if hasattr(module, '_log_outputs'):
module._log_outputs(outputs, inputs, losses, step, log_images, phase, self._logger)
if hasattr(module, 'log_outputs_stateful'):
module.log_outputs_stateful(step, log_images, phase, self._logger)
def _log_losses(self, losses, step, log_images, phase):
for name, loss in losses.items():
self._logger.log_scalar(loss.value, name + '_loss', step, phase)
if 'breakdown' in loss and log_images:
self._logger.log_graph(loss.breakdown, name + '_breakdown', step, phase)
def _load_weights(self, weight_loading_info):
"""
Loads weights of submodels from defined checkpoints + scopes.
:param weight_loading_info: list of tuples: [(model_handle, scope, checkpoint_path)]
"""
def get_filtered_weight_dict(checkpoint_path, scope):
if os.path.isfile(checkpoint_path):
checkpoint = torch.load(checkpoint_path, map_location=self._hp.device)
filtered_state_dict = {}
remove_key_length = len(scope) + 1 # need to remove scope from checkpoint key
for key, item in checkpoint['state_dict'].items():
if key.startswith(scope):
filtered_state_dict[key[remove_key_length:]] = item
if not filtered_state_dict:
raise ValueError("No variable with scope '{}' found in checkpoint '{}'!".format(scope, checkpoint_path))
return filtered_state_dict
else:
raise ValueError("Cannot find checkpoint file '{}' for loading '{}'.".format(checkpoint_path, scope))
print("")
for loading_op in weight_loading_info:
print(("=> loading '{}' from checkpoint '{}'".format(loading_op[1], loading_op[2])))
filtered_weight_dict = get_filtered_weight_dict(checkpoint_path=loading_op[2],
scope=loading_op[1])
loading_op[0].load_state_dict(filtered_weight_dict)
print(("=> loaded '{}' from checkpoint '{}'".format(loading_op[1], loading_op[2])))
print("")
def log_gradients(self, step, phase):
grad_norms = list([torch.norm(p.grad.data) for p in self.parameters() if p.grad is not None])
if len(grad_norms) == 0:
return
grad_norms = torch.stack(grad_norms)
self._logger.log_scalar(grad_norms.mean(), 'gradients/mean_norm', step, phase)
self._logger.log_scalar(grad_norms.max(), 'gradients/max_norm', step, phase)
def get_total_loss(self, inputs, losses):
# compute total loss
## filtering is important when some losses are nan
## the unsqueeze is important when some of the weights or losses are 1-dim tensors.
# TODO use the function from blox
total_loss = torch.stack([loss[1].value[None] * loss[1].weight for loss in
filter(lambda x: x[1].weight > 0, losses.items())]).sum()
if torch.isnan(total_loss).any():
import pdb; pdb.set_trace()
return AttrDict(value=total_loss)
<file_sep>from math import floor
import numpy as np
import torch
from blox.tensor import ndim
from blox.torch.ops import batchwise_assign, batchwise_index
from blox.utils import timing
from torch import nn
def fast_gak(C, transition, begin_inds=0):
"""
Computes the global alignment kernel (Cuturi'07, A kernel for time series based on global alignments).
This version parallelizes the computation across the diagonal
This version is able to process batched sequences of variable length by using begin_inds
:param C: the cost matrix
:return: the kernel value and the matrix of intermediate values (kernel values for smaller sequences)
"""
"""
Computes Dynamic Time Warping (DTW) of two sequences.
This version multiplies the cost instead of adding it. This can be used if the costs represent exponential values
exp_dtw = exp(basic_dtw(log(C)))
:param C: the cost matrix ... x n_x x n_y
:param aggr: the function used for aggregating candidates.
:param transition: if 'nohor', horizontal transitions are excluded, i.e. one x only matches one y
(but not the other way around)
:return: the minimum distance and the accumulated cost matrix
"""
r, c = C.shape[-2:]
D = torch.full_like(C, -np.inf)
batchwise_assign(D[:, 0], begin_inds, batchwise_index(C[:, 0], begin_inds))
assert transition == 'nohor'
assert r >= c
# impossible = (1 - torch.ones_like(D).tril()).byte()
# impossible += (1 - torch.ones_like(D).triu(-diff)).byte()
# Iterate over diagonals
for i in range(1, r + c):
ids = torch.arange(i + 1).flip(0)
jds = torch.arange(i + 1)
# exclude boundary violations
ids = ids[max(0, i - r + 1):c]
jds = jds[max(0, i - r + 1):c]
# ToDo clean this up?
ids = ids.flip(0)
jds = jds.flip(0)
skip = D[..., ids - 1, jds]
step = D[..., ids - 1, jds - 1]
# # Allow horizontal transitions where the y sequence ended
# # jds[-1] is the column index of the lowest element on the diagonal
# repeat = torch.full_like(skip, -np.inf)
# allow_mask = jds[-1] > end_inds
# repeat[allow_mask, -1] = D[allow_mask, ids[-1], jds[-1] - 1]
# Recursion
add = torch.logsumexp(ndim.stack([skip, step], -1), -1)
new_cost = C[..., ids, jds] + add
mask = D[..., ids, jds] != -np.inf
new_cost[mask] = D[..., ids, jds][mask] # If D was specified, leave as is (for begin_inds)
# TODO change to simple array indexing once the backward pass for it is implemented
mask = ids2mask(D, ids, jds)
if hasattr(mask, 'bool'):
mask = mask.bool()
D = D.masked_scatter(mask, (new_cost).flatten())
return D
def ids2mask(mat, ids, jds):
mask = torch.zeros_like(mat, dtype=torch.uint8)
mask[..., ids, jds] = 1
return mask
def soft_dtw(C, end_inds=None):
"""
Computes the expected edge frequencies. See https://www.overleaf.com/read/jksjyppbrdgn
:param C: the cost matrix, n_x x n_y
:param end_inds: the end indices for the sequences y. The sequences y will be assumed to have the length as per the
end index and the remainer of the frames will not be matched
:return: the matrix with expected edge frequencies
"""
C = (-C).double()
batch, r, c = C.shape
if end_inds is None:
end_inds = torch.full([batch], c - 1, dtype=torch.long)
# mask = torch.zeros_like(C)
# if end_inds is not None:
# for i, ind in enumerate(end_inds):
# mask[i, :-1, end_inds[i]:] = 1
# C[mask.byte()] = -np.inf
# Compute forward-backward
comb_C = torch.cat([C, ndim.flip(C, [-1, -2])], 0)
# The backward begins with end indices, not (-1,-1)
comb_begin_inds = torch.cat([torch.zeros_like(end_inds), c - end_inds - 1], 0)
accum = fast_gak(comb_C, transition='nohor', begin_inds=comb_begin_inds)
forward = accum[:batch]
backward = ndim.flip(accum[batch:], [-1, -2])
# Compute expected matrix
z = batchwise_index(forward[:, -1], end_inds)[:, None, None]
e = forward + backward - C
e[C == -np.inf] = -np.inf
w = (e - z).exp()
if not equal(w.sum(2).max(), 1, eps=1e-2):
print('warning: dtw is not stable with these cost values')
import pdb; pdb.set_trace()
return w.float()
def equal(n, m, eps):
return m - eps < n < m + eps
<file_sep>import os
import matplotlib.pyplot as plt
import numpy as np
import torch
import torchvision
from gcp.prediction.utils.visualization import plot_gt_matching_overview, plot_val_tree, plot_pruned_seqs,\
plot_balanced_tree, make_gif, plot_graph, param, fig2img, plot_inverse_model_actions
from tensorboardX import SummaryWriter
from blox.tensor.ops import broadcast_final, find_tensor
from gcp.prediction.utils.utils import get_pad_mask
class Logger:
def __init__(self, log_dir, n_logged_samples=10, summary_writer=None, fps=4):
self._log_dir = log_dir
self._n_logged_samples = n_logged_samples
self.fps = fps
if summary_writer is not None:
self._summ_writer = summary_writer
else:
self._summ_writer = SummaryWriter(log_dir, max_queue=1, flush_secs=1)
def _loop_batch(self, fn, name, val, *argv, **kwargs):
"""Loops the logging function n times."""
for log_idx in range(min(self._n_logged_samples, len(val))):
name_i = os.path.join(name, "_%d" % log_idx)
fn(name_i, val[log_idx], *argv, **kwargs)
@staticmethod
def _check_size(val, size):
if isinstance(val, torch.Tensor) or isinstance(val, np.ndarray):
assert len(val.shape) == size, "Size of tensor does not fit required size, {} vs {}".format(len(val.shape), size)
elif isinstance(val, list):
assert len(val[0].shape) == size-1, "Size of list element does not fit required size, {} vs {}".format(len(val[0].shape), size-1)
else:
raise NotImplementedError("Input type {} not supported for dimensionality check!".format(type(val)))
# if (val[0].numel() > 1e9):
# print("Logging very large image with size {}px.".format(max(val[0].shape[1], val[0].shape[2])))
#raise ValueError("This might be a bit too much")
def log_scalar(self, scalar, name, step, phase):
self._summ_writer.add_scalar('{}_{}'.format(name, phase), scalar, step)
def log_scalars(self, scalar_dict, group_name, step, phase):
"""Will log all scalars in the same plot."""
self._summ_writer.add_scalars('{}_{}'.format(group_name, phase), scalar_dict, step)
def log_images(self, image, name, step, phase):
self._check_size(image, 4) # [N, C, H, W]
if image[0].numel() > 3e9:
print('skipping logging a giant image with {}px'.format(image[0].numel()))
return
self._loop_batch(
self._summ_writer.add_image, '{}_{}'.format(name, phase), image, step)
def log_video(self, video_frames, name, step, phase):
assert len(video_frames.shape) == 4, "Need [T, C, H, W] input tensor for single video logging!"
if not isinstance(video_frames, torch.Tensor): video_frames = torch.tensor(video_frames)
#video_frames = torch.transpose(video_frames, 0, 1) # tbX requires [C, T, H, W] <- not in tbX >= 1.6
video_frames = video_frames.unsqueeze(0) # add an extra dimension to get grid of size 1
self._summ_writer.add_video('{}_{}'.format(name, phase), video_frames, step, fps=self.fps)
def log_videos(self, video_frames, name, step, phase):
assert len(video_frames.shape) == 5, "Need [N, T, C, H, W] input tensor for video logging!"
video_frames = video_frames.unsqueeze(1) # add an extra dimension after batch to get grid of size 1
self._loop_batch(self._summ_writer.add_video, '{}_{}'.format(name, phase), video_frames, step, fps=self.fps)
def log_image_grid(self, images, name, step, phase, nrow=8):
assert len(images.shape) == 4, "Image grid logging requires input shape [batch, C, H, W]!"
img_grid = torchvision.utils.make_grid(images, nrow=nrow)
if img_grid.min() < 0:
print("warning, image not rescaled!")
img_grid = (img_grid + 1) / 2
self._summ_writer.add_image('{}_{}'.format(name, phase), img_grid, step)
def log_video_grid(self, video_frames, name, step, phase, fps=3):
assert len(video_frames.shape) == 5, "Need [N, T, C, H, W] input tensor for video logging!"
self._summ_writer.add_video('{}_{}'.format(name, phase), video_frames, step, fps=fps)
def log_figures(self, figure, name, step, phase):
"""figure: matplotlib.pyplot figure handle"""
assert figure.shape[0] > 0, "Figure logging requires input shape [batch x figures]!"
self._loop_batch(self._summ_writer.add_figure, '{}_{}'.format(name, phase), figure, step)
def log_figure(self, figure, name, step, phase):
"""figure: matplotlib.pyplot figure handle"""
self._summ_writer.add_figure('{}_{}'.format(name, phase), figure, step)
def log_graph(self, array, name, step, phase):
"""figure: matplotlib.pyplot figure handle"""
im = plot_graph(array)
self._summ_writer.add_image('{}_{}'.format(name, phase), im.transpose(2, 0, 1), step)
def dump_scalars(self, log_path=None):
log_path = os.path.join(self._log_dir, "scalar_data.json") if log_path is None else log_path
self._summ_writer.export_scalars_to_json(log_path)
class HierarchyLogger(Logger):
def __init__(self, log_dir, hp, max_seq_len, n_logged_samples=3, summary_writer=None, fps=4):
Logger.__init__(self, log_dir, n_logged_samples, summary_writer, fps)
self._gamma_width = 4 # width of the gamma visualization in px
self._hp = hp
self.max_seq_len = max_seq_len
self.dummy_env = None
def render(self, tensor):
return tensor
def log_hierarchy_image(self, model_output, inputs, name, step, phase):
"""Builds an image depicting the predicted hierarchy."""
reference = find_tensor(inputs)
batch_size, channels, im_height, _ = inputs.I_0_image.shape
N = self.max_seq_len
assert batch_size >= self._n_logged_samples
depth = model_output.tree.depth
if 'gamma' in model_output.tree.subgoals.keys():
gamma_width = self._gamma_width
else:
gamma_width = 0
level_height = im_height + gamma_width
image = 0.7 * torch.ones((self._n_logged_samples, channels, depth * level_height, N * im_height),
dtype=torch.float32, device=reference.device)
for level in range(depth):
# initialize gamma "strips" to 0
image[:, :, level * level_height : level * level_height + gamma_width] = 0.0
def tile_gamma(gamma):
return gamma[:, None].repeat(1, im_height).view(-1)
# TODO vectorize the batch computation
for segment in model_output.tree.depth_first_iter():
subgoal = segment.subgoal
if not subgoal.keys(): break
level = depth - segment.depth
# visualize gamma on this depth level
for batch_idx in range(self._n_logged_samples):
if 'done_mask' in segment and (segment.done_mask is None or not segment.done_mask[batch_idx]):
time_idx = int(subgoal.ind[batch_idx])
if gamma_width != 0:
image[batch_idx, :, level * level_height:level * level_height + gamma_width] += \
tile_gamma(subgoal.gamma[batch_idx])
if 'images' in subgoal.keys():
image[batch_idx,
:,
level * level_height + gamma_width:(level+1) * level_height,
time_idx * im_height:(time_idx + 1) * im_height] = self.render(subgoal.images[batch_idx])
def flatten_seq(seq):
return seq[:self._n_logged_samples].transpose(1, 2).transpose(2, 3).\
reshape(self._n_logged_samples, channels, im_height, N * im_height)
def mask_extra(seq):
return seq * broadcast_final(get_pad_mask(model_output.end_ind, N), seq)
if 'traj_seq' in inputs.keys():
input_image = flatten_seq(inputs.traj_seq_images)
else:
input_image = torch.zeros_like(image[:,:,:level_height])
input_image[:,:,:,:im_height] = inputs.I_0_image[:self._n_logged_samples]
input_image[:,:,:,-im_height:] = inputs.I_g_image[:self._n_logged_samples]
image = torch.cat([input_image, image], dim=2)
if 'dense_rec' in model_output.keys() and 'images' in model_output.dense_rec.keys()\
and not 'p_n_hat' in model_output.tree.subgoals: # don't log the dense rec here if it is pruned
dense_images = mask_extra(self.render(model_output.dense_rec.images))
image = torch.cat([image, flatten_seq(dense_images)], dim=2)
if 'soft_matched_estimates' in model_output.keys():
soft_estimates = mask_extra(self.render(model_output.soft_matched_estimates))
image = torch.cat([image, flatten_seq(soft_estimates)], dim=2)
image = (image + 1)/2 # rescale back to 0-1 range
self.log_images(image, name, step, phase)
return image
def _log_plot_img(self, img, name, step, phase):
if not isinstance(img, torch.Tensor): img = torch.tensor(img)
img = img.float().permute(0, 3, 1, 2)
self.log_images(img, name, step, phase)
def log_gif(self, imgs, name, step, phase):
if isinstance(imgs, list): imgs = np.concatenate(imgs)
if not isinstance(imgs, torch.Tensor): imgs = torch.tensor(imgs)
imgs = imgs.float().permute(0, 3, 1, 2)
self.log_video(imgs, name + "_gif", step, phase)
def log_gt_match_overview(self, model_output, inputs, name, step, phase):
self._log_plot_img(plot_gt_matching_overview(model_output, inputs), name, step, phase)
def log_attention_overview(self, model_output, inputs, name, step, phase):
self._log_plot_img(plot_gt_matching_overview(model_output, inputs, plot_attr='gamma'), name, step, phase)
def log_val_tree(self, model_output, inputs, name, step, phase):
self._log_plot_img(plot_val_tree(model_output, inputs), name, step, phase)
def log_balanced_tree(self, model_output, element, name, step, phase):
with param(n_logged_samples=self._n_logged_samples):
im = plot_balanced_tree(model_output, element).transpose((0, 2, 3, 1))
self._log_plot_img(im, name, step, phase) # show from first, last and predicted
def log_pruned_pred(self, model_output, inputs, name, step, phase):
im, seq = plot_pruned_seqs(model_output, inputs)
self._log_plot_img(im, name, step, phase)
self.log_gif(seq, name, step, phase)
def log_dense_gif(self, model_output, inputs, name, step, phase):
"""Logs the dense reconstruction """
rows = [model_output.dense_rec.images]
if phase == 'train':
rows = [inputs.traj_seq] + rows
self.log_rows_gif(rows, name, step, phase)
# self.log_loss_gif(model_output.dense_rec.images, inputs.traj_seq, name, step, phase)
def log_rows_gif(self, rows, name, step, phase):
""" Logs a gif with several rows
:param rows: a list of tensors batch x time x channel x height x width
:param name:
:param step:
:param phase:
:return:
"""
im = make_gif(rows)
self.log_video(im, name, step, phase)
def log_loss_gif(self, estimates, targets, name, step, phase):
"""Logs gifs showing a target and a ground truth sequence side by side,
shows ground truth sequence at training time too."""
estimate_imgs = self.render((estimates + 1) / 2)
target_imgs = self.render((targets + 1) / 2)
if phase == "train":
# concat target images
seq_shape = target_imgs.shape
padding = torch.zeros(seq_shape[:3] + (2,) + seq_shape[4:], dtype=target_imgs.dtype,
device=target_imgs.device)
plot_imgs = torch.cat([target_imgs, padding, estimate_imgs], dim=3)
else:
plot_imgs = estimate_imgs
plot_imgs = plot_imgs[:5]
batch, time, channels, height, width = plot_imgs.shape
plot_imgs = plot_imgs.permute(1, 2, 3, 0, 4).reshape(time, channels, height, width * batch)
self.log_video(plot_imgs, name, step, phase)
def log_single_topdown_traj(self, traj, name, step, phase):
from gcp.planning.infra.envs.miniworld_env.multiroom3d.multiroom3d_env import Multiroom3dEnv
if self.dummy_env is None:
self.dummy_env = Multiroom3dEnv({'n_rooms': self._hp.n_rooms}, no_env=True)
im = self.dummy_env.render_top_down(traj)
self._log_plot_img(im[None], name, step, phase)
def log_multiple_topdown_trajs(self, trajs, name, step, phase):
from gcp.planning.infra.envs.miniworld_env.multiroom3d.multiroom3d_env import Multiroom3dEnv
if self.dummy_env is None:
self.dummy_env = Multiroom3dEnv({'n_rooms': self._hp.n_rooms}, no_env=True)
im = self.dummy_env.render_top_down(trajs[0])
for traj in trajs[1:]:
im = self.dummy_env.render_top_down(traj, background=im)
self._log_plot_img(im[None], name, step, phase)
def log_dataset_specific_trajectory(
self, model_output, inputs, name, step, phase, dataset, predictions=None, end_inds=None):
""" Logs trajectory according to a protocol defined by the dataset class """
# TODO any way to make logger class have access to the dataset directly?
if not hasattr(dataset, 'render_trajectory'):
return
if predictions is None:
predictions, end_inds = self.get_predictions_from_output(inputs, model_output)
im = dataset.render_trajectory(model_output, inputs, predictions, end_inds, self._n_logged_samples)
if 'image' in im:
self._log_plot_img(im['image'], name, step, phase)
if 'video' in im:
self.log_video(im['video'], name, step, phase)
if 'videos' in im:
for i, video in enumerate(im['videos']):
self.log_video(video, name + "_{}".format(i), step, phase)
@staticmethod
def get_predictions_from_output(inputs, model_output):
if 'pruned_prediction' in model_output:
predictions = model_output.pruned_prediction
end_inds = model_output.end_ind
elif 'images' in model_output.dense_rec:
predictions = model_output.dense_rec.images
end_inds = inputs.end_ind
elif 'soft_matched_estimates' in model_output:
predictions = model_output.soft_matched_estimates
end_inds = inputs.end_ind
else:
predictions = model_output.tree.df.images
end_inds = np.ones((predictions.shape[0],), dtype=int) * predictions.shape[1] # use full length
return predictions, end_inds
def log_states_2d(self, model_output, inputs, name, step, phase):
"""Logs 2D plot of first 2 state dimensions."""
predictions, end_inds = self.get_predictions_from_output(inputs, model_output)
imgs = []
for i, end_ind_inp, end_ind in zip(range(self._n_logged_samples), inputs.end_ind, end_inds):
fig = plt.figure()
gt = inputs.traj_seq_states[i, :end_ind_inp].data.cpu().numpy()
plt.plot(gt[:, 0], gt[:, 1], 'g')
pred = predictions[i][:end_ind].data.cpu().numpy()
plt.plot(pred[:, 0], pred[:, 1], 'r')
imgs.append(fig2img(fig))
self._log_plot_img(np.stack(imgs), name, step, phase)
def log_pred_actions(self, model_output, inputs, name, step, phase):
self._log_plot_img(plot_inverse_model_actions(model_output, inputs), name, step, phase)
if __name__ == "__main__":
logger = Logger(log_dir="./summaries")
for step in range(10):
print("Running step %d" % step)
dummy_data = torch.rand([32, 10, 3, 64, 64])
logger.log_scalar(dummy_data[0, 0, 0, 0, 0], name="scalar", step=step, phase="train")
logger.log_scalars({
'test1': dummy_data[0, 0, 0, 0, 0],
'test2': dummy_data[0, 0, 0, 0, 1],
'test3': dummy_data[0, 0, 0, 0, 2]
}, group_name="scalar_group", step=step, phase="train")
logger.log_images(dummy_data[:, 0], name="image", step=step, phase="train")
logger.log_video(dummy_data, name="video", step=step, phase="train")
logger.log_video_grid(dummy_data, name="video_grid", step=step, phase="train")
fig = plt.figure()
plt.plot(dummy_data.data.numpy()[:, 0, 0, 0, 0])
logger.log_figures(np.asarray([fig for _ in range(10)]), name="figure", step=step, phase="train")
logger.dump_scalars()
print("Done!")
<file_sep>import glob
import imp
import os
import random
import h5py
import numpy as np
import torch.utils.data as data
from torchvision.transforms import Resize
from blox import AttrDict
from blox.basic_types import map_dict
from blox.torch.training import RepeatedDataLoader
from blox.vis import resize_video
from gcp.prediction import global_params
def set_seed(seed):
random.seed(seed)
np.random.seed(seed + 2)
class BaseVideoDataset(data.Dataset):
def __init__(self, data_dir, mpar, data_conf, phase, shuffle=True, dataset_size=-1):
"""
:param data_dir: path to data directory
:param mpar: model parameters used to determine output resolution etc
:param data_conf: dataset config
:param phase: string indicating whether 'train'/'val'/'test'
:param shuffle: whether to shuffle within batch, set to False for computing metrics
:param dataset_size: (optional) if not full dataset should be used, specifies number of used sequences
"""
self.phase = phase
self.data_dir = data_dir
self.spec = data_conf.dataset_spec
self.data_conf = data_conf
self.dataset_size = dataset_size
self.shuffle = shuffle and phase == 'train'
self.use_states = not mpar.use_convs
self.img_sz = mpar.img_sz
self.device = mpar.device
if shuffle:
self.n_worker = 4
else:
self.n_worker = 1
if global_params.debug:
self.n_worker = 0
self.filenames = None
self.states_mean = None
self.states_std = None
def process_data_dict(self, data_dict):
if 'images' in data_dict:
data_dict.traj_seq_images = data_dict.pop('images')
if 'states' in data_dict:
data_dict.traj_seq_states = data_dict.pop('states')
if self.states_mean is not None:
data_dict.traj_seq_states = self.standardize(data_dict.traj_seq_states)
data_dict.traj_seq_states_mean = self.states_mean
data_dict.traj_seq_states_std = self.states_std
if 'traj_seq_images' in data_dict and len(data_dict.traj_seq_images.shape) > 1: # some datasets don't have images
data_dict.traj_seq_images = self.preprocess_images(data_dict.traj_seq_images)
data_dict.traj_seq = data_dict.traj_seq_states if self.use_states else data_dict.traj_seq_images
if 'start_ind' not in data_dict:
data_dict.start_ind = 0
if 'end_ind' not in data_dict:
data_dict.end_ind = self.spec['max_seq_len'] - 1
if 'pad_mask' not in data_dict:
data_dict.pad_mask = np.ones(self.spec['max_seq_len'], dtype=np.float32)
data_dict.I_0 = data_dict.traj_seq[0]
data_dict.I_g = data_dict.traj_seq[data_dict.end_ind]
if 'traj_seq_images' in data_dict:
data_dict.I_0_image = data_dict.traj_seq_images[0]
data_dict.I_g_image = data_dict.traj_seq_images[data_dict.end_ind]
def get_data_loader(self, batch_size, n_repeat):
print('len {} dataset {}'.format(self.phase, len(self)))
assert self.device in ['cuda', 'cpu'] # Otherwise the logic below is wrong
return RepeatedDataLoader(self, batch_size=batch_size, shuffle=self.shuffle, num_workers=self.n_worker,
drop_last=True, n_repeat=n_repeat, pin_memory=self.device == 'cuda',
worker_init_fn=lambda x: np.random.seed(np.random.randint(65536) + x))
def preprocess_images(self, images):
return images
@staticmethod
def visualize(*args, **kwargs):
pass
def standardize(self, states):
return (states - self.states_mean)/(1e-6 + self.states_std)
@staticmethod
def get_dataset_spec(data_dir):
return imp.load_source('dataset_spec', os.path.join(data_dir, 'dataset_spec.py')).dataset_spec
class VarLenVideoDataset(BaseVideoDataset):
"""Variable length video dataset"""
def __init__(self, data_dir, mpar, data_conf, phase, shuffle=True, dataset_size=-1):
"""
:param data_dir: path to data directory
:param mpar: model parameters used to determine output resolution etc
:param data_conf: dataset config
:param phase: string indicating whether 'train'/'val'/'test'
:param shuffle: whether to shuffle within batch, set to False for computing metrics
:param dataset_size: (optional) if not full dataset should be used, specifies number of used sequences
"""
super().__init__(data_dir, mpar, data_conf, phase, shuffle, dataset_size)
print('loading files from', self.data_dir)
self.filenames = self._get_filenames()
self.traj_per_file = self.get_traj_per_file(self.filenames[0])
self.randomize_length = mpar.randomize_length
self.randomize_start = mpar.randomize_start
self.transform = Resize([mpar.img_sz, mpar.img_sz])
self.flatten_im = False
if 'states_mean' in self.spec:
self.states_mean = self.spec['states_mean']
self.states_std = self.spec['states_std']
def _get_filenames(self):
raise NotImplementedError("Needs to be implemented in sub-class!")
def get_traj_per_file(self, path):
with h5py.File(path, 'r') as F:
return F['traj_per_file'].value
def __getitem__(self, index):
file_index = index // self.traj_per_file
path = self.filenames[file_index]
try:
with h5py.File(path, 'r') as F:
ex_index = index % self.traj_per_file # get the index
key = 'traj{}'.format(ex_index)
# Fetch data into a dict
if key + '/images' in F.keys():
data_dict = AttrDict(images=(F[key + '/images'].value))
else:
data_dict = AttrDict()
for name in F[key].keys():
if name in ['states', 'actions', 'pad_mask']:
data_dict[name] = F[key + '/' + name].value.astype(np.float32)
# Make length consistent
end_ind = np.argmax(data_dict.pad_mask * np.arange(data_dict.pad_mask.shape[0], dtype=np.float32), 0)
start_ind = np.random.randint(0, end_ind - 1) if self.randomize_start else 0
start_ind, end_ind, data_dict = self.sample_max_len_video(data_dict, start_ind, end_ind)
# Randomize length
if self.randomize_length:
end_ind = self._randomize_length(start_ind, end_ind, data_dict)
# Collect data into the format the model expects
data_dict.end_ind = end_ind
data_dict.start_ind = start_ind
self.process_data_dict(data_dict)
except:
raise ValueError("Problem when loading file from {}".format(path))
return data_dict
def sample_max_len_video(self, data_dict, start_ind, end_ind):
""" This function processes data tensors so as to have length equal to max_seq_len
by sampling / padding if necessary """
extra_length = (end_ind - start_ind + 1) - self.spec['max_seq_len']
if self.phase == 'train':
offset = max(0, int(np.random.rand() * (extra_length + 1))) + start_ind
else:
offset = 0
data_dict = map_dict(lambda tensor: self._maybe_pad(tensor, offset, self.spec['max_seq_len']), data_dict)
if 'actions' in data_dict:
data_dict.actions = data_dict.actions[:-1]
end_ind = min(end_ind - offset, self.spec['max_seq_len'] - 1)
return 0, end_ind, data_dict # start index gets 0 by design
def _randomize_length(self, start_ind, end_ind, data_dict):
""" This function samples part of the input tensors so that the length of the result
is uniform between 1 and max """
length = 3 + int(np.random.rand() * (end_ind - 2)) # The length of the seq is from 2 to total length
chop_length = int(np.random.rand() * (end_ind + 1 - length)) # from 0 to the reminder
end_ind = length - 1
pad_mask = np.logical_and((np.arange(self.spec['max_seq_len']) <= end_ind),
(np.arange(self.spec['max_seq_len']) >= start_ind)).astype(np.float32)
# Chop off the beginning of the arrays
def pad(array):
array = np.concatenate([array[chop_length:], np.repeat(array[-1:], chop_length, 0)], 0)
array[end_ind + 1:] = 0
return array
for key in filter(lambda key: key != 'pad_mask', data_dict):
data_dict[key] = pad(data_dict[key])
data_dict.pad_mask = pad_mask
return end_ind
def preprocess_images(self, images):
# Resize video
if len(images.shape) == 5:
images = images[:, 0] # Number of cameras, used in RL environments
assert images.dtype == np.uint8, 'image need to be uint8!'
images = resize_video(images, (self.img_sz, self.img_sz))
images = np.transpose(images, [0, 3, 1, 2]) # convert to channel-first
images = images.astype(np.float32) / 255 * 2 - 1
assert images.dtype == np.float32, 'image need to be float32!'
if self.flatten_im:
images = np.reshape(images, [images.shape[0], -1])
return images
def _split_with_percentage(self, frac, filenames):
assert sum(frac.values()) <= 1.0 # fractions cannot sum up to more than 1
assert self.phase in frac
if self.phase == 'train':
start, end = 0, frac['train']
elif self.phase == 'val':
start, end = frac['train'], frac['train'] + frac['val']
else:
start, end = frac['train'] + frac['val'], frac['train'] + frac['val'] + frac['test']
start, end = int(len(filenames) * start), int(len(filenames) * end)
return filenames[start:end]
@staticmethod
def _maybe_pad(val, offset, target_length):
"""Pads / crops sequence to desired length."""
val = val[offset:]
len = val.shape[0]
if len > target_length:
return val[:target_length]
elif len < target_length:
return np.concatenate((val, np.zeros([int(target_length - len)] + list(val.shape[1:]), dtype=val.dtype)))
else:
return val
@staticmethod
def _shuffle_with_seed(arr, seed=2):
rng = random.Random()
rng.seed(seed)
rng.shuffle(arr)
return arr
def __len__(self):
if self.dataset_size != -1:
return self.dataset_size
return len(self.filenames) * self.traj_per_file
class FolderSplitVarLenVideoDataset(VarLenVideoDataset):
"""Splits in train/val/test using given folder structure."""
def _get_filenames(self):
filenames = sorted(glob.glob(os.path.join(self.data_dir, 'hdf5', self.phase + '/*')))
if not filenames:
raise RuntimeError('No filenames found in {}'.format(self.data_dir))
filenames = self._shuffle_with_seed(filenames)
return filenames
class GlobalSplitVarLenVideoDataset(VarLenVideoDataset):
"""Splits in train/val/test using global percentages."""
def _get_filenames(self):
filenames = []
for root, dirs, files in os.walk(self.data_dir):
for file in files:
if file.endswith(".h5") and not file == 'dataset_info.h5':
filenames.append(os.path.join(root, file))
if not filenames:
raise RuntimeError('No filenames found in {}'.format(self.data_dir))
filenames = self._shuffle_with_seed(filenames)
filenames = self._split_with_percentage(self.spec.split, filenames)
return filenames
class MazeGlobalSplitVarLenVideoDataset(GlobalSplitVarLenVideoDataset):
def process_data_dict(self, data_dict):
if 'states' in data_dict:
data_dict['states'] = data_dict['states'][..., :2] # only use x,y position states
return super().process_data_dict(data_dict)
class MazeTopRenderedGlobalSplitVarLenVideoDataset(MazeGlobalSplitVarLenVideoDataset):
def __init__(self, data_dir, mpar, data_conf, phase, shuffle=True, dataset_size=-1):
from gcp.planning.infra.envs.miniworld_env.multiroom3d.multiroom3d_env import Multiroom3dEnv
super().__init__(data_dir, mpar, data_conf, phase, shuffle, dataset_size)
assert 'n_rooms' in data_conf # need to add this in config file!
self._crop_window_px = data_conf.crop_window
self._render_env = Multiroom3dEnv({'n_rooms': data_conf['n_rooms']}, no_env=True,
crop_window=self._crop_window_px)
def process_data_dict(self, data_dict):
# replace images with topdown rendered images -> first render, then resize to scale
if "images" in data_dict:
assert "states" in data_dict and "end_ind" in data_dict
rendered_imgs = np.zeros((data_dict.images.shape[0], 1, self._crop_window_px*2, self._crop_window_px*2, 3),
dtype=data_dict.images.dtype)
for t in range(data_dict.end_ind + 1):
raw_img = self._render_env.render_pos_top_down(data_dict.states[t, :2],
data_dict.states[data_dict.end_ind, :2],)
rendered_imgs[t, 0] = np.asarray(raw_img * 255, dtype=rendered_imgs.dtype)
data_dict.images = rendered_imgs
return super().process_data_dict(data_dict)
<file_sep>from blox import AttrDict
from blox.torch.ops import ten2ar
import numpy as np
import numbers
from gcp.datasets.data_loader import MazeTopRenderedGlobalSplitVarLenVideoDataset
class Nav9Rooms(MazeTopRenderedGlobalSplitVarLenVideoDataset):
n_rooms = 9
@classmethod
def render_maze_trajectories(cls, states, end_inds, color, n_logged_samples=3, bckgrds=None):
from gcp.planning.infra.envs.miniworld_env.multiroom3d.multiroom3d_env import Multiroom3dEnv
dummy_env = Multiroom3dEnv({'n_rooms': cls.n_rooms}, no_env=True)
if bckgrds is None:
bckgrds = [None] * n_logged_samples
if isinstance(color[0], numbers.Number):
color = [color] * n_logged_samples
imgs = []
for i, end_ind in zip(range(n_logged_samples), end_inds):
state_seq = ten2ar(states[i][:end_ind + 1])
# if state_seq.shape[0] < 2:
# if bckgrds[i] is not None:
# imgs.append(bckgrds[i])
# continue
imgs.append(dummy_env.render_top_down(state_seq, color=color[i], background=bckgrds[i]))
return np.stack(imgs)
@classmethod
def render_trajectory(cls, outputs, inputs, predictions, end_inds, n_logged_samples=3):
# render ground truth trajectories
im = cls.render_maze_trajectories(inputs.traj_seq_states, inputs.end_ind, (0, 1.0, 0), # green
n_logged_samples=n_logged_samples)
# render predicted trajectory on top
color = np.asarray((1.0, 0, 0)) # red
if 'tree' in outputs and 'match_dist' in outputs.tree.subgoals:
# Color bottleneck frames
bottleneck_frames = ten2ar(outputs.tree.bf.match_dist.argmax(2)[:, :7])
end_inds_np = ten2ar(end_inds)
colors = []
for i in range(n_logged_samples):
bottleneck_frames[i, bottleneck_frames[i] > end_inds_np[i]] = end_inds_np[i]
color_seq = color[None].repeat(end_inds_np[i] + 1, 0)
color_seq[bottleneck_frames[i]] = color_seq[bottleneck_frames[i]] * 0.5
color_seq[bottleneck_frames[i] - 1] = color_seq[bottleneck_frames[i] - 1] * 0.5
colors.append(color_seq)
color = colors
im = cls.render_maze_trajectories(predictions, end_inds, color, bckgrds=im)
return {'image': im}
config = AttrDict(
dataset_spec=AttrDict(
max_seq_len=100,
dataset_class=Nav9Rooms,
split=AttrDict(train=0.994, val=0.006, test=0.00),
),
n_rooms=9,
crop_window=40,
)
<file_sep>import matplotlib
import numpy as np
matplotlib.use('agg')
from gcp.planning.infra.datasets.save_util.record_saver import HDF5SaverBase
from blox import AttrDict
def pad_traj_timesteps(traj, max_num_actions):
"""
pad images and actions with zeros
:param traj:
:param max_num_actions:
:return:
"""
if 'images' in traj:
im_shape = traj.images.shape
ac_shape = traj.actions.shape
if ac_shape[0] < max_num_actions:
if 'images' in traj:
zeros = np.zeros([max_num_actions - im_shape[0] + 1, im_shape[1], im_shape[2], im_shape[3], im_shape[4]], dtype=np.uint8)
traj.images = np.concatenate([traj.images, zeros])
if len(ac_shape) > 1:
zeros = np.zeros([max_num_actions - ac_shape[0], ac_shape[1]])
else:
zeros = np.zeros([max_num_actions - ac_shape[0]])
traj.actions = np.concatenate([traj.actions, zeros])
if 'images' in traj:
assert traj.images.shape[0] == max_num_actions + 1
assert traj.actions.shape[0] == max_num_actions
return traj
def get_pad_mask(action_len, max_num_actions):
"""
create a 0/1 mask with 1 where there are images and 0 where there is padding
:param action_len: the number of actions in trajectory
:param max_num_actions: maximum number of actions allowed
:return:
"""
if action_len < max_num_actions:
mask = np.concatenate([np.ones(action_len + 1), np.zeros(max_num_actions - action_len)])
elif action_len == max_num_actions:
mask = np.ones(max_num_actions + 1)
else:
raise ValueError
assert mask.shape[0] == max_num_actions + 1
return mask
class HDF5Saver(HDF5SaverBase):
def __init__(self, save_dir, envparams, agentparams, traj_per_file,
offset=0, split=(0.90, 0.05, 0.05), split_train_val_test=True):
self.do_not_save_images = agentparams.do_not_save_images
if hasattr(agentparams, 'max_num_actions'):
self.max_num_actions = envparams.max_num_actions
else:
self.max_num_actions = agentparams.T
super().__init__(save_dir, traj_per_file, offset, split, split_train_val_test)
def _save_manifests(self, agent_data, obs, policy_out):
pass
def make_traj(self, agent_data, obs, policy_out):
traj = AttrDict()
if not self.do_not_save_images:
traj.images = obs['images']
traj.states = obs['state']
action_list = [action['actions'] for action in policy_out]
traj.actions = np.stack(action_list, 0)
traj.pad_mask = get_pad_mask(traj.actions.shape[0], self.max_num_actions)
traj = pad_traj_timesteps(traj, self.max_num_actions)
if 'robosuite_xml' in obs:
traj.robosuite_xml = obs['robosuite_xml'][0]
if 'robosuite_env_name' in obs:
traj.robosuite_env_name = obs['robosuite_env_name'][0]
if 'robosuite_full_state' in obs:
traj.robosuite_full_state = obs['robosuite_full_state']
# minimal state that contains all information to position entities in the env
if 'regression_state' in obs:
traj.regression_state = obs['regression_state']
return traj
def save_traj(self, itr, agent_data, obs, policy_out):
traj = self.make_traj(agent_data, obs, policy_out)
self._save_traj(traj)
<file_sep>from gcp.planning.infra.envs.base_env import BaseEnv
class BaseMiniworldEnv(BaseEnv):
def __init__(self, hp):
self._hp = hp
self._ncam = 1
self.num_objects = None
self._goal = None
self._goaldistances = []
self._initial_shortest_dist, self._final_shortest_dist = None, None
self._full_traj = []
def reset(self):
self._goaldistances = []
self._initial_shortest_dist, self._final_shortest_dist = None, None
self._full_traj = []
def set_goal(self, goal):
self._goal = goal
def add_goal_dist(self, goal_dist):
self._goaldistances.append(goal_dist)
def valid_rollout(self):
return True # no invalid states in miniworld env
def eval(self):
assert self._initial_shortest_dist is not None and self._final_shortest_dist is not None # need to be set by subclass before eval!
stats = {}
stats['improvement'] = self._initial_shortest_dist - self._final_shortest_dist
stats['initial_dist'] = self._initial_shortest_dist
stats['final_dist'] = self._final_shortest_dist
stats['all_goal_distances'] = self._goaldistances
stats['full_traj'] = self._full_traj
stats['goal'] = self._goal
# TODO add success rate
return stats
def get_reset_from_obs(self, obs_dict):
reset_state = {}
reset_state['state'] = obs_dict['state'][0]
reset_state['qpos_full'] = obs_dict['qpos_full'][0]
return reset_state
@staticmethod
def default_ncam():
return 1
@property
def ncam(self):
return self._ncam
@property
def cam_res(self):
return self._frame_height, self._frame_width
<file_sep>import numpy as np
import torch
from blox import AttrDict
from gcp.planning.infra.policy.policy import Policy
from gcp.prediction.models.auxilliary_models.bc_mdl import TestTimeBCModel
class BehavioralCloningPolicy(Policy):
"""
Behavioral Cloning Policy
"""
def __init__(self, ag_params, policyparams, gpu_id, ngpu, conversion_fcns=None, n_rooms=None):
super(BehavioralCloningPolicy, self).__init__()
self._hp = self._default_hparams()
self.override_defaults(policyparams)
self.log_dir = ag_params.log_dir
self._hp.params['batch_size'] = 1
# self._hp.params['n_actions'] = self._hp.params.n_actions # todo get this from env!
self.policy = TestTimeBCModel(self._hp.params, None)
self.policy.eval()
self.hidden_var = None # stays None for non-recurrent policy
def reset(self):
super().reset()
self.hidden_var = None
def _default_hparams(self):
default_dict = {
'params': {},
'checkpt_path': None,
'model': None,
'logger': None,
}
parent_params = super()._default_hparams()
parent_params.ncam = 1
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def act(self, t=None, i_tr=None, images=None, state=None, goal=None, goal_image=None):
# Note: goal_image provides n (2) images starting from the last images of the trajectory
self.t = t
self.i_tr = i_tr
self.goal_image = goal_image
if self.policy.has_image_input:
inputs = AttrDict(
I_0=self._preprocess_input(images[t]),
I_g=self._preprocess_input(goal_image[-1] if len(goal_image.shape) > 4 else goal_image),
hidden_var=self.hidden_var
)
else:
current = state[-1:, :2]
goal = goal[-1:, :2] #goal_state = np.concatenate([state[-1:, -2:], state[-1:, 2:]], axis=-1)
inputs = AttrDict(
I_0=current,
I_g=goal,
hidden_var=self.hidden_var
)
actions, self.hidden_var = self.policy(inputs)
output = AttrDict()
output.actions = actions.data.cpu().numpy()[0]
return output
@staticmethod
def _preprocess_input(input):
assert len(input.shape) == 4 # can currently only handle inputs with 4 dims
if input.max() > 1.0:
input = input / 255.
if input.min() >= 0.0:
input = 2*input - 1.0
if input.shape[-1] == 3:
input = input.transpose(0, 3, 1, 2)
return input
@property
def default_action(self):
return np.zeros(self.policy._hp.n_actions)
def log_outputs_stateful(self, logger=None, global_step=None, phase=None, dump_dir=None, exec_seq=None, goal=None, index=None, env=None, goal_pos=None, traj=None, topdown_image=None):
logger.log_video(np.transpose(exec_seq, [0, 3, 1, 2]), 'control/traj{}_'.format(index), global_step, phase)
goal_img = np.transpose(goal, [2, 0, 1])[None]
goal_img = torch.tensor(goal_img)
logger.log_images(goal_img, 'control/traj{}_goal'.format(index), global_step, phase)
class BehavioralCloningPolicy_RegressionState(BehavioralCloningPolicy):
def act(self, t=None, i_tr=None, images=None, regression_state=None, goal=None, goal_image=None):
return super().act(t, i_tr, images, regression_state, goal, goal_image)
import cv2
def resize_image(im):
return cv2.resize(im.squeeze(), (64, 64), interpolation=cv2.INTER_AREA)
<file_sep>import matplotlib;
from gcp.prediction.utils.utils import datetime_str, make_path, set_seeds, get_dataset_path, download_data
matplotlib.use('Agg')
import torch
import argparse
import os
from shutil import copy
import imp
import importlib
from tensorflow.contrib.training import HParams
from tensorboardX import SummaryWriter
from torch import autograd
from torch.optim import Adam, RMSprop, SGD
from functools import partial
from gcp.datasets.data_loader import FolderSplitVarLenVideoDataset
from gcp.prediction.training.checkpoint_handler import save_cmd, save_git, get_config_path
from blox.utils import dummy_context
from blox import AttrDict
from blox.torch.training import LossSpikeHook, NanGradHook, NoneGradHook, DataParallelWrapper, \
get_clipped_optimizer
from gcp.evaluation.compute_metrics import Evaluator
from gcp.prediction.training.base_trainer import BaseTrainer
from gcp.prediction import global_params
from gcp.prediction.utils import visualization
from blox.torch.radam import RAdam
class GCPBuilder(BaseTrainer):
""" This class constructs the GCP model, dataset and the optimizers """
def __init__(self):
self.batch_idx = 0
## Set up params
cmd_args, model_conf, conf_path, data_conf = self.get_configs()
## Set up logging
self.log_dir = log_dir = os.path.join(self._hp.exp_path, 'events')
print('using log dir: ', log_dir)
if not cmd_args.dont_save:
# Log
print('Writing to the experiment directory: {}'.format(self._hp.exp_path))
if not os.path.exists(self._hp.exp_path):
os.makedirs(self._hp.exp_path)
save_cmd(self._hp.exp_path)
save_git(self._hp.exp_path)
# Copy config file
copy(conf_path, os.path.join(self._hp.exp_path, "conf_" + datetime_str() + ".py"))
writer = SummaryWriter(log_dir, max_queue=1, flush_secs=1)
self.logger = self._hp.logger(log_dir, self._hp, max_seq_len=data_conf.dataset_spec.max_seq_len,
summary_writer=writer)
else:
self.logger = None
## Set up CUDA
self.use_cuda = torch.cuda.is_available() and not global_params.debug
self.device = torch.device('cuda') if self.use_cuda else torch.device('cpu')
if cmd_args.gpu != -1:
os.environ["CUDA_VISIBLE_DEVICES"] = str(cmd_args.gpu)
if cmd_args.deterministic:
set_seeds()
## Set up model conf
# copy over data specs, but cannot copy list into hparams
model_conf.update({k: v for k, v in data_conf.dataset_spec.items() if not isinstance(v, list)})
model_conf['device'] = self.device.type
model_conf['batch_size'] = self._hp.batch_size
if self.use_cuda:
model_conf['batch_size'] = int(self._hp.batch_size / torch.cuda.device_count())
## Build model
model = self._hp.model(model_conf, self.logger)
if torch.cuda.device_count() > 1:
print("\nUsing {} GPUs!\n".format(torch.cuda.device_count()))
model = DataParallelWrapper(model)
model.device = self.device
self.model = model = model.to(self.device)
## Build data loading
self.train_loader = self.get_dataset(model, data_conf, 'train', self._hp.epoch_cycles_train, -1)
phase = 'test' if self.cmd_args.metric else 'val'
self.val_loader = self.get_dataset(model, data_conf, phase, self._hp.epoch_cycles_train, cmd_args.val_data_size)
## Build optimizer
self.optimizer = self.get_optimizer_class()(filter(lambda p: p.requires_grad, self.model.parameters()),
lr=self._hp.lr)
## Build evaluator
if hasattr(self.model, 'dense_rec') and not self.cmd_args.skip_top100_val:
self.evaluator = Evaluator(self.model, self.log_dir, self._hp, self.cmd_args.metric, self.logger)
else:
self.evaluator = None
## Set up training options: context, hooks
self.training_context = autograd.detect_anomaly if cmd_args.detect_anomaly else dummy_context
self.hooks = []
self.hooks.append(LossSpikeHook('sg_img_mse_train'))
self.hooks.append(NanGradHook(self))
# SVG has none gradients for the tree part of the network
if self.model._hp.dense_rec_type != 'svg': self.hooks.append(NoneGradHook(self))
def get_dataset(self, model, data_conf, phase, n_repeat, dataset_size=-1):
if self.cmd_args.feed_random_data:
from gcp.datasets.data_generator import RandomVideoDataset
dataset_class = RandomVideoDataset
elif 'dataset_class' in data_conf.dataset_spec:
dataset_class = data_conf.dataset_spec.dataset_class
visualization.PARAMS.visualize = dataset_class.visualize
elif hasattr(data_conf, 'dataset_conf') and 'dataset_class' in data_conf.dataset_conf:
dataset_class = data_conf.dataset_conf.dataset_class
visualization.PARAMS.visualize = dataset_class.visualize
else:
dataset_class = FolderSplitVarLenVideoDataset
loader = dataset_class(get_dataset_path(self._hp.dataset_name), model._hp, data_conf,
phase=phase, shuffle=not self.cmd_args.metric, dataset_size=dataset_size). \
get_data_loader(self._hp.batch_size, n_repeat)
return loader
def get_configs(self):
# Cmd arguments
self.cmd_args = cmd_args = self.get_cmd_args()
exp_dir = os.environ['GCP_EXP_DIR']
# Config file
conf_path = get_config_path(cmd_args.path)
print('loading from the config file {}'.format(conf_path))
conf_module = imp.load_source('conf', conf_path)
# Trainer config
trainer_conf = conf_module.configuration
self._hp = self._default_hparams()
self.override_defaults(trainer_conf) # override defaults with config file
self._hp.set_hparam('exp_path', make_path(exp_dir, cmd_args.path, cmd_args.prefix, cmd_args.new_dir))
# Model config
model_conf = conf_module.model_config
# Data config
download_data(trainer_conf.dataset_name)
data_conf = self.get_data_config(conf_module)
return cmd_args, model_conf, conf_path, data_conf
def get_data_config(self, conf_module):
# get default data config
path = os.path.join(get_dataset_path(conf_module.configuration['dataset_name']), 'dataset_spec.py')
data_conf_file = imp.load_source('dataset_spec', path)
data_conf = AttrDict()
data_conf.dataset_spec = AttrDict(data_conf_file.dataset_spec)
# update with custom params if available
update_data_conf = {}
if hasattr(conf_module, 'data_config'):
update_data_conf = conf_module.data_config
elif conf_module.configuration.dataset_name is not None:
update_data_conf = importlib.import_module('gcp.datasets.configs.' + conf_module.configuration.dataset_name).config
for key in update_data_conf:
if key == "dataset_spec":
data_conf.dataset_spec.update(update_data_conf.dataset_spec)
else:
data_conf[key] = update_data_conf[key]
if not 'fps' in data_conf:
data_conf.fps = 4
return data_conf
def get_optimizer_class(self):
optim = self._hp.optimizer
if optim == 'adam':
get_optim = partial(get_clipped_optimizer, optimizer_type=Adam, betas=(self._hp.adam_beta, 0.999))
elif optim == 'radam':
get_optim = partial(get_clipped_optimizer, optimizer_type=RAdam, betas=(self._hp.adam_beta, 0.999))
elif optim == 'rmsprop':
get_optim = partial(get_clipped_optimizer, optimizer_type=RMSprop, momentum=self._hp.momentum)
elif optim == 'sgd':
get_optim = partial(get_clipped_optimizer, optimizer_type=SGD, momentum=self._hp.momentum)
else:
raise ValueError("Optimizer '{}' not supported!".format(optim))
return partial(get_optim, gradient_clip=self._hp.gradient_clip)
def get_cmd_args(self):
parser = argparse.ArgumentParser()
parser.add_argument("--path", help="path to the config file directory")
# Folder settings
parser.add_argument("--prefix", help="experiment prefix, if given creates subfolder in experiment directory")
parser.add_argument('--new_dir', default=False, type=int, help='If True, concat datetime string to exp_dir.')
parser.add_argument('--dont_save', default=False, type=int,
help="if True, nothing is saved to disk. Note: this doesn't work") # TODO this doesn't work
parser.add_argument("--visualize", default='', help="path to model file to visualize") # TODO what uses this?
# Running protocol
parser.add_argument('--resume', default='latest', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--train', default=True, type=int,
help='if False, will run one validation epoch')
parser.add_argument('--test_control', default=True, type=int,
help="if False, control isn't run at validation time")
parser.add_argument('--test_prediction', default=True, type=int,
help="if False, prediction isn't run at validation time")
parser.add_argument('--skip_first_val', default=False, type=int,
help='if True, will skip the first validation epoch')
parser.add_argument('--skip_top100_val', default=False, type=int,
help="if True, skips top of 100 eval")
parser.add_argument('--metric', default=False, type=int,
help='if True, run test metrics')
parser.add_argument('--val_sweep', default=False, type=int,
help='if True, runs validation on all existing model checkpoints')
parser.add_argument('--dataset_val_sweep', default=False, type=int,
help='if True, runs validation on a given collection of datasets')
# Misc
parser.add_argument('--gpu', default=-1, type=int,
help='will set CUDA_VISIBLE_DEVICES to selected value')
parser.add_argument('--strict_weight_loading', default=True, type=int,
help='if True, uses strict weight loading function')
parser.add_argument('--deterministic', default=False, type=int,
help='if True, sets fixed seeds for torch and numpy')
parser.add_argument('--imepoch', default=4, type=int,
help='number of image loggings per epoch')
parser.add_argument('--val_data_size', default=-1, type=int,
help='number of sequences in the validation set. If -1, the full dataset is used')
parser.add_argument('--log_images', default=True, type=int,
help='')
parser.add_argument('--log_outputs_interval', default=10, type=int,
help='')
# Debug
parser.add_argument('--detect_anomaly', default=False, type=int,
help='if True, uses autograd.detect_anomaly()')
parser.add_argument('--feed_random_data', default=False, type=int,
help='if True, we feed random data to the model to test its performance')
parser.add_argument('--train_loop_pdb', default=False, type=int,
help='if True, opens a pdb into training loop')
parser.add_argument('--debug', default=False, type=int,
help='if True, runs in debug mode')
parser.add_argument('--verbose_timing', default=False, type=int,
help='if True, prints additional time measurements.')
return parser.parse_args()
def _default_hparams(self):
# put new parameters in here:
default_dict = {
'model': None,
'logger': None,
'dataset_name': None, # directory where dataset is in
'batch_size': 64,
'exp_path': None, # Path to the folder with experiments
'num_epochs': 200,
'epoch_cycles_train': 1,
'optimizer': 'radam', # supported: 'radam', 'adam', 'rmsprop', 'sgd'
'lr': None,
'gradient_clip': None,
'momentum': 0, # momentum in RMSProp / SGD optimizer
'adam_beta': 0.9, # beta1 param in Adam
'metric_pruning_scheme': 'dtw',
'top_of_100_eval': True,
'n_rooms': None,
}
# add new params to parent params
parent_params = HParams()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
<file_sep>from blox import AttrDict
def get_default_gcp_hyperparameters():
# Params that actually should be elsewhere
default_dict = AttrDict({
'randomize_length': False,
'randomize_start': False,
})
# Network size
default_dict.update({
'ngf': 4, # number of feature maps in shallowest level
'nz_enc': 32, # number of dimensions in encoder-latent space
'nz_vae': 32, # number of dimensions in vae-latent space
'nz_vae2': 256, # number of dimensions in 2nd level vae-latent space (if used)
'nz_mid': 32, # number of dimensions for internal feature spaces
'nz_mid_lstm': 32,
'n_lstm_layers': 1,
'n_processing_layers': 3, # Number of layers in MLPs
'conv_inf_enc_kernel_size': 3, # kernel size of convolutional inference encoder
'conv_inf_enc_layers': 1, # number of layers in convolutional inference encoder
'n_attention_heads': 1, # number of attention heads (needs to divide nz_enc evenly)
'n_attention_layers': 1, # number of layers in attention network
'nz_attn_key': 32, # dimensionality of the attention key
'init_mlp_layers': 3, # number of layers in the LSTM initialization MLP (if used)
'init_mlp_mid_sz': 32, # size of hidden layers inside LSTM initialization MLP (if used)
'n_conv_layers': None, # Number of conv layers. Can be of format 'n-<int>' for any int for relative spec
})
# Network params
default_dict.update(AttrDict(
action_activation=None,
device=None,
context_every_step=True,
))
# Loss weights
default_dict.update({
'kl_weight': 1.,
'kl_weight_burn_in': None,
'entropy_weight': .0,
'length_pred_weight': 1.,
'dense_img_rec_weight': 1.,
'dense_action_rec_weight': 1.,
'free_nats': 0,
})
# Architecture params
default_dict.update({
'use_skips': True, # only works with conv encoder/decoder
'skips_stride': 2,
'add_weighted_pixel_copy': False, # if True, adds pixel copying stream for decoder
'pixel_shift_decoder': False,
'skip_from_parents': False, # If True, parents are added to the pixel copy/shift sources
'seq_enc': 'none', # Manner of sequence encoding. ['none', 'conv', 'lstm']
'regress_actions': False, # whether or not to regress actions
'learn_attn_temp': True, # if True, makes attention temperature a trainable variable
'attention_temperature': 1.0, # temperature param used in attention softmax
'attach_inv_mdl': False, # if True, attaches an inverse model to output that computes actions
'attach_cost_mdl': False, # if True, attaches a cost function MLP that estimates cost from pairs of states
'run_cost_mdl': True, # if False, does not run cost model (but might still build it
'attach_state_regressor': False, # if True, attaches network that regresses states from pre-decoding-latents
'action_conditioned_pred': False, # if True, conditions prediction on actions
'learn_beta': True, # if True, learns beta
'initial_sigma': 1.0, # if True, learns beta
'separate_cnn_start_goal_encoder': False, # if True, builds separate conv encoder for start/goal image
'decoder_distribution': 'gaussian' # [gaussian, categorical]
})
# RNN params
default_dict.update({
'use_conv_lstm': False,
})
# Variational inference parameters
default_dict.update(AttrDict(
prior_type='learned', # type of prior to be used ['fixed', 'learned']
var_inf='standard', # type of variation inference ['standard', '2layer', 'deterministic']
))
# RecPlan params
default_dict.update({
'hierarchy_levels': 3, # number of levels in the subgoal tree
'one_hot_attn_time_cond': False, # if True, conditions attention on one-hot time step index
'attentive_inference': False, # if True, forces attention to single matching frame
'non_goal_conditioned': False, # if True, does not condition prediction on goal frame
'tree_lstm': '', # ['', 'sum' or 'linear']
'lstm_init': 'zero', # defines how treeLSTM is initialized, ['zero', 'mlp'], #, 'warmup']
'matching_temp': 1.0, # temperature used in TAP-style matching softmax
'matching_temp_tenthlife': -1,
'matching_temp_min': 1e-3,
'matching_type': 'latent', # evidence binding procedure
# ['image', 'latent', 'fraction', 'balanced', 'tap']
'leaves_bias': 0.0,
'top_bias': 1.0,
'n_top_bias_nodes': 1,
'supervise_match_weight': 0.0,
'regress_index': False,
'regress_length': False,
'inv_mdl_params': {}, # params for the inverse model, if attached
'train_inv_mdl_full_seq': False, # if True, omits sampling for inverse model and trains on full seq
'cost_mdl_params': {}, # cost model parameters
'act_cond_inference': False, # if True, conditions inference on actions
'train_on_action_seqs': False, # if True, trains the predictive network on action sequences
'learned_pruning_threshold': 0.5, # confidence thresh for learned pruning network after which node gets pruned
'untied_layers': False,
'supervised_decoder': False,
'states_inference': False,
})
# Outdated GCP params
default_dict.update({
'dense_rec_type': 'none', # ['none', 'discrete', 'softmax', 'linear', 'node_prob', action_prob].
'one_step_planner': 'discrete', # ['discrete', 'continuous', 'sh_pred']. Always 'sh_pred' for HEDGE models
'mask_inf_attention': False, # if True, masks out inference attention outside the current subsegment
'binding': 'frames', # Matching loss form ['loss', 'frames', 'exp', 'lf']. Always 'loss'.
})
# Matching params
default_dict.update(AttrDict(
learn_matching_temp=True, # If true, the matching temperature is learned
))
# Logging params
default_dict.update(AttrDict(
dump_encodings='', # Specifies the directory where to dump the encodings
dump_encodings_inv_model='', # Specifies the directory where to dump the encodings
log_states_2d=False, # if True, logs 2D plot of first two state dimensions
log_cartgripper=False, # if True, logs sawyer from states
data_dir='', # necessary for sawyer logging
))
# Hyperparameters that shouldn't exist
default_dict.update(AttrDict(
log_d2b_3x3maze=0, # Specifies the directory where to dump the encodings
))
return default_dict
<file_sep>from setuptools import setup
from Cython.Build import cythonize
import numpy
setup(
name='gcp',
version='0.2dev',
packages=['gcp', 'blox', 'gym-miniworld'],
license='MIT License',
ext_modules=cythonize(['gcp/evaluation/*.pyx']),
include_dirs=[numpy.get_include(),],
)
<file_sep>import torch
import numpy as np
from blox import AttrDict
class GCPSimulator:
"""Implements simple simulator interface for GCP models."""
def __init__(self, model, append_latent):
self._model = model
self._append_latent = append_latent # if True, appends the latent to the state rollout
self._logs = []
def rollout(self, state, goal_state, samples, rollout_len, prune=False):
"""Performs one model rollout."""
# prepare inputs
batch_size = samples.shape[0]
state, goal_state = state.repeat(batch_size, 0), goal_state.repeat(batch_size, 0)
input_dict = AttrDict(I_0=torch.tensor(state, device=self._model.device, dtype=torch.float32),
I_g=torch.tensor(goal_state, device=self._model.device, dtype=torch.float32),
start_ind=torch.tensor(np.zeros((batch_size,)), device=self._model.device).long(),
end_ind=torch.tensor(np.ones((batch_size,)) * (rollout_len - 1),
device=self._model.device).long(),
z=torch.tensor(samples, device=self._model.device, dtype=torch.float32))
input_dict = self._postprocess_inputs(input_dict)
# perform rollout, collect outputs
outputs = AttrDict()
with self._model.val_mode():
model_output = self._model(input_dict)
end_ind = torch.max(model_output.end_ind, torch.ones_like(model_output.end_ind))
# self._logs.append(model_output)
if prune:
outputs.predictions = self._list2np(model_output.pruned_prediction)
else:
outputs.predictions = self._list2np(self._get_state_rollouts(input_dict, model_output, end_ind))
outputs.actions = self._list2np(self._cap_to_length(model_output.actions, end_ind))
outputs.states = self._list2np(self._cap_to_length(model_output.regressed_state, end_ind))
outputs.latents = self._list2np(self._cap_to_length(input_dict.model_enc_seq, end_ind))
return outputs
def _postprocess_inputs(self, input_dict):
return input_dict
def _get_state_rollouts(self, input_dict, model_output, end_ind):
batch_size = model_output.end_ind.shape[0]
state_plans = []
for i in range(batch_size):
seq_len = end_ind[i] + 1
out = self._model.dense_rec.get_sample_with_len(i, seq_len, model_output, input_dict, 'basic')
rollout = out[0].reshape(seq_len, -1)
if self._append_latent:
name = 'encodings' if model_output.dense_rec else 'e_g_prime'
latent_rollout = self._model.dense_rec.get_sample_with_len(
i, seq_len, model_output, input_dict, 'basic', name=name)[0].reshape(seq_len, -1)
rollout = torch.cat((rollout, latent_rollout), dim=-1)
state_plans.append(rollout)
return state_plans
@staticmethod
def _cap_to_length(vals, end_inds):
assert vals.shape[0] == end_inds.shape[0]
return [val[:end_ind + 1] for val, end_ind in zip(vals, end_inds)]
@staticmethod
def _list2np(list):
return [elem.data.cpu().numpy() for elem in list]
def dump_logs(self, dump_file='rollout_dump.pkl'):
import pickle
with open(dump_file, 'wb') as F:
pickle.dump(self._logs, F)
self._logs = []
class GCPImageSimulator(GCPSimulator):
def _postprocess_inputs(self, input_dict):
input_dict = super()._postprocess_inputs(input_dict)
input_dict.z = input_dict.z[..., None, None]
input_dict.I_0 = self._env2planner(input_dict.I_0)
input_dict.I_g = self._env2planner(input_dict.I_g)
return input_dict
@staticmethod
def _env2planner(img):
"""Converts images to the [-1...1] range of the hierarchical planner."""
if img.max() > 1.0:
img = img / 255.0
if len(img.shape) == 5:
img = img[0]
if len(img.shape) == 4:
img = img.permute(0, 3, 1, 2)
return img * 2 - 1.0
class ActCondGCPImageSimulator(GCPImageSimulator):
def _postprocess_inputs(self, input_dict):
input_dict = super()._postprocess_inputs(input_dict)
input_dict.actions = input_dict.pop('z')[..., 0, 0] # input (action) samples are stored in z by default
input_dict.pad_mask = torch.ones(input_dict.actions.shape[:2], device=input_dict.actions.device)
return input_dict
<file_sep>import numpy as np
import copy
import os
import torch
import pickle as pkl
from collections import defaultdict
from blox import AttrDict
from blox.utils import ParamDict
from blox.basic_types import listdict2dictlist
from gcp.planning.cem.cost_fcn import EuclideanPathLength, LearnedCostEstimate
from gcp.planning.cem.sampler import FlatCEMSampler, HierarchicalTreeCEMSampler
class CEMPlanner:
"""Generic CEM planner."""
def __init__(self, hp, simulator):
self._hp = self._default_hparams().overwrite(hp)
self._simulator = simulator
self._cost_fcn = self._build_cost()
self._sampler = self._build_sampler()
self._logs = []
def _default_hparams(self):
default_dict = ParamDict(
horizon=None, # CEM optimization horizon (i.e. how many sequential actions get optimized)
action_dim=None, # dimensionality of the actions that are optimized
n_iters=1, # number of CEM iterations
batch_size=64, # number of rollouts per iteration
max_rollout_bs=100, # maximum batch size for rollout (splits 'batch_size' if too large)
elite_frac=0.1, # percentage of 'best' trajectories
)
# cost params
default_dict.update(ParamDict(
cost_fcn=EuclideanPathLength,
dense_cost=False,
final_step_cost_weight=1.0,
))
# sampler params
default_dict.update(ParamDict(
sampler=FlatCEMSampler,
sampler_clip_val=float("Inf"),
initial_std=3e-1,
))
# misc
default_dict.update(ParamDict(
verbose=False, # whether to visualize planning procedure (for debugging)
dump_planning_data=False, # whether to dump raw planning data
use_delta_state_actions=False, # if True, uses delta between inferred states as action plan
use_inferred_actions=True, # if True, uses model-inferred actions for action plan
max_seq_len=None, # used for model during rollout
))
return default_dict
def __call__(self, state, goal_state):
logs = []
self._sampler.init()
for cem_iter in range(self._hp.n_iters):
# sample actions
samples = self._sampler.sample(self._hp.batch_size)
# rollout simulator
rollouts = self._rollout(state, goal_state, samples)
best_rollouts, best_rollouts_states, best_scores, best_samples, elite_idxs = \
self._get_best_rollouts(rollouts, goal_state, samples)
# refit action distribution
self._sampler.fit(best_samples, best_scores)
# store all logs
logs.append(AttrDict(
elite_rollouts=copy.deepcopy(best_rollouts),
elite_scores=best_scores,
dists=self._sampler.get_dists(),
goal_state=goal_state,
elite_states=copy.deepcopy(best_rollouts_states),
))
# perform final rollout with best actions
final_rollouts = self._rollout(state, goal_state, best_samples)
logs.append(AttrDict(
elite_rollouts=copy.deepcopy(self._maybe_split_image(final_rollouts.predictions)),
elite_scores=best_scores,
dists=self._sampler.get_dists(),
goal_state=goal_state,
elite_states=copy.deepcopy(final_rollouts.states),
))
# extract output action plan
best_actions = self._get_action_plan(final_rollouts, best_samples)
# save logs
self._logs.append(logs)
return final_rollouts.predictions[0], best_actions[0], final_rollouts.latents[0], best_scores[0]
def log_verbose(self, logger, step, phase, i_tr, dump_dir):
if self._hp.dump_planning_data:
os.makedirs(os.path.join(dump_dir, "planning"), exist_ok=True)
with open(os.path.join(dump_dir, "planning/traj{}_raw_data.pkl".format(i_tr)), "wb") as F:
pkl.dump(self._logs, F)
self._logs = []
def _build_cost(self):
return self._hp.cost_fcn(self._hp.dense_cost, self._hp.final_step_cost_weight)
def _build_sampler(self):
return self._hp.sampler(self._hp.sampler_clip_val,
self._hp.max_seq_len,
self._hp.action_dim,
self._hp.initial_std)
def _rollout(self, state, goal, samples):
output = defaultdict(list)
for i in range(max(samples.shape[0] // self._hp.max_rollout_bs, 1)):
sim_output = self._simulator.rollout(state, goal,
samples[i * self._hp.max_rollout_bs: (i + 1) * self._hp.max_rollout_bs],
self._hp.max_seq_len)
output = self._join_dicts(sim_output, output)
return AttrDict({key: self._cap_to_horizon(output[key]) for key in output})
def _get_best_rollouts(self, rollouts, goal_state, samples):
# compute rollout scores
scores = self._cost_fcn(rollouts.predictions, goal_state)
# get idxs of best rollouts
full_elite_idxs = scores.argsort()
elite_idxs = full_elite_idxs[:int(self._hp.batch_size * self._hp.elite_frac)]
best_rollouts, best_rollouts_states = \
[rollouts.predictions[idx] for idx in elite_idxs], [rollouts.states[idx] for idx in elite_idxs],
best_scores, best_samples = scores[elite_idxs], samples[elite_idxs]
return self._maybe_split_image(best_rollouts), best_rollouts_states, best_scores, best_samples, elite_idxs
def _maybe_split_image(self, rollout):
if hasattr(self._cost_fcn, "_split_state_rollout"):
# separate latent and image in case that latent got attached to rollout
return self._cost_fcn._split_state_rollout(rollout).image_rollout
return rollout
def _get_action_plan(self, final_rollouts, best_samples):
if self._hp.use_delta_state_actions:
return [b[1:] - b[:-1] for b in final_rollouts.states]
elif self._hp.use_inferred_actions:
return final_rollouts.actions
else:
return best_samples
def _cap_to_horizon(self, input):
if self._hp.horizon is not None:
return [elem[:self._hp.horizon] for elem in input]
else:
return input
@property
def append_latent(self):
return self._sampler.append_latent
@staticmethod
def _join_dicts(d1, d2):
return AttrDict({key: d1[key] + d2[key] for key in d1})
class HierarchicalCEMPlanner(CEMPlanner):
"""CEM planner for hierarchical optimization."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self._hp.sampling_rates_per_layer is not None:
assert self._hp.n_iters == len(self._hp.sampling_rates_per_layer) + 1
def _default_hparams(self):
default_dict = super()._default_hparams()
# general params
default_dict.update(ParamDict(
horizon=None, # for GCP we do not need to define horizon
))
# cost params
default_dict.update(ParamDict(
cost_fcn=LearnedCostEstimate,
cost_config={}, # cost function for subgoal optimization
LL_cost_fcn=None, # if None cost_fcn is used
))
# sampler params
default_dict.update(ParamDict(
sampler=HierarchicalTreeCEMSampler,
n_level_hierarchy=None,
sampling_rates_per_layer=None,
n_ll_samples=5,
))
return default_dict
def _build_cost(self):
cost_fcn = self._hp.cost_fcn(self._hp.cost_config)
self._ll_cost_fcn = cost_fcn if self._hp.LL_cost_fcn is None \
else self._hp.LL_cost_fcn(self._hp.dense_cost, self._hp.final_step_cost_weight)
return cost_fcn
def _build_sampler(self):
return self._hp.sampler(self._hp.sampler_clip_val,
self._hp.max_seq_len,
self._hp.action_dim,
self._hp.initial_std,
n_level_hierarchy=self._hp.n_level_hierarchy,
sampling_rates_per_layer=self._hp.sampling_rates_per_layer,
subgoal_cost_fcn=self._cost_fcn,
ll_cost_fcn=self._ll_cost_fcn,
n_ll_samples=self._hp.n_ll_samples)
def _get_best_rollouts(self, rollouts, goal_state, samples):
if not isinstance(self._sampler, HierarchicalTreeCEMSampler):
# in case we use non-hierarchical optimization with tree-based model
return super()._get_best_rollouts(rollouts, goal_state, samples)
best_rollouts, best_scores = self._sampler.optimize(rollouts.predictions, goal_state)
best_samples = self._sampler.sample(self._hp.batch_size)
elite_idxs = np.arange(len(best_rollouts)) # dummy value
return best_rollouts, rollouts.states, best_scores, best_samples, elite_idxs
class ImageCEMPlanner(CEMPlanner):
def log_verbose(self, logger, step, phase, i_tr, dump_dir):
if self._hp.verbose:
for replan_idx, replan_log in enumerate(self._logs):
for cem_iter_idx, iter_log in enumerate(replan_log):
# visualize all plans in order
plan_stack = []
for plan in iter_log.elite_rollouts:
time, c, h, w = plan.shape
plan = np.clip((plan+1) / 2, 0, 1.0)
if time < self._hp.horizon:
plan = np.concatenate((plan, np.ones((self._hp.horizon - time, c, h, w))))
plan_stack.append(plan)
plan_stack = np.array(plan_stack)
n_plans = plan_stack.shape[0]
log_img = torch.tensor(plan_stack.transpose(0, 2, 3, 1, 4)
.reshape(n_plans, c, h, self._hp.horizon*w)
.transpose(1, 0, 2, 3).reshape(c, h*n_plans, self._hp.horizon*w))
logger.log_images(log_img[None],
"elite_trajs_{}_test/plan_r{}_iter{}_overview".format(i_tr, replan_idx, cem_iter_idx),
step, phase)
if 'elite_states' in iter_log:
logger.log_single_topdown_traj(iter_log.elite_states[0],
"elite_trajs_{}_test/plan_r{}_iter{}_z_inferStateTraj".
format(i_tr, replan_idx, cem_iter_idx), step, phase)
logger.log_multiple_topdown_trajs(iter_log.elite_states,
"elite_trajs_{}_test/plan_r{}_iter{}_z_inferStateTrajDist".
format(i_tr, replan_idx, cem_iter_idx), step, phase)
if 'goal_state' in iter_log:
logger.log_images(torch.tensor(iter_log['goal_state'].transpose(0, 3, 1, 2)),
"elite_trajs_{}_test/plan_r{}_iter{}_z_goal".
format(i_tr, replan_idx, cem_iter_idx), step, phase)
super().log_verbose(logger, step, phase, i_tr, dump_dir)
def hack_add_state(self, state):
self._logs[-1][-1].state = state.copy()
class HierarchicalImageCEMPlanner(HierarchicalCEMPlanner, ImageCEMPlanner):
def log_verbose(self, *args, **kwargs):
ImageCEMPlanner.log_verbose(self, *args, **kwargs)
<file_sep>import numpy as np
def _rand(n):
return np.random.rand(n) - 0.5 # sample in range [-0.5, 0.5]
def _M(*elems):
return np.array(elems)
class RoomSampler2d:
"""Samples 2d coordinates in a room environment with given number of rooms."""
def __init__(self, rooms_per_side, sample_wide=False):
"""If sample_wide is True, the sampling fills the whole room all the way to the walls."""
super().__init__()
self._rooms_per_side = rooms_per_side
self._agent_size = 0.02
self._sampling_width = 1/3 - (not sample_wide) * 3 * self._agent_size # equivalent to width of one room in mujoco
self._room_offset = 1/3 # equivalent to middle point of rooms
self._door_sampling_width = 1.5 * 0.0667 - 3 * self._agent_size
self._hor_door_sampling_width = _M(2*self._agent_size, self._door_sampling_width)
self._vert_door_sampling_width = _M(self._door_sampling_width, 2 * self._agent_size)
def sample(self, room=None):
if room is None:
room = np.random.randint(self._rooms_per_side**2)
room = self._ridx2coords(room)
center = _M(*[self._room_offset / 2 + i * self._room_offset
- self._rooms_per_side / 2 * self._room_offset for i in room]) # centered around (0, 0)
return _rand(2) * self._sampling_width + center
def sample_door(self, room1, room2, sample_center=False):
"""Samples in the door way between two given rooms."""
center = self.get_door_pos(room1, room2)
if sample_center: return center
room1, room2 = self._ridx2coords(room1), self._ridx2coords(room2)
if room1[0] != room2[0] and room1[1] == room2[1]:
# horizontal room connection
return _rand(2) * self._hor_door_sampling_width + center
elif room1[0] == room2[0] and room1[1] != room2[1]:
# vertical room connection
return _rand(2) * self._vert_door_sampling_width + center
else:
raise ValueError("Rooms don't have connection for door.")
def get_door_pos(self, room1, room2):
assert room1 < room2 # room1 needs to be on top or left of room2
room1, room2 = self._ridx2coords(room1), self._ridx2coords(room2)
assert np.abs(
room1[0] - room2[0] + room1[1] - room2[1]) == 1 # difference between given rooms needs to be exactly 1
center = _M(*[self._room_offset / 2 + (i + j) / 2 * self._room_offset
- self._rooms_per_side / 2 * self._room_offset for i, j in zip(room1, room2)])
return center
def get_door_path(self, room1, room2):
"""Returns path through the door between two rooms (i.e. three waypoints)."""
lefttop = room1 < room2 # check if room 1 is on left/top of room2
center = self.get_door_pos(min(room1, room2), max(room1, room2))
room1, room2 = self._ridx2coords(room1), self._ridx2coords(room2)
if room1[0] != room2[0] and room1[1] == room2[1]:
# horizontal room connection
offset = _M(3 * self._door_sampling_width, 0)
elif room1[0] == room2[0] and room1[1] != room2[1]:
# vertical room connection
offset = _M(0, -3 * self._door_sampling_width)
else:
raise ValueError("Rooms don't have connection for door.")
if lefttop:
return [center - offset, center, center + offset]
else:
return [center + offset, center, center - offset]
def _ridx2coords(self, room_idx):
"""Converts room index to coordinates based on grid size."""
return (int(np.floor(room_idx / self._rooms_per_side)),
int(self._rooms_per_side - 1 - room_idx % self._rooms_per_side))
if __name__ == "__main__":
import matplotlib.pyplot as plt
DOORS = [(0, 3), (3, 6), (1, 4), (4, 7), (5, 8), (3, 4), (1, 2), (7, 8)]
sampler = RoomSampler2d(rooms_per_side=3, sample_wide=True)
samples = np.asarray([[sampler.sample(r) for _ in range(100)] for r in range(36)]).transpose(2, 0, 1).reshape(2, -1)
plt.scatter(samples[0], samples[1], c='black')
samples = np.asarray([[sampler.sample_door(d[0], d[1]) for _ in range(10)] for d in DOORS]).transpose(2, 0, 1).reshape(2, -1)
plt.scatter(samples[0], samples[1], c='red')
plt.show()
<file_sep>import numpy as np
from blox import AttrDict
from blox.basic_types import listdict2dictlist
class HierarchicalTreeLatentOptimizer:
"""Optimizes latent distributions for GCP-tree layers recursively, one layer at a time.
After N layers have been optimized hierarchically, all remaining layers are jointly optimized as 'segments'
connecting between the planned subgoals."""
def __init__(self, latent_dim, sampling_rates, depth, subgoal_cost_fcn, ll_cost_fcn, final_layer_samples):
"""
:param latent_dim: dimensionality of optimized latent
:param sampling_rates: per-layer sampling rates (except for last layer) as list
:param depth: depth of GCP tree model who's latents are getting optimized
:param subgoal_cost_fcn: cost function for estimating cost of sampled predictions
:param ll_cost_fcn: cost function for estimating cost of dense prediction in last layer
:param final_layer_samples: number of samples for optimizing last layer's dense trajectory predictions
"""
self._latent_dim = latent_dim
self._depth = depth
self._subgoal_cost_fcn = subgoal_cost_fcn
self._ll_cost_fcn = ll_cost_fcn
self._is_optimized = False # indicates whether this layer of the tree is already optimized
self._opt_z = None # holds optimal subgoal latent once optimized
self._latest_z_samples = None # holds last-sampled z samples for subgoal optimization
self._dummy_env = None # used for visualization purposes
if sampling_rates:
# not yet at bottom-most layer
self._n_samples = sampling_rates.pop(0)
self._n_latents = 1
self._children = [[type(self)(latent_dim, sampling_rates.copy(), depth - 1,
self._subgoal_cost_fcn, self._ll_cost_fcn,
final_layer_samples)
for _ in range(self._n_samples)] for _ in range(2)]
else:
# final layer, create remaining samples for non-hierarchical 'segment' optimization
self._n_samples = final_layer_samples
self._n_latents = 2**depth - 1
self._children = None
self.mean = np.zeros((self._n_latents, self._latent_dim))
self.std = np.ones((self._n_latents, self._latent_dim))
def sample(self, below_opt_layer=False):
"""Samples latents from all layers of the tree, returns concatenated result.
Samples N latents for layer that's currently getting optimized, only 1 latent for all layers above and below.
:param below_opt_layer: indicates whether layer is below the layer that is currently getting optimized."""
# sample current layer's latents
if self._is_optimized: # if layer is already optimized --> sample optimized subgoal latent
z = self._opt_z.copy()[None]
else:
# sample N latents if this is currently optimized layer,
# if below optimized layer sample only single latent (since decoding won't be used for optimization)
z = self._sample()[:1] if below_opt_layer else self._sample()
self._latest_z_samples = z.copy()
next_below_opt_layer = (not self._is_optimized and not below_opt_layer) \
or below_opt_layer # in the first case the current layer is getting optimized
if self._children is not None:
# sample children's latents and concatenate
samples = []
for child_left, child_right, z_i in zip(self._children[0], self._children[1], z):
z_left, z_right = child_left.sample(next_below_opt_layer), child_right.sample(next_below_opt_layer)
assert z_left.shape == z_right.shape # latent tree needs to be balanced
samples.append(np.concatenate([z_left, np.tile(z_i[0], (z_left.shape[0], 1, 1)), z_right], axis=1))
z = np.concatenate(samples)
return z
def optimize(self, all_rollouts, goal):
"""Optimizes subgoal in all layers sequentially, optimizes full rollout in last layer."""
if self._children is None: # final layer --> optimize dense segment
return self._optimize_segment(all_rollouts, goal)
elif not self._is_optimized: # non-final layer, not optimized --> optimize subgoal
return self._optimize_subgoal(all_rollouts, goal)
else: # non-final layer, already optimized --> recurse
return self._recurse_optimization(all_rollouts, goal)
def _optimize_segment(self, all_rollouts, goal):
"""Optimizes final-layer 'segment' between subgoals."""
best_rollout, best_cost, best_idx = self._best_of_n_segments(all_rollouts, goal, self._ll_cost_fcn)
self._opt_z = self._latest_z_samples[best_idx]
self._is_optimized = True
return best_rollout, best_cost
def _optimize_subgoal(self, all_rollouts, goal):
"""Optimizes subgoal latent by minimizing pairwise cost estimate with both parents."""
# first split off latents from concatenated rollouts
rollouts = self._split_state_rollout(all_rollouts)
# prepare start + goal + subgoal arrays
starts, start_latents = np.stack([r[0] for r in rollouts.state_rollout]), \
np.stack([r[0] for r in rollouts.latent_rollout])
subgoals = np.stack([r[int(np.floor(r.shape[0] / 2))] for r in rollouts.state_rollout])
subgoal_latents = np.stack([r[int(np.floor(r.shape[0] / 2))] for r in rollouts.latent_rollout])
goals = np.stack([self._split_state_rollout([goal[None]]).state_rollout[0][0] \
if goal.shape[-1] == all_rollouts[0].shape[-1] else goal
for _ in rollouts.state_rollout])
goal_latents = np.stack([self._split_state_rollout([goal[None]]).latent_rollout[0][0] \
if goal.shape[-1] == all_rollouts[0].shape[-1] else r[-1]
for r in rollouts.latent_rollout])
# compute pairwise cost
to_cost, from_cost = self._subgoal_cost_fcn(start_latents, subgoal_latents), \
self._subgoal_cost_fcn(subgoal_latents, goal_latents)
total_cost = to_cost + from_cost
# find optimal subgoal
opt_z_idx = np.argmin(total_cost)
self._opt_z = self._latest_z_samples[opt_z_idx]
# construct output rollout + output_cost
outputs = [starts[opt_z_idx]]
if (subgoals[opt_z_idx] != outputs[-1]).any(): # they can be the same if sequence is too short
outputs.append(subgoals[opt_z_idx])
if not goal.shape[-1] == all_rollouts[0].shape[-1]: # only append very final goal once
if goals[opt_z_idx].shape == outputs[-1].shape:
outputs.append(goals[opt_z_idx])
else:
outputs.append(goals[opt_z_idx][0].transpose(2, 0, 1)) # for image-based CEM
output_rollout = np.stack(outputs)
output_cost = total_cost[opt_z_idx]
# remove children for all but optimal latent, indicate layer is optimized for subsequent optimization passes
self._children = [c[:1] for c in self._children]
self._n_samples = 1
self._is_optimized = True
# (optional) log all options to output for debugging, here in first layer of 8-layer GCP only
# if self._depth == 8:
# self._log_all_subgoal_plans(starts, subgoals, goals, to_cost, from_cost)
return output_rollout, output_cost
def _recurse_optimization(self, all_rollouts, goal):
"""Splits sequence around subgoal and optimizes both parts independently.
Handles edge cases of sub-trajetories too short for further recursion."""
per_latent_rollouts = np.array_split(all_rollouts, self._n_samples)
best_costs, best_rollouts = [], []
for child_left, child_right, rollouts in zip(self._children[0], self._children[1], per_latent_rollouts):
rollouts = [r for r in rollouts] # convert from array of arrays to list
# filter too short rollouts that don't need hierarchical expansion, replace with dummy
short_rollouts = []
for r_idx, r in enumerate(rollouts):
if r.shape[0] < 3:
short_rollouts.append(r)
rollouts[r_idx] = self._make_dummy_seq(r[0])
# expand hierarchically
subgoal_inds = [int(np.floor(r.shape[0] / 2)) for r in rollouts]
subgoal = rollouts[0][subgoal_inds[0]] # across batch dimension all of the subgoals are identical
best_rollout_left, best_cost_left = \
child_left.optimize([r[:si] for r, si in zip(rollouts, subgoal_inds)], subgoal)
best_rollout_right, best_cost_right = \
child_right.optimize([r[si:] for r, si in zip(rollouts, subgoal_inds)], goal)
best_rollout = np.concatenate([best_rollout_left, best_rollout_right])
best_cost = best_cost_left + best_cost_right
# check whether too short trajectories are better, if so: replace results
if short_rollouts:
best_rollout_short, best_cost_short, _ = self._best_of_n_segments(short_rollouts, goal, self._ll_cost_fcn)
if best_cost_short < best_cost or np.isnan(best_cost):
best_rollout, best_cost = best_rollout_short, best_cost_short
# dump best results for this latent
best_rollouts.append(best_rollout); best_costs.append(best_cost)
best_cost_idx = np.argmin(np.array(best_costs))
best_rollout, best_cost = best_rollouts[best_cost_idx], best_costs[best_cost_idx]
return best_rollout, best_cost
def _sample(self):
return np.random.normal(loc=self.mean, scale=self.std, size=(self._n_samples, self._n_latents, self._latent_dim))
def _best_of_n_segments(self, all_rollouts, goal, cost_fcn):
"""Applies dense cost function to segment samples, returns min-cost segment + min-cost + idx."""
all_rollouts_opt, goal_opt = self._prep_segment_opt_inputs(all_rollouts, goal)
cost = cost_fcn(all_rollouts_opt, goal_opt)
best_cost_idx = np.argmin(cost)
return self._split_state_rollout(all_rollouts).state_rollout[best_cost_idx], cost[best_cost_idx], best_cost_idx
def _prep_segment_opt_inputs(self, all_rollouts, goal):
"""Splits off input to cost function from combined inputs (rollouts are concat of both state and latent)"""
rollouts = self._split_state_rollout(all_rollouts).state_rollout
state_goal = self._split_state_rollout([goal]).state_rollout[0] if goal.shape[-1] == all_rollouts[0].shape[-1] \
else goal
return rollouts, state_goal
def _split_state_rollout(self, rollouts):
"""Splits off latents from states in joined rollouts."""
return listdict2dictlist([AttrDict(state_rollout=r[..., :-self._subgoal_cost_fcn.input_dim],
latent_rollout=r[..., -self._subgoal_cost_fcn.input_dim:]) for r in rollouts])
@staticmethod
def _make_dummy_seq(reference_array):
return np.stack([np.ones_like(reference_array) * float("inf"), # fill with dummy w/ max cost
np.zeros_like(reference_array),
np.ones_like(reference_array) * float("inf")])
@property
def fully_optimized(self):
if self._children is not None:
return not (not self._is_optimized or
np.any([not c.fully_optimized for c in self._children[0]]) or
np.any([not c.fully_optimized for c in self._children[1]]))
else:
return self._is_optimized
def _log_all_subgoal_plans(self, starts, subgoals, goals, to_cost, from_cost):
for s, sg, g, tc, fc in zip(starts, subgoals, goals, to_cost, from_cost):
r = np.stack([s, sg, g])
c = tc + fc
self._log_subgoal_plan(r, c)
def _log_subgoal_plan(self, rollout, cost):
from gcp.planning.infra.envs.miniworld_env.multiroom3d.multiroom3d_env import Multiroom3dEnv
import time
import cv2, os
if self._dummy_env is None:
self._dummy_env = Multiroom3dEnv({'n_rooms': 25}, no_env=True)
im = self._dummy_env.render_top_down(rollout)
name = "subgoal_{}_{}.png".format(cost, time.time())
cv2.imwrite(os.path.join("/parent/tmp", name), im*255.)
class ImageHierarchicalTreeLatentOptimizer(HierarchicalTreeLatentOptimizer):
def _prep_segment_opt_inputs(self, all_rollouts, goal):
rollouts = self._split_state_rollout(all_rollouts).latent_rollout
if len(goal.shape) > 2: # in case we dont have goal encoding use final state of rollout
state_goal = [r[-1:] for r in rollouts]
else:
state_goal = [self._split_state_rollout([goal[None]]).latent_rollout[0] for _ in rollouts]
return rollouts, state_goal
def _split_state_rollout(self, rollouts):
"""Splits off latents from states in joined rollouts."""
def reshape_to_image(flat):
if len(flat.shape) != 2:
import pdb; pdb.set_trace()
assert len(flat.shape) == 2
res = int(np.sqrt(flat.shape[1] / 3)) # assumes 3-channel image
return flat.reshape(flat.shape[0], 3, res, res)
return listdict2dictlist([AttrDict(state_rollout=reshape_to_image(r[..., :-self._subgoal_cost_fcn.input_dim]),
latent_rollout=r[..., -self._subgoal_cost_fcn.input_dim:]) for r in rollouts])
def _log_all_subgoal_plans(self, starts, subgoals, goals, to_cost, from_cost):
import cv2, time, os
img_stack = []
for s, sg, g, tc, fc in zip(starts, subgoals, goals, to_cost, from_cost):
if len(g.shape) == 4:
g = g[0].transpose(2, 0, 1) * 2 - 1
img_strip = (np.concatenate((s, sg, g), axis=2).transpose(1, 2, 0) + 1) / 2
img_strip = cv2.resize(img_strip[..., ::-1], (0,0), fx=4, fy=4)
img_strip = cv2.putText(img_strip, "{:.2f}".format(float(fc+tc)),
(10,img_strip.shape[0]-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,0), 2)
img_strip = cv2.putText(img_strip, "{:.2f}".format(float(fc+tc)),
(10,10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (1.0, 1.0, 1.0), 2)
img_stack.append(img_strip)
name = "subgoals_{}.png".format(time.time())
cv2.imwrite(os.path.join("/parent/tmp", name), np.concatenate(img_stack, axis=0) * 255)
<file_sep>import numpy as np
import torch
import torch.nn as nn
import gcp.prediction.models.adaptive_binding.adaptive
import gcp.prediction.models.tree.frame_binding
from blox import AttrDict
from blox.basic_types import subdict
from blox.torch.dist import safe_entropy, ProbabilisticModel
from blox.torch.losses import PenaltyLoss
from blox.torch.subnetworks import GeneralizedPredictorModel, Predictor
from gcp.prediction.models.tree.tree_lstm import build_tree_lstm
from blox.torch.variational import setup_variational_inference
from gcp.prediction.models.adaptive_binding.attentive_inference import AttentiveInference
from gcp.prediction.utils.tree_utils import SubgoalTreeLayer
from gcp.prediction.models.tree.inference import Inference
class TreeModule(nn.Module, ProbabilisticModel):
def __init__(self, hp, decoder):
nn.Module.__init__(self)
ProbabilisticModel.__init__(self)
self._hp = hp
self.decoder = decoder
self.build_network()
def build_network(self):
hp = self._hp
q, self.prior = setup_variational_inference(self._hp, self._hp.nz_enc, self._hp.nz_enc * 2)
if self._hp.attentive_inference:
self.inference = AttentiveInference(self._hp, q)
else:
self.inference = Inference(self._hp, q)
# todo clean this up with subclassing?
pred_inp_dim = hp.nz_enc * 2 + hp.nz_vae
if self._hp.context_every_step:
pred_inp_dim = pred_inp_dim + hp.nz_enc * 2
if hp.tree_lstm:
self.subgoal_pred, self.lstm_initializer = build_tree_lstm(hp, pred_inp_dim, hp.nz_enc)
else:
self.subgoal_pred = GeneralizedPredictorModel(hp, input_dim=pred_inp_dim, output_dims=[hp.nz_enc],
activations=[None])
self.build_binding()
if self._hp.regress_index:
self.index_predictor = Predictor(
self._hp, self._hp.nz_enc * 2, self._hp.max_seq_len, detached=False, spatial=False)
def build_binding(self):
# TODO this has to be specified with classes, not strings
hp = self._hp
if self._hp.matching_type == 'balanced':
binding_class = gcp.prediction.models.tree.frame_binding.BalancedBinding
elif 'dtw' in self._hp.matching_type:
binding_class = gcp.prediction.models.adaptive_binding.adaptive.AdaptiveBinding
else:
raise NotImplementedError
self.binding = binding_class(hp, self.decoder)
def produce_subgoal(self, inputs, layerwise_inputs, start_ind, end_ind, left_parent, right_parent, depth=None):
"""
Divides the subsequence by producing a subgoal inside it.
This function represents one step of recursion of the model
"""
subgoal = AttrDict()
e_l = left_parent.e_g_prime
e_r = right_parent.e_g_prime
subgoal.p_z = self.prior(e_l, e_r)
if 'z' in layerwise_inputs:
z = layerwise_inputs.z
if self._hp.prior_type == 'learned': # reparametrize if learned prior is used
z = subgoal.p_z.reparametrize(z)
elif self._sample_prior:
z = subgoal.p_z.sample()
else:
## Inference
if self._hp.attentive_inference:
subgoal.update(self.inference(inputs, e_l, e_r, start_ind, end_ind))
else:
subgoal.match_timesteps = self.binding.comp_timestep(left_parent.match_timesteps,
right_parent.match_timesteps)
subgoal.update(self.inference(inputs, e_l, e_r, start_ind, end_ind, subgoal.match_timesteps.float()))
z = subgoal.q_z.sample()
## Predict the next node
pred_input = [e_l, e_r, z]
if self._hp.context_every_step:
mult = int(z.shape[0] / inputs.e_0.shape[0])
pred_input += [inputs.e_0.repeat_interleave(mult, 0),
inputs.e_g.repeat_interleave(mult, 0)]
if self._hp.tree_lstm:
if left_parent.hidden_state is None and right_parent.hidden_state is None:
left_parent.hidden_state, right_parent.hidden_state = self.lstm_initializer(e_l, e_r, z)
subgoal.hidden_state, subgoal.e_g_prime = \
self.subgoal_pred(left_parent.hidden_state, right_parent.hidden_state, *pred_input)
else:
subgoal.e_g_prime_preact = self.subgoal_pred(*pred_input)
subgoal.e_g_prime = torch.tanh(subgoal.e_g_prime_preact)
subgoal.ind = (start_ind + end_ind) / 2 # gets overwritten w/ argmax of matching at training time (in loss)
return subgoal, left_parent, right_parent
def loss(self, inputs, outputs):
if outputs.tree.depth == 0:
return {}
losses = AttrDict()
losses.update(self.get_node_loss(inputs, outputs))
# Explaining loss
losses.update(self.binding.loss(inputs, outputs))
# entropy penalty
losses.entropy = PenaltyLoss(weight=self._hp.entropy_weight)(outputs.entropy)
return losses
def compute_matching(self, inputs, outputs):
""" Match the tree nodes to ground truth and compute relevant values """
tree = outputs.tree
# compute matching distributions
if 'gt_match_dists' in outputs:
gt_match_dists = outputs.gt_match_dists
else:
gt_match_dists = self.binding.get_w(inputs.pad_mask, inputs, outputs, log=True)
tree.bf.match_dist = outputs.gt_match_dists = gt_match_dists
# compute additional vals
outputs.entropy = safe_entropy(outputs.gt_match_dists, dim=-1)
# probability of the node existing
tree.bf.p_n = outputs.p_n = torch.sum(outputs.gt_match_dists, dim=2).clamp(0, 1)
def get_node_loss(self, inputs, outputs):
""" Reconstruction and KL divergence loss """
losses = AttrDict()
tree = outputs.tree
losses.update(self.binding.reconstruction_loss(inputs, outputs))
losses.update(self.inference.loss(tree.bf.q_z, tree.bf.p_z))
return losses
@staticmethod
def _log_outputs(outputs, inputs, losses, step, log_images, phase, logger):
if log_images:
# Log layerwise loss
layerwise_keys = ['dense_img_rec', 'kl'] & losses.keys()
for name, loss in subdict(losses, layerwise_keys).items():
if len(loss.error_mat.shape) > 2: # reduce to two dimensions
loss.error_mat = loss.error_mat.mean([i for i in range(len(loss.error_mat.shape))][2:])
layerwise_loss = SubgoalTreeLayer.split_by_layer_bf(loss.error_mat, dim=1)
layerwise_loss = torch.tensor([l[l != 0].mean() for l in layerwise_loss])
logger.log_graph(layerwise_loss, '{}_{}'.format(name, 'loss_layerwise'), step, phase)
<file_sep>import torch
from blox import AttrDict, batch_apply, rmap
from blox.tensor.ops import batchwise_index
from blox.torch.losses import BCELogitsLoss
from blox.torch.modules import DummyModule
from blox.torch.ops import make_one_hot, broadcast_final
from blox.torch.subnetworks import Predictor
class BaseBinding(DummyModule):
def __init__(self, hp, decoder=None):
super().__init__()
self._hp = hp
self.decoder = decoder
self.build_network()
def get_init_inds(self, inputs):
return inputs.start_ind.float()[:, None], inputs.end_ind.float()[:, None]
def apply_tree(self, tree, inputs):
# recursive_add_dim = make_recursive(lambda x: add_n_dims(x, n=1, dim=1))
start_ind, end_ind = self.get_init_inds(inputs)
tree.apply_fn(
{}, fn=self, left_parents=AttrDict(timesteps=start_ind), right_parents=AttrDict(timesteps=end_ind))
def get_matched_sequence(self, tree, key):
latents = tree.bf[key]
indices = tree.bf.match_dist.argmax(1)
# Two-dimensional indexing
matched_sequence = rmap(lambda x: batchwise_index(x, indices), latents)
return matched_sequence
class BalancedBinding(BaseBinding):
# TODO almost all of this class is no longer needed I think
def build_network(self):
self.existence_predictor = Predictor(self._hp, self._hp.nz_enc, 1, spatial=False)
def __call__(self, inputs, subgoal, left_parent, right_parent):
timesteps = self.comp_timestep(left_parent.timesteps, right_parent.timesteps)
c_n_prime = make_one_hot(timesteps.long(), self._hp.max_seq_len)
# TODO implement the alternative of not doing this. Then would need to renormalize
c_n_prime[left_parent.timesteps.long() == timesteps, :] = 0
c_n_prime[right_parent.timesteps.long() == timesteps, :] = 0
return AttrDict(timesteps=timesteps.long(), c_n_prime=c_n_prime.float())
@staticmethod
def comp_timestep(t_l, t_r, *unused_args):
return (t_l + t_r) / 2
def get_w(self, pad_mask, inputs, outputs, log=False):
""" Match to the middle frame between parents """
self.apply_tree(outputs.tree, inputs)
match_dists = outputs.tree.bf.c_n_prime
return match_dists
def get_init_inds(self, outputs):
# TODO this actually mostly gets passed 'inputs'
return torch.zeros_like(outputs.end_ind[:, None]) - 1, \
outputs.end_ind[:, None] + 1
def prune_sequence(self, inputs, outputs, key='images'):
seq = getattr(outputs.tree.df, key)
latent_seq = outputs.tree.df.e_g_prime
existence = batch_apply(self.existence_predictor, latent_seq)[..., 0]
outputs.existence_predictor = AttrDict(existence=existence)
existing_frames = torch.sigmoid(existence) > 0.5
existing_frames[:, 0] = 1
pruned_seq = [seq[i][existing_frames[i]] for i in range(seq.shape[0])]
return pruned_seq
def loss(self, inputs, outputs):
losses = AttrDict()
if 'existence_predictor' in outputs:
losses.existence_predictor = BCELogitsLoss()(
outputs.existence_predictor.existence, outputs.tree.df.match_dist.sum(2).float())
return losses
def reconstruction_loss(self, inputs, outputs, weights=1):
""" Balanced tree can have a simpler loss version which doesn't use cdist """
tree = outputs.tree
estimates = self.get_matched_sequence(tree, 'distr')
outputs.soft_matched_estimates = self.get_matched_sequence(tree, 'images')
targets = inputs.traj_seq
weights = broadcast_final(weights * inputs.pad_mask, targets)
losses = self.decoder.nll(estimates, targets, weights, log_error_arr=True)
return losses
<file_sep>import torch.nn as nn
import torch
import torch.nn.functional as F
from blox import AttrDict
from blox.torch.ops import make_one_hot, apply_linear
from blox.torch.subnetworks import Predictor, MultiheadAttention
from gcp.prediction.models.tree.inference import Inference
class AttentiveInference(Inference):
def __init__(self, hp, q):
super().__init__(hp, q)
self.attention = Attention(hp)
def forward(self, inputs, e_l, e_r, start_ind, end_ind, timestep=None):
assert timestep is None
output = AttrDict()
if self.deterministic:
output.q_z = self.q(e_l)
return output
values = inputs.inf_enc_seq
keys = inputs.inf_enc_key_seq
# Get (initial) attention key
query_input = [e_l, e_r]
e_tilde, output.gamma = self.attention(values, keys, query_input, start_ind, end_ind, inputs)
output.q_z = self.q(e_l, e_r, e_tilde)
return output
class Attention(nn.Module):
def __init__(self, hp):
super().__init__()
self._hp = hp
time_cond_length = self._hp.max_seq_len if self._hp.one_hot_attn_time_cond else 1
input_size = hp.nz_enc * 2
self.query_net = Predictor(hp, input_size, hp.nz_attn_key)
self.attention_layers = nn.ModuleList([MultiheadAttention(hp) for _ in range(hp.n_attention_layers)])
self.predictor_layers = nn.ModuleList([Predictor(hp, hp.nz_enc, hp.nz_attn_key, num_layers=2)
for _ in range(hp.n_attention_layers)])
self.out = nn.Linear(hp.nz_enc, hp.nz_enc)
def forward(self, values, keys, query_input, start_ind, end_ind, inputs):
"""
Performs multi-layered, multi-headed attention.
Note: the query can have a different batch size from the values/keys. In that case, the query is interpreted as
multiple queries, i.e. the values are tiled to match the query tensor size.
:param values: tensor batch x length x dim_v
:param keys: tensor batch x length x dim_k
:param query_input: input to the query network, batch2 x dim_k
:param start_ind:
:param end_ind:
:param inputs:
:param timestep: specify the timestep of the attention directly. tensor batch2 x 1
:param attention_weights:
:return:
"""
query = self.query_net(*query_input)
s_ind, e_ind = (torch.floor(start_ind), torch.ceil(end_ind)) if self._hp.mask_inf_attention \
else (inputs.start_ind, inputs.end_ind)
# Reshape values, keys, inputs if not enough dimensions
mult = int(query.shape[0] / keys.shape[0])
tile = lambda x: x[:, None][:, [0] * mult].reshape((-1,) + x.shape[1:])
values = tile(values)
keys = tile(keys)
s_ind = tile(s_ind)
e_ind = tile(e_ind)
# Attend
norm_shape_k = query.shape[1:]
norm_shape_v = values.shape[2:]
raw_attn_output, att_weights = None, None
for attention, predictor in zip(self.attention_layers, self.predictor_layers):
raw_attn_output, att_weights = attention(query, keys, values, s_ind, e_ind)
x = F.layer_norm(raw_attn_output, norm_shape_v)
query = F.layer_norm(predictor(x) + query, norm_shape_k) # skip connections around attention and predictor
return apply_linear(self.out, raw_attn_output, dim=1), att_weights # output non-normalized output of final attention layer
<file_sep>from contextlib import contextmanager
import numpy as np
import torch
from blox import AttrDict
from blox.tensor.ops import broadcast_final, batch_apply, batchwise_index, remove_spatial
from blox.torch.encoder_decoder import Encoder
from blox.torch.losses import L2Loss
from blox.torch.subnetworks import Predictor
from gcp.prediction.models.auxilliary_models.base_model import BaseModel
from gcp.prediction.training.checkpoint_handler import CheckpointHandler
class InverseModel(BaseModel):
def __init__(self, params, logger):
super().__init__(logger)
self._hp = self._default_hparams()
self.override_defaults(params) # override defaults with config file
self.postprocess_params()
assert self._hp.n_actions != -1 # make sure action dimensionality was overridden
self.build_network()
# load only the encoder params during training
if self._hp.enc_params_checkpoint is not None:
assert self._hp.build_encoder # provided checkpoint but did not build encoder
self._load_weights([
(self.encoder, 'encoder', self._hp.enc_params_checkpoint),
])
self.detach_enc = not self._hp.finetune_enc
def _default_hparams(self):
# put new parameters in here:
default_dict = {
'ngf': 4, # number of feature maps in shallowest level
'nz_enc': 128, # number of dimensions in encoder-latent space
'nz_mid': 128, # number of hidden units in fully connected layer
'n_processing_layers': 3, # Number of layers in MLPs
'temp_dist': 1, # sample temporal distances between 1 and temp_dist, regress only first action
'enc_params_checkpoint': None, # specify pretrained encoder weights to load for training
'take_first_tstep': False, # take only first and second time step, no shuffling.
'use_states': False,
'aggregate_actions': False, # when taking two images that are more than one step apart sum the actions along that
'pred_states': False,
'finetune_enc': False,
'checkpt_path': None,
'build_encoder': True, # if False, does not build an encoder, assumes that inputs are encoded from model
'add_lstm_state_enc': False, # if True, expects lstm state as additional encoded input
'log_topdown_maze': False,
'train_full_seq': False,
'train_im0_enc': True, # If True, the first frame latent is passed in as `enc_traj_seq`
}
# loss weights
default_dict.update({
'action_rec_weight': 1.0,
'state_rec_weight': 1.0,
})
# misc params
default_dict.update({
'use_skips': False,
'dense_rec_type': None,
'device': None,
'randomize_length': False,
})
# add new params to parent params
parent_params = super()._default_hparams()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def build_network(self, build_encoder=True):
if self._hp.build_encoder:
self.encoder = Encoder(self._hp)
input_sz = self._hp.nz_enc * 3 if self._hp.add_lstm_state_enc else self._hp.nz_enc * 2
self.action_pred = Predictor(self._hp, input_sz, self._hp.n_actions)
def sample_offsets(self, end_ind):
"""
# sample temporal distances between 1 and temp_dist, regress only first action
:return: None, call by reference
"""
bs = end_ind.shape[0]
if self._hp.take_first_tstep:
t0 = torch.zeros(bs, device=self._hp.device).long()
t1 = torch.ones_like(t0)
else:
t0 = np.zeros(bs)
for b in range(bs):
assert end_ind[b].cpu().numpy() >= self._hp.temp_dist
t0[b] = np.random.randint(0, end_ind[b].cpu().numpy() - self._hp.temp_dist + 1, 1)
delta_t = np.random.randint(1, self._hp.temp_dist + 1, bs)
t1 = t0 + delta_t
t0 = torch.tensor(t0, device=self._hp.device, dtype=torch.long)
t1 = torch.tensor(t1, device=self._hp.device, dtype=torch.long)
return t0, t1
def index_input(self, input, t, aggregate=False, t1=None):
if aggregate:
assert t1 is not None # need end time step for aggregation
selected = torch.zeros_like(input[:, 0])
for b in range(input.shape[0]):
selected[b] = torch.sum(input[b, t[b]:t1[b]], dim=0)
else:
selected = batchwise_index(input, t)
return selected
def full_seq_forward(self, inputs):
if 'model_enc_seq' in inputs:
enc_seq_1 = inputs.model_enc_seq[:, 1:]
if self._hp.train_im0_enc and 'enc_traj_seq' in inputs:
enc_seq_0 = inputs.enc_traj_seq.reshape(inputs.enc_traj_seq.shape[:2] + (self._hp.nz_enc,))[:, :-1]
enc_seq_0 = enc_seq_0[:, :enc_seq_1.shape[1]]
else:
enc_seq_0 = inputs.model_enc_seq[:, :-1]
else:
enc_seq = batch_apply(self.encoder, inputs.traj_seq)
enc_seq_0, enc_seq_1 = enc_seq[:, :-1], enc_seq[:, 1:]
if self.detach_enc:
enc_seq_0 = enc_seq_0.detach()
enc_seq_1 = enc_seq_1.detach()
# TODO quite sure the concatenation is automatic
actions_pred = batch_apply(self.action_pred, torch.cat([enc_seq_0, enc_seq_1], dim=2))
output = AttrDict()
output.actions = actions_pred #remove_spatial(actions_pred)
if 'actions' in inputs:
output.action_targets = inputs.actions
output.pad_mask = inputs.pad_mask
return output
def forward(self, inputs, full_seq=None):
"""
forward pass at training time
:arg full_seq: if True, outputs actions for the full sequence, expects input encodings
"""
if full_seq is None:
full_seq = self._hp.train_full_seq
if full_seq:
return self.full_seq_forward(inputs)
t0, t1 = self.sample_offsets(inputs.norep_end_ind if 'norep_end_ind' in inputs else inputs.end_ind)
im0 = self.index_input(inputs.traj_seq, t0)
im1 = self.index_input(inputs.traj_seq, t1)
if 'model_enc_seq' in inputs:
if self._hp.train_im0_enc and 'enc_traj_seq' in inputs:
enc_im0 = self.index_input(inputs.enc_traj_seq, t0).reshape(inputs.enc_traj_seq.shape[:1] + (self._hp.nz_enc,))
else:
enc_im0 = self.index_input(inputs.model_enc_seq, t0)
enc_im1 = self.index_input(inputs.model_enc_seq, t1)
else:
assert self._hp.build_encoder # need encoder if no encoded latents are given
enc_im0 = self.encoder.forward(im0)[0]
enc_im1 = self.encoder.forward(im1)[0]
if self.detach_enc:
enc_im0 = enc_im0.detach()
enc_im1 = enc_im1.detach()
selected_actions = self.index_input(inputs.actions, t0, aggregate=self._hp.aggregate_actions, t1=t1)
selected_states = self.index_input(inputs.traj_seq_states, t0)
if self._hp.pred_states:
actions_pred, states_pred = torch.split(self.action_pred(enc_im0, enc_im1), 2, 1)
else:
actions_pred = self.action_pred(enc_im0, enc_im1)
output = AttrDict()
output.actions = remove_spatial(actions_pred)
output.action_targets = selected_actions
output.state_targets = selected_states
output.img_t0, output.img_t1 = im0, im1
return output
def loss(self, inputs, outputs, add_total=True):
losses = AttrDict()
# subgoal reconstruction loss
n_action_output = outputs.actions.shape[1]
loss_weights = broadcast_final(outputs.pad_mask[:, :n_action_output], inputs.actions) if 'pad_mask' in outputs else 1
losses.action_reconst = L2Loss(self._hp.action_rec_weight)(outputs.actions, outputs.action_targets[:, :n_action_output], weights=loss_weights)
if self._hp.pred_states:
losses.state_reconst = L2Loss(self._hp.state_rec_weight)(outputs.states, outputs.state_targets)
return losses
def log_outputs(self, outputs, inputs, losses, step, log_images, phase):
super()._log_losses(losses, step, log_images, phase)
if 'actions' not in outputs:
# TODO figure out why this happens
return
if log_images and len(inputs.traj_seq.shape) == 5:
self._logger.log_pred_actions(outputs, inputs, 'pred_actions', step, phase)
if self._hp.pred_states:
self._logger.log_pred_states(outputs, inputs, 'pred_states', step, phase)
if log_images:
dataset = self._hp.dataset_class
if len(outputs.actions.shape) == 3:
actions = outputs.actions
else:
# Training, need to get the action sequence
actions = self(inputs, full_seq=True).actions
cum_action_traj = torch.cat((inputs.traj_seq_states[:, :1], actions), dim=1).cumsum(1)
self._logger.log_dataset_specific_trajectory(outputs, inputs, "action_traj_topdown", step, phase, dataset,
predictions=cum_action_traj, end_inds=inputs.end_ind)
cum_action_traj = torch.cat((inputs.traj_seq_states[:, :1], inputs.actions), dim=1).cumsum(1)
self._logger.log_dataset_specific_trajectory(outputs, inputs, "action_traj_gt_topdown", step, phase, dataset,
predictions=cum_action_traj, end_inds=inputs.end_ind)
def run_single(self, enc_latent_img0, model_latent_img1):
"""Runs inverse model on first input encoded by encoded and second input produced by model."""
assert self._hp.train_im0_enc # inv model needs to be trained from
return remove_spatial(self.action_pred(enc_latent_img0, model_latent_img1))
@contextmanager
def val_mode(self, *args, **kwargs):
yield
class TestTimeInverseModel(InverseModel):
def __init__(self, params, logger):
super().__init__(params, logger)
if torch.cuda.is_available():
self.cuda()
self.device = torch.device('cuda')
else:
self.device = torch.device('cpu')
assert self._hp.checkpt_path is not None
weights_file = CheckpointHandler.get_resume_ckpt_file('latest', self._hp.checkpt_path)
success = CheckpointHandler.load_weights(weights_file, self)
if not success: raise ValueError("Could not load checkpoint from {}!".format(weights_file))
def forward(self, inputs):
for k in inputs:
if not isinstance(inputs[k], torch.Tensor):
inputs[k] = torch.Tensor(inputs[k])
if not inputs[k].device == self.device:
inputs[k] = inputs[k].to(self.device)
enc_im0 = self.encoder.forward(inputs['img_t0'])[0]
enc_im1 = self.encoder.forward(inputs['img_t1'])[0]
return remove_spatial(self.action_pred(enc_im0, enc_im1))
class FromStatesInverseModel(InverseModel):
def __init__(self, params, logger):
super().__init__(params, logger)
def get_timesteps(self, inputs):
"""
# sample temporal distances between 1 and temp_dist, regress only first action
:return: None, call by reference
"""
t0 = np.zeros(self._hp.batch_size)
for b in range(self._hp.batch_size):
t0[b] = np.random.randint(0, abs(inputs.end_ind[b].cpu().numpy() - self._hp.temp_dist), 1)
delta_t = np.random.randint(1, self._hp.temp_dist + 1, self._hp.batch_size)
t1 = t0 + delta_t
t0 = torch.tensor(t0, device=inputs.traj_seq_states.device, dtype=torch.long)
t1 = torch.tensor(t1, device=inputs.traj_seq_states.device, dtype=torch.long)
inputs.state_t0 = batchwise_index(inputs.traj_seq_states, t0)
inputs.state_t1 = batchwise_index(inputs.traj_seq_states, t1)
inputs.selected_action = batchwise_index(inputs.actions, t0)
def build_network(self, build_encoder=True):
self.action_pred = Predictor(self._hp, self._hp.state_dim*2, self._hp.n_actions, 3)
def forward(self, inputs):
self.get_timesteps(inputs)
actions_pred = self.action_pred(inputs.state_t0[:,:,None, None], inputs.state_t1[:,:,None, None])
output = AttrDict()
output.actions = torch.squeeze(actions_pred)
return output
def log_outputs(self, outputs, inputs, losses, step, log_images, phase):
super()._log_losses(losses, step, phase)
class EarlyFusionInverseModel(InverseModel):
def __init__(self, params, logger):
super().__init__(params, logger)
def build_network(self, build_encoder=True):
self._hp.input_nc = 6
self.encoder = Encoder(self._hp)
if self._hp.pred_states:
outdim = self._hp.n_actions + self._hp.state_dim
else:
outdim = self._hp.n_actions
self.action_pred = Predictor(self._hp, self._hp.nz_enc, outdim, 3)
def forward(self, inputs):
self.get_timesteps(inputs)
enc = self.encoder.forward(torch.cat([inputs.img_t0, inputs.img_t1], dim=1))[0]
output = AttrDict()
out = self.action_pred(enc)
if self._hp.pred_states:
output.actions, output.states = torch.split(torch.squeeze(out), [2,2], 1)
else:
output.actions = torch.squeeze(out)
return output
<file_sep>import cv2
import copy
import gym
import numpy as np
from blox import AttrDict
from gcp.planning.infra.envs.miniworld_env.base_miniworld_env import BaseMiniworldEnv
from gcp.planning.infra.envs.miniworld_env.utils.multiroom2d_layout import define_layout, draw_layout_overview, \
default_texture_dir
from gcp.planning.infra.envs.miniworld_env.utils.sampling_fcns import RoomSampler2d
import numbers
def fcn_apply(fcn, arg):
return lambda: fcn(arg())
class Multiroom3dEnv(BaseMiniworldEnv):
def __init__(self, hp, reset_state=None, no_env=False, crop_window=None):
self._hp = self._default_hparams()
for name, value in hp.items():
print('setting param {} to value {}'.format(name, value))
self._hp.set_hparam(name, value)
super().__init__(self._hp)
self._texture_dir = default_texture_dir()
self._rooms_per_side = int(np.sqrt(self._hp.n_rooms))
self._layout = define_layout(self._rooms_per_side, self._texture_dir)
self._topdown_render_scale = 256 # table_size * scale = px size of output render img
self._static_img_topdown = draw_layout_overview(self._rooms_per_side,
self._topdown_render_scale,
texture_dir=self._texture_dir)
self._crop_window = crop_window
if crop_window is not None:
# top-down rendering will get cropped -> pad static background
padded_bg = np.zeros((self._static_img_topdown.shape[0] + 2 * crop_window,
self._static_img_topdown.shape[1] + 2 * crop_window, 3), dtype=self._static_img_topdown.dtype)
padded_bg[crop_window:-crop_window, crop_window:-crop_window] = self._static_img_topdown
self._static_img_topdown = padded_bg
self._adim, self._sdim = 2, 3
if not no_env:
import gym_miniworld # keep! important for env registration
self.env = gym.make("MiniWorld-Multiroom3d-v0",
obs_height=self._hp.obs_height,
obs_width=self._hp.obs_width,
rooms_per_side=self._rooms_per_side,
doors=self._layout.doors,
heading_smoothing=self._hp.heading_smoothing,
layout_params=AttrDict(room_size=self._layout.room_size,
door_size=self._layout.door_size,
textures=self._layout.textures))
# Define the sample_*_state method by looking up the function with the corresponding name
self.state_sampler = RoomSampler2d(self._rooms_per_side)
self.current_pos = None
self.goal_pos = None
self.prm_policy = None # used to compute shortest distance between pos and goal
def _default_hparams(self):
default_dict = {
'obs_height': 300,
'obs_width': 400,
'goal_pos': None,
'init_pos': None,
'n_rooms': 9,
'heading_smoothing': 0.2, # how much of new angle is taken into average
}
parent_params = super()._default_hparams()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def reset(self, reset_state):
super().reset()
if reset_state is None:
start_pos = self.env.mj2mw(self.state_sampler.sample(self._hp.init_pos))
start_angle = 2 * np.pi * np.random.rand()
goal_pos = self.env.mj2mw(self.state_sampler.sample(self._hp.goal_pos))
else:
start_pos = reset_state[:2]
start_angle = reset_state[2]
goal_pos = reset_state[-2:]
reset_state = AttrDict(start_pos=start_pos,
start_angle=start_angle,
goal=goal_pos)
img_obs = self.env.reset(reset_state)
self.goal_pos = goal_pos
qpos_full = np.concatenate((start_pos, np.array([start_angle])))
obs = AttrDict(images=np.expand_dims(img_obs, axis=0), # add camera dimension
qpos_full=qpos_full,
goal=goal_pos,
env_done=False,
state=np.concatenate((qpos_full, goal_pos)),
topdown_image=self.render_pos_top_down(qpos_full, self.goal_pos)
)
self._post_step(start_pos)
self._initial_shortest_dist = self.comp_shortest_dist(start_pos, goal_pos)
return obs, reset_state
def get_reset_from_obs(self, obs_dict):
return obs_dict['state'][0]
def get_goal_from_obs(self, obs_dict):
self.goal = obs_dict['goal'][-1]
return self.goal
def step(self, action):
img_obs, reward, done, agent_pos = self.env.step(action)
obs = AttrDict(images=np.expand_dims(img_obs, axis=0), # add camera dimension
qpos_full=agent_pos,
goal=self.goal_pos,
env_done=done,
state=np.concatenate((agent_pos, self.goal_pos)),
topdown_image=self.render_pos_top_down(agent_pos, self.goal_pos)
)
self._post_step(agent_pos)
return obs
def _post_step(self, agent_pos):
self.current_pos = agent_pos
self.add_goal_dist(self.comp_shortest_dist(self.current_pos[:2], self.goal_pos))
self._full_traj.append(agent_pos)
def eval(self):
self._final_shortest_dist = self.comp_shortest_dist(self.current_pos[:2], self.goal_pos)
return super().eval()
def comp_shortest_dist(self, p1, p2):
"""Uses PRM to get the shortest distance between two points within the maze."""
if self.prm_policy is None:
from gcp.planning.infra.policy.prm_policy.prm_policy import PrmPolicy
self.prm_policy = PrmPolicy(None, AttrDict(n_samples_per_room=200), None, None, **self.env_policy_params())
dist, _ = self.prm_policy.compute_shortest_path(p1, p2)
return dist
def env_policy_params(self):
def transform_plan(state_plan, action_plan):
state_plan = self.env.mj2mw(state_plan)
action_plan = state_plan[:, 1:] - state_plan[:, :-1]
return state_plan, action_plan
conversion_fcns = AttrDict(transform_plan=transform_plan,
env2prm=self.env.mw2mj,
prm2env=self.env.mj2mw)
return {'conversion_fcns': conversion_fcns, 'n_rooms': self._hp.n_rooms}
def render_top_down(self, traj, background=None, goal=None, line_thickness=4, color=(1.0, 0, 0), mark_pts=False):
"""Renders a state trajectory in a top-down view."""
if isinstance(color[0], numbers.Number):
color = [color] * (traj.shape[0] - 1)
img = self._static_img_topdown.copy() if background is None else background.copy()
traj = traj.copy() # very important!!!
if goal is not None:
goal = goal.copy()
if traj.shape[1] == 5 or traj.shape[1] == 2: goal = goal[:2]; goal[1] *= -1
if traj.max() > 1.0 or traj.min() < -1.0: goal = goal / 27.0
goal = goal + 0.5 * self._layout.table_size
if traj.shape[1] == 5 or traj.shape[1] == 2: traj = traj[:, :2]; traj[:, 1] *= -1
if traj.max() > 1.0 or traj.min() < -1.0: traj = traj / 27.0 # scale from miniworld env to [-1...1]
traj = traj + 0.5 * self._layout.table_size
for i in range(traj.shape[0] - 1):
cv2.line(img, (int(traj[i, 0] * self._topdown_render_scale),
img.shape[0] - int(traj[i, 1] * self._topdown_render_scale)),
(int(traj[i+1, 0] * self._topdown_render_scale),
img.shape[0] - int(traj[i+1, 1] * self._topdown_render_scale)),
color[i], line_thickness)
if mark_pts and i > 0 and i < (traj.shape[0] - 2):
cv2.line(img, (int(traj[i, 0] * self._topdown_render_scale),
img.shape[0] - int(traj[i, 1] * self._topdown_render_scale)),
(int(traj[i, 0] * self._topdown_render_scale),
img.shape[0] - int(traj[i, 1] * self._topdown_render_scale)),
(1.0, 0, 0), int(3*line_thickness))
# print start+end position
img = self.render_pos_top_down(traj[0], traj[-1], background=img, mirror_scale=False)
if goal is not None:
img = self.render_pos_top_down(traj[0], goal, background=img, mirror_scale=False, large_goal=True)
return img
def render_pos_top_down(self,
current_pose,
goal_pos,
background=None,
mirror_scale=True,
large_goal=False):
"""Renders a state trajectory in a top-down view."""
img = self._static_img_topdown.copy() if background is None else background.copy()
def convert_sim2topdown(pos, img_shape):
pos = pos.copy() # very important !!!!!
if mirror_scale:
pos[1] *= -1
pos = pos / 27.0 # scale from miniworld env to [-1...1]
pos = pos + 0.5 * self._layout.table_size
return (int(pos[0] * self._topdown_render_scale), img_shape[0] - int(pos[1] * self._topdown_render_scale))
curr_pos = convert_sim2topdown(current_pose, img.shape)
goal_pos = convert_sim2topdown(goal_pos, img.shape)
if self._crop_window is not None:
# we need to correct for the too large size of img.shape above, therefore -2*self._crop_window
curr_pos = (curr_pos[0] + self._crop_window, curr_pos[1] + self._crop_window - 2*self._crop_window)
goal_pos = (goal_pos[0] + self._crop_window, goal_pos[1] + self._crop_window - 2*self._crop_window)
cv2.line(img, curr_pos, curr_pos, (0.0, 0, 1.0), 10)
cv2.line(img, goal_pos, goal_pos, (0.0, 1.0, 0), 10 if not large_goal else 20)
if self._crop_window is not None:
# catch rounding errors
curr_pos = (max(self._crop_window, curr_pos[0]), max(self._crop_window, curr_pos[1]))
lower, upper = np.asarray(curr_pos) - self._crop_window, np.asarray(curr_pos) + self._crop_window
img = img[lower[1]:upper[1], lower[0]:upper[0]]
return img
@property
def adim(self):
return self._adim
@property
def sdim(self):
return self._sdim
class TopdownMultiroom3dEnv(Multiroom3dEnv):
"""Image observations are rendered topdown in a window around the agent."""
def __init__(self, hp, reset_state=None, no_env=False, crop_window=None):
assert "crop_window" in hp # need to specify the crop window for topdown rendering
temp_hp = copy.deepcopy(hp)
crop_window = temp_hp.pop("crop_window")
super().__init__(temp_hp, reset_state, no_env, crop_window=crop_window)
def reset(self, reset_state):
obs, reset_state = super().reset(reset_state)
obs.images = np.asarray(255*obs.topdown_image.copy(), dtype=np.uint8)[None]
return obs, reset_state
def step(self, action):
obs = super().step(action)
obs.images = np.asarray(255*obs.topdown_image.copy(), dtype=np.uint8)[None]
return obs
<file_sep>import pickle as pkl
import numpy as np
import os
from robosuite.utils.mjcf_utils import postprocess_model_xml
from gcp.planning.infra.agent.benchmarking_agent import BenchmarkAgent
class BenchmarkAgentLoadHDF5(BenchmarkAgent):
def __init__(self, hyperparams, start_goal_list=None):
super.__init__(hyperparams, start_goal_list)
def _load_raw_data(self, itr):
"""
doing the reverse of save_raw_data
:param itr:
:return:
"""
traj = self._start_goal_confs + 'itr.hdf5'
self._demo_images = demo_images.astype(np.float32)/255.
self._goal_image = self._demo_images[-1]
with open('{}/obs_dict.pkl'.format(traj), 'rb') as file:
obs_dict.update(pkl.load(file))
self._goal = self.env.get_goal_from_obs(obs_dict)
reset_state = self.get_reset_state(obs_dict)
if os.path.exists(traj + '/robosuite.xml'):
with open(traj + '/robosuite.xml', "r") as model_f:
model_xml = model_f.read()
xml = postprocess_model_xml(model_xml)
reset_state['robosuite_xml'] = xml
return reset_state
def get_reset_state(self, obs_dict):
return self.env.get_reset_from_obs(obs_dict)
<file_sep>import numpy as np
from blox import AttrDict
from gcp.planning.tree_optimizer import HierarchicalTreeLatentOptimizer, ImageHierarchicalTreeLatentOptimizer
class CEMSampler:
"""Defines interface for sampler used in CEM optimization loop."""
def __init__(self, clip_val, n_steps, action_dim, initial_std):
self._clip_val = clip_val
self._n_steps = n_steps
self._action_dim = action_dim
self._initial_std = initial_std
self.init()
def init(self):
"""Initialize the sampling distributions."""
raise NotImplementedError
def sample(self, n_samples):
"""Sample n_samples from the sampling distributions."""
raise NotImplementedError
def fit(self, data, scores):
"""Refits distributions to data."""
raise NotImplementedError
def get_dists(self):
"""Returns a representation of the current sampling distributions."""
raise NotImplementedError
class FlatCEMSampler(CEMSampler):
"""Samples flat arrays from Gaussian distributions."""
def init(self):
"""Initialize the sampling distributions."""
self.mean = np.zeros((self._n_steps, self._action_dim))
self.std = self._initial_std * np.ones((self._n_steps, self._action_dim))
def sample(self, n_samples):
raw_actions = np.random.normal(loc=self.mean, scale=self.std, size=(n_samples, self._n_steps, self._action_dim))
return np.clip(raw_actions, -self._clip_val, self._clip_val)
def fit(self, data, scores):
self.mean = np.mean(data, axis=0)
self.std = np.std(data, axis=0)
def get_dists(self):
return AttrDict(mean=self.mean, std=self.std)
class PDDMSampler(FlatCEMSampler):
"""Samples correlated noise, uses path integral formulation to fit it."""
BETA = 0.5 # noise correlation factor
GAMMA = 1.0 # reward weighting factor
def sample(self, n_samples):
noise = np.random.normal(loc=np.zeros_like(self.mean), scale=self.std,
size=(n_samples, self._n_steps, self._action_dim))
correlated_noise, n_i = [], np.zeros((n_samples, self._action_dim))
for i in range(noise.shape[1]):
u_i = noise[:, i]
n_i = self.BETA * u_i + (1 - self.BETA) * n_i
correlated_noise.append(n_i)
correlated_noise = np.stack(correlated_noise, axis=1)
return np.clip(correlated_noise + self.mean[None], -self._clip_val, self._clip_val)
def fit(self, actions, scores):
"""Assumes that scores are better the lower (ie cost function output)."""
self.mean = np.sum(actions * np.exp(-self.GAMMA * scores)[:, None, None], axis=0) \
/ np.sum(np.exp(-self.GAMMA * scores))
class SimpleTreeCEMSampler(FlatCEMSampler):
"""CEM sampler for tree-GCPs that optimizes all levels at once with same number of samples
(ie like flat CEM sampler)."""
def __init__(self, *args, n_level_hierarchy, **kwargs):
self._n_layer_hierarchy = n_level_hierarchy
super().__init__(*args)
self._n_steps = 2**n_level_hierarchy - 1
class HierarchicalTreeCEMSampler(SimpleTreeCEMSampler):
"""Tree-GCP CEM sampler that optimizes the layers of the hierarchy sequentially, starting from the top."""
def __init__(self, *args, sampling_rates_per_layer, subgoal_cost_fcn, ll_cost_fcn, n_ll_samples, **kwargs):
self._sampling_rates_per_layer = sampling_rates_per_layer
self._subgoal_cost_fcn = subgoal_cost_fcn
self._ll_cost_fcn = ll_cost_fcn
self._n_ll_samples = n_ll_samples
super().__init__(*args, **kwargs)
assert self._n_layer_hierarchy >= len(sampling_rates_per_layer) # not enough layers in tree
def init(self):
self._optimizer = HierarchicalTreeLatentOptimizer(self._action_dim,
self._sampling_rates_per_layer.copy(),
self._n_layer_hierarchy,
self._subgoal_cost_fcn,
self._ll_cost_fcn,
self._n_ll_samples)
def sample(self, n_samples):
raw_actions = self._optimizer.sample()
return np.clip(raw_actions, -self._clip_val, self._clip_val)
def optimize(self, rollouts, goal):
best_rollout, best_cost = self._optimizer.optimize(rollouts, goal)
if (best_rollout[-1] != goal).any(): # this can happen if too few frames on right tree side
best_rollout = np.concatenate((best_rollout, goal[None]))
return [best_rollout], best_cost
def fit(*args, **kwargs):
"""Does not currently support refitting distributions."""
pass
def get_dists(self):
return AttrDict(mean=0., std=1.) # dummy values
@property
def append_latent(self):
return True # we need latent rollouts to compute subgoal costs
@property
def fully_optimized(self):
return self._optimizer.fully_optimized
class ImageHierarchicalTreeCEMSampler(HierarchicalTreeCEMSampler):
"""Hierarchical GCP-tree CEM sampler for image prediction GCPs."""
def init(self):
self._optimizer = ImageHierarchicalTreeLatentOptimizer(self._action_dim,
self._sampling_rates_per_layer.copy(),
self._n_layer_hierarchy,
self._subgoal_cost_fcn,
self._ll_cost_fcn,
self._n_ll_samples)
def optimize(self, rollouts, goal):
best_rollout, best_cost = self._optimizer.optimize(rollouts, goal)
if (best_rollout[-1] != goal[0].transpose(2, 0, 1)).any(): # can happen if too few frames on right tree side
best_rollout = np.concatenate((best_rollout, goal.transpose(0, 3, 1, 2)))
if not hasattr(best_cost, "__len__"):
best_cost = [best_cost] # need to return array-shaped cost, no scalar
return [best_rollout], best_cost
<file_sep>import glob
import itertools
import os
import cv2
import numpy as np
from blox import AttrDict
from gcp.planning.infra.agent.utils.hdf5_saver import HDF5SaverBase
from tqdm import tqdm
def process_frame(frame_in, frame_output_size=(64, 64)):
"""Standardizes input frame width and height, and removes dummy channels.
"""
# NB: OpenCV takes new size as (X, Y), not (Y, X)!!!
frame_out = cv2.resize(
frame_in.astype(np.float32),
(frame_output_size[1], frame_output_size[0])).astype(
frame_in.dtype)
if frame_out.shape[-1] == 1:
# Chop off redundant dimensions
frame_out = frame_out[..., 0]
elif frame_out.shape[-1] == 3:
# Convert OpenCV's BGR to RGB
frame_out = frame_out[..., ::-1]
else:
raise ValueError("Unexpected frame shape!")
return frame_out
def read_video(video_path, n_downsample=8):
"""Reads a video from a file and returns as an array."""
assert os.path.isfile(video_path)
cap = cv2.VideoCapture(video_path)
all_frames = []
i = 0
while cap.isOpened():
ret, frame = cap.read()
if ret:
if i % n_downsample == 0:
all_frames.append(process_frame(frame))
else:
cap.release()
i += 1
return np.asarray(all_frames)
class H36Maker(HDF5SaverBase):
def __init__(self, data_dir, folders, save_dir, traj_per_file, offset=0, split=(0.90, 0.05, 0.05), n_downsample=8):
super().__init__(save_dir, traj_per_file, offset, split)
data_folders = list([data_dir + '/' + folder + '/Videos' for folder in folders])
listlist_names = list([glob.glob(folder + '/*') for folder in data_folders])
self.filenames = list(itertools.chain(*listlist_names))
self.max_seq_len = 1500
self.n_downsample = n_downsample
def get_traj(self, name):
vid = read_video(name, self.n_downsample)
print(vid.shape)
# pad_mask = np.concatenate([np.ones(vid.shape[0]), np.zeros(self.max_seq_len - vid.shape[0])])
pad_mask = np.ones(vid.shape[0])
return AttrDict(images=vid, pad_mask=pad_mask)
def make_phase(self, filenames, phase):
traj_list = []
for name in tqdm(filenames):
traj = self.get_traj(name)
traj_list.append(traj)
if len(traj_list) == self.traj_per_file:
self.save_hdf5(traj_list, phase)
traj_list = []
if __name__ == '__main__':
folders_list = [['S1', 'S5', 'S6', 'S7'], ['S8'], ['S9', 'S11']]
phase_list = ['train', 'val', 'test']
for i in range(3):
maker = H36Maker('/parent/nfs/kun1/users/oleg/h36m', folders_list[i],
'/workspace/recplan_data/h36m_long', 10, split=(1, 0, 0), n_downsample=1)
maker.make_phase(maker.filenames, phase_list[i])
<file_sep>from contextlib import contextmanager
import numpy as np
import torch
from blox import AttrDict
from blox.tensor.ops import batchwise_index
from blox.torch.losses import L2Loss
from blox.torch.subnetworks import Predictor
from gcp.prediction.models.auxilliary_models.base_model import BaseModel
from gcp.prediction.training.checkpoint_handler import CheckpointHandler
class CostModel(BaseModel):
def __init__(self, params, logger):
super().__init__(logger)
self._hp = self._default_hparams()
params.update({'use_convs': False})
self.override_defaults(params) # override defaults with config file
self.postprocess_params()
self.build_network()
if self._hp.cost_fcn is not None:
self._gt_cost_fcn = self._hp.cost_fcn(True)
def _default_hparams(self):
# put new parameters in here:
default_dict = {
'nz_enc': 128, # number of dimensions in encoder-latent space
'nz_mid': 128, # number of hidden units in fully connected layer
'n_processing_layers': 3, # Number of layers in MLPs
'checkpt_path': None,
'load_epoch': None, # checkpoint epoch which should be loaded, if 'none' loads latest
'cost_fcn': None, # holds ground truth cost function
'use_path_dist_cost': False, # if True, uses fast path distance cost computation, ignores cost_fcn
}
# misc params
default_dict.update({
'use_skips': False,
'dense_rec_type': None,
'device': None,
'randomize_length': False,
})
# add new params to parent params
parent_params = super()._default_hparams()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def build_network(self, build_encoder=True):
self.cost_pred = Predictor(self._hp, self._hp.nz_enc * 2, 1, detached=True)
def forward(self, inputs):
"""Forward pass at training time."""
# randomly sample start and end state, compute GT cost
start, end, gt_cost = self._fast_path_dist_cost(inputs) if self._hp.use_path_dist_cost \
else self._general_cost(inputs)
# compute cost estimate
cost_pred = self.cost_pred(torch.cat([start, end], dim=-1))
output = AttrDict(
cost=cost_pred,
cost_target=gt_cost,
)
return output
def loss(self, inputs, outputs, add_total=True):
return AttrDict(
cost_estimation=L2Loss(1.0)(outputs.cost, outputs.cost_target)
)
def log_outputs(self, outputs, inputs, losses, step, log_images, phase):
super()._log_losses(losses, step, log_images, phase)
@contextmanager
def val_mode(self):
yield
def _fast_path_dist_cost(self, inputs):
"""Vectorized computation of path distance cost."""
# sample start goal indices
batch_size = inputs.end_ind.shape[0]
start_idx = torch.rand((batch_size,), device=inputs.end_ind.device) * (inputs.end_ind.float() - 1)
end_idx = torch.rand((batch_size,), device=inputs.end_ind.device) * (inputs.end_ind.float() - (start_idx + 1)) + (start_idx + 1)
start_idx, end_idx = start_idx.long(), end_idx.long()
# get start goal latents
start = batchwise_index(inputs.model_enc_seq, start_idx).detach()
end = batchwise_index(inputs.model_enc_seq, end_idx).detach()
# compute path distance cost
cum_diff = torch.cumsum(torch.norm(inputs.traj_seq[:, 1:] - inputs.traj_seq[:, :-1], dim=-1), dim=1)
cum_diff = torch.cat((torch.zeros((batch_size, 1), dtype=cum_diff.dtype, device=cum_diff.device),
cum_diff), dim=1) # prepend 0
gt_cost = batchwise_index(cum_diff, end_idx) - batchwise_index(cum_diff, start_idx)
return start, end, gt_cost[:, None].detach()
def _general_cost(self, inputs):
"""Computes cost with generic cost function, not vectorized."""
batch_size = inputs.end_ind.shape[0]
start, end, gt_cost = [], [], []
for b in range(batch_size):
start_idx = np.random.randint(0, inputs.end_ind[b].cpu().numpy(), 1)[0]
end_idx = np.random.randint(start_idx + 1, inputs.end_ind[b].cpu().numpy() + 1, 1)[0]
start.append(inputs.model_enc_seq[b, start_idx])
end.append(inputs.model_enc_seq[b, end_idx])
gt_cost.append(self._gt_cost_fcn([inputs.traj_seq[b, start_idx:end_idx+1].data.cpu().numpy()],
inputs.traj_seq[b, end_idx].data.cpu().numpy()))
start, end = torch.stack(start).detach(), torch.stack(end).detach() # no gradients in encoder
gt_cost = torch.tensor(np.stack(gt_cost), device=start.device, requires_grad=False).float()
return start, end, gt_cost
@property
def input_dim(self):
return self._hp.nz_enc
class TestTimeCostModel(CostModel):
def __init__(self, params, logger):
super().__init__(params, logger)
if torch.cuda.is_available():
self.cuda()
self.device = torch.device('cuda')
else:
self.device = torch.device('cpu')
assert self._hp.checkpt_path is not None
load_epoch = self._hp.load_epoch if self._hp.load_epoch is not None else 'latest'
weights_file = CheckpointHandler.get_resume_ckpt_file(load_epoch, self._hp.checkpt_path)
success = CheckpointHandler.load_weights(weights_file, self, submodule_name='cost_mdl')
if not success: raise ValueError("Could not load checkpoint from {}!".format(weights_file))
def forward(self, inputs):
for k in inputs:
if not isinstance(inputs[k], torch.Tensor):
inputs[k] = torch.Tensor(inputs[k])
if not inputs[k].device == self.device:
inputs[k] = inputs[k].to(self.device)
return self.cost_pred(inputs['enc1'], inputs['enc2'])
<file_sep>from itertools import chain
import torch
import torch.nn as nn
from blox.tensor.ops import concat_inputs
from blox.torch.subnetworks import HiddenStatePredictorModel
from blox.torch.recurrent_modules import ZeroLSTMCellInitializer, MLPLSTMCellInitializer
class SumTreeHiddenStatePredictorModel(HiddenStatePredictorModel):
""" A HiddenStatePredictor for tree morphologies. Averages parents' hidden states """
def forward(self, hidden1, hidden2, *inputs):
hidden_state = hidden1 + hidden2
return super().forward(hidden_state, *inputs)
class LinTreeHiddenStatePredictorModel(HiddenStatePredictorModel):
""" A HiddenStatePredictor for tree morphologies. Averages parents' hidden states """
def build_network(self):
super().build_network()
self.projection = nn.Linear(self.get_state_dim() * 2, self.get_state_dim())
def forward(self, hidden1, hidden2, *inputs):
hidden_state = self.projection(concat_inputs(hidden1, hidden2))
return super().forward(hidden_state, *inputs)
class SplitLinTreeHiddenStatePredictorModel(HiddenStatePredictorModel):
""" A HiddenStatePredictor for tree morphologies. Averages parents' hidden states """
def build_network(self):
super().build_network()
split_state_size = int(self.get_state_dim() / (self._hp.n_lstm_layers * 2))
if self._hp.use_conv_lstm:
projection = lambda: nn.Conv2d(split_state_size * 2, split_state_size, kernel_size=3, padding=1)
else:
projection = lambda: nn.Linear(split_state_size * 2, split_state_size)
self.projections = torch.nn.ModuleList([projection() for _ in range(self._hp.n_lstm_layers*2)])
def forward(self, hidden1, hidden2, *inputs):
chunked_hidden1 = list(chain(*[torch.chunk(h, 2, 1) for h in torch.chunk(hidden1, self._hp.n_lstm_layers, 1)]))
chunked_hidden2 = list(chain(*[torch.chunk(h, 2, 1) for h in torch.chunk(hidden2, self._hp.n_lstm_layers, 1)]))
chunked_projected = [projection(concat_inputs(h1, h2))
for projection, h1, h2 in zip(self.projections, chunked_hidden1, chunked_hidden2)]
hidden_state = torch.cat(chunked_projected, dim=1)
return super().forward(hidden_state, *inputs)
def build_tree_lstm(hp, input_dim, output_dim):
if hp.tree_lstm == 'sum':
cls = SumTreeHiddenStatePredictorModel
elif hp.tree_lstm == 'linear':
cls = LinTreeHiddenStatePredictorModel
elif hp.tree_lstm == 'split_linear':
cls = SplitLinTreeHiddenStatePredictorModel
else:
raise ValueError("don't know this TreeLSTM type")
subgoal_pred = cls(hp, input_dim, output_dim)
lstm_initializer = get_lstm_initializer(hp, subgoal_pred)
return subgoal_pred, lstm_initializer
def get_lstm_initializer(hp, cell):
if hp.lstm_init == 'zero':
return ZeroLSTMCellInitializer(hp, cell)
elif hp.lstm_init == 'mlp':
return MLPLSTMCellInitializer(hp, cell, 2 * hp.nz_enc + hp.nz_vae)
else:
raise ValueError('dont know lstm init type {}!'.format(hp.lstm_init))
<file_sep>import os
from contextlib import contextmanager
import torch
import torch.nn as nn
from torch.nn.utils.rnn import pad_sequence as pad_sequence
from blox import batch_apply, AttrDict, optional, rmap
from blox.basic_types import subdict
from blox.tensor.core import find_tensor
from blox.tensor.ops import remove_spatial
from blox.torch.dist import ProbabilisticModel
from blox.torch.encoder_decoder import Encoder, DecoderModule
from blox.torch.layers import BaseProcessingNet
from blox.torch.losses import L2Loss
from blox.torch.modules import Identity, LinearUpdater
from blox.torch.subnetworks import ConvSeqEncodingModule, RecurrentSeqEncodingModule, BidirectionalSeqEncodingModule, \
Predictor
from gcp.prediction.models.auxilliary_models.misc import AttnKeyEncodingModule, LengthPredictorModule, \
ActionConditioningWrapper
from gcp.prediction import global_params
from gcp.prediction.hyperparameters import get_default_gcp_hyperparameters
from gcp.prediction.models.auxilliary_models.base_model import BaseModel
from gcp.prediction.models.auxilliary_models.cost_mdl import CostModel
from gcp.prediction.models.auxilliary_models.inverse_mdl import InverseModel
from gcp.prediction.utils import visualization
class BaseGCPModel(BaseModel):
def __init__(self, params, logger):
super().__init__(logger)
self._hp = self._default_hparams()
self.override_defaults(params) # override defaults with config file
self.postprocess_params()
visualization.PARAMS.hp = self._hp
assert self._hp.batch_size != -1 # make sure that batch size was overridden
if self._hp.regress_actions or self._hp.action_conditioned_pred:
assert self._hp.n_actions != -1 # make sure action dimensionality was overridden
self.build_network()
self._use_pred_length = False
self._inv_mdl_full_seq = False
@contextmanager
def val_mode(self, pred_length=True):
"""Sets validation parameters. To be used like: with model.val_mode(): ...<do something>..."""
self.call_children('switch_to_prior', ProbabilisticModel)
self._use_pred_length = pred_length
self._inv_mdl_full_seq = True
yield
self.call_children('switch_to_inference', ProbabilisticModel)
self._use_pred_length = False
self._inv_mdl_full_seq = False
def postprocess_params(self):
super().postprocess_params()
if self._hp.action_activation is None:
pass
elif self._hp.action_activation == 'sigmoid':
self._hp.action_activation = torch.sigmoid
elif self._hp.action_activation == 'tanh':
self._hp.action_activation = torch.tanh
else:
raise ValueError('Action activation {} not supported!'.format(self._hp.action_activation))
global_params.hp = self._hp
def _default_hparams(self):
parent_params = super()._default_hparams()
default_dict = get_default_gcp_hyperparameters()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def build_network(self, build_encoder=True):
if build_encoder:
self.encoder = Encoder(self._hp)
self.decoder = DecoderModule(self._hp, # infer actions in decoder if not using SH-Pred model
regress_actions=self._hp.regress_actions and self._hp.one_step_planner is not 'sh_pred')
self.build_inference()
if self._hp.regress_length:
self.length_pred = LengthPredictorModule(self._hp)
if self._hp.attach_inv_mdl:
self.inv_mdl = InverseModel(self._hp.inv_mdl_params, self._logger)
if self._hp.attach_cost_mdl:
self.cost_mdl = CostModel(self._hp.cost_mdl_params, self._logger)
if self._hp.attach_state_regressor:
self.state_regressor = BaseProcessingNet(self._hp.nz_enc, self._hp.nz_mid, self._hp.state_dim,
self._hp.n_processing_layers, self._hp.fc_builder)
if self._hp.separate_cnn_start_goal_encoder:
from blox.torch.layers import LayerBuilderParams
from tensorflow.contrib.training import HParams
with_conv_hp = HParams()
for k in self._hp.values().keys():
with_conv_hp.add_hparam(k, self._hp.values()[k])
#with_conv_hp = self._hp
with_conv_hp.set_hparam('use_convs', True)
with_conv_hp.set_hparam('input_nc', 3)
with_conv_hp.set_hparam('builder', LayerBuilderParams(
True, self._hp.use_batchnorm, self._hp.normalization, self._hp.predictor_normalization))
self.start_goal_enc = Encoder(with_conv_hp)
def build_inference(self):
if self._hp.states_inference:
self.inf_encoder = Predictor(self._hp, self._hp.nz_enc + 2, self._hp.nz_enc)
elif self._hp.act_cond_inference:
self.inf_encoder = ActionConditioningWrapper(self._hp, self.build_temporal_inf_encoder())
else:
self.inf_encoder = self.build_temporal_inf_encoder()
self.inf_key_encoder = nn.Sequential(self.build_temporal_inf_encoder(),
AttnKeyEncodingModule(self._hp, add_time=False))
if self._hp.kl_weight_burn_in is not None:
target_kl_weight = self._hp.kl_weight
delattr(self._hp, 'kl_weight')
del self._hp._hparam_types['kl_weight']
self._hp.add_hparam('kl_weight', nn.Parameter(torch.zeros(1)))
self._hp.kl_weight.requires_grad_(False)
self.kl_weight_updater = LinearUpdater(
self._hp.kl_weight, self._hp.kl_weight_burn_in, target_kl_weight, name='kl_weight')
def build_temporal_inf_encoder(self):
if self._hp.seq_enc == 'none':
return Identity()
elif self._hp.seq_enc == 'conv':
return ConvSeqEncodingModule(self._hp)
elif self._hp.seq_enc == 'lstm':
return RecurrentSeqEncodingModule(self._hp)
elif self._hp.seq_enc == 'bi-lstm':
return BidirectionalSeqEncodingModule(self._hp)
def forward(self, inputs, phase='train'):
"""
forward pass at training time
:param
images shape = batch x time x height x width x channel
pad mask shape = batch x time, 1 indicates actual image 0 is padded
:return:
"""
outputs = AttrDict()
inputs.reference_tensor = find_tensor(inputs)
self.optional_preprocessing(inputs)
# Get features
self.run_encoder(inputs)
# Optionally predict sequence length
end_ind = self.get_end_ind(inputs, outputs)
# Generate sequences
outputs.update(self.predict_sequence(inputs, outputs, inputs.start_ind, end_ind, phase))
# Predict other optional values
outputs.update(self.run_auxilliary_models(inputs, outputs, phase))
return outputs
def optional_preprocessing(self, inputs):
if self._hp.non_goal_conditioned:
# Set goal to zeros
if 'traj_seq' in inputs:
inputs.traj_seq[torch.arange(inputs.traj_seq.shape[0]), inputs.end_ind] = 0.0
inputs.traj_seq_images[torch.arange(inputs.traj_seq.shape[0]), inputs.end_ind] = 0.0
inputs.I_g = torch.zeros_like(inputs.I_g)
if "I_g_image" in inputs:
inputs.I_g_image = torch.zeros_like(inputs.I_g_image)
if inputs.I_0.shape[-1] == 5: # special hack for maze
inputs.I_0[..., -2:] = 0.0
if "traj_seq" in inputs:
inputs.traj_seq[..., -2:] = 0.0
if self._hp.train_on_action_seqs:
# swap in actions if we want to train action sequence decoder
inputs.traj_seq = torch.cat([inputs.actions, torch.zeros_like(inputs.actions[:, :1])], dim=1)
if 'start_ind' not in inputs:
inputs.start_ind = torch.zeros(self._hp.batch_size, dtype=torch.long, device=inputs.reference_tensor.device)
def run_encoder(self, inputs):
if 'traj_seq' in inputs:
# Encode sequence
if not 'enc_traj_seq' in inputs:
inputs.enc_traj_seq, inputs.skips = batch_apply(self.encoder, inputs.traj_seq)
if 'skips' in inputs:
inputs.skips = rmap(lambda s: s[:, 0], inputs.skips) # only use start image activations
# Encode sequence for inference and attention
if self._hp.act_cond_inference:
inputs.inf_enc_seq = self.inf_encoder(inputs.enc_traj_seq, inputs.actions)
elif self._hp.states_inference:
inputs.inf_enc_seq = batch_apply(self.inf_encoder,
inputs.enc_traj_seq, inputs.traj_seq_states[..., None, None])
else:
inputs.inf_enc_seq = self.inf_encoder(inputs.enc_traj_seq)
inputs.inf_enc_key_seq = self.inf_key_encoder(inputs.enc_traj_seq)
# Encode I_0, I_g
if self._hp.separate_cnn_start_goal_encoder:
e_0, inputs.skips = self.start_goal_enc(inputs.I_0_image)
inputs.e_0 = remove_spatial(e_0)
inputs.e_g = remove_spatial(self.start_goal_enc(inputs.I_g_image)[0])
else:
inputs.e_0, inputs.skips = self.encoder(inputs.I_0)
inputs.e_g = self.encoder(inputs.I_g)[0]
# Encode actions
if self._hp.action_conditioned_pred:
inputs.enc_action_seq = batch_apply(self.action_encoder, inputs.actions)
def get_end_ind(self, inputs, outputs):
# TODO clean this up. outputs.end_ind is not currently used anywhere
end_ind = inputs.end_ind if 'end_ind' in inputs else None
if self._hp.regress_length:
# predict total sequence length
outputs.update(self.length_pred(inputs.e_0, inputs.e_g))
if self._use_pred_length and (self._hp.length_pred_weight > 0 or end_ind is None):
end_ind = torch.clamp(torch.argmax(outputs.seq_len_pred.sample().long(), dim=1), min=2) # min pred seq len needs to be >= 3 for planning
if self._hp.action_conditioned_pred or self._hp.non_goal_conditioned:
# don't use predicted length when action conditioned
end_ind = torch.ones_like(end_ind) * (self._hp.max_seq_len - 1)
outputs.end_ind = end_ind
return end_ind
def predict_sequence(self, inputs, outputs, start_ind, end_ind, phase):
raise NotImplementedError("Needs to be implemented in the child class")
def run_auxilliary_models(self, inputs, outputs, phase):
aux_outputs = AttrDict()
if self.prune_sequences:
if phase == 'train':
inputs.model_enc_seq = self.get_matched_pruned_seqs(inputs, outputs)
else:
inputs.model_enc_seq = self.get_predicted_pruned_seqs(inputs, outputs)
inputs.model_enc_seq = pad_sequence(inputs.model_enc_seq, batch_first=True)
if len(inputs.model_enc_seq.shape) == 5:
inputs.model_enc_seq = inputs.model_enc_seq[..., 0, 0]
if inputs.model_enc_seq.shape[1] == 0:
# This happens usually in the beginning of training where the pruning is inaccurate
return aux_outputs
if self._hp.attach_inv_mdl and phase == 'train':
aux_outputs.update(self.inv_mdl(inputs, full_seq=self._inv_mdl_full_seq or self._hp.train_inv_mdl_full_seq))
if self._hp.attach_state_regressor:
regressor_inputs = inputs.model_enc_seq
if not self._hp.supervised_decoder:
regressor_inputs = regressor_inputs.detach()
aux_outputs.regressed_state = batch_apply(self.state_regressor, regressor_inputs)
if self._hp.attach_cost_mdl and self._hp.run_cost_mdl and phase == 'train':
# There is an issue here since SVG doesn't output a latent for the first imagge
# Beyong conceptual problems, this breaks if end_ind = 199
aux_outputs.update(self.cost_mdl(inputs))
return aux_outputs
def loss(self, inputs, outputs, log_error_arr=False):
losses = AttrDict()
# Length prediction loss
if self._hp.regress_length:
losses.update(self.length_pred.loss(inputs, outputs))
# Dense Reconstruction loss
losses.update(self.dense_rec.loss(inputs, outputs.dense_rec, log_error_arr))
# Inverse Model loss
if self._hp.attach_inv_mdl:
losses.update(self.inv_mdl.loss(inputs, outputs, add_total=False))
# Cost model loss
if self._hp.attach_cost_mdl and self._hp.run_cost_mdl:
losses.update(self.cost_mdl.loss(inputs, outputs))
# State regressor cost
if self._hp.attach_state_regressor:
reg_len = outputs.regressed_state.shape[1]
losses.state_regression = L2Loss(1.0)(outputs.regressed_state, inputs.traj_seq_states[:, :reg_len],
weights=inputs.pad_mask[:, :reg_len][:, :, None])
# Negative Log-likelihood (upper bound)
if 'dense_img_rec' in losses and 'kl' in losses:
losses.nll = AttrDict(value=losses.dense_img_rec.value + losses.kl.value, weight=0.0)
return losses
def get_total_loss(self, inputs, losses):
# compute total loss
## filtering is important when some losses are nan
## the unsqueeze is important when some of the weights or losses are 1-dim tensors.
# TODO use the function from blox
total_loss = torch.stack([loss[1].value[None] * loss[1].weight for loss in
filter(lambda x: x[1].weight > 0, losses.items())]).sum()
total_loss = total_loss / torch.prod(torch.tensor(inputs.traj_seq.shape[1:]))
if torch.isnan(total_loss).any():
import pdb; pdb.set_trace()
return AttrDict(value=total_loss)
def log_outputs(self, outputs, inputs, losses, step, log_images, phase):
super().log_outputs(outputs, inputs, losses, step, log_images, phase)
if self._hp.attach_inv_mdl:
self.inv_mdl.log_outputs(outputs, inputs, losses, step, log_images, phase)
if log_images:
dataset = self._hp.dataset_class
if 'regressed_state' in outputs:
self._logger.log_dataset_specific_trajectory(outputs, inputs, "regressed_state_topdown", step, phase,
dataset, predictions=outputs.regressed_state,
end_inds=inputs.end_ind)
if 'regressed_state' in outputs and self._hp.attach_inv_mdl and 'actions' in outputs:
if len(outputs.actions.shape) == 3:
actions = outputs.actions
else:
# Training, need to get the action sequence
actions = self.inv_mdl(inputs, full_seq=True).actions
cum_action_traj = torch.cat((outputs.regressed_state[:, :1], actions), dim=1).cumsum(1)
self._logger.log_dataset_specific_trajectory(outputs, inputs, "action_traj_topdown", step, phase, dataset,
predictions=cum_action_traj, end_inds=inputs.end_ind)
if not self._hp.use_convs:
self._logger.log_dataset_specific_trajectory(outputs, inputs, "prediction_topdown", step, phase, dataset)
# TODO remove this
if self._hp.log_states_2d:
self._logger.log_states_2d(outputs, inputs, "prediction_states_2d", step, phase)
if self._hp.train_on_action_seqs:
action_seq = outputs.dense_rec.images
cum_action_seq = torch.cumsum(action_seq, dim=1)
self._logger.log_dataset_specific_trajectory(outputs, inputs, "cum_action_prediction_topdown", step,
phase, dataset, predictions=cum_action_seq,
end_inds=inputs.end_ind)
if self._hp.dump_encodings:
os.makedirs(self._logger._log_dir + '/stored_data/', exist_ok=True)
torch.save(subdict(inputs, ['enc_traj_seq', 'traj_seq', 'traj_seq_states', 'actions']),
self._logger._log_dir + '/stored_data/encodings_{}'.format(step))
if self._hp.dump_encodings_inv_model:
os.makedirs(self._logger._log_dir + '/stored_data_inv_model/', exist_ok=True)
torch.save(subdict(inputs, ['model_enc_seq', 'traj_seq_states', 'actions']),
self._logger._log_dir + '/stored_data_inv_model/encodings_{}.th'.format(step))
@property
def prune_sequences(self):
return self._hp.attach_inv_mdl or (self._hp.attach_cost_mdl and self._hp.run_cost_mdl) \
or self._hp.attach_state_regressor
def get_predicted_pruned_seqs(self, inputs, outputs):
raise NotImplementedError
def get_matched_pruned_seqs(self, inputs, outputs):
name = 'encodings' if outputs.dense_rec else 'e_g_prime' # for SVG vs tree
if 'dtw' in self._hp.matching_type:
# use precomputed matching dists for pruning
matched_latents = self.tree_module.binding.get_matched_sequence(outputs.tree, 'e_g_prime')
batch, time = inputs.traj_seq.shape[:2]
model_enc_seq = [matched_latents[i_ex, :inputs.end_ind[i_ex] + 1] for i_ex in range(batch)]
model_enc_seq = model_enc_seq
else:
# batched collection for SVG and balanced tree
model_enc_seq = self.dense_rec.get_all_samples_with_len(
inputs.end_ind, outputs, inputs, 'basic', name=name)[0]
return model_enc_seq
<file_sep>import numpy as np
import torch
import torch.nn as nn
from blox import AttrDict
from blox.tensor.ops import batchwise_index
from blox.torch.losses import L2Loss, PenaltyLoss
from blox.torch.ops import batch_cdist, like, list2ten
from gcp.prediction.utils.visualization import draw_frame
from gcp.prediction import global_params
class LossAveragingCriterion(nn.Module):
def __init__(self, hp):
self._loss = L2Loss
self._hp = hp
super().__init__()
def loss(self, outputs, targets, weights, pad_mask, weight, log_sigma):
predictions = outputs.tree.bf.images
gt_match_dists = outputs.gt_match_dists
# Compute likelihood
loss_val = batch_cdist(predictions, targets, reduction='sum')
log_sigmas = log_sigma - WeightsHacker.hack_weights(torch.ones_like(loss_val)).log()
n = np.prod(predictions.shape[2:])
loss_val = 0.5 * loss_val * torch.pow(torch.exp(-log_sigmas), 2) + n * (log_sigmas + 0.5 * np.log(2 * np.pi))
# Weigh by matching probability
match_weights = gt_match_dists
match_weights = match_weights * pad_mask[:, None] # Note, this is now unnecessary since both tree models handle it already
loss_val = loss_val * match_weights * weights
losses = AttrDict()
losses.dense_img_rec = PenaltyLoss(weight, breakdown=2)(loss_val, log_error_arr=True, reduction=[-1, -2])
# if self._hp.top_bias > 0.0:
# losses.n_top_bias_nodes = PenaltyLoss(
# self._hp.supervise_match_weight)(1 - WeightsHacker.get_n_top_bias_nodes(targets, weights))
return losses
def get_soft_estimates(self, gt_match_dists, vals):
""" This function is only used to produce visualization now. Move it. """
# soft_matched_estimates = torch.sum(add_n_dims(gt_match_dists, len(vals.shape)-2) * vals[:, :, None], dim=1)
def soft_average(values):
""" Averages per-node values to compute per-frame values """
return torch.einsum('int, in...->it...', gt_match_dists, values).detach()
soft_matched_estimates = soft_average(vals)
# Mark top nodes
if self._hp.use_convs:
color = torch.zeros(vals.shape[:2]).to(vals.device)
color[:, :3] = 0.5
color_t = soft_average(color)
soft_matched_estimates = draw_frame(soft_matched_estimates, color_t)
return soft_matched_estimates
class WeightsHacker():
@staticmethod
def hack_weights(weights):
if abs(global_params.hp.leaves_bias) > 0.0:
w_1 = weights[:, :-global_params.hp.max_seq_len]
w_2 = weights[:, -global_params.hp.max_seq_len:] * (1 - global_params.hp.leaves_bias)
weights = torch.cat([w_1, w_2], 1)
if global_params.hp.top_bias != 1.0:
w_1 = weights[:, :global_params.hp.n_top_bias_nodes] * global_params.hp.top_bias
w_2 = weights[:, global_params.hp.n_top_bias_nodes:]
weights = torch.cat([w_1, w_2], 1)
return weights
@staticmethod
def hack_weights_df(weights):
# TODO implement bf2df for indices and use here
if global_params.hp.top_bias != 1.0:
n_top_bias_layers = np.int(np.log2(global_params.hp.n_top_bias_nodes + 1))
depth = np.int(np.log2(weights.shape[1] + 1))
m = torch.ones(weights.shape[:2], device=weights.device)
for l in range(n_top_bias_layers):
m[:, 2 ** (depth - l - 1) - 1:: 2 ** (depth - l)] = global_params.hp.top_bias
weights = weights * m[:, :, None]
return weights
@staticmethod
def get_n_top_bias_nodes(targets, weights):
""" Return the probability that the downweighted nodes match the noisy frame"""
inds = WeightsHacker.get_index(targets)
noise_frames = batchwise_index(weights, inds, 2)
n = noise_frames.mean(0)[:global_params.hp.n_top_bias_nodes].sum() / global_params.hp.top_bias
return n
@staticmethod
def can_get_index():
return 'dataset_conf' in global_params.data_conf and 'dataset_class' in global_params.data_conf.dataset_conf \
and global_params.data_conf.dataset_conf.dataset_class == PointMassDataset
@staticmethod
def can_get_d2b(inputs):
if 'actions' in inputs and inputs.actions.shape[2] == 3 and \
torch.equal(torch.unique(inputs.actions[:, :, 2]), like(list2ten, inputs.actions)([-1, 1])):
# Looks like sawyer
return True
else:
return False
@staticmethod
def distance2bottleneck(inputs, outputs):
dists = []
for i in range(inputs.actions.shape[0]):
gripper = inputs.actions[i, :, -1]
picks = (gripper[1:] == gripper[:-1] + 2).nonzero()[:, 0]
places = (gripper[1:] == gripper[:-1] - 2).nonzero()[:, 0]
bottlenecks = torch.cat([picks, places], -1)
top_inds = outputs.tree.bf.match_dist[i, :3].argmax(-1)
# top_inds = torch.from_numpy(np.asarray([20, 40, 60])).long().cuda()
def closest_point_distance(a, b):
mat = torch.abs(a[:, None] - b[None, :])
ind = mat.argmin(-1)
return torch.abs(a - b[ind])
dist = closest_point_distance(top_inds, bottlenecks)
dists.append(dist)
return torch.mean(torch.stack(dists).float(), 0)
<file_sep>import datetime
import os
from functools import partial
import dload
import numpy as np
import torch
from blox import AttrDict
from blox.tensor.ops import batchwise_index
def select_e_0_e_g(seq, start_ind, end_ind):
e_0 = batchwise_index(seq, start_ind)
e_g = batchwise_index(seq, end_ind)
return e_0, e_g
def get_end_ind(pad_mask):
"""
:param pad_mask: torch tensor with 1 where there is an actual image and zeros where there's padding
pad_mask has shape batch_size x max_seq_len
:return:
"""
max_seq_len = pad_mask.shape[1]
end_ind = torch.argmax(pad_mask * torch.arange(max_seq_len, dtype=torch.float, device=pad_mask.device), 1)
return end_ind
def get_pad_mask(end_ind, max_seq_len):
"""
:param pad_mask: torch tensor with 1 where there is an actual image and zeros where there's padding
pad_mask has shape batch_size x max_seq_len
:return:
"""
use_torch = isinstance(end_ind, torch.Tensor)
if use_torch:
arange_fn = partial(torch.arange, dtype=torch.long, device=end_ind.device)
else:
arange_fn = np.arange
pad_mask = arange_fn(max_seq_len) <= end_ind[:, None]
if use_torch:
pad_mask = pad_mask.float()
else:
pad_mask = pad_mask.astype(np.float)
return pad_mask
def datetime_str():
return datetime.datetime.now().strftime("_%Y-%m-%d_%H-%M-%S")
def make_path(exp_dir, conf_path, prefix, make_new_dir):
# extract the subfolder structure from config path
path = conf_path.split('experiments/', 1)[1]
if make_new_dir:
prefix += datetime_str()
base_path = os.path.join(exp_dir, path)
return os.path.join(base_path, prefix) if prefix else base_path
def set_seeds(seed=0):
"""Sets all seeds and disables non-determinism in cuDNN backend."""
torch.manual_seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
np.random.seed(seed)
def get_dataset_path(dataset_name):
"""Returns path to dataset."""
return os.path.join(os.environ["GCP_DATA_DIR"], dataset_name)
def download_data(dataset_name):
"""Downloads data if not yet existent."""
DATA_URLs = AttrDict(
nav_9rooms='https://www.seas.upenn.edu/~oleh/datasets/gcp/nav_9rooms.zip',
nav_25rooms='https://www.seas.upenn.edu/~oleh/datasets/gcp/nav_25rooms.zip',
sawyer='https://www.seas.upenn.edu/~oleh/datasets/gcp/sawyer.zip',
h36m='https://www.seas.upenn.edu/~oleh/datasets/gcp/h36m.zip',
)
if dataset_name not in DATA_URLs:
raise ValueError("Dataset identifier {} is not known!".format(dataset_name))
if not os.path.exists(get_dataset_path(dataset_name)):
print("Downloading dataset from {} to {}.".format(DATA_URLs[dataset_name], os.environ["GCP_DATA_DIR"]))
print("This may take a few minutes...")
dload.save_unzip(DATA_URLs[dataset_name], os.environ["GCP_DATA_DIR"], delete_after=True)
print("...Done!")
<file_sep>from blox import AttrDict
from experiments.prediction.base_configs import base_tree as base_conf
configuration = AttrDict(base_conf.configuration)
model_config = AttrDict(base_conf.model_config)
model_config.update({
'matching_type': 'dtw_image',
'learn_matching_temp': False,
'attentive_inference': True,
})
<file_sep>import torch
import torch.distributions
from gcp.prediction.models.tree.untied_layers_tree import UntiedLayersTree
from gcp.prediction.models.tree.tree_module import TreeModule
from gcp.prediction.models.tree.tree_dense_rec import TreeDenseRec
from blox import AttrDict, rmap
from blox.basic_types import subdict
from blox.tensor.ops import add_n_dims
from gcp.prediction.models.base_gcp import BaseGCPModel
from gcp.prediction.utils.tree_utils import SubgoalTreeLayer
class TreeModel(BaseGCPModel):
def build_network(self, build_encoder=True):
super().build_network(build_encoder)
cls = TreeModule
if self._hp.untied_layers:
cls = UntiedLayersTree
self.tree_module = cls(self._hp, self.decoder)
self.dense_rec = TreeDenseRec(
hp=self._hp, input_size=self._hp.nz_enc, output_size=self._hp.nz_enc, decoder=self.decoder)
def _create_initial_nodes(self, inputs):
start_node, end_node = AttrDict(e_g_prime=inputs.e_0, images=inputs.I_0), \
AttrDict(e_g_prime=inputs.e_g, images=inputs.I_g)
if not self._hp.attentive_inference:
start_match_timestep, end_match_timestep = self.tree_module.binding.get_init_inds(inputs)
start_node.update(AttrDict(match_timesteps=start_match_timestep))
end_node.update(AttrDict(match_timesteps=end_match_timestep))
if self._hp.tree_lstm:
start_node.hidden_state, end_node.hidden_state = None, None
return start_node, end_node
def filter_layerwise_inputs(self, inputs):
layerwise_input_keys = ['z'] # these inputs are assumed to be depth-first inputs per node in dim 1
layerwise_inputs = subdict(inputs, layerwise_input_keys, strict=False)
return layerwise_inputs
def predict_sequence(self, inputs, outputs, start_ind, end_ind, phase):
layerwise_inputs = self.filter_layerwise_inputs(inputs)
start_node, end_node = self._create_initial_nodes(inputs)
outputs.tree = root = SubgoalTreeLayer()
tree_inputs = [layerwise_inputs, start_ind, end_ind, start_node, end_node]
tree_inputs = rmap(lambda x: add_n_dims(x, n=1, dim=1), tree_inputs)
tree_inputs = [inputs] + tree_inputs
root.produce_tree(*tree_inputs, self.tree_module, self._hp.hierarchy_levels)
outputs.dense_rec = self.dense_rec(root, inputs)
if 'traj_seq' in inputs and phase == 'train':
# compute matching between nodes & frames of input sequence, needed for loss and inv mdl etc.
self.tree_module.compute_matching(inputs, outputs)
# TODO the binding has to move to this class
outputs.pruned_prediction = self.tree_module.binding.prune_sequence(inputs, outputs)
# add pruned reconstruction if necessary
if not outputs.dense_rec and self._hp.matching_type == 'balanced':
# TODO this has to be unified with the balanced tree case
outputs.pruned_prediction = self.dense_rec.get_all_samples_with_len(
outputs.end_ind, outputs, inputs, pruning_scheme='basic')[0]
return outputs
def get_predicted_pruned_seqs(self, inputs, outputs):
return self.tree_module.binding.prune_sequence(inputs, outputs, 'e_g_prime')
def loss(self, inputs, outputs, log_error_arr=False):
losses = super().loss(inputs, outputs, log_error_arr)
losses.update(self.tree_module.loss(inputs, outputs))
return losses
def log_outputs(self, outputs, inputs, losses, step, log_images, phase):
super().log_outputs(outputs, inputs, losses, step, log_images, phase)
if outputs.tree.subgoals is None:
outputs.tree.subgoals = AttrDict()
if log_images:
dataset = self._hp.dataset_class
if self._hp.use_convs:
self._logger.log_hierarchy_image(outputs, inputs, "hierarchical_splits", step, phase)
self._logger.log_rows_gif([outputs.pruned_prediction], 'pruned_seq_gif', step, phase)
if 'match_dist' in outputs.tree.subgoals:
# Any model that has matching
self._logger.log_gt_match_overview(outputs, inputs, "match_overview", step, phase)
if 'soft_matched_estimates' in outputs:
self._logger.log_loss_gif(outputs.soft_matched_estimates, inputs.traj_seq,
'gt_target_gif', step, phase)
if phase == 'val' and 'images' in outputs.tree.subgoals:
self._logger.log_val_tree(outputs, inputs, "output_tree", step, phase)
if 'pixel_copy_mask' in outputs.tree.subgoals:
self._logger.log_balanced_tree(outputs, "pixel_copy_mask", "pixel_copy_masks", step, phase)
if 'gamma' in outputs.tree.subgoals and outputs.tree.subgoals.gamma is not None:
self._logger.log_attention_overview(outputs, inputs, "attention_masks", step, phase)
if outputs.dense_rec:
self._logger.log_pruned_pred(outputs, inputs, "pruned_pred", step, phase)
if outputs.tree.pruned is not None:
self._logger.log_pruned_tree(outputs, "pruned_tree", step, phase)
log_prior_images = False
if log_prior_images:
# Run the model N times
with torch.no_grad(), self.val_mode():
rows = list([self(inputs).pruned_prediction for i in range(4)])
self._logger.log_rows_gif(rows, "prior_samples", step, phase)
for i in range(4):
if 'regressed_state' in outputs:
out = self(inputs, 'test')
self._logger.log_dataset_specific_trajectory(outputs, inputs,
"prior_regressed_state_topdown_" + str(i),
step, phase, dataset,
predictions=out.regressed_state,
end_inds=inputs.end_ind)
<file_sep>import matplotlib; matplotlib.use('Agg')
import argparse
import copy
import glob
import importlib.machinery
import importlib.util
import os
import random
from multiprocessing import Pool
import numpy as np
from blox import AttrDict
from gcp.planning.infra.sim.benchmarks import run_trajectories
def bench_worker(conf, iex=-1, ngpu=1):
print('started process with PID:', os.getpid())
random.seed(None)
np.random.seed(None)
print('start ind', conf['start_index'])
print('end ind', conf['end_index'])
run_trajectories(conf, iex, gpu_id=conf['gpu_id'], ngpu=ngpu)
def check_and_pop(dict, key):
if dict.pop(key, None) is not None:
print('popping key: {}'.format(key))
def postprocess_hyperparams(hyperparams, args):
if args.data_save_postfix:
hyperparams['data_save_dir'] = os.path.join(hyperparams['data_save_dir'], args.data_save_postfix)
return hyperparams
class ControlManager:
def __init__(self, args_in=None, hyperparams=None):
parser = argparse.ArgumentParser(description='run parallel data collection')
parser.add_argument('experiment', type=str, help='experiment name')
parser.add_argument('--nworkers', type=int, help='use multiple threads or not', default=1)
parser.add_argument('--gpu_id', type=int, help='the starting gpu_id', default=0)
parser.add_argument('--ngpu', type=int, help='the number of gpus to use', default=1)
parser.add_argument('--gpu', type=int, help='the gpu to use', default=-1)
parser.add_argument('--nsplit', type=int, help='number of splits', default=-1)
parser.add_argument('--isplit', type=int, help='split id', default=-1)
parser.add_argument('--iex', type=int, help='if different from -1 use only do example', default=-1)
parser.add_argument('--data_save_postfix', type=str, help='appends to the data_save_dir path', default='')
parser.add_argument('--nstart_goal_pairs', type=int, help='max number of start goal pairs', default=None)
parser.add_argument('--resume_from', type=int, help='from which traj idx to continue generating', default=None)
args = parser.parse_args(args_in)
print("Resume from")
print(args.resume_from)
if args.gpu != -1:
os.environ["CUDA_VISIBLE_DEVICES"] = str(args.gpu)
if hyperparams is None:
hyperparams_file = args.experiment
loader = importlib.machinery.SourceFileLoader('mod_hyper', hyperparams_file)
spec = importlib.util.spec_from_loader(loader.name, loader)
mod = importlib.util.module_from_spec(spec)
loader.exec_module(mod)
hyperparams = AttrDict(mod.config)
self.args = args
self.hyperparams = postprocess_hyperparams(hyperparams, args)
def run(self, logging_conf=None):
args = self.args
hyperparams = self.hyperparams
gpu_id = args.gpu_id
n_worker = args.nworkers
if args.nworkers == 1:
parallel = False
else:
parallel = True
print('parallel ', bool(parallel))
if args.nsplit != -1:
assert args.isplit >= 0 and args.isplit < args.nsplit, "isplit should be in [0, nsplit-1]"
n_persplit = max((hyperparams['end_index'] + 1 - hyperparams['start_index']) / args.nsplit, 1)
hyperparams['end_index'] = int((args.isplit + 1) * n_persplit + hyperparams['start_index'] - 1)
hyperparams['start_index'] = int(args.isplit * n_persplit + hyperparams['start_index'])
n_traj = hyperparams['end_index'] - hyperparams['start_index'] + 1
traj_per_worker = int(n_traj // np.float32(n_worker))
offset = int(args.resume_from // np.float32(n_worker)) if args.resume_from is not None else 0
start_idx = [hyperparams['start_index'] + offset + traj_per_worker * i for i in range(n_worker)]
end_idx = [hyperparams['start_index'] + traj_per_worker * (i+1)-1 for i in range(n_worker)]
if 'gen_xml' in hyperparams['agent']:
try:
os.system("rm {}".format('/'.join(str.split(hyperparams['agent']['filename'], '/')[:-1]) + '/auto_gen/*'))
except: pass
self.set_paths(hyperparams, args)
record_queue, record_saver_proc, counter = None, None, None
if args.iex != -1:
hyperparams['agent']['iex'] = args.iex
conflist = []
for i in range(n_worker):
modconf = copy.deepcopy(hyperparams)
modconf['start_index'] = start_idx[i]
modconf['end_index'] = end_idx[i]
modconf['ntraj'] = n_traj
modconf['gpu_id'] = i + gpu_id
if logging_conf is not None:
modconf['logging_conf'] = logging_conf
conflist.append(modconf)
if parallel:
self.start_parallel(conflist, n_worker)
else:
bench_worker(conflist[0], args.iex, args.ngpu)
if args.save_thread:
record_queue.put(None) # send flag to background thread that it can end saving after it's done
record_saver_proc.join() # joins thread and continues execution
def set_paths(self, hyperparams, args):
subpath = str.partition(hyperparams['current_dir'], 'experiments')[-1]
if 'data_save_dir' not in hyperparams:
data_save_dir = os.environ['GCP_DATA_DIR'] + '/' + subpath
hyperparams['data_save_dir'] = data_save_dir
print('setting data_save_dir to', hyperparams['data_save_dir'])
if 'log_dir' not in hyperparams:
log_dir = os.environ['GCP_EXP_DIR'] + '/' + subpath
if args.data_save_postfix:
log_dir = os.path.join(log_dir, args.data_save_postfix)
hyperparams['log_dir'] = log_dir
print('setting log_dir to', hyperparams['log_dir'])
result_dir = hyperparams['data_save_dir'] + '/log'
if not os.path.exists(result_dir):
os.makedirs(result_dir)
for file in glob.glob(result_dir + '/*.pkl'):
os.remove(file)
hyperparams['result_dir'] = result_dir
def start_parallel(self, conflist, n_worker):
# mp.set_start_method('spawn') # this is important for parallelism with xvfb
p = Pool(n_worker)
p.map(bench_worker, conflist)
if __name__ == '__main__':
ControlManager().run()
<file_sep>import numpy as np
import torch
from blox import AttrDict
from blox.torch.ops import cdist
from blox.utils import PriorityQueue
from gcp.prediction.models.tree.frame_binding import BalancedBinding
try:
print("\nUsing fast C-version of DTW!\n")
import gcp.evaluation.cutils as cutils
from gcp.evaluation.dtw_utils import c_dtw as dtw
except:
print("\nC-version of DTW not compiled! Falling back to slower Numpy version!\n")
from gcp.evaluation.dtw_utils import basic_dtw as dtw
def torch2np(tensor):
return tensor.detach().cpu().numpy()
class BaseEvalBinding:
def __init__(self, hp):
self._hp = hp
def __call__(self, *args, **kwargs):
raise NotImplementedError("This function needs to be implemented by the subclass!")
def _init_tree(self, tree, inputs):
bsize = tree.end_ind.shape[0]
for node in tree:
node.selected = np.zeros(bsize, dtype=bool)
def _collect_sequence(self, tree, inputs, i_ex):
return torch.stack([node.subgoal.images[i_ex] for node in tree.depth_first_iter()])
@staticmethod
def _check_output(output_seq, length):
if output_seq.shape[0] != length:
print("Expected output length {}, got sequence of length {}. Repeating last frame".format(length,
output_seq.shape[
0]))
output_seq = torch.cat((output_seq, output_seq[-1:].expand(length - output_seq.shape[0], -1, -1, -1)))
return output_seq
return output_seq
class GreedyExistEvalBinding(BaseEvalBinding):
def __call__(self, tree, inputs, length, i_ex):
"""Perform greedy search over the tree prioritizing with respect to existence probability."""
if tree.selected is None:
self._init_tree(tree)
root = tree
p_queue = PriorityQueue()
p_queue.push(root, torch2np(root.subgoal.p_n_hat[i_ex]))
for i in range(length):
node = p_queue.pop()
node.selected[i_ex] = True
s0 = node.s0
s1 = node.s1
if s0.subgoal is not None:
p_queue.push(s0, torch2np(s0.subgoal.p_n_hat[i_ex]))
if s1.subgoal is not None:
p_queue.push(s1, torch2np(s1.subgoal.p_n_hat[i_ex]))
gen_images = self._collect_sequence(tree, inputs, i_ex)
gen_images = self._check_output(gen_images, length)
return gen_images, None
class GreedyL2EvalBinding(BaseEvalBinding):
def _init_tree(self, tree, inputs):
super()._init_tree(tree, inputs)
tree.min_l2_match(np.zeros_like(inputs.end_ind.cpu().numpy()), inputs.end_ind.cpu().numpy() + 1,
inputs.traj_seq,
np.asarray(np.ones_like(inputs.end_ind.cpu().numpy()), dtype=np.uint8))
@staticmethod
def _get_best_filler_seq(partial_gt_seq, frame_1, frame_2):
frames = torch.stack((frame_1, frame_2), dim=0)
gt_seg_len = partial_gt_seq.shape[0]
l2_distances = torch.nn.MSELoss(reduction='none')(partial_gt_seq[None], frames[:, None]) \
.view(2, gt_seg_len, -1).mean(-1)
frame_choices = torch.argmin(l2_distances, dim=0)
frames = frames[frame_choices]
return [t[0] for t in torch.split(frames, 1)]
@staticmethod
def _collect_sequence(tree, inputs, i_ex):
sel_list = []
def maybe_fill(prev_matched_step, prev_matched_img, matched_step, matched_img):
diff = matched_step - prev_matched_step
if diff > 1: # add missing nodes
sel_list.extend(
GreedyL2EvalBinding._get_best_filler_seq(inputs.traj_seq[i_ex, prev_matched_step + 1:matched_step],
prev_matched_img, matched_img))
prev_matched_step, prev_matched_img = -1, None
for node in tree: # iterate through the tree
if node.selected[i_ex]:
matched_step, matched_img = node.match_eval_idx[i_ex], node.subgoal.images[i_ex]
prev_matched_img = matched_img if prev_matched_img is None else prev_matched_img # fill with first predicted image
maybe_fill(prev_matched_step, prev_matched_img, matched_step, matched_img)
sel_list.append(matched_img)
prev_matched_step, prev_matched_img = matched_step, matched_img
maybe_fill(prev_matched_step, prev_matched_img, inputs.end_ind[i_ex] + 1,
prev_matched_img) # fill with last predicted image
return torch.stack(sel_list, dim=0)
def __call__(self, tree, inputs, length, i_ex):
"""Perform greedy minimal-L2-matchings from the root of the tree."""
if tree.selected is None:
self._init_tree(tree, inputs)
gen_images = self._collect_sequence(tree, inputs, i_ex)
gen_images = self._check_output(gen_images, length)
return gen_images, None
class DTWEvalBinding(BaseEvalBinding):
def __call__(self, outputs, inputs, length, i_ex, targets=None, estimates=None):
""" Match """
if estimates is None:
estimates = self._collect_sequence(outputs.tree, inputs, i_ex)
if targets is None:
targets = inputs.traj_seq[i_ex, :inputs.end_ind[i_ex] + 1]
return self.get_single_matches(targets, estimates)
@staticmethod
def get_single_matches(targets, estimates):
# Get dtw
matrix = cdist(estimates, targets, reduction='mean').data.cpu().numpy()
# matrix = ((estimates[:, None] - targets[None]) ** 2).mean(dim=[2, 3, 4]).data.cpu().numpy()
# norm = lambda x,targets: torch.nn.MSELoss()(x,targets).data.cpu().numpy()
d, cost_matrix, path = dtw(matrix)
# Get best matches for gt frames
match_matrix = np.zeros_like(cost_matrix)
match_matrix[:, :] = np.inf
match_matrix[path[0], path[1]] = cost_matrix[path[0], path[1]]
inds = np.argmin(match_matrix, axis=0)
gen_images = estimates[inds]
matching_output = AttrDict(targets=targets, estimates=estimates, matching_path=path, gen_images=gen_images)
return gen_images, matching_output
@staticmethod
def vis_matching(matching_output):
"""
Visualizes the DTW matching path between GT and predicted sequence
:param matching_output: Dict that gets returned in 'get_single_matches'
(targets: n_t, channels, res, res; dtype: torch.Tensor)
(estimates: n_n, channels, res, res; dtype: torch.Tensor)
([2x(max(n_t, n_n))]; dtype: ndarray)
(gen_images: n_t, channels, res, res; dtype: torch.Tensor)
"""
n_t, channels, res, _ = matching_output.targets.shape
n_n = matching_output.estimates.shape[0]
img = -torch.ones((channels, res * (n_t + 1), res * (n_n + 2)), dtype=matching_output.targets.dtype)
img[:, res:, :res] = matching_output.targets.transpose(0, 1).contiguous().view(channels, n_t * res, res)
img[:, res:, res:2 * res] = matching_output.gen_images.transpose(0, 1).contiguous().view(channels, n_t * res,
res)
img[:, :res, 2 * res:] = matching_output.estimates.permute(1, 2, 0, 3).contiguous().view(channels, res,
n_n * res)
for pn, pt in zip(*matching_output.matching_path):
img[:, (pt + 1) * res: (pt + 2) * res, (pn + 2) * res: (pn + 3) * res] = 1.0
return img.permute(1, 2, 0).data.cpu().numpy()
class BalancedEvalBinding(BaseEvalBinding):
def __init__(self, hp):
self.binding = BalancedBinding(hp)
super().__init__(hp)
def __call__(self, outputs, inputs, length, i_ex, name=None, targets=None, estimates=None):
start_ind, end_ind = self.binding.get_init_inds(outputs)
if i_ex == 0:
# Only for the first element
outputs.tree.compute_matching_dists({}, matching_fcn=self.binding,
left_parents=AttrDict(timesteps=start_ind),
right_parents=AttrDict(timesteps=end_ind))
name = 'images' if name is None else name
estimates = torch.stack([node.subgoal[name][i_ex] for node in outputs.tree.depth_first_iter()])
leave = torch.stack([node.subgoal.c_n_prime[i_ex] for node in outputs.tree.depth_first_iter()]).bool().any(1)
return estimates[leave], None
def get_all_samples(self, outputs, inputs, length, name=None, targets=None, estimates=None):
start_ind, end_ind = self.binding.get_init_inds(outputs)
# Only for the first element
outputs.tree.compute_matching_dists({}, matching_fcn=self.binding,
left_parents=AttrDict(timesteps=start_ind),
right_parents=AttrDict(timesteps=end_ind))
name = 'images' if name is None else name
estimates = torch.stack([node.subgoal[name] for node in outputs.tree.depth_first_iter()])
leave = torch.stack([node.subgoal.c_n_prime for node in outputs.tree.depth_first_iter()]).bool().any(-1)
pruned_seqs = [estimates[:, i][leave[:, i]] for i in range(leave.shape[1])]
return pruned_seqs, None
class BalancedPrunedDTWBinding():
def __init__(self, hp):
self.pruning_binding = BalancedEvalBinding(hp)
self.dtw_binding = DTWEvalBinding(hp)
def __call__(self, outputs, inputs, length, i_ex, targets=None, estimates=None):
pruned_sequence = self.pruning_binding(outputs, inputs, length, i_ex)
warped_sequence = self.dtw_binding(outputs, inputs, length, i_ex, estimates=pruned_sequence[0])
return warped_sequence
@staticmethod
def vis_matching(matching_output):
return DTWEvalBinding.vis_matching(matching_output)
<file_sep>import random
import torch.utils.data as data
import numpy as np
from PIL import Image
import glob
import h5py
import random
import torch
import matplotlib.pyplot as plt
from PIL import Image
def get_traj_per_file(file):
with h5py.File(file, 'r') as F:
return len(list(F.keys()))
class Video_Dataset(data.Dataset):
def __init__(self,
data_dir,
phase
):
self.data_dir = data_dir + '/' + phase
self.train_val_split = 0.95
self.filenames = sorted(glob.glob(self.data_dir + '/*.h5'))
if not self.filenames:
raise RuntimeError('No filenames found')
random.seed(1)
random.shuffle(self.filenames)
self.traj_per_file = get_traj_per_file(self.filenames[0])
def __getitem__(self, index):
file_index = index//self.traj_per_file
path = self.filenames[file_index]
in_file_ind = index % self.traj_per_file
with h5py.File(path, 'r') as F:
images = np.asarray(F['traj{}/images'.format(in_file_ind)])
actions = np.asarray(F['traj{}/actions'.format(in_file_ind)])
return images, actions
def __len__(self):
return len(self.filenames)*self.traj_per_file
def make_data_loader(data_dir, phase):
"""
:param data_dir:
:param phase: either train, val or test
:return: dataset iterator
"""
d = Video_Dataset(data_dir, phase)
return torch.utils.data.DataLoader(d, batch_size=3, shuffle=True,num_workers=1, pin_memory=True)
if __name__ == '__main__':
data_dir = '/mnt/sda1/recursive_planning_data/sim/cartgripper/multi_block_2d'
d = Video_Dataset(data_dir, phase='train')
loader = torch.utils.data.DataLoader(d, batch_size=3, shuffle=True,num_workers=1, pin_memory=True)
deltat = []
for i_batch, sample_batched in enumerate(loader):
images, actions = sample_batched
print(actions)
for t in range(images.shape[1]):
plt.imshow(images[0,t,0])
plt.show()
# images = images.numpy().transpose((0, 1, 3, 4, 2))
# file = '/'.join(str.split(config['agent']['data_save_dir'], '/')[:-1]) + '/example'
# comp_single_video(file, images)
<file_sep># Long-Horizon Visual Planning with Goal-Conditioned Hierarchical Predictors
#### [[Project Website]](https://orybkin.github.io/video-gcp/)
[<NAME>](https://kpertsch.github.io/)<sup>*1</sup>, [<NAME>](https://www.seas.upenn.edu/~oleh/)<sup>*2</sup>,
[<NAME>](https://febert.github.io/)<sup>3</sup>, [<NAME>](http://people.eecs.berkeley.edu/~cbfinn/)<sup>4</sup>,
[<NAME>](https://www.seas.upenn.edu/~dineshj/)<sup>2</sup>, [<NAME>](https://people.eecs.berkeley.edu/~svlevine/)<sup>3</sup><br/>
(* equal contribution)
<sup>1</sup>University of Southern California
<sup>2</sup>University of Pennsylvania
<sup>3</sup>UC Berkeley
<sup>4</sup>Stanford
<a href="https://orybkin.github.io/video-gcp/">
<p align="center">
<img src="https://orybkin.github.io/video-gcp/resources/teaser.gif" width="800">
</p>
</img></a>
This is the official Pytorch implementation for our paper **Long-Horizon Visual Planning with Goal-Conditioned Hierarchical Predictors**.
## Installation
To install the module, run the following commands:
```
git clone --recursive <EMAIL>:orybkin/video-gcp.git
cd video-gcp
virtualenv -p $(which python3) ./venv
source ./venv/bin/activate
pip3 install -r requirements.txt
python3 setup.py develop
```
## Model Training
To train a tree-structured prediction model start by setting paths for data and experiment logs:
```
export GCP_DATA_DIR=./data
export GCP_EXP_DIR=./experiment_logs
```
Note that the data will be automatically downloaded upon first running the model training code.
To start training the GCP model on the 25-room navigation dataset, run:
```
cd gcp
python3 prediction/train.py --path=../experiments/prediction/25room/gcp_tree/ --skip_first_val=1
```
The training logs get written into `GCP_EXP_DIR` and can be displayed by opening a Tensorboard in this folder:
`tensorboard --logdir=$GCP_EXP_DIR`.
## Planning Experiments
For running planning experiments we need to set up a virtual display for rendering (skip this step if you are not running on a headless machine).
```
Xvfb -screen 0 320x240x24 &
export DISPLAY=:0
```
Next, we need to install the `gym-miniworld` submodule:
```
cd gym-miniworld
python3 setup.py develop
```
To run a planning evaluation run with the previously trained model, execute:
```
cd gcp
python3 planning/run.py ../experiments/control/25room/gcp_tree/mod_hyper.py
```
To compute the control performance metrics (success rate and trajectory cost), run:
```
python evaluation/compute_control_perf.py --path=${GCP_EXP_DIR}/control/25room/gcp_tree/scores_0to99.pkl --n_rooms=25
```
## Modifying the Code
### Running on a new dataset
If you want to run our model on a new dataset, the easiest is to subclass [```FolderSplitVarLenVideoDataset```](gcp/datasets/data_loader.py)
and overwrite the ```__getitem__()``` method to load your data. Then just point to your custom
data loader in the config file. Our code expects the data loader's ```__getitem__()``` function to output a dictionary with
the following structure:
```
dict({
'traj_seq': (time, channels, heigth, width) # zero-padded sequence of max_seq_len
'pad_mask': (time,) # one for all time steps that are zero-padded, 0 otherwise
'I_0': (channels, height, width) # start image
'I_g': (channels, height, width) # goal image
'end_ind': int # index of goal image in the demo sequence
'states': (time, state_dim) # (optional) state sequence corresponding to image sequence, for training state regressor
'actions': (time, action_dim) # (optional) action sequence for training inverse model
})
```
Further your dataset folder is expected to have the following structure:
```
< dataset_path >
|-- dataset_spec.py # specifies state/action dimensions + sequence length
|-- train/ # training trajectory files
|-- val/ # validation trajectory files
|-- test/ # test trajectory files
```
You can find an example ```dataset_spec.py``` file [here](experiments/data_gen/misc/example_dataset_spec.py).
By default the data loader will search for all ```.h5``` files in the ```train/val/test``` subfolders to compile the list of filenames.
If you want to load from a different file format or have a different folder structure you need to adjust the ```get_filenames()``` function accordingly.
### Modifying the predictive model
The proposed goal-conditioned predictors are implemented in the [```prediction```](gcp/prediction) directory.
Below we describe the functionality implemented in the different files to allow for quick adjustments.
|File | Description |
|:------------- |:-------------|
| [```train.py```](gcp/prediction/train.py) | Tranining script. |
| [```sequential.py```](gcp/prediction/sequential.py) | A naive sequential goal-conditioned predictor (GCP). This works well on some data, but fails to scale to long horizons. |
| [```blox/torch/models/vrnn.py```](blox/torch/models/vrnn.py) | The variational RNN, which is the basis of the sequential GCP. |
| [```tree/tree.py```](gcp/prediction/tree/tree.py) | The proposed hierarchical GCP which scales to long horizons. |
| [```tree/tree_module.py```](gcp/prediction/tree/tree_module.py) | The basic module for hierarchical prediction: predicting a state between two other states. |
| [```tree/tree_lstm.py```](gcp/prediction/tree/tree_lstm.py) | Tree LSTM implementation. |
| [```blox/torch/encoder_decoder.py```](blox/torch/encoder_decoder.py) | The encoder and decoder networks and various decoding distributions. |
| [```adaptive_binding```](gcp/prediction/adaptive_binding) | The hierarchical model wih adaptive binding. |
A significant amount of utils developed by us for this and other projects are in the [blox](https://github.com/orybkin/blox-nn) repo,
including tools for quick fully connected, convolutional, and recurrent architectures, operations on nested tensor dictionaries,
tools for probabilistic deep learning, and tools to interface numpy and torch code.
### Modifying the planning algorithm
The code that implements the image-based planning and control approach described in the paper is in the [```planning```](gcp/planning) directory.
Below we describe the functionality implemented in the different files to allow for quick adjustments.
|File | Description |
|:------------- |:-------------|
| [```run.py```](gcp/planning/run.py) | Launch script for planning & control experiments. |
| [```tree_optimizer.py```](gcp/planning/tree_optimizer.py) | Core hierarchical planning algorithm for GCP-tree latent optimization. |
| [```planner_policy.py```](gcp/planning/planner_policy.py) | Policy wrapper that calls planner and follows plan using inverse model. |
| [```cem/cem_planner.py```](gcp/planning/cem/cem_planner.py) | CEM planners for flat and hierarchical prediction models. |
| [```cem/cem_simulator.py```](gcp/planning/cem/cem_simulator.py) | Wrapper around predictive model for interface with CEM planner. |
| [```cem/sampler.py```](gcp/planning/cem/sampler.py) | Samples latents / actions for CEM planning, calls latent optimizer. |
| [```cem/cost_fcn.py```](gcp/planning/cem/cost_fcn.py) | Cost functions for CEM planning. |
Most of the infrastructure for the planning experiments (start / goal loading, logging, interaction between environment and agent)
directly builds on the [Visual Foresight codebase](https://github.com/SudeepDasari/visual_foresight) and can be found in the
[```infra```](gcp/planning/infra) directory.
### Generating navigation data with new layout
We provide an example script for using a PRM planner to generate a new navigation dataset in an environment with 16 rooms.
The layout can be further adjusted [here](gcp/planning/infra/envs/miniworld_env/utils/multiroom2d_layout.py).
The ```nworkers``` argument allows for parallelized data generation.
```
cd recursive_planning
python3 planning/run.py ../experiments/data_gen/nav_16rooms/mod_hyper.py --nworkers=4
```
## Downloading the datasets
The training code automatically downloads the required datasets if they are not already in the expected folders.
However, if you want to download the datasets independently, you can find the zip files here:
|Dataset | Link | Size |
|:------------- |:-------------|:-----|
| 9-room navigation | [https://www.seas.upenn.edu/~oleh/datasets/gcp/nav_9rooms.zip](https://www.seas.upenn.edu/~oleh/datasets/gcp/nav_9rooms.zip) | 140MB |
| 25-room navigation |[https://www.seas.upenn.edu/~oleh/datasets/gcp/nav_25rooms.zip](https://www.seas.upenn.edu/~oleh/datasets/gcp/nav_25rooms.zip)| 395MB|
| Sawyer |[https://www.seas.upenn.edu/~oleh/datasets/gcp/sawyer.zip](https://www.seas.upenn.edu/~oleh/datasets/gcp/sawyer.zip)|395MB|
| Human3.6M 500-frame | [https://www.seas.upenn.edu/~oleh/datasets/gcp/h36m.zip](https://www.seas.upenn.edu/~oleh/datasets/gcp/h36m.zip) | 14GB|
## Bibtex
If you find this work useful in your research, please consider citing:
```
@article{pertsch2020gcp,
title={Long-Horizon Visual Planning with Goal-Conditioned Hierarchical Predictors},
author={<NAME> and <NAME> and <NAME>
and <NAME> and <NAME> and <NAME>},
year={2020},
journal={arXiv preprint arXiv:2006.13205},
}
```
## Acknowledgements
Parts of the planning code are based on the [Visual Foresight codebase](https://github.com/SudeepDasari/visual_foresight).
In parallel with this method, we developed the [blox](https://github.com/orybkin/blox-nn) python module, which contains
many useful utilities and architectures for pytorch. Check it out and let us know if it was useful!
<file_sep>import numpy as np
import torch
from blox import AttrDict
from blox.basic_types import listdict2dictlist
from gcp.prediction.models.auxilliary_models.cost_mdl import TestTimeCostModel
class CostFcn:
"""Base class to define CEM cost functions."""
def __init__(self, dense_cost, final_step_weight=1.0, *unused_args):
self._dense_cost = dense_cost
self._final_step_weight = final_step_weight
def __call__(self, cem_outputs, goal):
cost_per_step = self._compute(cem_outputs, goal)
for i in range(len(cost_per_step)):
cost_per_step[i][-1] *= self._final_step_weight
if self._dense_cost:
return np.array([np.sum(c) for c in cost_per_step])
else:
return np.array([c[-1] for c in cost_per_step])
def _compute(self, cem_outputs, goal):
raise NotImplementedError
class ImageCost:
"""Provides method to split off image and latent sequence from input sequence."""
def _split_state_rollout(self, rollouts):
"""Splits off latents from states in joined rollouts."""
def reshape_to_image(flat):
if len(flat.shape) != 2:
import pdb; pdb.set_trace()
assert len(flat.shape) == 2
res = int(np.sqrt(flat.shape[1] / 3)) # assumes 3-channel image
return flat.reshape(flat.shape[0], 3, res, res)
return listdict2dictlist([AttrDict(image_rollout=reshape_to_image(r[..., :-self.input_dim]),
latent_rollout=r[..., -self.input_dim:]) for r in rollouts])
class EuclideanDistance(CostFcn):
"""Euclidean distance between vals and goal."""
def _compute(self, cem_outputs, goal):
euclid_dists = [np.linalg.norm(cem_output - goal[None], axis=-1) for cem_output in cem_outputs]
return euclid_dists
class EuclideanPathLength(CostFcn):
"""Euclidean length of the whole path to the goal."""
def _compute(self, cem_outputs, goal):
assert self._dense_cost # need dense cost for path length computation
return [np.linalg.norm(np.concatenate([cem_output[1:], goal[None]]) - cem_output, axis=-1)
for cem_output in cem_outputs]
class StepPathLength(CostFcn):
"""Cost is equivalent to number of steps in path."""
def _compute(self, cem_outputs, goal):
path_lengths = [cem_output.shape[0] for cem_output in cem_outputs]
return [np.concatenate((np.zeros(cem_output.shape[0]-1), np.array([path_length])))
for cem_output, path_length in zip(cem_outputs, path_lengths)]
class L2ImageCost(CostFcn, ImageCost):
"""Cost is equivalent to L2 distance in image space."""
LATENT_SIZE = 128 # TODO: make this configurable
def _compute(self, cem_outputs, goal_raw):
image_sequences = self._split_state_rollout(cem_outputs).image_rollout
goal = goal_raw.transpose(0, 3, 1, 2) * 2 - 1.0
return [np.sqrt(np.sum((seq - goal)**2, axis=(1, 2, 3))) for seq in image_sequences]
@property
def input_dim(self):
return self.LATENT_SIZE
class LearnedCostEstimate:
"""Uses learned network to estimate cost between to latent states."""
def __init__(self, config):
self.net = TestTimeCostModel(params=config, logger=None)
def __call__(self, start_enc, goal_enc):
if isinstance(start_enc, np.ndarray):
# compute cost for single start goal pair
return self.net(AttrDict(enc1=start_enc, enc2=goal_enc)).data.cpu().numpy()
elif isinstance(start_enc, list):
# compute summed cost for sequence
costs = []
for seq, goal in zip(start_enc, goal_enc):
seq_input = torch.cat((torch.tensor(seq).to(self.net.device), torch.tensor(goal).to(self.net.device)))
cost_per_step = self.net(AttrDict(enc1=seq_input[:-1], enc2=seq_input[1:]))
costs.append(cost_per_step.sum().data.cpu().numpy())
return np.array(costs)
else:
raise ValueError("Dimensionality of input to learned cost function not supported!")
@property
def input_dim(self):
return self.net.input_dim
class ImageLearnedCostEstimate(LearnedCostEstimate, ImageCost):
pass
class ImageWrappedLearnedCostFcn(LearnedCostEstimate, ImageCost):
"""Shallow wrapper around LearnedCostEstimate that unpacks image input."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __call__(self, start_enc, goal_enc):
start_enc = self._split_state_rollout(start_enc).latent_rollout
goal_enc = [start_enc[-1] for _ in range(len(start_enc))] # HACK that only works for goal-cond prediction!
return super().__call__(start_enc, goal_enc)
<file_sep>from contextlib import contextmanager
import torch
from blox import AttrDict
from blox.tensor.ops import broadcast_final, batch_apply, remove_spatial
from blox.torch.encoder_decoder import Encoder
from blox.torch.losses import L2Loss
from blox.torch.subnetworks import Predictor
from gcp.prediction.models.auxilliary_models.misc import RecurrentPolicyModule
from gcp.prediction.models.auxilliary_models.base_model import BaseModel
from gcp.prediction.training.checkpoint_handler import CheckpointHandler
class BehavioralCloningModel(BaseModel):
def __init__(self, params, logger):
super().__init__(logger)
self._hp = self._default_hparams()
self.override_defaults(params) # override defaults with config file
self.postprocess_params()
assert self._hp.n_actions != -1 # make sure action dimensionality was overridden
self.build_network()
# load only the encoder params during training
if self._hp.enc_params_checkpoint is not None:
self._load_weights([
(self.encoder, 'encoder', self._hp.enc_params_checkpoint),
])
self.detach_enc = self._hp.enc_params_checkpoint is not None and not self._hp.finetune_enc
def _default_hparams(self):
# put new parameters in here:
default_dict = {
'ngf': 4, # number of feature maps in shallowest level
'nz_enc': 32, # number of dimensions in encoder-latent space
'nz_mid': 32, # number of hidden units in fully connected layer
'nz_mid_lstm': 32,
'n_lstm_layers': 1,
'n_processing_layers': 3, # Number of layers in MLPs
'reactive': True, # if False, adds recurrent cell to policy
'enc_params_checkpoint': None, # specify pretrained encoder weights to load for training
'finetune_enc': False,
'checkpt_path': None,
'train_first_action_only': False, # if True, only trains on initial action of sequence
'n_conv_layers': None, # Number of conv layers. Can be of format 'n-<int>' for any int for relative spec'n_conv_layers': None, # Number of conv layers. Can be of format 'n-<int>' for any int for relative spec
}
# misc params
default_dict.update({
'use_skips': False,
'dense_rec_type': None,
'device': None,
'randomize_length': False,
'randomize_start': False,
})
# add new params to parent params
parent_params = super()._default_hparams()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def build_network(self, build_encoder=True):
self.encoder = Encoder(self._hp)
if not self._hp.reactive:
self.policy = RecurrentPolicyModule(self._hp, 2*self._hp.nz_enc, self._hp.n_actions)
else:
self.policy = Predictor(self._hp, 2*self._hp.nz_enc, self._hp.n_actions)
def forward(self, inputs, phase='train'):
"""
forward pass at training time
"""
if not 'enc_traj_seq' in inputs:
enc_traj_seq, _ = self.encoder(inputs.traj_seq[:, 0]) if self._hp.train_first_action_only \
else batch_apply(self.encoder, inputs.traj_seq)
if self._hp.train_first_action_only: enc_traj_seq = enc_traj_seq[:, None]
enc_traj_seq = enc_traj_seq.detach() if self.detach_enc else enc_traj_seq
enc_goal, _ = self.encoder(inputs.I_g)
n_dim = len(enc_goal.shape)
fused_enc = torch.cat((enc_traj_seq, enc_goal[:, None].repeat(1, enc_traj_seq.shape[1], *([1]*(n_dim-1)))), dim=2)
#fused_enc = torch.cat((enc_traj_seq, enc_goal[:, None].repeat(1, enc_traj_seq.shape[1], 1, 1, 1)), dim=2)
if self._hp.reactive:
actions_pred = batch_apply(self.policy, fused_enc)
else:
policy_output = self.policy(fused_enc)
actions_pred = policy_output
# remove last time step to match ground truth if training on full sequence
actions_pred = actions_pred[:, :-1] if not self._hp.train_first_action_only else actions_pred
output = AttrDict()
output.actions = remove_spatial(actions_pred) if len(actions_pred.shape) > 3 else actions_pred
return output
def loss(self, inputs, outputs):
losses = AttrDict()
# action prediction loss
n_actions = outputs.actions.shape[1]
losses.action_reconst = L2Loss(1.0)(outputs.actions, inputs.actions[:, :n_actions],
weights=broadcast_final(inputs.pad_mask[:, :n_actions], inputs.actions))
# compute total loss
#total_loss = torch.stack([loss[1].value * loss[1].weight for loss in losses.items()]).sum()
#losses.total = AttrDict(value=total_loss)
# losses.total = total_loss*torch.tensor(np.nan) # for checking if backprop works
return losses
def get_total_loss(self, inputs, losses):
total_loss = torch.stack([loss[1].value * loss[1].weight for loss in losses.items()]).sum()
return AttrDict(value=total_loss)
def log_outputs(self, outputs, inputs, losses, step, log_images, phase):
super().log_outputs(outputs, inputs, losses, step, log_images, phase)
if log_images and self._hp.use_convs:
self._logger.log_pred_actions(outputs, inputs, 'pred_actions', step, phase)
@contextmanager
def val_mode(self, *args, **kwargs):
yield
@property
def has_image_input(self):
return self._hp.use_convs
class TestTimeBCModel(BehavioralCloningModel):
def __init__(self, params, logger):
super().__init__(params, logger)
if torch.cuda.is_available():
self.cuda()
self.device = torch.device('cuda')
else:
self.device = torch.device('cpu')
if not self._hp.reactive: self.init_hidden_var = self.init_hidden_var.to(self.device)
assert self._hp.checkpt_path is not None
weights_file = CheckpointHandler.get_resume_ckpt_file('latest', self._hp.checkpt_path)
success = CheckpointHandler.load_weights(weights_file, self)
if not success: raise ValueError("Could not load checkpoint from {}!".format(weights_file))
def forward(self, inputs):
for k in inputs:
if inputs[k] is None:
continue
if not isinstance(inputs[k], torch.Tensor):
inputs[k] = torch.Tensor(inputs[k])
if not inputs[k].device == self.device:
inputs[k] = inputs[k].to(self.device)
enc, _ = self.encoder(inputs['I_0'])
enc_goal, _ = self.encoder(inputs['I_g'])
fused_enc = torch.cat((enc, enc_goal), dim=1)
if self._hp.reactive:
action_pred = self.policy(fused_enc)
hidden_var = None
else:
hidden_var = self.init_hidden_var if inputs.hidden_var is None else inputs.hidden_var
policy_output = self.policy(fused_enc[:, None], hidden_var)
action_pred, hidden_var = policy_output.output, policy_output.hidden_state[:, 0]
if self._hp.use_convs:
return remove_spatial(action_pred if len(action_pred.shape)==4 else action_pred[:, 0]), hidden_var
else:
return action_pred, hidden_var
<file_sep>""" This file defines an agent for the MuJoCo simulator environment. """
import copy
import os
import cv2
import matplotlib.pyplot as plt
import numpy as np
from gcp.planning.infra.policy.policy import get_policy_args
from gcp.planning.infra.utils.im_utils import resize_store, npy_to_gif
from tensorflow.contrib.training import HParams
class Image_Exception(Exception):
def __init__(self):
pass
class Environment_Exception(Exception):
def __init__(self):
pass
class GeneralAgent(object):
"""
All communication between the algorithms and MuJoCo is done through
this class.
"""
N_MAX_TRIALS = 100
def __init__(self, hyperparams, start_goal_list=None):
self._hp = self._default_hparams()
self.override_defaults(hyperparams)
self.T = self._hp.T
self._start_goal_list = start_goal_list
self._goal = None
self._goal_seq = None
self._goal_image = None
self._demo_images = None
self._reset_state = None
self._setup_world(0)
def override_defaults(self, config):
"""
:param config: override default valus with config dict
:return:
"""
for name, value in config.items():
print('overriding param {} to value {}'.format(name, value))
if value == getattr(self._hp, name):
raise ValueError("attribute is {} is identical to default value!!".format(name))
if name in self._hp and self._hp.get(name) is None: # don't do a type check for None default values
setattr(self._hp, name, value)
else: self._hp.set_hparam(name, value)
def _default_hparams(self):
default_dict = {
'T': None,
'adim': None,
'sdim': None,
'ncam': 1,
'rejection_sample': False, # repeatedly attemp to collect a trajectory if error occurs
'type': None,
'env': None,
'image_height': 48,
'image_width': 64,
'nchannels': 3,
'data_save_dir': '',
'log_dir': '',
'make_final_gif': True, # whether to make final gif
'make_final_gif_freq': 1, # final gif, frequency
'make_final_gif_pointoverlay': False,
'gen_xml': (True, 1), # whether to generate xml, and how often
'start_goal_confs': None,
'show_progress': False,
'state_resets': False, # reset the simluator state zeroing velocities according to policies replan frequency
'do_not_save_images': False # dataset savers will not save images if True
}
# add new params to parent params
parent_params = HParams()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def _setup_world(self, itr):
"""
Helper method for handling setup of the MuJoCo world.
Args:
filename: Path to XML file containing the world information.
"""
env_type, env_params = self._hp.env
if self._start_goal_list is not None:
env_params['init_pos'] = self._start_goal_list[itr, 0]
env_params['goal_pos'] = self._start_goal_list[itr, 1]
self.env = env_type(env_params, self._reset_state)
self._hp.adim = self.adim = self.env.adim
self._hp.sdim = self.sdim = self.env.sdim
self._hp.ncam = self.ncam = self.env.ncam
self.num_objects = self.env.num_objects
def sample(self, policy, i_traj):
"""
Runs a trial and constructs a new sample containing information
about the trial.
"""
self.i_traj = i_traj
if self._hp.gen_xml[0]:
if i_traj % self._hp.gen_xml[1] == 0 and i_traj > 0:
self._setup_world(i_traj)
traj_ok, obs_dict, policy_outs, agent_data = False, None, None, None
i_trial = 0
imax = self.N_MAX_TRIALS
while not traj_ok and i_trial < imax:
i_trial += 1
try:
agent_data, obs_dict, policy_outs = self.rollout(policy, i_trial, i_traj)
traj_ok = agent_data['traj_ok']
except Image_Exception:
traj_ok = False
if not traj_ok:
print('traj_ok: ', traj_ok)
print('needed {} trials'.format(i_trial))
if self._hp.make_final_gif or self._hp.make_final_gif_pointoverlay:
if i_traj % self._hp.make_final_gif_freq == 0:
self.save_gif(i_traj, self._hp.make_final_gif_pointoverlay)
# self.plot_endeff_traj(obs_dict)
self._reset_state = None # avoid reusing the same reset state
return agent_data, obs_dict, policy_outs
def _post_process_obs(self, env_obs, agent_data, initial_obs=False, stage=None):
"""
Handles conversion from the environment observations, to agent observation
space. Observations are accumulated over time, and images are resized to match
the given image_heightximage_width dimensions.
Original images from cam index 0 are added to buffer for saving gifs (if needed)
Data accumlated over time is cached into an observation dict and returned. Data specific to each
time-step is returned in agent_data
:param env_obs: observations dictionary returned from the environment
:param initial_obs: Whether or not this is the first observation in rollout
:return: obs: dictionary of observations up until (and including) current timestep
"""
agent_img_height = self._hp.image_height
agent_img_width = self._hp.image_width
if stage is not None:
env_obs['stage'] = stage
if initial_obs:
T = self._hp.T + 1
self._agent_cache = {}
for k in env_obs:
if k == 'images':
if 'obj_image_locations' in env_obs:
self.traj_points = []
self._agent_cache['images'] = np.zeros((T, self._hp.ncam, agent_img_height, agent_img_width, self._hp.nchannels), dtype=np.uint8)
elif isinstance(env_obs[k], np.ndarray):
obs_shape = [T] + list(env_obs[k].shape)
self._agent_cache[k] = np.zeros(tuple(obs_shape), dtype=env_obs[k].dtype)
else:
self._agent_cache[k] = []
self._cache_cntr = 0
t = self._cache_cntr
self._cache_cntr += 1
point_target_width = agent_img_width
obs = {}
if self._hp.show_progress:
plt.imshow(env_obs['images'][0])
path = self._hp.log_dir + '/verbose/traj{}/'.format(self.i_traj)
if not os.path.exists(path):
os.makedirs(path)
plt.savefig(path + '/im{}.png'.format(t))
for k in env_obs:
if k == 'images':
resize_store(t, self._agent_cache['images'], env_obs['images'])
self.gif_images_traj.append(self._agent_cache['images'][t,0]) # only take first camera
elif k == 'obj_image_locations':
self.traj_points.append(copy.deepcopy(env_obs['obj_image_locations'][0])) # only take first camera
env_obs['obj_image_locations'] = np.round((env_obs['obj_image_locations'] *
point_target_width / env_obs['images'].shape[2])).astype(
np.int64)
self._agent_cache['obj_image_locations'][t] = env_obs['obj_image_locations']
elif isinstance(env_obs[k], np.ndarray):
self._agent_cache[k][t] = env_obs[k]
else:
self._agent_cache[k].append(env_obs[k])
obs[k] = self._agent_cache[k][:self._cache_cntr]
if k == 'topdown_image':
self.topdown_images.append((self._agent_cache['topdown_image'][t]*255).astype(np.uint8)) # only take first camera
if 'obj_image_locations' in env_obs:
agent_data['desig_pix'] = env_obs['obj_image_locations']
if self._goal_image is not None:
agent_data['goal_image'] = self._goal_image
if self._goal is not None:
agent_data['goal'] = self._goal
if self._demo_images is not None:
agent_data['demo_images'] = self._demo_images
if self._reset_state is not None:
agent_data['reset_state'] = self._reset_state
obs['reset_state'] = self._reset_state
return obs
def _required_rollout_metadata(self, agent_data, traj_ok, t, i_tr):
"""
Adds meta_data such as whether the goal was reached and the total number of time steps
into the agent dictionary that is MANDATORY for later parts of pipeline
:param agent_data: Agent data dictionary
:param traj_ok: Whether or not rollout succeeded
:return: None
"""
agent_data['term_t'] = t - 1
if self.env.has_goal():
agent_data['goal_reached'] = self.env.goal_reached()
agent_data['traj_ok'] = traj_ok
def rollout(self, policy, i_trial, i_traj):
"""
Rolls out policy for T timesteps
:param policy: Class extending abstract policy class. Must have act method (see arg passing details)
:param i_trial: Rollout attempt index (increment each time trajectory fails rollout)
:return: - agent_data: Dictionary of extra statistics/data collected by agent during rollout
- obs: dictionary of environment's observations. Each key maps to that values time-history
- policy_ouputs: list of policy's outputs at each timestep.
Note: tfrecord saving assumes all keys in agent_data/obs/policy_outputs point to np arrays or primitive int/float
"""
self._init()
agent_data, policy_outputs = {}, []
# Take the sample.
t = 0
done = self._hp.T <= 0
initial_env_obs, self._reset_state = self.env.reset(self._reset_state)
obs = self._post_process_obs(initial_env_obs, agent_data, True, stage=0)
policy.reset()
while not done:
"""
Every time step send observations to policy, acts in environment, and records observations
Policy arguments are created by
- populating a kwarg dict using get_policy_arg
- calling policy.act with given dictionary
Policy returns an object (pi_t) where pi_t['actions'] is an action that can be fed to environment
Environment steps given action and returns an observation
"""
if self._hp.state_resets: # reset the simulator to state so that mujoco-based cem-planning can strat from there.
if t % policy.replan_interval == 0 and t != 0:
print('_____')
print('gen_ag: performing state reset ')
self.env.qpos_reset(obs['qpos_full'][t], obs['qvel_full'][t])
new_obs = self.env._get_obs()
print('qpos of t ', new_obs['qpos'])
print('qvel of t', new_obs['qvel'])
print('_____')
pi_t = policy.act(**get_policy_args(policy, obs, t, i_traj, agent_data))
policy_outputs.append(pi_t)
if 'done' in pi_t:
done = pi_t['done']
try:
obs = self._post_process_obs(self.env.step(pi_t['actions']), agent_data)
# obs = self._post_process_obs(self.env.step(copy.deepcopy(pi_t['actions']), stage=stage), agent_data, stage=pi_t['policy_index'])
except Environment_Exception as e:
print(e)
return {'traj_ok': False}, None, None
if (self._hp.T - 1) == t or obs['env_done'][-1]: # environements can include the tag 'env_done' in the observations to signal that time is over
done = True
t += 1
print('t', t)
traj_ok = self.env.valid_rollout()
if self._hp.rejection_sample:
if self._hp.rejection_sample > i_trial:
assert self.env.has_goal(), 'Rejection sampling enabled but env has no goal'
traj_ok = self.env.goal_reached()
print('goal_reached', self.env.goal_reached())
self._required_rollout_metadata(agent_data, traj_ok, t, i_trial)
obs.update(self.env.add_extra_obs_info())
return agent_data, obs, policy_outputs
def save_gif(self, i_traj, overlay=False):
if self.traj_points is not None and overlay:
colors = [tuple([np.random.randint(0, 256) for _ in range(3)]) for __ in range(self.num_objects)]
for pnts, img in zip(self.traj_points, self.gif_images_traj):
for i in range(self.num_objects):
center = tuple([int(np.round(pnts[i, j])) for j in (1, 0)])
cv2.circle(img, center, 4, colors[i], -1)
file_path = self._hp.log_dir
# plt.switch_backend('tkagg')
# plt.imshow(self.gif_images_traj[0])
# plt.show()
npy_to_gif(self.gif_images_traj, file_path + '/verbose/traj{}/video'.format(i_traj)) # todo make extra folders for each run?
if False: #len(self.topdown_images) > 0:
npy_to_gif(self.topdown_images, file_path + '/verbose/traj{}/topdownvideo'.format(i_traj))
def plot_endeff_traj(self, obs_dict):
endeff_pos = obs_dict['regression_state'][:,:3]
xpos = endeff_pos[:,0]
zpos = endeff_pos[:,2]
plt.switch_backend('TkAgg')
plt.plot(xpos, zpos)
plt.show()
def _init(self):
"""
Set the world to a given model
"""
self.gif_images_traj, self.topdown_images, self.traj_points = [], [], None
<file_sep>import torch.nn as nn
from blox import AttrDict
from blox.tensor.ops import batchwise_index
from blox.torch.losses import KLDivLoss2
from blox.torch.variational import FixedPrior
class Inference(nn.Module):
def __init__(self, hp, q):
super().__init__()
self._hp = hp
self.q = q
self.deterministic = isinstance(self.q, FixedPrior)
def forward(self, inputs, e_l, e_r, start_ind, end_ind, timestep):
assert timestep is not None
output = AttrDict(gamma=None)
if self.deterministic:
output.q_z = self.q(e_l)
return output
values = inputs.inf_enc_seq
keys = inputs.inf_enc_key_seq
mult = int(timestep.shape[0] / keys.shape[0])
if mult > 1:
timestep = timestep.reshape(-1, mult)
result = batchwise_index(values, timestep.long())
e_tilde = result.reshape([-1] + list(result.shape[2:]))
else:
e_tilde = batchwise_index(values, timestep[:, 0].long())
output.q_z = self.q(e_l, e_r, e_tilde)
return output
def loss(self, q_z, p_z, weights=1):
if q_z.mu.numel() == 0:
return {}
return AttrDict(kl=KLDivLoss2(self._hp.kl_weight, breakdown=1, free_nats_per_dim=self._hp.free_nats)(
q_z, p_z, weights=weights, log_error_arr=True))
<file_sep>"""based on: https://github.com/google-research/google-research/blob/master/frechet_video_distance/example.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import numpy as np
import tensorflow as tf
from blox import AttrDict
from gcp.evaluation.fvd import frechet_video_distance as fvd
# Number of videos must be divisible by 16.
NUMBER_OF_VIDEOS = 16
def get_trainer_args():
parser = argparse.ArgumentParser()
parser.add_argument("--gt", help="path to ground truth sequence .npy file")
parser.add_argument("--pred", help="path to predicted sequence .npy file")
return parser.parse_args()
def main(argv):
args = get_trainer_args()
def load_videos(path):
print("Loading trajectories from {}".format(path))
if not path.endswith('.npy'): raise ValueError("Can only read in .npy files!")
seqs = (np.load(path).transpose(0, 1, 3, 4, 2) + 1) / 2
assert len(seqs.shape) == 5 # need [batch, T, W, H, C] input data
assert seqs.shape[-1] == 3 # assume 3-channeled seq with channel in last dim
if seqs.max() <= 1: seqs *= 255 # assume [0...255] range
return seqs
gt_seqs = load_videos(args.gt)
pred_seqs = load_videos(args.pred)
assert gt_seqs.shape == pred_seqs.shape
batch, time, h, w, c = gt_seqs.shape
n_batches = int(np.floor(batch / NUMBER_OF_VIDEOS)) # needs to be dividable by NUMBER_OF_VIDEOS
print("Evaluating batch of {} sequences of shape {}...".format(NUMBER_OF_VIDEOS, (time, h, w, c)))
# Step 1: collect all embeddings (needs to run in loop bc network can only handle batch_size 16)
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
end_ind = np.argmax(np.all(np.abs(gt_seqs) < 1e-6, axis=(2, 3, 4)), axis=-1) # check for black images
end_ind[end_ind == 0] = time
embeddings = AttrDict()
for key, seq in [['gt', gt_seqs], ['pred', pred_seqs]]:
stored_embeds = []
for i, s in enumerate(seq):
length = end_ind[i]
if length < 10: continue
with tf.Graph().as_default():
# construct embedding graph
seq_ph = tf.placeholder(dtype=tf.float32, shape=(1, length, h, w, c))
embed = fvd.create_id3_embedding(fvd.preprocess(seq_ph, (224, 224)))
with tf.Session(config=config) as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.tables_initializer())
feed_dict = {seq_ph: s[:length][None]}
#feed_dict = {seq_ph: np.concatenate((s[:length][None], np.zeros([NUMBER_OF_VIDEOS-1, length] + s.shape[1:])))}
print("{} - Seq {} - Length: {}".format(key, i, length))
e = sess.run(embed, feed_dict=feed_dict)
stored_embeds.append(e[0])
embeddings[key] = np.stack(stored_embeds)
print("Generated embeddings!")
# Step 2: evaluate the FVD
with tf.Graph().as_default():
gt_embed_ph = tf.placeholder(dtype=tf.float32, shape=embeddings.gt.shape)
pred_embed_ph = tf.placeholder(dtype=tf.float32, shape=embeddings.pred.shape)
result = fvd.calculate_fvd(gt_embed_ph, pred_embed_ph)
with tf.Session(config=config) as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.tables_initializer())
feed_dict = {gt_embed_ph: embeddings.gt, pred_embed_ph: embeddings.pred}
print("FVD is: %.2f." % sess.run(result, feed_dict=feed_dict))
if __name__ == "__main__":
tf.app.run(main)
<file_sep>import argparse
import numpy as np
import torch
import models
from blox import batch_apply
from blox.basic_types import map_dict
def get_trainer_args():
parser = argparse.ArgumentParser()
parser.add_argument("--gt", help="path to ground truth sequence .npy file")
parser.add_argument("--pred", help="path to predicted sequence .npy file")
parser.add_argument("--batch_size", default=8, type=int, help="batch size for network forward pass")
parser.add_argument('--use_gpu', action='store_true', help='turn on flag to use GPU')
return parser.parse_args()
def main():
args = get_trainer_args()
def load_videos(path):
print("Loading trajectories from {}".format(path))
if not path.endswith('.npy'): raise ValueError("Can only read in .npy files!")
seqs = np.load(path)
assert len(seqs.shape) == 5 # need [batch, T, C, H, W] input data
assert seqs.shape[2] == 3 # assume 3-channeled seq with channel in last dim
seqs = torch.Tensor(seqs)
if args.use_gpu: seqs = seqs.cuda()
return seqs # range [-1, 1]
gt_seqs = load_videos(args.gt)
pred_seqs = load_videos(args.pred)
print('shape: ', gt_seqs.shape)
assert gt_seqs.shape == pred_seqs.shape
n_seqs, time, c, h, w = gt_seqs.shape
n_batches = int(np.floor(n_seqs / args.batch_size))
# import pdb; pdb.set_trace()
# get sequence mask (for sequences with variable length
mask = 1 - torch.all(torch.all(torch.all((gt_seqs + 1.0).abs() < 1e-6, dim=-1), dim=-1), dim=-1) # check for black images
mask2 = 1 - torch.all(torch.all(torch.all((gt_seqs).abs() < 1e-6, dim=-1), dim=-1), dim=-1) # check for gray images
mask = mask * mask2
# Initializing the model
model = models.PerceptualLoss(model='net-lin', net='alex', use_gpu=args.use_gpu)
# run forward pass to compute LPIPS distances
distances = []
for b in range(n_batches):
x, y = gt_seqs[b*args.batch_size : (b+1)*args.batch_size], pred_seqs[b*args.batch_size : (b+1)*args.batch_size]
lpips_dist = batch_apply(model, x, y)
distances.append(lpips_dist)
distances = torch.cat(distances)
mean_distance = distances[mask].mean()
print("LPIPS distance: {}".format(mean_distance))
if __name__ == "__main__":
main()
<file_sep>import os
from blox import AttrDict
current_dir = os.path.dirname(os.path.realpath(__file__))
from experiments.prediction.base_configs import gcp_sequential as base_conf
configuration = AttrDict(base_conf.configuration)
model_config = base_conf.model_config
model_config.update({
'action_conditioned_pred': True,
'non_goal_conditioned': True,
'nz_vae': 0,
'var_inf': 'deterministic',
})
<file_sep>import os.path
from blox import AttrDict
from gcp.planning.infra.agent.benchmarking_agent import BenchmarkAgent
from gcp.planning.infra.envs.miniworld_env.multiroom3d.multiroom3d_env import TopdownMultiroom3dEnv
from gcp.planning.planner_policy import ImageCEMPolicy
from gcp.planning.cem.sampler import ImageHierarchicalTreeCEMSampler
from gcp.planning.cem.cem_planner import HierarchicalImageCEMPlanner
from gcp.planning.cem.cost_fcn import ImageLearnedCostEstimate
from experiments.prediction.base_configs import gcp_tree as base_conf
BASE_DIR = '/'.join(str.split(__file__, '/')[:-1])
current_dir = os.path.dirname(os.path.realpath(__file__))
env_params = {
'init_pos': None,
'goal_pos': None,
'n_rooms': 25,
'heading_smoothing': 0.1,
'crop_window': 40,
}
agent = AttrDict(
type=BenchmarkAgent,
env=(TopdownMultiroom3dEnv, env_params),
T=400,
image_height=32,
image_width=32,
start_goal_confs=os.environ['GCP_DATA_DIR'] + '/nav_25rooms/start_goal_configs/raw',
)
h_config = AttrDict(base_conf.model_config)
h_config.update({
'state_dim': 2,
'ngf': 16,
'max_seq_len': 200,
'hierarchy_levels': 8,
'nz_mid_lstm': 512,
'n_lstm_layers': 3,
'nz_mid': 128,
'nz_enc': 128,
'nz_vae': 256,
'regress_length': True,
'attach_state_regressor': True,
'attach_inv_mdl': True,
'inv_mdl_params': AttrDict(
n_actions=2,
use_convs=False,
build_encoder=False,
),
'untied_layers': True,
'decoder_distribution': 'discrete_logistic_mixture',
})
h_config.pop("add_weighted_pixel_copy")
cem_params = AttrDict(
prune_final=True,
horizon=200,
action_dim=256,
verbose=True,
n_iters=3,
batch_size=10,
n_level_hierarchy=8,
sampler=ImageHierarchicalTreeCEMSampler,
sampling_rates_per_layer=[10, 10],
cost_fcn=ImageLearnedCostEstimate,
cost_config=AttrDict(
checkpt_path=os.environ['GCP_EXP_DIR'] + '/prediction/25room/gcp_tree/weights'
),
)
policy = AttrDict(
type=ImageCEMPolicy,
params=h_config,
checkpt_path=cem_params.cost_config.checkpt_path,
cem_planner=HierarchicalImageCEMPlanner,
cem_params=cem_params,
replan_interval=agent.T+2,
#load_epoch=10,
closed_loop_execution=True
)
config = AttrDict(
current_dir=current_dir,
start_index=0,
end_index=99,
agent=agent,
policy=policy,
save_format=['raw'],
data_save_dir=os.environ['GCP_EXP_DIR'] + '/control/nav_25rooms/gcp_tree',
split_train_val_test=False,
traj_per_file=1,
)
<file_sep>import numpy as np
import torch
import torch.nn as nn
from blox import batch_apply, AttrDict
from blox.torch.dist import normalize
from blox.torch.losses import BCELogitsLoss, PenaltyLoss
from blox.torch.modules import ExponentialDecayUpdater
from blox.torch.ops import batch_cdist, cdist
from blox.torch.subnetworks import Predictor
from gcp.prediction.models.adaptive_binding.probabilistic_dtw import soft_dtw
from gcp.prediction.models.tree.frame_binding import BaseBinding
from gcp.prediction.utils.tree_utils import depthfirst2breadthfirst
from gcp.prediction.models.adaptive_binding.binding_loss import LossAveragingCriterion, WeightsHacker
class AdaptiveBinding(BaseBinding):
def build_network(self):
self.temp = nn.Parameter(self._hp.matching_temp * torch.ones(1))
if not self._hp.learn_matching_temp:
self.temp.requires_grad_(False)
if self._hp.matching_temp_tenthlife != -1:
assert not self._hp.learn_matching_temp
self.matching_temp_updater = ExponentialDecayUpdater(
self.temp, self._hp.matching_temp_tenthlife, min_limit=self._hp.matching_temp_min)
self.distance_predictor = Predictor(self._hp, self._hp.nz_enc * 2, 1, spatial=False)
self.criterion = LossAveragingCriterion(self._hp)
def get_w(self, pad_mask, inputs, model_output, log=False):
""" Matches according to the dynamic programming-based posterior. """
# TODO add a prior over w - this is currently trained as an AE.
# (oleg) it seems that a uniform prior wouldn't change the computation
# A prior on specific edges would change it somewhat similarly to weighting the cost (but not exactly with p)
# Get cost matrix
tree = model_output.tree
if self._hp.matching_type == 'dtw_image':
imgs = tree.df.images
cost_matrix = batch_cdist(imgs, inputs.traj_seq, reduction='mean')
elif self._hp.matching_type == 'dtw_latent':
img_latents = tree.df.e_g_prime
cost_matrix = batch_cdist(img_latents, inputs.enc_traj_seq, reduction='mean')
# TODO remove the detachment to propagate the gradients!
cost_matrix = WeightsHacker.hack_weights_df(cost_matrix)
w_matrix = soft_dtw(cost_matrix.detach() / self.temp, inputs.end_ind)
# TODO write this up
# (oleg) There is some magic going on here. To define a likelihood, we define a mixture model for each frame
# that consists of the nodes and the respective weights. We normalize the weights for it to be a distribution.
# Then, we invoke Jensen's!
# Since we expect all elements in the mixture to be either x or have zero probability, the bound is tight.
w_matrix = normalize(w_matrix, 1)
return depthfirst2breadthfirst(w_matrix)
def prune_sequence(self, inputs, outputs, key='images'):
seq = getattr(outputs.tree.df, key)
latent_seq = outputs.tree.df.e_g_prime
distances = batch_apply(self.distance_predictor,
latent_seq[:, :-1].contiguous(), latent_seq[:, 1:].contiguous())[..., 0]
outputs.distance_predictor = AttrDict(distances=distances)
# distance_predictor outputs true if the two frames are too close
close_frames = torch.sigmoid(distances) > self._hp.learned_pruning_threshold
# Add a placeholder for the first frame
close_frames = torch.cat([torch.zeros_like(close_frames[:, [0]]), close_frames], 1)
pruned_seq = [seq[i][~close_frames[i]] for i in range(seq.shape[0])]
return pruned_seq
def loss(self, inputs, outputs):
losses = super().loss(inputs, outputs)
if self._hp.top_bias != 1.0 and WeightsHacker.can_get_index():
losses.n_top_bias_nodes = PenaltyLoss(self._hp.supervise_match_weight) \
(1 - WeightsHacker.get_n_top_bias_nodes(inputs.traj_seq, outputs.tree.bf.match_dist))
if WeightsHacker.can_get_d2b(inputs):
dist = WeightsHacker.distance2bottleneck(inputs, outputs)
losses.distance_to_bottleneck_1 = PenaltyLoss(0)(dist[0])
losses.distance_to_bottleneck_2 = PenaltyLoss(0)(dist[1])
losses.distance_to_bottleneck_3 = PenaltyLoss(0)(dist[2])
if self._hp.log_d2b_3x3maze:
top_nodes = outputs.tree.bf.images[:, :self._hp.n_top_bias_nodes].reshape(-1, 2)
def get_bottleneck_states():
if inputs.traj_seq_states.max() > 13.5:
scale = 45 # 5x5 maze
else:
scale = 27 # 3x3 maze
start = -0.5 * scale
end = 0.5 * scale
doors_x = torch.linspace(start, end, self._hp.log_d2b_3x3maze + 1).to(self._hp.device)[1:-1]
doors_y = torch.linspace(start, end, self._hp.log_d2b_3x3maze * 2 + 1).to(self._hp.device)[1:-1:2]
n_x = doors_x.shape[0]
n_y = doors_y.shape[0]
doors_x = doors_x.repeat(n_y)
doors_y = doors_y.repeat_interleave(n_x)
doors = torch.stack([doors_x, doors_y], 1)
# And the other way around
return torch.cat([doors, doors.flip(1)], 0)
doors = get_bottleneck_states()
dist = cdist(top_nodes, doors)
avg_dist = dist.min(-1).values.mean()
losses.distance_to_doors = PenaltyLoss(self._hp.supervise_match_weight)(avg_dist)
if 'distance_predictor' in outputs:
df_match_dists = outputs.tree.df.match_dist
best_matching = df_match_dists.argmax(-1)
targets = best_matching[:, 1:] == best_matching[:, :-1] # 1 if frames are too close, i.e. if best matching gt is the same
losses.distance_predictor = BCELogitsLoss()(outputs.distance_predictor.distances, targets.float())
return losses
def reconstruction_loss(self, inputs, outputs, weights):
losses = AttrDict()
outputs.soft_matched_estimates = self.criterion.get_soft_estimates(outputs.gt_match_dists,
outputs.tree.bf.images)
losses.update(self.criterion.loss(
outputs, inputs.traj_seq, weights, inputs.pad_mask, self._hp.dense_img_rec_weight, self.decoder.log_sigma))
return losses
<file_sep>import os
from contextlib import contextmanager
import numpy as np
import torch
from skimage.io import imsave
from blox import AttrDict
from blox.basic_types import dict_concat
from blox.tensor.ops import batchwise_index
from blox.torch.evaluation import ssim, psnr, mse
from blox.utils import timed
from gcp.prediction.utils.visualization import plot_pruned_tree, make_gif
def make_image_strips(input, gen_seq, phase, outdir, ind):
"""
:param input:
:param gen_seq: t, channel, r, c
:param ind: batch index to make images for
:param phase:
:param outdir:
:return:
"""
gen_seq = gen_seq.detach().cpu().numpy()
gen_seq = np.split(gen_seq, gen_seq.shape[0], axis=0)
gen_seq = [l.squeeze() for l in gen_seq]
gen_seq = [np.transpose(item,(1,2,0)) for item in gen_seq]
input = input.detach().cpu().numpy()
input = np.split(input, input.shape[0], axis=0)
input = [l.squeeze() for l in input]
input = [np.transpose(item,(1,2,0)) for item in input]
input = np.concatenate(input, axis=1)
gen_seq = np.concatenate(gen_seq, axis=1)
input = (input + 1)/2
gen_seq = (gen_seq + 1)/2
out = np.concatenate([input, gen_seq], axis=0)
if not os.path.exists(outdir):
os.makedirs(outdir)
imsave(outdir + '/outfile{}_{}.png'.format(ind, phase), out)
class Evaluator:
"""Performs evaluation of metrics etc."""
N_PLOTTED_ELEMENTS = 5
LOWER_IS_BETTER_METRICS = ['mse']
HIGHER_IS_BETTER_METRICS = ['psnr', 'ssim']
def __init__(self, model, logdir, hp, log_to_file, tb_logger, top_comp_metric='mse'):
self._logdir = logdir + '/metrics'
self._logger = FileEvalLogger(self._logdir) if log_to_file else TBEvalLogger(logdir, tb_logger)
self._hp = hp
self._pruning_scheme = hp.metric_pruning_scheme
self._dense_rec_module = model.dense_rec
self.use_images = model._hp.use_convs
self._top_of_100 = hp.top_of_100_eval
self._top_of = 100
self._top_comp_metric = top_comp_metric
if not os.path.exists(self._logdir): os.makedirs(self._logdir)
self.evaluation_buffer = None
self.full_evaluation = None
self.dummy_env = None
def reset(self):
self.evaluation_buffer = None
self.full_evaluation = None
def _erase_eval_buffer(self):
def get_init_array(val):
n_eval_samples = self._top_of if self._top_of_100 else 1
return val * np.ones((self._hp.batch_size, n_eval_samples))
self.evaluation_buffer = AttrDict(ssim=get_init_array(0.),
psnr=get_init_array(0.),
mse=get_init_array(np.inf),
gen_images=np.empty(self._hp.batch_size, dtype=np.object),
rand_seqs=np.empty(self._hp.batch_size, dtype=np.object))
for b in range(self._hp.batch_size):
self.evaluation_buffer.rand_seqs[b] = []
if not self.use_images:
self.evaluation_buffer.pop('ssim')
self.evaluation_buffer.pop('psnr')
def eval_single(self, inputs, outputs, sample_n=0):
input_images = inputs.traj_seq
bsize = input_images.shape[0]
store_states = "traj_seq_states" in inputs and (inputs.traj_seq_states.shape[-1] == 2 or
inputs.traj_seq_states.shape[-1] == 5)
# TODO paralellize DTW
for b in range(bsize):
input_seq = input_images[b, :inputs.end_ind[b]+1]
input_len = input_seq.shape[0]
gen_seq, matching_output = self._dense_rec_module.get_sample_with_len(b, input_len, outputs, inputs, self._pruning_scheme)
input_seq, gen_seq = input_seq[1:-1], gen_seq[1:-1] # crop first and last frame for eval (conditioning frames)
state_seq = inputs.traj_seq_states[b, :input_len] if store_states else None
full_gen_seq, gen_seq = self.compute_metrics(b, gen_seq, input_seq, outputs, sample_n)
if self._is_better(self.evaluation_buffer[self._top_comp_metric][b, sample_n],
self.evaluation_buffer[self._top_comp_metric][b]):
# log visualization results for the best sample only, replace if better
self.evaluation_buffer.gen_images[b] = AttrDict(gt_seq=input_images.cpu().numpy()[b],
gen_images=gen_seq,
full_gen_seq=full_gen_seq,
matching_outputs=matching_output,
state_seq=state_seq)
if sample_n < self.N_PLOTTED_ELEMENTS:
pred_len = outputs.end_ind[b].data.cpu().numpy() + 1 if 'end_ind' in outputs else input_len
pred_len_seq, _ = self._dense_rec_module.get_sample_with_len(b, pred_len, outputs, inputs,
self._pruning_scheme)
self.evaluation_buffer.rand_seqs[b].append(pred_len_seq.data.cpu().numpy())
def compute_metrics(self, b, gen_seq, input_seq, outputs, sample_n):
input_seq = input_seq.detach().cpu().numpy()
gen_seq = gen_seq.detach().cpu().numpy()
full_gen_seq = torch.stack([n.subgoal.images[b] for n in outputs.tree.depth_first_iter()]) \
.detach().cpu().numpy() if 'tree' in outputs \
and outputs.tree.subgoals is not None else gen_seq
self.evaluation_buffer.mse[b, sample_n] = mse(gen_seq, input_seq)
if 'psnr' in self.evaluation_buffer:
self.evaluation_buffer.psnr[b, sample_n] = psnr(gen_seq, input_seq)
if 'ssim' in self.evaluation_buffer:
self.evaluation_buffer.ssim[b, sample_n] = ssim(gen_seq, input_seq)
return full_gen_seq, gen_seq
@timed("Eval time for batch: ")
def eval(self, inputs, outputs, model):
self._erase_eval_buffer()
if self._top_of_100:
for n in range(self._top_of):
outputs = model(inputs)
self.eval_single(inputs, outputs, sample_n=n)
else:
self.eval_single(inputs, outputs)
self._flush_eval_buffer()
def _flush_eval_buffer(self):
if self.full_evaluation is None:
self.full_evaluation = self.evaluation_buffer
else:
dict_concat(self.full_evaluation, self.evaluation_buffer)
def dump_results(self, it):
self.dump_metrics(it)
if self.use_images:
self.dump_seqs(it)
if 'matching_outputs' in self.full_evaluation.gen_images[0] \
and self.full_evaluation.gen_images[0].matching_outputs is not None:
self.dump_matching_vis(it)
self.reset()
def dump_trees(self, it):
no_pruning = lambda x, b: False # show full tree, not pruning anything
img_dict = self.full_evaluation.gen_images[0]
plot_matched = img_dict.outputs.tree.match_eval_idx is not None
assert Evaluator.N_PLOTTED_ELEMENTS <= len(img_dict.gen_images) # can currently only max plot as many trees as in batch
def make_padded_seq_img(tensor, target_width, prepend=0):
assert len(tensor.shape) == 4 # assume [n_frames, channels, res, res]
n_frames, channels, res, _ = tensor.shape
seq_im = np.transpose(tensor, (1, 2, 0, 3)).reshape(channels, res, n_frames * res)
concats = [np.zeros((channels, res, prepend * res), dtype=np.float32)] if prepend > 0 else []
concats.extend([seq_im, np.zeros((channels, res, target_width - seq_im.shape[2] - prepend * res), dtype=np.float32)])
seq_im = np.concatenate(concats, axis=-1)
return seq_im
with self._logger.log_to('trees', it, 'image'):
tree_imgs = plot_pruned_tree(img_dict.outputs.tree, no_pruning, plot_matched).detach().cpu().numpy()
for i in range(Evaluator.N_PLOTTED_ELEMENTS):
im = tree_imgs[i]
if plot_matched:
gt_seq_im = make_padded_seq_img(img_dict.gt_seq[i], im.shape[-1])
pred_seq_im = make_padded_seq_img(img_dict.gen_images[i], im.shape[-1], prepend=1) # prepend for cropped first frame
im = np.concatenate((gt_seq_im, im, pred_seq_im), axis=1)
im = np.transpose(im, [1, 2, 0])
self._logger.log(im)
def dump_metrics(self, it):
with self._logger.log_to('results', it, 'metric'):
best_idxs = self._get_best_idxs(self.full_evaluation[self._top_comp_metric])
print_st = []
for metric in sorted(self.full_evaluation):
vals = self.full_evaluation[metric]
if metric in ['psnr', 'ssim', 'mse']:
if metric not in self.evaluation_buffer: continue
best_vals = batchwise_index(vals, best_idxs)
print_st.extend([best_vals.mean(), best_vals.std(), vals.std(axis=1).mean()])
self._logger.log(metric, vals if self._top_of_100 else None, best_vals)
print(*print_st, sep=',')
def dump_seqs(self, it):
"""Dumps all predicted sequences and all ground truth sequences in separate .npy files"""
DUMP_KEYS = ['gt_seq', 'gen_images', 'full_gen_seq']
batch = len(self.full_evaluation.gen_images)
_, c, h, w = self.full_evaluation.gen_images[0].gt_seq.shape
stacked_seqs = AttrDict()
for key in DUMP_KEYS:
if key == 'full_gen_seq':
time = max([i[key].shape[0] for i in self.full_evaluation.gen_images])
else:
time = self.full_evaluation.gen_images[0]['gt_seq'].shape[0] - 1
stacked_seqs[key] = np.zeros((batch, time, c, h, w), dtype=self.full_evaluation.gen_images[0][key].dtype)
for b, seqs in enumerate(self.full_evaluation.gen_images):
stacked_seqs['gt_seq'][b] = seqs['gt_seq'][1:] # skip the first (conditioning frame)
stacked_seqs['gen_images'][b, :seqs['gen_images'].shape[0]] = seqs['gen_images']
stacked_seqs['full_gen_seq'][b, :seqs['full_gen_seq'].shape[0]] = seqs['full_gen_seq']
for b, seqs in enumerate(self.full_evaluation.rand_seqs[:self.N_PLOTTED_ELEMENTS]):
key = 'seq_samples_{}'.format(b)
time = self.full_evaluation.gen_images[0]['gt_seq'].shape[0] - 1
stacked_seqs[key] = np.zeros((self.N_PLOTTED_ELEMENTS, time, c, h, w), dtype=self.full_evaluation.rand_seqs[0][0].dtype)
for i, seq_i in enumerate(seqs):
stacked_seqs[key][i, :seq_i.shape[0]] = seq_i[:time]
for key in DUMP_KEYS:
with self._logger.log_to(key, it, 'array'):
self._logger.log(stacked_seqs[key])
self.dump_gifs(stacked_seqs, it)
if self._hp.n_rooms is not None and self.full_evaluation.gen_images[0].state_seq is not None:
self.dump_traj_overview(it)
def dump_matching_vis(self, it):
"""Dumps some visualization of the matching procedure."""
with self._logger.log_to('matchings', it, 'image'):
try:
for i in range(min(Evaluator.N_PLOTTED_ELEMENTS, self.full_evaluation.gen_images.shape[0])):
im = self._dense_rec_module.eval_binding.vis_matching(self.full_evaluation.gen_images[i].matching_outputs)
self._logger.log(im)
except AttributeError:
print("Binding does not provide matching visualization")
pass
def dump_gifs(self, seqs, it):
"""Dumps gif visualizations of pruned and full sequences."""
with self._logger.log_to('pruned_seq', it, 'gif'):
im = make_gif([torch.Tensor(seqs.gt_seq), (torch.Tensor(seqs.gen_images))])
self._logger.log(im)
with self._logger.log_to('full_gen_seq', it, 'gif'):
im = make_gif([torch.Tensor(seqs.full_gen_seq)])
self._logger.log(im)
for key in seqs:
if 'seq_samples' in key:
with self._logger.log_to(key, it, 'gif'):
im = make_gif([torch.Tensor(seqs[key])])
self._logger.log(im)
def dump_traj_overview(self, it):
"""Dumps top-down overview of trajectories in Multiroom datasets."""
from gcp.planning.infra import Multiroom3dEnv
if self.dummy_env is None:
self.dummy_env = Multiroom3dEnv({'n_rooms': self._hp.n_rooms}, no_env=True)
with self._logger.log_to('trajectories', it, 'image'):
for b in range(min(Evaluator.N_PLOTTED_ELEMENTS, self.full_evaluation.gen_images.shape[0])):
im = self.dummy_env.render_top_down(self.full_evaluation.gen_images[b].state_seq.data.cpu().numpy())
self._logger.log(im * 2 - 1)
def _is_better(self, val, other):
"""Comparison function for different metrics.
returns True if val is "better" than any of the values in the array other
"""
if self._top_comp_metric in self.LOWER_IS_BETTER_METRICS:
return np.all(val <= other)
elif self._top_comp_metric in self.HIGHER_IS_BETTER_METRICS:
return np.all(val >= other)
else:
raise ValueError("Currently only support comparison on the following metrics: {}. Got {}."
.format(self.LOWER_IS_BETTER_METRICS + self.HIGHER_IS_BETTER_METRICS, self._top_comp_metric))
def _get_best_idxs(self, vals):
assert len(vals.shape) == 2 # assumes batch in first dimension, N samples in second dim
if self._top_comp_metric in self.LOWER_IS_BETTER_METRICS:
return np.argmin(vals, axis=1)
else:
return np.argmax(vals, axis=1)
class EvalLogger:
def __init__(self, log_dir):
self._log_dir = log_dir
self.log_target = None
self.log_type = None
self.log_tag = None
self.log_counter = None
@contextmanager
def log_to(self, tag, it, type):
"""Sets logging context (e.g. what file to log to)."""
raise NotImplementedError
def log(self, *vals):
"""Implements logging within the 'log_to' context."""
assert self.log_target is not None # cannot log without 'log_to' context
if self.log_type == 'metric':
self._log_metric(*vals)
elif self.log_type == 'image':
self._log_img(*vals)
elif self.log_type == 'array':
self._log_array(*vals)
elif self.log_type == 'gif':
self._log_gif(*vals)
self.log_counter += 1
def _log_metric(self, name, vals, best_vals):
raise NotImplementedError
def _log_img(self, img):
raise NotImplementedError
def _log_array(self, array):
np.save(os.path.join(self.log_target, "{}_{}.npy".format(self.log_tag, self.log_counter)), array)
def _log_gif(self, gif):
pass
def _make_dump_dir(self, tag, it):
dump_dir = os.path.join(self._log_dir, '{}/it_{}'.format(tag, it))
if not os.path.exists(dump_dir): os.makedirs(dump_dir)
return dump_dir
class FileEvalLogger(EvalLogger):
"""Logs evaluation results on disk."""
@contextmanager
def log_to(self, tag, it, type):
"""Creates logging file."""
self.log_type, self.log_tag, self.log_counter = type, tag, 0
if type == 'metric':
self.log_target = open(os.path.join(self._log_dir, '{}_{}.txt'.format(tag, it)), 'w')
elif type == 'image' or type == 'array':
self.log_target = self._make_dump_dir(tag, it)
elif type == 'gif':
self.log_target = 'no log'
else:
raise ValueError("Type {} is not supported for logging in eval!".format(type))
yield
if type == 'metric':
self.log_target.close()
self.log_target, self.log_type, self.log_tag, self.log_counter = None, None, None, None
def _log_metric(self, name, vals, best_vals):
str = 'mean {} {}, standard error of the mean (SEM) {}'.format(name, best_vals.mean(), best_vals.std())
str += ', mean std of 100 samples {}\n'.format(vals.std(axis=1).mean()) if vals is not None else '\n'
self.log_target.write(str)
print(str)
def _log_img(self, img):
#assert -1.0 <= img.min() and img.max() <= 1.0 # expect image to be in range [-1...1]
imsave(os.path.join(self.log_target, "{}_{}.png".format(self.log_tag, self.log_counter)), (img + 1) / 2)
class TBEvalLogger(EvalLogger):
"""Logs evaluation results to Tensorboard."""
def __init__(self, log_dir, tb_logger):
super().__init__(log_dir)
self._tb_logger = tb_logger
self.log_step = None
@contextmanager
def log_to(self, tag, it, type):
self.log_type, self.log_tag, self.log_counter, self.log_step = type, tag, 0, it
if type == 'array':
self.log_target = self._make_dump_dir(tag, it)
else:
self.log_target = 'TB'
yield
self.log_target, self.log_type, self.log_tag, self.log_counter, self.log_step = None, None, None, None, None
def _log_metric(self, name, vals, best_vals):
self._tb_logger.log_scalar(best_vals.mean(), self.group_tag + '/metric/{}/top100_mean'.format(name), self.log_step, '')
self._tb_logger.log_scalar(best_vals.std(), self.group_tag + '/verbose/{}/top100_std'.format(name), self.log_step, '')
if vals is not None:
self._tb_logger.log_scalar(vals.mean(), self.group_tag + '/verbose/{}/all100_mean'.format(name), self.log_step, '')
self._tb_logger.log_scalar(vals.std(axis=1).mean(), self.group_tag + '/verbose/{}/all100_std'.format(name), self.log_step, '')
def _log_img(self, img):
#assert -1.0 <= img.min() and img.max() <= 1.0 # expect image to be in range [-1...1]
if not isinstance(img, torch.Tensor): img = torch.tensor(img)
img = (img.permute(2, 0, 1) + 1) / 2
self._tb_logger.log_images(img[None], self.group_tag + '/{}'.format(self.log_counter), self.log_step, '')
def _log_gif(self, gif):
self._tb_logger.log_video(gif, self.group_tag + '/{}'.format(self.log_counter), self.log_step, '')
@property
def group_tag(self):
assert self.log_tag is not None # need to set logging context first
return 'eval/{}'.format(self.log_tag)
<file_sep>from blox import AttrDict
from gcp.prediction.models.tree.tree import TreeModel
from gcp.prediction.utils.logger import HierarchyLogger
configuration = {
'model': TreeModel,
'logger': HierarchyLogger,
}
configuration = AttrDict(configuration)
model_config = {
'one_step_planner': 'sh_pred',
'hierarchy_levels': 7,
'binding': 'loss',
'seq_enc': 'conv',
'tree_lstm': 'split_linear',
'lstm_init': 'mlp',
'add_weighted_pixel_copy': True,
'dense_rec_type': 'node_prob',
}
<file_sep>import matplotlib; matplotlib.use('Agg')
import pickle as pkl
import numpy as np
import argparse
from gcp.planning.infra.envs.miniworld_env.utils.multiroom2d_layout import define_layout
from gcp.planning.infra.policy.prm_policy.prm_policy import plan_room_seq
def n_room_path(start, end, layout):
return len(plan_room_seq(start, end, layout.doors))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--path", help="path to the config file directory")
parser.add_argument('--n_rooms', default=9, type=int,
help='number of rooms in navigation layout')
parser.add_argument('--n_tasks', default=100, type=int,
help='number of task instances in eval set')
parser.add_argument('--max_seq_len', default=200, type=int,
help='maximum length of sequence (for cost computation')
return parser.parse_args()
def main():
args = parse_args()
FILE = args.path
rooms_per_side = int(np.sqrt(args.n_rooms))
layout = define_layout(rooms_per_side, None)
with open(FILE, 'rb') as pickle_file:
data = pkl.load(pickle_file)
paths = data['full_traj']
success, rooms_to_goal, rooms_traversed = 0, [], []
penalized_length = 0.
for i in range(args.n_tasks):
# extract start / final / goal position and room
goal_pos = data['reset_state'][i]['goal'][-2:] / 27
final_pos = paths[i][-1][:2] / 27
start_pos = paths[i][0][:2] / 27
goal_pos[1] *= -1; final_pos[1] *= -1; start_pos[1] *= -1
goal_room = layout.coords2ridx(goal_pos[0], goal_pos[1])
final_room = layout.coords2ridx(final_pos[0], final_pos[1])
start_room = layout.coords2ridx(start_pos[0], start_pos[1])
# compute success
if final_room == goal_room:
success += 1
# compute length
path = np.stack([p[:2] for p in paths[i]])
path_len = np.sum(np.linalg.norm(path[1:] - path[:-1], axis=-1))
penalized_length += path_len if final_room == goal_room else args.max_seq_len
# compute number of rooms to goal / traversed
rooms_to_goal += [n_room_path(final_room, goal_room, layout)]
rooms_traversed += [n_room_path(start_room, final_room, layout)]
print("Success: \t{}".format(success / args.n_tasks))
print("Cost: \t{:.2f}".format(penalized_length / args.n_tasks))
print("")
print("Room2Goal: \t{}\t{}".format(np.mean(rooms_to_goal), np.std(rooms_to_goal)))
print("RTravers: \t{}\t{}".format(np.mean(rooms_traversed), np.std(rooms_traversed)))
if __name__ == "__main__":
main()
<file_sep>import os
import time
import warnings
import math
import numpy as np
from copy import deepcopy
from tensorboardX import SummaryWriter
import torch
from torch import autograd
from blox import AttrDict
from blox.basic_types import map_dict
from blox.utils import AverageMeter
from blox.utils import RecursiveAverageMeter
from gcp.prediction.training.checkpoint_handler import CheckpointHandler, NoCheckpointsException
from gcp.prediction.training.gcp_builder import GCPBuilder
from gcp.evaluation.compute_metrics import Evaluator
warnings.simplefilter('once')
class ModelTrainer(GCPBuilder):
""" This class defines the training loop of the GCP model"""
def run(self):
""" Runs training """
args = self.cmd_args
model_conf = self.model._hp
data_conf = self.train_loader.dataset.data_conf
# TODO clean up resuming
self.global_step = 0
start_epoch = 0
if args.resume or ('checkpt_path' in model_conf and model_conf.checkpt_path is not None):
ckpt_path = model_conf.checkpt_path if 'checkpt_path' in model_conf else None
start_epoch = self.resume(args.resume, ckpt_path)
if args.val_sweep:
epochs = CheckpointHandler.get_epochs(os.path.join(self._hp.exp_path, 'weights'))
for epoch in list(sorted(epochs))[::4]:
self.resume(epoch)
self.val()
return
if args.dataset_val_sweep:
self.run_dataset_val_sweep(args, data_conf, model_conf)
return
## Train
if args.train:
self.train(start_epoch)
else:
self.val()
def resume(self, ckpt, path=None):
path = os.path.join(self._hp.exp_path, 'weights') if path is None else os.path.join(path, 'weights')
assert ckpt is not None # need to specify resume epoch for loading checkpoint
try:
weights_file = CheckpointHandler.get_resume_ckpt_file(ckpt, path)
except NoCheckpointsException:
return 0
self.global_step, start_epoch, _ = \
CheckpointHandler.load_weights(weights_file, self.model,
load_step_and_opt=True, optimizer=self.optimizer,
dataset_length=len(self.train_loader) * self._hp.batch_size,
strict=self.cmd_args.strict_weight_loading)
self.model.to(self.model.device)
return start_epoch
def run_dataset_val_sweep(self, args, data_conf, model_conf):
assert 'sweep_specs' in data_conf.dataset_spec and data_conf.dataset_spec.sweep_specs # need to define sweep_specs
for sweep_spec in data_conf.dataset_spec.sweep_specs:
print("\nStart eval of dataset {}...".format(sweep_spec.name))
dc, mc = deepcopy(data_conf), deepcopy(model_conf)
dc.dataset_spec.dataset_class = sweep_spec.dataset_class
dc.dataset_spec.split = sweep_spec.split
dc.dataset_spec.max_seq_len = sweep_spec.max_seq_len
mc.update({i: dc.dataset_spec[i] for i in dc.dataset_spec if not isinstance(dc.dataset_spec[i], list)})
if "dense_rec_type" not in mc or mc["dense_rec_type"] is not "svg":
mc["hierarchy_levels"] = int(np.ceil(math.log2(sweep_spec.max_seq_len)))
log_dir = self.log_dir + "_" + sweep_spec.name
writer = SummaryWriter(log_dir)
def rebuild_phase(logger, model, phase, n_repeat=1, dataset_size=-1):
logger = logger(log_dir, self._hp, max_seq_len=sweep_spec.max_seq_len, summary_writer=writer)
model = model(mc, logger).to(self.device)
model.device = self.device
loader = self.get_dataset(args, model, dc, phase, n_repeat, dataset_size)
return logger, model, loader
self.logger, self.model, self.val_loader = \
rebuild_phase(self._hp.logger, self._hp.model, "val", dataset_size=args.val_data_size)
self.evaluator = Evaluator(self.model, log_dir, self._hp, self.cmd_args.metric, self.logger)
if args.resume:
self.resume(args.resume)
else:
self.resume("latest")
self.val()
print("...Done!")
def train(self, start_epoch):
if not self.cmd_args.skip_first_val:
self.val()
for epoch in range(start_epoch, self._hp.num_epochs):
self.train_epoch(epoch)
if not self.cmd_args.dont_save:
self.save_checkpoint(epoch)
self.val(not (epoch - start_epoch) % 3)
def save_checkpoint(self, epoch):
state = {
'epoch': epoch,
'global_step': self.global_step,
'state_dict': self.model.state_dict(),
'optimizer': self.optimizer.state_dict(),
}
folder = os.path.join(self._hp.exp_path, 'weights')
os.makedirs(folder, exist_ok=True)
torch.save(state, os.path.join(folder, CheckpointHandler.get_ckpt_name(epoch)))
@property
def log_images_now(self):
return self.global_step % self.log_images_interval == 0 and self.cmd_args.log_images
@property
def log_outputs_now(self):
return self.global_step % self.cmd_args.log_outputs_interval == 0 or self.global_step % self.log_images_interval == 0
def try_move_to_dev(self, data):
try:
return data.to(self.device)
except:
# print('warning: could not move {} to gpu'.format(type(data)))
return data
def train_epoch(self, epoch):
self.model.train()
epoch_len = len(self.train_loader)
end = time.time()
batch_time = AverageMeter()
upto_log_time = AverageMeter()
data_load_time = AverageMeter()
forward_backward_time = AverageMeter()
self.log_images_interval = int(epoch_len / self.cmd_args.imepoch)
print('starting epoch ', epoch)
for self.batch_idx, sample_batched in enumerate(self.train_loader):
data_load_time.update(time.time() - end)
inputs = AttrDict(map_dict(self.try_move_to_dev, sample_batched))
with self.training_context():
self.optimizer.zero_grad()
start_fw_bw = time.time()
output = self.model(inputs)
losses = self.model.loss(inputs, output)
losses.total = self.model.get_total_loss(inputs, losses)
losses.total.value.backward()
self.call_hooks(inputs, output, losses, epoch)
self.optimizer.step()
self.model.step()
forward_backward_time.update(time.time() - start_fw_bw)
if self.cmd_args.train_loop_pdb:
import pdb; pdb.set_trace()
upto_log_time.update(time.time() - end)
if self.log_outputs_now and not self.cmd_args.dont_save:
self.model.log_outputs(output, inputs, losses, self.global_step,
log_images=self.log_images_now, phase='train')
batch_time.update(time.time() - end)
end = time.time()
if self.log_outputs_now:
print('GPU {}: {}'.format(os.environ["CUDA_VISIBLE_DEVICES"] if self.use_cuda else 'none', self._hp.exp_path))
print(('itr: {} Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
self.global_step, epoch, self.batch_idx, len(self.train_loader),
100. * self.batch_idx / len(self.train_loader), losses.total.value.item())))
print('avg time for loading: {:.2f}s, logs: {:.2f}s, compute: {:.2f}s, total: {:.2f}s'
.format(data_load_time.avg,
batch_time.avg - upto_log_time.avg,
upto_log_time.avg - data_load_time.avg,
batch_time.avg))
togo_train_time = batch_time.avg * (self._hp.num_epochs - epoch) * epoch_len / 3600.
print('ETA: {:.2f}h'.format(togo_train_time))
if self.cmd_args.verbose_timing: print("avg FW/BW time: {:.3f}s/batch".format(forward_backward_time.avg))
del output, losses
self.global_step = self.global_step + 1
def val(self, test_control=True):
print('Running Testing')
if self.cmd_args.test_prediction:
start = time.time()
losses_meter = RecursiveAverageMeter()
infer_time = AverageMeter()
# self.model.eval()
with autograd.no_grad():
for batch_idx, sample_batched in enumerate(self.val_loader):
inputs = AttrDict(map_dict(self.try_move_to_dev, sample_batched))
with self.model.val_mode(pred_length=False):
infer_start = time.time()
output = self.model(inputs, 'test')
infer_time.update(time.time() - infer_start)
if self.evaluator is not None: # force eval on all batches for reduced noise
self.evaluator.eval(inputs, output, self.model)
# run train model to get NLL on validation data
output_train_mdl = self.model(inputs)
losses = self.model.loss(inputs, output_train_mdl)
losses.total = self.model.get_total_loss(inputs, losses)
losses_meter.update(losses)
del losses
del output_train_mdl
# if batch_idx == 0:
# break
if not self.cmd_args.dont_save:
if self.evaluator is not None:
self.evaluator.dump_results(self.global_step)
if self.cmd_args.metric:
print("Finished Evaluation! Exiting...")
exit(0)
self.model.log_outputs(
output, inputs, losses_meter.avg, self.global_step, log_images=self.cmd_args.log_images, phase='val')
print(('\nTest set: Average loss: {:.4f} in {:.2f}s\n'
.format(losses_meter.avg.total.value.item(), time.time() - start)))
if self.cmd_args.verbose_timing: print("avg Inference time: {:.3f}s/batch".format(infer_time.avg))
del output
if __name__ == '__main__':
trainer = ModelTrainer()
trainer.run()
<file_sep>from blox import AttrDict
from gcp.datasets.configs.nav_9rooms import Nav9Rooms
class Nav25Rooms(Nav9Rooms):
n_rooms = 25
config = AttrDict(
dataset_spec=AttrDict(
max_seq_len=200,
dataset_class=Nav25Rooms,
split=AttrDict(train=0.994, val=0.006, test=0.00),
),
n_rooms=25,
crop_window=40,
)
<file_sep>from contextlib import contextmanager
import cv2
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import numpy as np
import torch
from PIL import Image
import numbers
from torch.nn.utils.rnn import pad_sequence as pad_sequence
from blox.tensor.ops import batchwise_index
from blox.tensor import ndim
PLOT_BINARY_DISTS = True # if True, plots greyscale tiles instead of line plots for matching functions
class Params():
""" Singleton holding visualization params """
n_logged_samples = 3
PARAMS = Params()
@contextmanager
def param(**kwargs):
""" A context manager that sets global params to specified values for the context duration """
# Update values
old_kwargs = {}
for name, value in kwargs.items():
old_kwargs[name] = getattr(Params, name)
setattr(Params, name, value)
yield
# Reset old values
for name, value in old_kwargs.items():
setattr(Params, name, value)
def fig2img(fig):
"""Converts a given figure handle to a 3-channel numpy image array."""
fig.canvas.draw()
w, h = fig.canvas.get_width_height()
buf = np.fromstring(fig.canvas.tostring_argb(), dtype=np.uint8)
buf.shape = (w, h, 4)
buf = np.roll(buf, 3, axis=2)
w, h, d = buf.shape
return np.array(Image.frombytes("RGBA", (w, h), buf.tostring()), dtype=np.float32)[:, :, :3] / 255.
def plot_greyscale_dist(dist, h, w):
n_tiles = dist.shape[0]
tile_width = int(w / n_tiles)
tiled_im = np.repeat(np.repeat(np.repeat(dist, tile_width, axis=0)[None], h, axis=0)[..., None], 3, axis=-1)
return tiled_im
def plot_dists(dists, h=400, w=400, dpi=10, linewidth=1.0):
if PLOT_BINARY_DISTS and len(dists) == 1:
return plot_greyscale_dist(dists[0], h, w)
COLORS = ['red', 'blue', 'green']
assert len(dists) <= 3 # only have 3 different colors for now, add more if necessary!
fig = plt.figure(figsize=(w / dpi, h / dpi), dpi=dpi)
for dist, color in zip(dists, COLORS[:len(dists)]):
plt.plot(dist, color=color, linewidth=linewidth)
plt.ylim(0, 1)
plt.xlim(0, dist.shape[0]-1)
plt.tight_layout()
fig_img = fig2img(fig)
plt.close(fig)
return fig_img
def plot_graph(array, h=400, w=400, dpi=10, linewidth=3.0):
fig = plt.figure(figsize=(w / dpi, h / dpi), dpi=dpi)
if isinstance(array, torch.Tensor):
array = array.cpu().numpy()
plt.xlim(0, array.shape[0] - 1)
plt.xticks(fontsize=100)
plt.yticks(fontsize=100)
plt.plot(array)
plt.grid()
plt.tight_layout()
fig_img = fig2img(fig)
plt.close(fig)
return fig_img
def tensor2np(tensor, n_logged_samples=None):
if tensor is None: return tensor
if n_logged_samples is None: return tensor.data.cpu().numpy()
return tensor[:n_logged_samples].data.cpu().numpy()
def visualize(tensor):
""" Visualizes the state. Returns blank image as default, otherwise calls the dataset method"""
n_logged_samples = PARAMS.n_logged_samples
array = np.ones(tensor.shape[:-1] + (3, PARAMS.hp.img_sz, PARAMS.hp.img_sz), dtype=np.float32)[:n_logged_samples]
if hasattr(PARAMS, 'visualize'):
array = PARAMS.visualize(tensor, array, PARAMS.hp) # 'visualize' is set in train_planner_mode
return array
def imgtensor2np(tensor, n_logged_samples=None, gt_images=False):
if tensor is None: return tensor
if not PARAMS.hp.use_convs and not gt_images:
with param(n_logged_samples=n_logged_samples):
return visualize(tensor[:n_logged_samples])
return (tensor2np(tensor, n_logged_samples) + 1)/2
def np2imgtensor(array, device, n_logged_samples=None):
if array is None: return array
if n_logged_samples is not None: array = array[:n_logged_samples]
return torch.tensor(array * 2 - 1)
def action2img(action, res, channels):
action_scale = 50 # how much action arrows should get magnified
assert action.size == 2 # can only plot 2-dimensional actions
img = np.zeros((res, res, channels), dtype=np.float32).copy()
start_pt = res/2 * np.ones((2,))
end_pt = start_pt + action * action_scale * (res/2 - 1) * np.array([1, -1]) # swaps last dimension
np2pt = lambda x: tuple(np.asarray(x, int))
img = cv2.arrowedLine(img, np2pt(start_pt), np2pt(end_pt), (255, 255, 255), 1, cv2.LINE_AA, tipLength=0.2)
return img * 255.0
def batch_action2img(actions, res, channels):
batch, seq_len, _ = actions.shape
im = np.empty((batch, seq_len, res, res, channels), dtype=np.float32)
for b in range(batch):
for s in range(seq_len):
im[b, s] = action2img(actions[b, s], res, channels)
return im
class channel_last:
""" A function decorator that transposes the input and output image if necessary
The first input to the function has to be the image, and the function must output an image back
"""
def __init__(self):
""" Decorator parameters """
pass
def __call__(self, func):
""" Wrapping """
def wrapper(img, *args, **kwargs):
transpose = self.is_channel_first(img.shape[-3:])
if transpose:
sh_offset = len(img.shape) - 3
offset_order = lambda x: np.concatenate([np.arange(sh_offset), np.array(x) + sh_offset])
img = ndim.permute(img, offset_order([1, 2, 0]))
result = func(img, *args, **kwargs)
if transpose:
result = ndim.permute(result, offset_order([2, 0, 1]))
return result
return wrapper
@staticmethod
def is_channel_first(shape):
# The allowed channel dimesions are [1, 3]
# If the last dim is not an allowed channel dimension
if shape[2] not in [1, 3]:
assert shape[0] in [1, 3]
return True
# If the first dim is not an allowed channel dimension
if shape[0] not in [1, 3]:
return False
# If the last dim is image size but the first is not
if hasattr(PARAMS, 'hp') and shape[2] == PARAMS.hp.img_sz and shape[0] != PARAMS.hp.img_sz:
return True
@ndim.torched
@channel_last()
def draw_frame(img, prob):
"""
:param img: array dims x width x height x colors
:param prob: array dims
:return:
"""
if img.shape[2] == 1: return img
if isinstance(prob, numbers.Number):
prob = np.full_like(img[..., 0, 0, 0], prob)
img = ndim.copy(img)
cmap = cm.get_cmap('RdYlGn')
rgb = cmap(prob[..., None, None])[..., :3]
img[..., :, :2, :], img[..., :, -2:, :] = rgb, rgb
img[..., :2, :, :], img[..., -2:, :, :] = rgb, rgb
return img
def framed_action2img(action, prob, res, channel):
"""Draws frame around action image indicating the probability of usage."""
img = action2img(action, res, channel)
img = draw_frame(img, prob)
return img
def sort_actions_depth_first(model_output, attrs, n_logged_samples):
"""Sorts actions in depth-first ordering. attrs is a list with attribute names for the left/right
distributions that should be sorted."""
assert len(attrs) == 2 # need one left and one right action attribute
tree = model_output.tree
n_sg = (2 ** tree.depth) - 1
dist_shape = [n_logged_samples, 2 * n_sg + 1]
dist_shape = dist_shape + list(tree.subgoals[attrs[0]].shape[2:]) if len(tree.subgoals[attrs[0]].shape) > 2 else dist_shape
match_dists = torch.zeros(dist_shape, device=tree.subgoals[attrs[0]].device)
for i, segment in enumerate(tree.depth_first_iter()):
match_dists[:, 2 * (i + 1) - 2 ** (segment.depth - 1) - 1] = segment.subgoal[attrs[0]][:n_logged_samples]
match_dists[:, 2 * (i + 1) + 2 ** (segment.depth - 1) - 1] = segment.subgoal[attrs[1]][:n_logged_samples]
return match_dists
def plot_balanced_tree(model_output, elem="images"):
"""Plots all subgoals of the tree in a balanced format."""
tree = model_output.tree
max_depth = tree.depth
batch = tree.subgoals[elem].shape[0]
res = PARAMS.hp.img_sz
n_logged_samples = PARAMS.n_logged_samples
n_logged_samples = batch if n_logged_samples is None else n_logged_samples
n_sg = (2 ** max_depth) - 1
im_height = max_depth * res
im_width = n_sg * res
im = 0.7 * np.ones((n_logged_samples, 3, im_height, im_width))
if 'gt_match_dists' in model_output:
usage_prob = tree.df.match_dist.sum(2)
elif 'existence_predictor' in model_output:
usage_prob = model_output.existence_predictor.existence
else:
usage_prob = None
usage_prob = tensor2np(usage_prob, n_logged_samples)
for n, node in enumerate(tree.depth_first_iter()):
level = max_depth - node.depth
# Handle 5-dimensional masks
im_batch = imgtensor2np(node.subgoal[elem][:, [0, 1, -1]], n_logged_samples)
if usage_prob is not None:
imgs = draw_frame(im_batch, usage_prob[:, n])
im_batch = np.stack(imgs, 0)
im[:, :, level*res : (level+1)*res, n*res : (n+1)*res] = im_batch
return im
def plot_balanced_tree_with_actions(model_output, inputs, n_logged_samples, get_prob_fcn=None):
tree = model_output.tree
batch, channels, res, _ = tree.subgoal.images.shape
_, action_dim = tree.subgoal.a_l.shape
max_depth = tree.depth
n_sg = (2 ** max_depth) - 1
im_height = (max_depth * 2 + 1) * res # plot all actions (*2) and start/end frame again (+1)
im_width = (n_sg + 2) * res
im = np.asarray(0.7 * np.ones((n_logged_samples, im_height, im_width, 3)), dtype=np.float32)
# insert start and goal frame
if inputs is not None:
im[:, :res, :res] = imgtensor2np(inputs.traj_seq[:n_logged_samples, 0], n_logged_samples).transpose(0, 2, 3, 1)
im[:, :res, -res:] = imgtensor2np(batchwise_index(inputs.traj_seq[:n_logged_samples], model_output.end_ind[:n_logged_samples]),
n_logged_samples).transpose(0, 2, 3, 1)
if 'norm_gt_action_match_dists' in model_output:
action_usage_prob = np.max(tensor2np(model_output.norm_gt_action_match_dists, n_logged_samples), axis=2)
step = 1
for i, segment in enumerate(tree):
level = 2 * (max_depth - segment.depth + 1)
dx = 2 ** (segment.depth - 2)
im[:, level * res : (level + 1) * res, step * res: (step + 1) * res] = \
imgtensor2np(segment.subgoal.images[:n_logged_samples], n_logged_samples).transpose(0, 2, 3, 1)
a_l, a_r = tensor2np(segment.subgoal.a_l, n_logged_samples), tensor2np(segment.subgoal.a_r, n_logged_samples)
if get_prob_fcn is not None:
usage_prob_l, usage_prob_r = get_prob_fcn(segment)
else:
usage_prob_l, usage_prob_r = action_usage_prob[:, 2*i], action_usage_prob[:, 2*i+1]
for b in range(n_logged_samples):
im[b, (level-1) * res : level * res, int((step-dx) * res): int((step - dx + 1) * res)] = \
framed_action2img(a_l[b], usage_prob_l[b], res, channels)
im[b, (level - 1) * res: level * res, int((step + dx) * res): int((step + dx + 1) * res)] = \
framed_action2img(a_r[b], usage_prob_r[b], res, channels)
step += 1
return im
def plot_pruned_tree(tree, check_pruned_fcn=lambda x, b: x.pruned[b], plot_matched=False):
"""Plots subgoal tree, but only non-pruned nodes.
'check_pruned_fcn' allows flexible definition of what 'pruned' means
'plot_matched': if True, plot nodes at positions where they were matched
"""
max_depth = tree.depth
batch, channels, res, _ = tree.subgoal.images.shape
n_sg = (2 ** max_depth) - 1
im_height = max_depth * res
im_width = n_sg * res
im = 1.0 * torch.ones((batch, channels, im_height, im_width))
step = 0
for segment in tree:
level = max_depth - segment.depth
for b in range(batch):
if check_pruned_fcn(segment, b): continue # only plot non-pruned elements of the tree
pos = segment.match_eval_idx[b] if plot_matched else step
im[b, :, level * res: (level + 1) * res, pos * res: (pos + 1) * res] = (segment.subgoal.images[b]+1 / 2)
step += 1
return im
def plot_val_tree(model_output, inputs, n_logged_samples=3):
tree = model_output.tree
batch, _, channels, res, _ = tree.subgoals.images.shape
max_depth = tree.depth
n_sg = (2 ** max_depth) - 1
dpi = 10
fig_height, fig_width = 2 * res, n_sg * res
im_height, im_width = max_depth*res + fig_height, 2*res + fig_width
im = np.asarray(0.7 * np.ones((n_logged_samples, im_height, im_width, 3)), dtype=np.float32)
# plot existence probabilities
if 'p_n_hat' in tree.subgoals:
p_n_hat = tensor2np(tree.df.p_n_hat, n_logged_samples)
for i in range(n_logged_samples):
im[i, :res, res:-res] = plot_dists([p_n_hat[i]], res, fig_width, dpi)
if 'p_a_l_hat' in tree.subgoals:
p_a_hat = tensor2np(sort_actions_depth_first(model_output, ['p_a_l_hat', 'p_a_r_hat'], n_logged_samples))
for i in range(n_logged_samples):
im[i, res:2*res, int(3*res/4):int(-3*res/4)] = plot_dists([p_a_hat[i]], res, fig_width + int(res/2), dpi)
im = np.concatenate((im[:, :fig_height],
plot_balanced_tree_with_actions(model_output, inputs, n_logged_samples,
get_prob_fcn=lambda s: (tensor2np(s.subgoal.p_a_l_hat),
tensor2np(s.subgoal.p_a_r_hat)))), axis=1)
else:
with param(n_logged_samples=n_logged_samples):
im[:, fig_height:, res:-res] = plot_balanced_tree(model_output).transpose((0, 2, 3, 1))
# insert start and goal frame
if inputs is not None:
im[:, :res, :res] = imgtensor2np(inputs.traj_seq[:n_logged_samples, 0], n_logged_samples).transpose(0, 2, 3, 1)
im[:, :res, -res:] = imgtensor2np(batchwise_index(inputs.traj_seq[:n_logged_samples], model_output.end_ind[:n_logged_samples]),
n_logged_samples).transpose(0, 2, 3, 1)
return im
def plot_gt_matching_overview(model_output, inputs, plot_attr='match_dist'):
"""Plots overview of which predicted frames contributed to which subgoals."""
if len(inputs.traj_seq_images.shape) > 2:
assert inputs.traj_seq_images.shape[3] == inputs.traj_seq_images.shape[4] # code can only handle square images atm
batch, n_gt, channels, res, _ = inputs.traj_seq_images.shape
n_logged_samples = PARAMS.n_logged_samples
assert batch >= n_logged_samples
tree = model_output.tree
max_depth = tree.depth
n_sg = (2**max_depth) - 1
im_height = (n_gt+max_depth) * res
im_width = (n_sg + 2) * res
im = np.asarray(0.7 * np.ones((n_logged_samples, im_height, im_width, 3)), dtype=np.float32)
# insert ground truth images on the left, soft estimates on the right, top to bottom
# insert raw subgoal predictions tree at the bottom, left to right in depth-first order
get_strip = lambda x, gt=False: imgtensor2np(x, n_logged_samples, gt).transpose(0, 1, 3, 4, 2)\
.reshape(n_logged_samples, res * n_gt, res, channels)
im[:, :res*n_gt, :res] = get_strip(inputs.traj_seq_images, gt=True)
if 'soft_matched_estimates' in model_output:
im[:, :res * n_gt, res:2*res] = get_strip(model_output.soft_matched_estimates)
with param(n_logged_samples=n_logged_samples):
im[:, -max_depth*res:, 2*res:] = plot_balanced_tree(model_output).transpose((0, 2, 3, 1))
fig_height, fig_width = res, n_sg * res
dpi = 10
match_dists = tensor2np(tree.get_attr_df(plot_attr), n_logged_samples)
for i in range(n_gt):
for b in range(n_logged_samples):
match_plot = plot_dists([match_dists[b, :, i]], fig_height, fig_width, dpi, linewidth=3.0)
im[b, i*res : (i+1)*res, 2*res:] = match_plot
return im
def plot_pruned_seqs(model_output, inputs, n_logged_samples=3, max_seq_len=None):
"""Plots the pruned output sequences of the SH-Pred model."""
assert "images" in model_output.dense_rec # need pruned predicted images of SH-Pred model
if inputs is not None:
batch, n_gt_imgs, channels, res, _ = inputs.traj_seq.shape
else:
batch = len(model_output.dense_rec.images)
assert batch == 1 # con currently only handle batch size 1
n_gt_imgs, channels, res, _ = model_output.dense_rec.images[0].shape
MAX_SEQ_LEN = int(n_gt_imgs * 1.5) if not max_seq_len else max_seq_len
im_height = 2 * res
im_width = (MAX_SEQ_LEN+1) * res
im = np.asarray(0.7 * np.ones((n_logged_samples, im_height, im_width, 3)), dtype=np.float32)
pred_imgs = list(map(imgtensor2np, model_output.dense_rec.images[:n_logged_samples]))
max_len = min(n_gt_imgs, MAX_SEQ_LEN)
for b in range(n_logged_samples):
if pred_imgs[b] is None: continue
seq_len = min(pred_imgs[b].shape[0], MAX_SEQ_LEN)
max_len = max(max_len, seq_len)
im[b, -res:, res:(seq_len+1)*res] = pred_imgs[b][:seq_len].transpose(2, 0, 3, 1).reshape(res, seq_len*res, channels)
if inputs is not None:
im[:, :res, :(n_gt_imgs*res)] = imgtensor2np(inputs.traj_seq, n_logged_samples).transpose(0, 3, 1, 4, 2)\
.reshape(n_logged_samples, res, n_gt_imgs*res, channels)
if "actions" in model_output.dense_rec \
and model_output.dense_rec.actions is not None \
and (True in [a is not None for a in model_output.dense_rec.actions]) \
and (inputs is None or inputs.actions.shape[-1] == 2):
ac_im = np.asarray(0.7 * np.ones((n_logged_samples, res, im_width, 3)), dtype=np.float32)
pred_ac = list(map(tensor2np, model_output.dense_rec.actions[:n_logged_samples]))
for b in range(n_logged_samples):
if pred_ac[b] is None: continue
seq_len = min(pred_ac[b].shape[0], MAX_SEQ_LEN)
ac_im[b, :, :seq_len*res] = batch_action2img(pred_ac[b][None, :seq_len], res, channels).transpose(0, 2, 1, 3, 4)\
.reshape(res, seq_len*res, channels)
im = np.concatenate((im, ac_im), axis=1)
# prepare GIF version
gif_imgs = np.swapaxes(im.reshape(n_logged_samples, im.shape[1], MAX_SEQ_LEN+1, res, channels), 0, 2)[:max_len+1] \
.reshape(max_len+1, im.shape[1], res * n_logged_samples, channels)
return im, gif_imgs
def unstack(arr):
arr = np.split(arr, arr.shape[0], 0)
arr = [a.squeeze() for a in arr]
return arr
def plot_inverse_model_actions(model_output, inputs, n_logged_samples=5):
#assert inputs.actions.shape[-1] == 2 # code can only handle 2-dim actions
batch, n_gt_imgs, channels, res, _ = inputs.traj_seq.shape
def make_row(arr):
"""stack images in a row along batch dimension"""
return np.concatenate(unstack(arr), 1)
if len(model_output.action_targets.shape) <= 2:
action_targets, actions = model_output.action_targets, model_output.actions
else:
action_targets, actions = model_output.action_targets[:, 0], model_output.actions[:, 0]
model_output.img_t0, model_output.img_t1 = inputs.traj_seq[:, 0], inputs.traj_seq[:, 1]
if action_targets.shape[-1] > 2:
actions, action_targets = actions[..., :2], action_targets[..., :2]
input_action_imgs = batch_action2img(tensor2np(action_targets[:, None], n_logged_samples), res, channels)
pred_action_imgs = batch_action2img(tensor2np(actions[:, None], n_logged_samples), res, channels)
image_rows = []
image_rows.append(np.transpose(tensor2np(model_output.img_t0, n_logged_samples), [0, 2, 3, 1]))
image_rows.append(np.transpose(tensor2np(model_output.img_t1, n_logged_samples), [0, 2, 3, 1]))
image_rows.append(input_action_imgs.squeeze())
image_rows.append(pred_action_imgs.squeeze())
image_rows = [make_row(item) for item in image_rows]
im = (np.concatenate(image_rows, 0)[None] + 1.0)/2
return im
def make_gif(seqs, n_seqs_logged=5):
"""Fuse sequences in list + bring in gif format. Uses the imgtensor2np function"""
seqs = [pad_sequence(seq, batch_first=True) for seq in seqs]
seqs = [imgtensor2np(s, n_logged_samples=n_seqs_logged) for s in seqs]
stacked_seqs = seqs[0]
if len(seqs) > 1:
padding = np.zeros_like(stacked_seqs)[:, :, :, :2]
padded_seqs = list(np.concatenate([padding, seq], axis=3) for seq in seqs[1:])
stacked_seqs = np.concatenate([stacked_seqs] + padded_seqs, axis=3)
batch, time, channels, height, width = stacked_seqs.shape
return stacked_seqs.transpose(1, 2, 3, 0, 4).reshape(time, channels, height, width * batch)
PREV_OBJS = None
def eval_mem_objs():
"""A simple helper function to evaluate the number of objects currently in memory (CPU or GPU) and print the
difference to the objects in memory when previously calling this function."""
import gc
gc.collect()
param_size, tensor_size = 0, 0
objs = dict()
for obj in gc.get_objects():
try:
if torch.is_tensor(obj) or (hasattr(obj, 'data') and torch.is_tensor(obj.data)):
# print(type(obj), obj.size())
if isinstance(obj, torch.nn.parameter.Parameter):
param_size = param_size + 1
else:
tensor_size = tensor_size + 1
key = tuple(obj.size())
if key in objs:
objs[key] = objs[key] + 1
else:
objs[key] = 1
except:
pass
print("#Params: {}".format(param_size))
print("#Tensors: {}".format(tensor_size))
global PREV_OBJS
if PREV_OBJS is not None:
diff = dict()
for key in objs:
if key in PREV_OBJS:
d = objs[key] - PREV_OBJS[key]
if d != 0:
diff[key] = d
else:
diff[key] = objs[key]
import pprint
pprint.pprint(diff)
PREV_OBJS = objs
<file_sep>import torch
from torch.distributions import OneHotCategorical
import torch.nn as nn
from blox import batch_apply, AttrDict
from blox.tensor.ops import remove_spatial, broadcast_final
from blox.torch.losses import CELogitsLoss
from blox.torch.recurrent_modules import BaseProcessingLSTM
from blox.torch.subnetworks import SeqEncodingModule, Predictor
class AttnKeyEncodingModule(SeqEncodingModule):
def build_network(self, input_size, hp):
self.net = Predictor(hp, input_size, hp.nz_attn_key, num_layers=1)
def forward(self, seq):
return batch_apply(self.net, seq.contiguous())
class RecurrentPolicyModule(SeqEncodingModule):
def __init__(self, hp, input_size, output_size, add_time=True):
super().__init__(hp, False)
self.hp = hp
self.output_size = output_size
self.net = BaseProcessingLSTM(hp, input_size, output_size)
def build_network(self, input_size, hp):
pass
def forward(self, seq):
sh = list(seq.shape)
seq = seq.view(sh[:2] + [-1])
proc_seq = self.run_net(seq)
proc_seq = proc_seq.view(sh[:2] + [self.output_size] + sh[3:])
return proc_seq
class LengthPredictorModule(nn.Module):
"""Predicts the length of a segment given start and goal image encoding of that segment."""
def __init__(self, hp):
super().__init__()
self._hp = hp
self.p = Predictor(hp, hp.nz_enc * 2, hp.max_seq_len)
def forward(self, e0, eg):
"""Returns the logits of a OneHotCategorical distribution."""
output = AttrDict()
output.seq_len_logits = remove_spatial(self.p(e0, eg))
output.seq_len_pred = OneHotCategorical(logits=output.seq_len_logits)
return output
def loss(self, inputs, model_output):
losses = AttrDict()
losses.len_pred = CELogitsLoss(self._hp.length_pred_weight)(model_output.seq_len_logits, inputs.end_ind)
return losses
class ActionConditioningWrapper(nn.Module):
def __init__(self, hp, net):
super().__init__()
self.net = net
self.ac_net = Predictor(hp, hp.nz_enc + hp.n_actions, hp.nz_enc)
def forward(self, input, actions):
net_outputs = self.net(input)
padded_actions = torch.nn.functional.pad(actions, (0, 0, 0, net_outputs.shape[1] - actions.shape[1], 0, 0))
# TODO quite sure the concatenation is automatic
net_outputs = batch_apply(self.ac_net, torch.cat([net_outputs, broadcast_final(padded_actions, input)], dim=2))
return net_outputs
<file_sep>import torch
import numpy as np
from blox import AttrDict
from gcp.planning.infra.policy.policy import Policy
from gcp.prediction.models.tree.tree import TreeModel
from gcp.prediction.training.checkpoint_handler import CheckpointHandler
from gcp.planning.cem.cem_planner import ImageCEMPlanner, CEMPlanner
from gcp.planning.cem.cem_simulator import GCPSimulator, \
GCPImageSimulator, ActCondGCPImageSimulator
class PlannerPolicy(Policy):
"""Policy that uses predictive planning algorithm to devise plan, and then follows it."""
def __init__(self, ag_params, policyparams, gpu_id=None, ngpu=None, conversion_fcns=None, n_rooms=None):
"""
:param ag_params: Agent parameters for infrastructure
:param policyparams: Parameters for the policy, including model parameters
:param gpu_id: unused arg (to comply with infrastructure definition)
:param ngpu: unused arg (to comply with infrastructure definition)
:param conversion_fcns: unused arg (to comply with infrastructure definition)
:param n_rooms: unused arg (to comply with infrastructure definition)
"""
super(PlannerPolicy, self).__init__()
self._hp = self._default_hparams()
self.override_defaults(policyparams)
self.verbose = self._hp.verbose
self.log_dir = ag_params.log_dir
self._hp.params['batch_size'] = 1
self.max_seq_len = ag_params.T
if 'max_seq_len' not in self._hp.params:
self._hp.params['max_seq_len'] = ag_params.T
# create planner predictive model
model = policyparams['model_cls'] if 'model_cls' in policyparams else TreeModel
self.planner = model(self._hp.params, None)
assert self.planner._hp.img_sz == ag_params.image_width
# move planner model to device
self.use_cuda = torch.cuda.is_available()
self.device = torch.device('cuda') if self.use_cuda else torch.device('cpu')
self.planner.to(self.device)
self.planner.device = torch.device('cuda')
self.planner._hp.device = self.planner.device
# load weights for predictive model
load_epoch = 'latest' if self._hp.load_epoch is None else self._hp.load_epoch
weights_file = CheckpointHandler.get_resume_ckpt_file(load_epoch, self._hp.checkpt_path)
CheckpointHandler.load_weights(weights_file, self.planner, strict=False)
self.planner.eval()
self.current_exec_step = None
self.image_plan = None
self.action_plan = None
self.planner_outputs = []
self.num_replans = 0
def reset(self):
super().reset()
self.current_exec_step = None
self.action_plan = None
self.image_plan = None
self.num_replans = 0
self.planner_outputs = []
self.img_t0_history = []
self.img_t1_history = []
def _default_hparams(self):
default_dict = {
'params': {}, # parameters for predictive model
'model_cls': None, # class for predictive model
'checkpt_path': None, # checkpoint path for predictive model
'load_epoch': None, # epoch that weigths should be loaded from
'logger': None,
'verbose': False, # whether verbose planning outputs are logged
'max_dump_rollouts': 5, # max number of rollouts to dump
'replan_interval': 1, # interval at which replanning is triggered
'num_max_replans': 10, # maximum number of replannings per episode
}
parent_params = super()._default_hparams()
parent_params.ncam = 1
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def act(self, t=None, i_tr=None, images=None, goal_image=None):
"""
Triggers planning if no plan is made yet / last plan is completely executed. Then executes current plan.
:param t: current time step in task execution
:param i_tr: index of currently executed task
:param images: images of so-far executed trajectory
:param goal_image: goal-image that should be planned towards
"""
self.t = t
self.i_tr = i_tr
self.goal_image = goal_image
self.log_dir_verb = self.log_dir + '/verbose/traj{}'.format(self.i_tr)
output = AttrDict()
if self.image_plan is None \
or self.image_plan.shape[0] - 1 <= self.current_exec_step \
or (t % self._hp.replan_interval == 0 and self.num_replans < self._hp.num_max_replans):
self._plan(images[t], goal_image, t)
self.num_replans += 1
output.actions = self.get_action(images[t])
self.current_exec_step = self.current_exec_step + 1
return output
def get_action(self, current_image):
assert self.action_plan is not None # need to attach inverse model to planner to get actions!
action = self.action_plan[self.current_exec_step]
return action
def _plan(self, image, goal_image, step):
"""Runs planning algorithm to obtain image and action plans."""
raise NotImplementedError
def log_outputs_stateful(self, logger=None, global_step=None, phase=None, dump_dir=None, exec_seq=None, goal=None,
index=None, env=None, goal_pos=None, traj=None, topdown_image=None):
"""Logs planner outputs for visualization."""
raise NotImplementedError
class CEMPolicy(PlannerPolicy):
"""Implements a simple CEM planning policy."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._hp.cem_params.update({'max_seq_len': self._hp.params['max_seq_len']})
self._cem_simulator = self.simulator_type(self.planner, append_latent=True)
self._cem_planner = self._hp.cem_planner(self._hp.cem_params, self._cem_simulator)
def _default_hparams(self):
default_dict = {
'cem_planner': None,
'cem_params': {},
}
parent_params = super()._default_hparams()
parent_params.ncam = 1
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def act(self, t=None, i_tr=None, state=None, goal=None, regression_state=None, images=None, run_super=False):
"""Plans a state trajectory with CEM, output actions are delta-states."""
if run_super:
return super().act(t, i_tr, state, goal)
self._images = images[:, 0]
self._states = state
return super().act(t, i_tr, state, goal)
def _plan(self, state, goal, step):
"""Runs CEM with planner model to generate state/action plan."""
# run CEM to get state plan
input_goal = goal[-1] if len(goal.shape) > 1 else goal
self.image_plan, action_plan, _, self.plan_cost = self._cem_planner(state, input_goal)
self.current_exec_step = 0 # reset internal execution counter used to index plan
# compute action plan as difference between states
self.action_plan = self.image_plan[1:] - self.image_plan[:-1]
def log_outputs_stateful(self, logger=None, global_step=None, phase=None, dump_dir=None, exec_seq=None, goal=None,
index=None, env=None, goal_pos=None, traj=None, topdown_image=None):
# self._cem_simulator.dump_logs()
self._cem_planner.log_verbose(logger, global_step, phase, self.i_tr, dump_dir)
# log executed image sequence
executed_traj = self._images.astype(np.float32) / 255
logger.log_video(executed_traj.transpose(0, 3, 1, 2), "elite_trajs_{}_test/execution".format(self.i_tr),
global_step, phase)
def get_action(self, current_image):
assert self.action_plan is not None # need to attach inverse model to planner to get actions!
if self.action_plan.size < 1:
return 0.05 * np.random.rand(2, )
action = self.action_plan[self.current_exec_step]
return action
@property
def simulator_type(self):
return GCPSimulator
@property
def planner_type(self):
return CEMPlanner
class ImageCEMPolicy(CEMPolicy):
"""CEM planning policy for image-based tasks. Uses inverse model to follow plan"""
def _default_hparams(self):
default_dict = {
'closed_loop_execution': False, # if True, will execute state plan in closed loop
'act_cond': False, # if action-conditioned simulator should be used
}
parent_params = super()._default_hparams()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def act(self, t=None, i_tr=None, state=None, images=None, goal_image=None):
self._images = images[:, 0]
self._states = state
return super().act(t, i_tr, images, goal_image, run_super=True)
def _plan(self, state, goal, step):
"""Planner directly outputs action plan via inverse model."""
self.image_plan, self.action_plan, self.latent_plan, self.plan_cost = self._cem_planner(state, goal)
self.current_exec_step = 0
self._cem_planner.hack_add_state(self._states[-1].copy())
def get_action(self, current_image):
"""Executes plan, optional closed-loop by re-inferring actions with the inverse model."""
if self._hp.closed_loop_execution:
return self._infer_action(current_image, self.latent_plan[self.current_exec_step + 1])
else:
return super().get_action(current_image)
def _infer_action(self, current_img, target_latent):
"""Uses inverse model to infer closed loop execution action."""
img = torch.tensor(current_img, device=self.device, dtype=torch.float32)
enc_img0 = self.planner.encoder(self._cem_simulator._env2planner(img))[0][:, :, 0, 0]
return self.planner.inv_mdl.run_single(
enc_img0, torch.tensor(target_latent[None], device=self.device))[0].data.cpu().numpy()
@property
def simulator_type(self):
return GCPImageSimulator if not self._hp.act_cond else ActCondGCPImageSimulator
@property
def planner_type(self):
return ImageCEMPlanner
<file_sep>import os
import shutil
import pickle as pkl
import cv2
import copy
class RawSaver():
def __init__(self, save_dir, ngroup=1000):
self.save_dir = save_dir
self.ngroup = ngroup
def save_traj(self, itr, agent_data=None, obs_dict=None, policy_outputs=None):
igrp = itr // self.ngroup
group_folder = self.save_dir + '/raw/traj_group{}'.format(igrp)
if not os.path.exists(group_folder):
os.makedirs(group_folder)
traj_folder = group_folder + '/traj{}'.format(itr)
if os.path.exists(traj_folder):
print('trajectory folder {} already exists, deleting the folder'.format(traj_folder))
shutil.rmtree(traj_folder)
os.makedirs(traj_folder)
print('writing: ', traj_folder)
if 'robosuite_xml' in obs_dict:
save_robosuite_xml(traj_folder + '/robosuite.xml', obs_dict['robosuite_xml'][-1])
if 'images' in obs_dict:
images = obs_dict['images'].copy()
T, n_cams = images.shape[:2]
for i in range(n_cams):
os.mkdir(traj_folder + '/images{}'.format(i))
for t in range(T):
for i in range(n_cams):
cv2.imwrite('{}/images{}/im_{}.png'.format(traj_folder, i, t), images[t, i, :, :, ::-1])
if agent_data is not None:
with open('{}/agent_data.pkl'.format(traj_folder), 'wb') as file:
pkl.dump(agent_data, file)
if obs_dict is not None:
with open('{}/obs_dict.pkl'.format(traj_folder), 'wb') as file:
obs_dict_cpy = copy.deepcopy(obs_dict)
if 'topdown_image' in obs_dict_cpy:
obs_dict_cpy.pop('topdown_image') # don't save topdown image, takes too much memory!
pkl.dump(obs_dict_cpy, file)
if policy_outputs is not None:
with open('{}/policy_out.pkl'.format(traj_folder), 'wb') as file:
pkl.dump(policy_outputs, file)
import os
import xml.dom.minidom
import xml.etree.ElementTree as ET
import io
def save_robosuite_xml(fname, xml_str, pretty=False):
with open(fname, "w") as f:
if pretty:
# TODO: get a better pretty print library
parsed_xml = xml.dom.minidom.parseString(xml_str)
xml_str = parsed_xml.toprettyxml(newl="")
f.write(xml_str)<file_sep>import queue
import numpy as np
from blox import AttrDict
from gcp.planning.infra.envs.miniworld_env.utils.multiroom2d_layout import define_layout
from gcp.planning.infra.envs.miniworld_env.utils.sampling_fcns import RoomSampler2d
from gcp.planning.infra.policy.policy import Policy
from gcp.planning.infra.policy.prm_policy.prm import PRM_planning
from scipy import interpolate
class PrmPolicy(Policy):
"""
Cross Entropy Method Stochastic Optimizer
"""
VAR_SAMPLING_RATES = [30, 300]
def __init__(self, ag_params, policyparams, gpu_id, ngpu, conversion_fcns=None, n_rooms=None):
super(PrmPolicy, self).__init__()
self._hp = self._default_hparams()
policyparams['n_rooms'] = n_rooms
self.override_defaults(policyparams)
self._rooms_per_side = int(np.sqrt(self._hp.n_rooms))
self.layout = define_layout(self._rooms_per_side)
self.state_sampler = RoomSampler2d(self._rooms_per_side, sample_wide=self.layout.non_symmetric)
self.plan_params = AttrDict(n_knn=self._hp.n_knn,
max_edge_len=self._hp.max_edge_len,
cost_fcn=lambda d: d ** self._hp.cost_power)
self.current_action = None
self.state_plan = None
self.action_plan = None
self.convert = conversion_fcns # AttrDict containing env2prm, transform_plan
self._room_plan = None
def reset(self):
self.current_action = None
self.state_plan = None
self.action_plan = None
self._room_plan = None
def _default_hparams(self):
default_dict = {
'n_samples_per_room': 50, # number of sample_points in first try, then gets increased
'n_samples_per_door': 3, # number of samples per door
'n_knn': 10, # number of edge from one sampled point
'max_edge_len': 0.1, # Maximum edge length (in layout units)
'replan_eps': 0.05, # distance btw planned and executed state that triggers replan, in % of table size
'max_planning_retries': 2, # maximum number of replans before inverting the last action
'cost_power': 2, # power on the distance for cost function
'bottleneck_sampling': True, # sample explicitly in bottlenecks to ease planning
'use_var_sampling': False, # if True, uses variable PRM sampling rates for different rooms
'subsample_factor': 1.0, # how much to subsample the plan in state space
'max_traj_length': None, # maximum length of planned trajectory
'smooth_trajectory': False, # if True, uses spline interpolation to smooth trajectory
'sample_door_center': False, # if True, samples door samples in center position of door
'use_scripted_path': False, # if True, uses scripted waypoints to construct path
'straight_through_door': False, # if True, crosses through door in a straight line
'n_rooms': None, # number of rooms in the layout
'use_fallback_plan': True, # if True executes fallback plan if planning fails
'logger': None, # dummy variable
}
parent_params = super()._default_hparams()
parent_params.ncam = 1
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def act(self, t=None, i_tr=None, qpos_full=None, goal=None):
self.i_tr = i_tr
output = AttrDict()
if self.action_plan is None or \
self._check_deviate(qpos_full[t, :2],
self.state_plan[:, min(self.current_action, self.state_plan.shape[1]-1)]):
self._plan(qpos_full[t], goal[t], t)
self.current_action = 0
done = False
if self.current_action < self.action_plan.shape[1]:
output.actions = self.action_plan[:, self.current_action]
else: # if required number of steps > planned steps
done = True
output.actions = np.zeros(2)
self.current_action = self.current_action + 1
output.done = done
return output
def _sample_uniform(self):
px, py = [], []
for _ in range(self._hp.n_samples_per_room * self._hp.n_rooms):
p = self.state_sampler.sample()
px.append(p[0]); py.append(p[1])
return px, py
def _sample_per_room(self, room_path):
px, py = [], []
room_path = range(self._hp.n_rooms) if room_path is None else room_path
for room in room_path:
n_samples = int(np.random.choice(PrmPolicy.VAR_SAMPLING_RATES, 1)) if self._hp.use_var_sampling \
else self._hp.n_samples_per_room
for _ in range(n_samples):
p = self.state_sampler.sample(room)
px.append(p[0]); py.append(p[1])
return px, py
def _sample_per_door(self, room_path=None):
doors = self.layout if room_path is None else \
[(min(room_path[i], room_path[i+1]), max(room_path[i], room_path[i+1])) for i in range(len(room_path) - 1)]
if not doors: return [], []
samples = np.asarray([[self.state_sampler.sample_door(d[0], d[1], self._hp.sample_door_center)
for _ in range(self._hp.n_samples_per_door)]
for d in doors]).transpose(2, 0, 1).reshape(2, -1)
return samples[0], samples[1]
def _sample_points(self, room_path=None):
px, py = self._sample_per_room(room_path)
if self._hp.bottleneck_sampling:
dx, dy = self._sample_per_door(room_path)
px.extend(dx); py.extend(dy)
return [px, py]
def _check_deviate(self, pos, target_pos):
print(np.linalg.norm(pos - target_pos))
return np.linalg.norm(pos - target_pos) > self._hp.replan_eps
def _plan(self, agent_pos, goal_pos, t):
## UNCOMMENT for random exploration policcy
#from gcp.infra.policy.cem.utils.sampler import PDDMSampler
#sampler = PDDMSampler(clip_val=float("Inf"), n_steps=self._hp.max_traj_length - t, action_dim=2, initial_std=2)
#self.action_plan = sampler.sample(1)[0].transpose(1, 0) #(np.random.rand(2, self._hp.max_traj_length - t) - 0.5) * 2 * 2#* 6e-1
#self.state_plan = agent_pos[:2][:, None].repeat(self.action_plan.shape[1], axis=1) + np.cumsum(self.action_plan, axis=1)
#return self.action_plan, True
if self.convert is not None:
pos = self.convert.env2prm(agent_pos[:2])
goal_pos = self.convert.env2prm(goal_pos)
else:
pos = agent_pos[:2]
length, path = self.compute_shortest_path(pos, goal_pos, transform_pose=False)
if self._hp.use_scripted_path:
planned_x, planned_y = [p[0] for p in path], [p[1] for p in path]
success = True
else:
sx, sy = pos[0], pos[1]
gx, gy = goal_pos[0], goal_pos[1]
ox, oy = self.layout.ox, self.layout.oy
if self._room_plan is None:
room_path = self.plan_room_seq(self.layout.coords2ridx(*pos),
self.layout.coords2ridx(*goal_pos), self.layout.doors)
print("Planned room sequence with {} rooms!".format(len(room_path)))
self._room_plan = room_path
else:
room_path = self._room_plan
print("Reused existing room plan!")
for _ in range(self._hp.max_planning_retries):
pts = self._sample_points(room_path)
planned_x, planned_y, success = PRM_planning(sx, sy, gx, gy, ox, oy, self.layout.robot_size, self.plan_params,
self._hp.n_samples_per_room * self._hp.n_rooms, pts)
if success: break # when planning is successful
if not success:
if self._hp.use_fallback_plan:
print("Did not find a plan in {} tries!".format(self._hp.max_planning_retries))
self._fallback_plan()
return None, False
n_steps = min(int(length * 20), self._hp.max_traj_length - t) # min(int(self._hp.subsample_factor * len(planned_x)), self._hp.max_traj_length)
try:
if self._hp.max_traj_length is not None: n_steps = min(n_steps, self._hp.max_traj_length - t)
tck, u = interpolate.splprep([planned_x, planned_y], s=0.0)
x_i, y_i = interpolate.splev(np.linspace(0, 1, n_steps), tck)
# x_i, y_i = planned_x, planned_y
self.state_plan = np.stack((x_i, y_i))
except TypeError:
print("Could not interpolate!") # this happens if duplicate values in plan
self._fallback_plan()
return None, False
#self.state_plan = np.array([planned_x, planned_y])
self.action_plan = self.state_plan[:, 1:] - self.state_plan[:, :-1]
raw_plan = self.state_plan.copy()
if self.convert is not None:
self.state_plan, self.action_plan = self.convert.transform_plan(self.state_plan, self.action_plan)
return raw_plan, True
def _fallback_plan(self):
if self.action_plan is not None:
self.action_plan = -2 * self.action_plan[:, self.current_action-1:] # TODO: come up with a better fallback solution!)
else:
self.action_plan = self.state_plan = 0.02 * np.random.rand(2, 1)
def compute_shortest_path(self, p1, p2, transform_pose=True, straight_through_door=False):
if self.convert is not None and transform_pose:
p1, p2 = self.convert.env2prm(p1), self.convert.env2prm(p2)
if (np.stack((p1,p2)) < -0.5).any() or (np.stack((p1,p2)) > 0.5).any():
return 10., [] # coordinates invalid
room_path = plan_room_seq(self.layout.coords2ridx(p1[0], p1[1]),
self.layout.coords2ridx(p2[0], p2[1]), self.layout.doors)
waypoints = [p1]
for n in range(len(room_path)-1):
# input rooms must be in ascending order
if straight_through_door:
waypoints.extend(self.state_sampler.get_door_path(room_path[n], room_path[n + 1]))
else:
waypoints.append(self.state_sampler.get_door_pos(min(room_path[n], room_path[n+1]),
max(room_path[n], room_path[n+1])))
waypoints.append(p2)
waypoints = np.array(waypoints)
length = np.linalg.norm(waypoints[1:] - waypoints[:-1], axis=1).sum()
return length, waypoints
def plan_room_seq(self, *args, **kwargs):
if self.layout.multimodal:
return plan_room_seq_multimodal(*args, **kwargs)
else:
return plan_room_seq(*args, **kwargs)
def avg_step_length(self, px, py):
return np.mean(np.sqrt((np.array(px[1:]) - np.array(px[:-1]))**2 + (np.array(py[1:]) - np.array(py[:-1]))**2))
def plan_room_seq(start, goal, doors):
"""Implements a breadth-first room search to find the sequence of rooms that reaches the goal."""
frontier = queue.Queue()
visited = []
def expand(node):
if node.room == goal: return node
visited.append(node.room)
neighbors = []
for d in doors:
if d[0] == node.room and d[1] not in visited:
neighbors.append(d[1])
elif d[1] == node.room and d[0] not in visited:
neighbors.append(d[0])
[frontier.put(AttrDict(room=neighbor, parent=node)) for neighbor in neighbors]
return expand(frontier.get())
linked_path = expand(AttrDict(room=start, parent=None))
room_path = []
def collect(node):
room_path.append(node.room)
if node.parent is None: return
collect(node.parent)
collect(linked_path)
return room_path[::-1]
def plan_room_seq_multimodal(start, goal, doors):
"""Finds all paths between start and goal that visit each room at most once. Returns one of them at random."""
frontier = queue.Queue()
goal_nodes = []
def collect_path(start_node):
room_path = []
def collect(node):
room_path.append(node.room)
if node.parent is None: return
collect(node.parent)
collect(start_node)
return room_path
def expand(node):
if node.room == goal:
goal_nodes.append(node)
else:
neighbors = []
for d in doors:
if d[0] == node.room and d[1] not in collect_path(node):
neighbors.append(d[1])
elif d[1] == node.room and d[0] not in collect_path(node):
neighbors.append(d[0])
[frontier.put(AttrDict(room=neighbor, parent=node)) for neighbor in neighbors]
if frontier.empty(): return
expand(frontier.get())
# collect list of all possible paths, is sorted by length (short to long)
expand(AttrDict(room=start, parent=None))
# sample one of the possible paths at random
return collect_path(np.random.choice(goal_nodes))[::-1]
if __name__ == "__main__":
layout = define_layout(3)
room_seq = plan_room_seq_multimodal(0, 8, layout.doors)
print(room_seq)
<file_sep>from blox import AttrDict
from experiments.prediction.base_configs import base_tree as base_conf
configuration = AttrDict(base_conf.configuration)
configuration.metric_pruning_scheme = 'pruned_dtw'
model_config = AttrDict(base_conf.model_config)
model_config.update({
'matching_type': 'balanced',
})
<file_sep>import os
from blox import AttrDict
from gcp.planning.cem.cost_fcn import EuclideanPathLength
current_dir = os.path.dirname(os.path.realpath(__file__))
from experiments.prediction.base_configs import gcp_sequential as base_conf
configuration = AttrDict(base_conf.configuration)
configuration.update({
'dataset_name': 'nav_9rooms',
'batch_size': 16,
'lr': 2e-4,
'epoch_cycles_train': 2,
'n_rooms': 9,
'metric_pruning_scheme': 'basic',
})
model_config = AttrDict(base_conf.model_config)
model_config.update({
'ngf': 16,
'nz_mid_lstm': 512,
'n_lstm_layers': 3,
'nz_mid': 128,
'nz_enc': 128,
'nz_vae': 256,
'regress_length': True,
'attach_state_regressor': True,
'attach_cost_mdl': True,
'cost_mdl_params': AttrDict(
cost_fcn=EuclideanPathLength,
),
'attach_inv_mdl': True,
'inv_mdl_params': AttrDict(
n_actions=2,
use_convs=False,
build_encoder=False,
),
'decoder_distribution': 'discrete_logistic_mixture',
})
model_config.pop("add_weighted_pixel_copy")<file_sep>import numpy as np
import torch
from scipy.spatial.distance import cdist
from math import isinf
try:
import gcp.evaluation.cutils as cutils
except:
pass
def dtw_dist(x, y, dist=None, warp=1, w=np.inf, s=1.0):
"""
Computes Dynamic Time Warping (DTW) of two sequences.
:param array x: N1*M array
:param array y: N2*M array
:param func dist: distance used as cost measure
:param int warp: how many shifts are computed.
:param int w: window size limiting the maximal distance between indices of matched entries |i,j|.
:param float s: weight applied on off-diagonal moves of the path. As s gets larger, the warping path is increasingly biased towards the diagonal
Returns the minimum distance, the cost matrix, the accumulated cost matrix, and the wrap path.
"""
r, c = len(x), len(y)
D1 = np.zeros((r, c))
for i in range(r):
for j in range(c):
# if (isinf(w) or (max(0, i - w) <= j <= min(c, i + w))):
D1[i, j] = dist(x[i], y[j])
return dtw(D1, warp, w, s)
def dtw(inp_D0, warp=1, w=np.inf, s=1.0):
"""
Computes Dynamic Time Warping (DTW) of two sequences.
:param int warp: how many shifts are computed.
:param int w: window size limiting the maximal distance between indices of matched entries |i,j|.
:param float s: weight applied on off-diagonal moves of the path. As s gets larger, the warping path is increasingly biased towards the diagonal
Returns the minimum distance, the cost matrix, the accumulated cost matrix, and the wrap path.
"""
r, c = inp_D0.shape
assert w >= abs(r - c)
assert s > 0
if not isinf(w):
D0 = np.full((r + 1, c + 1), np.inf)
for i in range(1, r + 1):
D0[i, max(1, i - w):min(c + 1, i + w + 1)] = 0
D0[0, 0] = 0
else:
D0 = np.zeros((r + 1, c + 1))
D0[0, 1:] = np.inf
D0[1:, 0] = np.inf
D1 = D0[1:, 1:] # view
D0[1:, 1:] = inp_D0 # TODO to support w, this needs to be modified to mask the assignment.
C = D1.copy()
jrange = range(c)
for i in range(r):
if not isinf(w):
jrange = range(max(0, i - w), min(c, i + w + 1))
for j in jrange:
min_list = [D0[i, j]]
for k in range(1, warp + 1):
i_k = min(i + k, r)
j_k = min(j + k, c)
min_list += [D0[i_k, j] * s, D0[i, j_k] * s]
D1[i, j] += min(min_list)
if r == 1:
path = np.zeros(c), range(c)
elif c == 1:
path = range(r), np.zeros(r)
else:
path = _traceback(D0)
return D1[-1, -1] / sum(D1.shape), C, D1, path
def basic_dtw(C):
"""
Computes Dynamic Time Warping (DTW) of two sequences.
Returns the minimum distance, the cost matrix, the accumulated cost matrix, and the wrap path.
:param C: the cost matrix
:return:
"""
r, c = C.shape
D = np.zeros((r + 1, c + 1))
D[0, 1:] = np.inf
D[1:, 0] = np.inf
D[1:, 1:] = C
for i in range(r):
for j in range(c):
candidates = [D[i, j], D[i + 1, j], D[i, j + 1]]
D[i + 1, j + 1] += min(candidates)
path = _traceback(D)
return D[-1, -1] / (r + c), D[1:, 1:], path
def c_dtw(C):
"""
Computes Dynamic Time Warping (DTW) of two sequences efficiently in C.
Returns the minimum distance, the cost matrix, the accumulated cost matrix, and the wrap path.
:param C: the cost matrix
:return:
"""
r, c = C.shape
D = np.zeros((r + 1, c + 1))
D[0, 1:] = np.inf
D[1:, 0] = np.inf
D[1:, 1:] = C
cutils.min_cumsum(D)
path = _traceback(D)
return D[-1, -1] / (r + c), D[1:, 1:], path
def batched_dtw(C, end_ind):
b, r, c = C.shape
D = np.zeros((b, r + 1, c + 1))
D[:, 0, 1:] = np.inf
D[:, 1:, 0] = np.inf
D[:, 1:, 1:] = C
for i in range(r):
for j in range(c):
candidates = [D[:, i, j], D[:, i + 1, j], D[:, i, j + 1]]
D[:, i + 1, j + 1] += np.min(np.stack(candidates), axis=0)
paths, path_lengths = _batched_traceback(D, end_ind)
return D[np.arange(b), -1, end_ind+1] / (r + end_ind+1), D[:, 1:, 1:], paths, path_lengths
def torch_dtw(C, end_ind):
b, r, c = C.shape
D = torch.zeros((b, r + 1, c + 1))
D[:, 0, 1:] = torch.Tensor([float("Inf")])
D[:, 1:, 0] = torch.Tensor([float("Inf")])
D[:, 1:, 1:] = C
for i in range(r):
for j in range(c):
candidates = [D[:, i, j], D[:, i + 1, j], D[:, i, j + 1]]
D[:, i + 1, j + 1].add_(torch.min(torch.stack(candidates), dim=0).values)
paths, path_lengths = _torched_traceback(D, end_ind)
return D[torch.arange(b), -1, (end_ind.float()+1).long()] / (r + end_ind.float()+1), D[:, 1:, 1:], paths, path_lengths
def accelerated_dtw(x, y, dist=None, inp_D0=None, warp=1):
"""
Computes Dynamic Time Warping (DTW) of two sequences in a faster way.
Instead of iterating through each element and calculating each distance,
this uses the cdist function from scipy (https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.distance.cdist.html)
:param array x: N1*M array
:param array y: N2*M array
:param string or func dist: distance parameter for cdist. When string is given, cdist uses optimized functions for the distance metrics.
If a string is passed, the distance function can be 'braycurtis', 'canberra', 'chebyshev', 'cityblock', 'correlation', 'cosine', 'dice', 'euclidean', 'hamming', 'jaccard', 'kulsinski', 'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean', 'wminkowski', 'yule'.
:param int warp: how many shifts are computed.
Returns the minimum distance, the cost matrix, the accumulated cost matrix, and the wrap path.
"""
assert len(x)
assert len(y)
if np.ndim(x) == 1:
x = x.reshape(-1, 1)
if np.ndim(y) == 1:
y = y.reshape(-1, 1)
r, c = len(x), len(y)
D0 = np.zeros((r + 1, c + 1))
D0[0, 1:] = np.inf
D0[1:, 0] = np.inf
D1 = D0[1:, 1:]
if inp_D0 is not None:
D0[1:, 1:] = inp_D0
else:
D0[1:, 1:] = cdist(x, y, dist)
C = D1.copy()
for i in range(r):
for j in range(c):
min_list = [D0[i, j]]
for k in range(1, warp + 1):
min_list += [D0[min(i + k, r), j],
D0[i, min(j + k, c)]]
D1[i, j] += min(min_list)
if len(x) == 1:
path = np.zeros(len(y)), range(len(y))
elif len(y) == 1:
path = range(len(x)), np.zeros(len(x))
else:
path = _traceback(D0)
return D1[-1, -1] / sum(D1.shape), C, D1, path
def _traceback(D):
# TODO I suspect this doesn't work with fancy stuff (w, s, warp)
i, j = np.array(D.shape) - 2
p, q = [i], [j]
while (i > 0) or (j > 0):
tb = np.argmin((D[i, j], D[i, j + 1], D[i + 1, j]))
if tb == 0:
i -= 1
j -= 1
elif tb == 1:
i -= 1
elif tb == 2:
j -= 1
else:
raise ValueError
p.insert(0, i)
q.insert(0, j)
return np.array(p), np.array(q)
def _batched_traceback(D, end_ind):
b, r, c = D.shape
i, j = np.asarray(np.ones((b,)) * (r - 2), dtype=int), end_ind
p, q = [i.copy()], [j.copy()]
path_lengths = np.zeros_like(i)
cnt = 0
while (i > 0).any() or (j > 0).any():
cnt += 1
path_lengths[(i == 0) & (j == 0) & (path_lengths == 0)] = cnt
tb = np.argmin(np.stack((D[np.arange(b), i, j], D[np.arange(b), i, j + 1], D[np.arange(b), i + 1, j])), axis=0)
i[(tb == 0) & (i > 0)] -= 1
j[(tb == 0) & (j > 0)] -= 1
i[(tb == 1) & (i > 0)] -= 1
j[(tb == 2) & (j > 0)] -= 1
p.insert(0, i.copy())
q.insert(0, j.copy())
return (np.array(p), np.array(q)), path_lengths
def _torched_traceback(D, end_ind):
b, r, c = D.shape
i, j = (torch.ones((b,)) * (r - 2)).long(), end_ind
p, q = [i.clone()], [j.clone()]
path_lengths = torch.zeros_like(i)
cnt = 0
while (i > 0).any() or (j > 0).any():
cnt += 1
path_lengths[(i == 0) & (j == 0) & (path_lengths == 0)] = cnt
tb = torch.argmin(
torch.stack((D[torch.arange(b), i, j], D[torch.arange(b), i, j + 1], D[torch.arange(b), i + 1, j])), dim=0)
i[(tb == 0) & (i > 0)] -= 1
j[(tb == 0) & (j > 0)] -= 1
i[(tb == 1) & (i > 0)] -= 1
j[(tb == 2) & (j > 0)] -= 1
p.insert(0, i.clone())
q.insert(0, j.clone())
return (torch.stack(p), torch.stack(q)), path_lengths
if __name__ == "__main__":
b, r, c = 8, 1024, 1000
min_length = int(c - 1)
EPS = 1e-5
import numpy as np
import time
np.random.seed(40)
DD = np.random.rand(b, r, c)
end_ind = min_length + np.asarray(np.random.rand(b) * (c - min_length - 1), dtype=int)
dd, dd2, pp, pp2, t1, t2 = [], [], [], [], 0.0, 0.0
for D, i in zip(DD, end_ind):
s = time.time()
d, cost_matrix, acc_cost_matrix, path = dtw(D[:, :i+1])
t1 += time.time() - s
dd.append(d); pp.append(path)
s = time.time()
d2, acc_cost_matrix_2, path_2 = c_dtw(D[:, :i+1])
t2 += time.time() - s
dd2.append(d2); pp2.append(path_2)
print("DTW: {}".format(t1))
print("C DTW: {}".format(t2))
def check(cond, name):
print("{}: PASS".format(name)) if cond else print("{}: FAIL".format(name))
check(not np.any((np.array(dd) - dd2) > EPS), "Distance")
check(not np.any(np.concatenate([(np.array(pp[i][0]) - np.array(pp2[i][0])) > EPS for i in range(b)])) and \
not np.any(np.concatenate([(np.array(pp[i][1]) - np.array(pp2[i][1])) > EPS for i in range(b)])), "Paths")
<file_sep>import glob
import os
import random
from random import Random
import cv2
import numpy as np
from blox import AttrDict
# don't change these values, they need to correspond to the multiroom2d.xml file!
ROBOT_SIZE = 0.02
ROOM_SIZE = 1/3
DOOR_SIZE = 1.5 * 0.0667
MAZE_SEED = 42
MULTIMODAL = True
NON_SYMMETRIC = False #True
def define_layout_raw(rooms_per_side, _add_horizontal_line=None, _add_vertical_line=None):
if _add_vertical_line is None:
coord_offset = 0.5 * rooms_per_side * ROOM_SIZE # center around (0,0)
def _add_horizontal_line(x_range, y):
ox = np.linspace(x_range[0], x_range[1], int((x_range[1] - x_range[0] + 1) * 100))
oy = y * np.ones_like(ox)
return np.stack([ox, oy], axis=0) - coord_offset
def _add_vertical_line(y_range, x):
oy = np.linspace(y_range[0], y_range[1], int((y_range[1] - y_range[0] + 1) * 100))
ox = x * np.ones_like(oy)
return np.stack([ox, oy], axis=0) - coord_offset
# add outer boundaries
table_size = ROOM_SIZE * rooms_per_side
o = _add_horizontal_line([0, table_size], 0)
o = np.concatenate((o, _add_horizontal_line([0, table_size], table_size)), axis=1)
o = np.concatenate((o, _add_vertical_line([0, table_size], 0)), axis=1)
o = np.concatenate((o, _add_vertical_line([0, table_size], table_size)), axis=1)
# add wall segments
rng = Random()
rng.seed(MAZE_SEED)
for wall_add_fcn in [_add_horizontal_line, _add_vertical_line]:
for r in range(rooms_per_side):
o = np.concatenate((o, wall_add_fcn([0, 1 * ROOM_SIZE/2 - DOOR_SIZE/2], (r+1) * ROOM_SIZE)), axis=1)
for seg_idx in range(rooms_per_side - 1):
if NON_SYMMETRIC and rng.random() < 0.1: continue
o = np.concatenate((o, wall_add_fcn(
[(2*seg_idx+1) * ROOM_SIZE/2 + DOOR_SIZE/2, (2*(seg_idx+1)+1) * ROOM_SIZE/2 - DOOR_SIZE/2],
(r+1) * ROOM_SIZE)), axis=1)
o = np.concatenate((o, wall_add_fcn([(rooms_per_side-0.5)*ROOM_SIZE + DOOR_SIZE/2, rooms_per_side*ROOM_SIZE],
(r + 1) * ROOM_SIZE)), axis=1)
# generate maze and add doors
doors = gen_doors_multimodal(rooms_per_side) if MULTIMODAL else generate_maze(rooms_per_side)
for rx in range(rooms_per_side):
for ry in range(rooms_per_side):
if rx + 1 < rooms_per_side and \
(((rx, ry), (rx+1, ry)) not in doors and ((rx+1, ry), (rx, ry)) not in doors):
door_center = ROOM_SIZE/2 + ry * ROOM_SIZE
o = np.concatenate((o, _add_vertical_line([door_center - DOOR_SIZE/2, door_center + DOOR_SIZE/2],
(rx + 1) * ROOM_SIZE)), axis=1)
if ry + 1 < rooms_per_side and \
(((rx, ry), (rx, ry+1)) not in doors and ((rx, ry+1), (rx, ry)) not in doors):
door_center = ROOM_SIZE/2 + rx * ROOM_SIZE
o = np.concatenate((o, _add_horizontal_line([door_center - DOOR_SIZE/2, door_center + DOOR_SIZE/2],
(ry + 1) * ROOM_SIZE)), axis=1)
def coords2ridx(x, y):
return x * rooms_per_side + (rooms_per_side-1) - y
# translate to idx and make sure that smaller room idx comes first
doors = [sorted((coords2ridx(d[0][0], d[0][1]), coords2ridx(d[1][0], d[1][1]))) for d in doors]
return o, ROBOT_SIZE, table_size, doors
def generate_maze(rooms_per_side):
"""Returns a set of doors that, when open, generate a maze without shortcuts.
Algorithm from here: https://github.com/maximecb/gym-miniworld"""
doors = []
rng = Random()
rng.seed(MAZE_SEED)
visited = []
neighbors = [(0, 1), (1, 0), (-1, 0), (0, -1)]
def visit(x, y):
visited.append((x, y))
rng.shuffle(neighbors)
for dx, dy in neighbors.copy():
nx, ny = x + dx, y + dy
if nx < 0 or nx >= rooms_per_side or ny < 0 or ny >= rooms_per_side:
continue # not a valid neighbor
if (nx, ny) in visited:
continue # neighbor already in visited states
doors.append(((x, y), (nx, ny))) # open door to neighbor
visit(nx, ny)
visit(0, 0) # generate maze starting from room 0
return doors
def gen_doors_multimodal(rooms_per_side):
"""Generates open layout with many doors that allows for multimodal trajectories."""
# generate list of all doors
doors = []
neighbors = [(0, 1), (1, 0), (-1, 0), (0, -1)]
def add_doors(x, y):
for dx, dy in neighbors.copy():
nx, ny = x + dx, y + dy
if nx < 0 or nx >= rooms_per_side or ny < 0 or ny >= rooms_per_side:
continue # not a valid neighbor
if ((x, y), (nx, ny)) in doors or ((nx, ny), (x, y)) in doors:
continue
doors.append(((x, y), (nx, ny)))
add_doors(nx, ny)
add_doors(0, 0)
def maybe_remove(r1, r2):
if (r1, r2) in doors:
doors.remove((r1, r2))
elif (r2, r1) in doors:
doors.remove((r2, r1))
# remove a few doors (i.e. add walls)
if rooms_per_side == 3: # add two walls around the middle room
maybe_remove((1, 1), (1, 2))
maybe_remove((1, 1), (1, 0))
elif rooms_per_side == 4:
maybe_remove((0, 3), (1, 3))
maybe_remove((1, 0), (2, 0))
maybe_remove((2, 1), (3, 1))
maybe_remove((2, 2), (3, 2))
maybe_remove((2, 3), (3, 3))
maybe_remove((1, 3), (1, 4))
elif rooms_per_side == 5:
maybe_remove((0, 3), (1, 3))
maybe_remove((1, 0), (2, 0))
maybe_remove((2, 1), (3, 1))
maybe_remove((2, 2), (3, 2))
maybe_remove((2, 3), (3, 3))
maybe_remove((1, 3), (1, 4))
maybe_remove((1, 1), (1, 2))
maybe_remove((2, 1), (2, 2))
maybe_remove((3, 1), (3, 2))
maybe_remove((4, 2), (4, 3))
else:
raise NotImplementedError
return doors
def define_layout(rooms_per_side, texture_dir=None):
if texture_dir is None:
texture_dir = default_texture_dir()
o, robot_size, table_size, doors = define_layout_raw(rooms_per_side)
ox, oy = list(o[0]), list(o[1])
def coords2ridx(x, y):
"""Translates float x, y coords into room index."""
xy_room = [np.floor((c + table_size/2) / ROOM_SIZE) for c in [x, y]]
ridx = xy_room[0] * rooms_per_side + (rooms_per_side-1) - xy_room[1]
return int(ridx) if ridx.size == 1 else np.asarray(ridx, dtype=int)
textures = load_textures(texture_dir) if texture_dir is not None else None
return AttrDict(ox=ox,
oy=oy,
robot_size=robot_size,
table_size=table_size,
room_size=ROOM_SIZE,
door_size=DOOR_SIZE,
doors=doors,
coords2ridx=coords2ridx,
textures=textures,
texture_dir=texture_dir,
multimodal=MULTIMODAL,
non_symmetric=NON_SYMMETRIC,)
def default_texture_dir():
texture_dir = None
for dir in ['nav_9rooms', 'nav_25rooms']:
path = os.path.join(os.environ["GCP_DATA_DIR"], dir, "textures")
if os.path.exists(path):
texture_dir = path
assert texture_dir is not None # need to download either 'nav_9room' or 'nav_25room' dataset to get textures
return texture_dir
def load_textures(texture_dir):
"""Loads all textures from asset folder"""
texture_files = glob.glob(os.path.join(texture_dir, "*.png"))
texture_files = [os.path.basename(p) for p in texture_files]
texture_files.sort()
rng = random.Random() # shuffle texture files
rng.seed(42)
rng.shuffle(texture_files)
texture_files.remove("asphalt_1.png") # this one is used for the floor
# there was a bug in initial data collection, this is a hack to synchronize with the generated data
# TODO remove the hardcoded textures when collecting new data
HARDCODED_TEXTURE_FILES = ['floor_tiles_white.png', 'lg_style_01_4tile_d_result.png', 'lg_style_01_wall_blue_1.png',
'wood_1.png', 'lg_style_04_wall_cerise_d_result.png',
'lg_style_05_floor_blue_bright_d_result.png', 'cardboard_4.png',
'lg_style_03_wall_light_m_result.png',
'lg_style_02_wall_dblue_d_result.png',
'lg_style_02_wall_purple_d_result.png', 'cinder_blocks_1.png', 'wood_2.png',
'ceiling_tiles_1.png', # to avoid aliasing
'lg_style_03_wall_purple_d_result.png', 'airduct_grate_1.png',
'lg_style_03_wall_orange_1.png', 'grass_2.png',
'lg_style_01_wall_light_m_result.png',
'lg_style_04_wall_purple_d_result.png',
'lg_style_03_floor_light1_m_result.png',
'lg_style_05_wall_red_d_result.png', 'slime_1.png',
'lg_style_05_wall_yellow_d_result.png', 'floor_tiles_bw_1.png',
'lg_style_02_floor_orange_d_result.png', 'lg_style_05_wall_yellow_bright_d_result.png',
'concrete_1.png', 'lg_style_03_wall_gray_d_result.png',
'lg_style_04_wall_red_d_result.png', # to avoid aliasing
'lg_style_04_floor_orange_bright_d_result.png',
'lg_style_01_floor_orange_bright_d_result.png', 'stucco_1.png',
'lg_style_04_wall_green_bright_d_result.png', 'door_steel_brown.png',
'lg_style_03_floor_blue_bright_d_result.png', 'lava_1.png',
'lg_style_05_floor_light1_m_result.png',
'lg_style_01_wall_red_bright_1.png',
'lg_style_01_wall_green_1.png', 'lg_style_01_wall_yellow_1.png',
'lg_style_01_wall_red_1.png', 'lg_style_02_wall_yellow_d_result.png', 'door_doom_1.png',
'wood_planks_1.png', 'lg_style_03_floor_blue_d_result.png',
'lg_style_04_floor_blue_d_result.png', 'lg_style_03_floor_orange_d_result.png',
'lg_style_04_wall_red_bright_d_result.png', 'lg_style_02_floor_blue_bright_d_result.png',
'door_garage_white.png', 'lg_style_04_floor_blue_bright_d_result.png',
'lg_style_01_floor_blue_d_result.png',
'lg_style_02_floor_light_m_result.png',
'marble_2.png', 'lg_style_04_floor_cyan_d_result.png',
'lg_style_05_floor_blue_d_result.png', 'lg_style_01_wall_cerise_1.png',
'lg_style_02_wall_yellow_bright_d_result.png',
'lg_style_01_floor_blue_bright_d_result.png', 'lg_style_04_wall_green_d_result.png',
'drywall_1.png', 'lg_style_01_floor_blue_team_d_result.png', 'door_steel_red.png',
'lg_style_01_floor_light_m_result.png',
'lg_style_03_wall_cyan_1.png', 'marble_1.png',
'picket_fence_1.png', 'door_steel_grey.png', 'water_1.png',
'lg_style_02_floor_green_d_result.png', 'lg_style_01_floor_orange_d_result.png',
'lg_style_01_wall_green_bright_1.png', 'lg_style_03_floor_green_bright_d_result.png',
'lg_style_04_floor_orange_d_result.png', 'door_garage_red.png', 'brick_wall_1.png',
'lg_style_03_wall_gray_bright_d_result.png', 'lg_style_03_wall_blue_d_result.png',
'rock_1.png', 'lg_style_05_wall_red_bright_d_result.png', 'grass_1.png',
'lg_style_03_floor_green_d_result.png', 'lg_style_02_floor_green_bright_d_result.png',
'lg_style_05_floor_orange_d_result.png', 'door_doom_2.png',
'lg_style_02_wall_blue_d_result.png', 'lg_style_04_floor_dorange_d_result.png',
'lg_style_03_floor_purple_d_result.png', 'lg_style_05_floor_orange_bright_d_result.png',
'lg_style_01_floor_red_team_d_result.png', 'metal_grill_1.png',
'lg_style_02_floor_blue_d_result.png', 'cardboard_3.png',
'lg_style_01_ceiling_d_result.png', 'lg_style_01_wall_purple_1.png',
'lg_style_03_wall_orange_bright_d_result.png',
'lg_style_02_wall_blue_bright_d_result.png', 'cardboard_1.png',
'ceiling_tile_noborder_1.png', 'lg_style_02_wall_lgreen_d_result.png',
'lg_style_03_floor_red_d_result.png']
return HARDCODED_TEXTURE_FILES
def draw_layout_overview(rooms_per_side, render_scale, texture_dir, add_textures=True):
textures = load_textures(texture_dir)
layout = define_layout(rooms_per_side, texture_dir)
# draw texture background
n_textures = len(textures)
res = int(layout.table_size * render_scale)
room_size = int(res / rooms_per_side)
img = np.ones((res, res, 3))
if add_textures:
for x in range(rooms_per_side):
for y in range(rooms_per_side):
texture = cv2.imread(os.path.join(texture_dir,
textures[(x * rooms_per_side + y) % n_textures]))
texture = cv2.resize(texture, (room_size, room_size))[:, :, ::-1] / 255.
img[int(y * room_size) : int((y+1) * room_size),
int(x * room_size) : int((x+1) * room_size)] = texture
def _add_horizontal_line(x_range, y):
cv2.line(img, (int(x_range[0] * render_scale), res - int(y * render_scale - 1)),
(int(x_range[1] * render_scale), res - int(y * render_scale - 1)), (0, 0, 0), 3)
return [[None]]
def _add_vertical_line(y_range, x):
cv2.line(img, (int(x * render_scale), res - int(y_range[0] * render_scale - 1)),
(int(x * render_scale), res - int(y_range[1] * render_scale - 1)), (0, 0, 0), 3)
return [[None]]
define_layout_raw(rooms_per_side, _add_horizontal_line, _add_vertical_line)
return img
if __name__ == '__main__':
import matplotlib.pyplot as plt
img = draw_layout_overview(rooms_per_side=5, render_scale=256, texture_dir="../../../../../assets/textures")
# plt.imshow(img)
# plt.show()
plt.imsave("test.png", img)
# import matplotlib.pyplot as plt
# l = define_layout(10)
# print(l.doors)
# plt.scatter(l.ox, l.oy, c='black')
# plt.axis('equal')
# plt.show()
<file_sep>from gcp.prediction.models.sequential import SequentialModel
from gcp.prediction.utils.logger import HierarchyLogger
configuration = {
'model': SequentialModel,
'logger': HierarchyLogger,
}
model_config = {
'one_step_planner': 'continuous',
'dense_rec_type': 'svg',
'hierarchy_levels': 0,
'add_weighted_pixel_copy': True,
}<file_sep>dataset_spec = {
'max_seq_len': 80, # maximum sequence in dataset is 80 frames
'n_actions': 2, # actions in dataset are 2-dimensional
'state_dim': 2, # states in dataset are 2-dimensional
}<file_sep>Cython==0.28.1
decorator==4.3.0
dload==0.6
funcsigs==1.0.2
gym==0.14.0
h5py==2.7.1
imageio==2.1.2
matplotlib==2.2.2
moviepy==0.2.3.2
numpy==1.18.4
opencv-python==3.4.2.17
Pillow==6.0.0
pyglet==1.2.4
scikit-image==0.15.0
scipy==1.3.0
tensorflow==1.14.0
tensorboardX==2.0
torch==1.3.0
torchvision==0.2.0
tqdm==4.11.2
urllib3==1.22
numpy-stl==2.11.2
pyquaternion==0.9.5
<file_sep>import torch
import torch.nn as nn
from blox import AttrDict
from blox.tensor.ops import broadcast_final
from blox.torch.layers import BaseProcessingNet
from blox.torch.losses import KLDivLoss2
from blox.torch.models.vrnn import VRNNCell
from gcp.evaluation.evaluation_matching import DTWEvalBinding
from gcp.prediction.models.base_gcp import BaseGCPModel
class SequentialRecModule(nn.Module):
def __init__(self, hp, input_size, output_size, decoder):
# TODO make test time version
assert input_size == output_size
super().__init__()
self._hp = hp
self.decoder = decoder
context_size = 0
if hp.context_every_step:
context_size += hp.nz_enc * 2
if hp.action_conditioned_pred:
context_size += hp.nz_enc
self.lstm = VRNNCell(hp, input_size, context_size, hp.nz_enc * 2).make_lstm()
self.eval_binding = DTWEvalBinding(hp)
if hp.skip_from_parents:
raise NotImplementedError("SVG doesn't support skipping from parents")
def forward(self, root, inputs):
lstm_inputs = AttrDict()
initial_inputs = AttrDict(x=inputs.e_0)
context = torch.cat([inputs.e_0, inputs.e_g], dim=1)
static_inputs = AttrDict()
if 'enc_traj_seq' in inputs:
lstm_inputs.x_prime = inputs.enc_traj_seq[:, 1:]
if 'z' in inputs:
lstm_inputs.z = inputs.z
if self._hp.context_every_step:
static_inputs.context = context
if self._hp.action_conditioned_pred:
assert 'enc_action_seq' in inputs # need to feed actions for action conditioned predictor
lstm_inputs.update(more_context=inputs.enc_action_seq)
self.lstm.cell.init_state(initial_inputs.x, context, lstm_inputs.get('more_context', None))
# Note: the last image is also produced. The actions are defined as going to the image
outputs = self.lstm(inputs=lstm_inputs,
initial_inputs=initial_inputs,
static_inputs=static_inputs,
length=self._hp.max_seq_len - 1)
outputs.encodings = outputs.pop('x')
outputs.update(self.decoder.decode_seq(inputs, outputs.encodings))
outputs.images = torch.cat([inputs.I_0[:, None], outputs.images], dim=1)
return outputs
def loss(self, inputs, outputs, log_error_arr=False):
losses = self.decoder.loss(inputs, outputs, extra_action=False, log_error_arr=log_error_arr)
# TODO don't place loss on the final image
weights = broadcast_final(inputs.pad_mask[:, 1:], outputs.p_z.mu)
losses.kl = KLDivLoss2(self._hp.kl_weight, breakdown=1, free_nats_per_dim=self._hp.free_nats)\
(outputs.q_z, outputs.p_z, weights=weights, log_error_arr=log_error_arr)
return losses
def get_sample_with_len(self, i_ex, len, outputs, inputs, pruning_scheme, name=None):
"""
:param i_ex: example index
:param input_seq:
:param outputs:
:return gen_images
"""
if pruning_scheme == 'dtw':
# Cut the first image off for DTW - it is the GT image
targets = inputs.traj_seq[i_ex, 1:inputs.end_ind[i_ex] + 1]
estimates = outputs.dense_rec.images[i_ex, 1:inputs.end_ind[i_ex] + 1]
images, matching_output = self.eval_binding(None, None, None, None, targets=targets, estimates=estimates)
# TODO clean up
# Add the first image back (eval cuts it off)..
return torch.cat([outputs.dense_rec.images[i_ex, [1]], images], dim=0), matching_output
elif pruning_scheme == 'basic':
if name is None:
return outputs.dense_rec.images[i_ex, :len], None
elif name == 'encodings':
# TODO fix this. This is necessary because the Hierarchical model outputs the first latent too.
# This concatenates the first encoder latent to compensate
return torch.cat((inputs.e_0[i_ex][None], outputs.dense_rec[name][i_ex]), 0)[:len], None
else:
return outputs.dense_rec[name][i_ex, :len], None
def get_all_samples_with_len(self, end_idxs, outputs, inputs, pruning_scheme, name=None):
return [self.get_sample_with_len(b, end_idxs[b] + 1, outputs, inputs, pruning_scheme, name=name)[0]
for b in range(end_idxs.shape[0])], None
class SequentialModel(BaseGCPModel):
def build_network(self, build_encoder=True):
super().build_network(build_encoder)
self.dense_rec = SequentialRecModule(
hp=self._hp, input_size=self._hp.nz_enc, output_size=self._hp.nz_enc, decoder=self.decoder)
if self._hp.action_conditioned_pred:
self.action_encoder = BaseProcessingNet(self._hp.n_actions, self._hp.nz_mid, self._hp.nz_enc,
self._hp.n_processing_layers, self._hp.fc_builder)
def predict_sequence(self, inputs, outputs, start_ind, end_ind, phase):
outputs = AttrDict(dense_rec=self.dense_rec(None, inputs))
return outputs
def log_outputs(self, outputs, inputs, losses, step, log_images, phase):
super().log_outputs(outputs, inputs, losses, step, log_images, phase)
if log_images:
if outputs.dense_rec and self._hp.use_convs:
self._logger.log_dense_gif(outputs, inputs, "dense_rec", step, phase)
log_prior_images = False
if log_prior_images:
# Run the model N times
with torch.no_grad(), self.val_mode():
rows = list([self(inputs).dense_rec.images for i in range(4)])
self._logger.log_rows_gif(rows, "prior_samples", step, phase)
def get_predicted_pruned_seqs(self, inputs, outputs):
return [seq[:end_ind+1] for seq, end_ind in zip(outputs.dense_rec.encodings, outputs.end_ind)]
<file_sep>from .general_agent import GeneralAgent
import pickle as pkl
import numpy as np
import cv2
import PIL
from PIL import Image
import os
import glob
class BenchmarkAgent(GeneralAgent):
N_MAX_TRIALS = 1 # only allow one trial per trajectory when benchmarking
def __init__(self, hyperparams, start_goal_list=None):
self._start_goal_confs = hyperparams.get('start_goal_confs', None)
self.ncam = hyperparams['env'][1].get('ncam', hyperparams['env'][0].default_ncam()) # check if experiment has ncam set, otherwise get env default
GeneralAgent.__init__(self, hyperparams, start_goal_list)
self._is_robot_bench = 'robot_name' in self._hp.env[1]
if not self._is_robot_bench:
self._hp.gen_xml = (True, 1) # this was = 1 but that did not work?!
def _setup_world(self, itr):
old_ncam = self.ncam
GeneralAgent._setup_world(self, itr)
if self._start_goal_confs is not None:
self._reset_state = self._load_raw_data(itr)
assert old_ncam == self.ncam, """Environment has {} cameras but benchmark has {}.
Feed correct ncam in agent_params""".format(self.ncam, old_ncam)
def _required_rollout_metadata(self, agent_data, traj_ok, t, i_itr):
GeneralAgent._required_rollout_metadata(self, agent_data, traj_ok, t, i_itr)
if self._start_goal_confs is not None:
agent_data.update(self.env.eval())
def _init(self):
return GeneralAgent._init(self)
def _load_raw_data(self, itr):
"""
doing the reverse of save_raw_data
:param itr:
:return:
"""
if 'robot_name' in self._hp.env[1]: # robot experiments don't have a reset state
return None
if 'iex' in self._hp:
itr = self._hp.iex
ngroup = 1000
igrp = itr // ngroup
group_folder = '{}/traj_group{}'.format(self._start_goal_confs, igrp)
traj_folder = group_folder + '/traj{}'.format(itr)
print('reading from: ', traj_folder)
num_files = len(glob.glob("{}/images0/*.png".format(traj_folder)))
assert num_files > 0, " no files found!"
obs_dict = {}
demo_images = np.zeros([num_files, self.ncam, self._hp.image_height, self._hp.image_width, 3])
for t in [0, num_files-1]: #range(num_files):
for c in range(self.ncam):
image_file = '{}/images{}/im_{}.png'.format(traj_folder, c, t)
if not os.path.isfile(image_file):
raise ValueError("Can't find goal image: {}".format(image_file))
img = cv2.imread(image_file)[..., ::-1]
if img.shape[0] != self._hp.image_height or img.shape[1] != self._hp.image_width:
img = Image.fromarray(img)
img = img.resize((self._hp.image_height, self._hp.image_width), PIL.Image.BILINEAR)
img = np.asarray(img, dtype=np.uint8)
demo_images[t, c] = img
self._demo_images = demo_images.astype(np.float32)/255.
self._goal_image = self._demo_images[-1]
with open('{}/obs_dict.pkl'.format(traj_folder), 'rb') as file:
obs_dict.update(pkl.load(file))
self._goal = self.env.get_goal_from_obs(obs_dict)
reset_state = self.get_reset_state(obs_dict)
if os.path.exists(traj_folder + '/robosuite.xml'):
with open(traj_folder + '/robosuite.xml', "r") as model_f:
model_xml = model_f.read()
from robosuite.utils.mjcf_utils import postprocess_model_xml
xml = postprocess_model_xml(model_xml)
reset_state['robosuite_xml'] = xml
return reset_state
def get_reset_state(self, obs_dict):
return self.env.get_reset_from_obs(obs_dict)
<file_sep>import numpy as np
import torch
from blox.torch.ops import slice_tensor, reduce_dim
from blox.torch import porch
from blox.basic_types import map_dict, listdict2dictlist
from blox.tensor.ops import batch_apply, make_recursive_list, rmap
class SubgoalTreeLayer:
def __init__(self, parent=None):
if isinstance(parent, map):
raise ValueError("This happens when splitting across multiple GPUs, will get caught above")
self.child_layer = None
self.subgoals = None
self.depth = None
self.pruned = None
# self.parent_layer = parent
self.selected = None
self.match_eval_idx = None
def produce_tree(self, inputs, layerwise_inputs, start_inds, end_inds, left_parents, right_parents, producer, depth):
"""no done mask checks, assumes start_ind never None.
all input tensors are of shape [batch, num_parent_nodes, ...]
"""
self.depth = depth
if depth == 0:
return
# slice out inputs for this layer
layer_inputs = rmap(lambda x: depthfirst2layers(reduce_dim(x, dim=1))[-depth].contiguous(), layerwise_inputs)
out = batch_apply(lambda x: producer.produce_subgoal(inputs, *x, depth=depth),
[layer_inputs, start_inds.float(), end_inds.float(), left_parents, right_parents])
self.subgoals, left_parents, right_parents = out
self.child_layer = SubgoalTreeLayer(self)
self.child_layer.produce_tree(inputs,
layerwise_inputs,
rec_interleave([start_inds.float(), self.subgoals.ind.clone()]),
rec_interleave([self.subgoals.ind.clone(), end_inds.float()]),
rec_interleave([left_parents, self.subgoals]),
rec_interleave([self.subgoals, right_parents]),
producer, depth - 1)
def compute_matching_dists(self, inputs, matching_fcn, left_parents, right_parents):
"""Computes the distribution of matches of subgoals to ground truth frames."""
self.apply_fn(inputs, matching_fcn, left_parents, right_parents)
def apply_fn(self, inputs, fn, left_parents, right_parents):
""" Recursively applies fn to the tree.
:param inputs:
:param fn: a function that takes in (inputs, subgoal, left_parent, right_parent) and outputs a dict
:param left_parents:
:param right_parents:
:return:
"""
if self.depth == 0:
return
assert self.subgoals is not None # need subgoal info to match to ground truth sequence
self.subgoals.update(batch_apply(fn, inputs, self.subgoals, left_parents, right_parents, unshape_inputs=True))
self.child_layer.apply_fn(rec_interleave([inputs, inputs]),
fn,
rec_interleave([left_parents, self.subgoals]),
rec_interleave([self.subgoals, right_parents]))
def __iter__(self):
"""Layer-wise iterator."""
if self.subgoals is None:
return
yield self
if self.child_layer is not None:
for l in self.child_layer:
yield l
def depth_first_iter(self, current_node=0):
"""Depth-first subgoal iterator."""
if self.subgoals is None or self.child_layer is None:
return
for n in self.child_layer.depth_first_iter(2*current_node):
yield n
self.subgoal = rmap(lambda x: x[:, current_node], self.subgoals)
yield self
for n in self.child_layer.depth_first_iter(2*current_node+1):
yield n
def get_attr_df(self, attr):
# TODO make this faster
return torch.stack([node.subgoal[attr] for node in self.depth_first_iter()], 1)
def set_attr_df(self, **kwargs):
# TODO check
for name, value in kwargs:
split = self.split_by_layer_df(value, 1)
for chunk, node in zip(split, self):
node[name] = chunk
def get_attr_bf(self, attr):
return porch.cat([node.subgoals[attr] for node in self], 1)
def set_attr_bf(self, **kwargs):
start = 0
for i, node in enumerate(self):
node.subgoals.update(rmap(lambda x: x[:,start:start+2**i].contiguous(), kwargs))
start += 2**i
def get_leaf_nodes(self):
if self.depth == 0:
raise ValueError("Depth 0 tree does not have leaf nodes!")
elif self.depth == 1:
return self.subgoals
else:
return self.child_layer.get_leaf_nodes()
@staticmethod
def cat(*argv):
tree = SubgoalTreeLayer()
for attr, val in argv[0].__dict__.items():
if val is None or np.isscalar(val):
tree.__dict__[attr] = val
elif attr == 'subgoals':
tree.__dict__[attr] = map_dict(concat, listdict2dictlist([d.subgoals for d in argv]))
elif attr == 'child_layer':
tree.__dict__[attr] = SubgoalTreeLayer.cat(*[d.child_layer for d in argv])
else:
raise ValueError("Cannot handle data type {} during tree concatenation!".format(type(val)))
return tree
@staticmethod
def reduce(*argv):
"""Called inside result gathering for multi-GPU processing"""
return SubgoalTreeLayer.cat(*argv)
@staticmethod
def split_by_layer_df(vals, dim):
return depthfirst2layers(vals, dim)
# """Splits depth-first vals into N lists along dimension dim, each containing vals for the corresp. layer."""
# depth = int(np.log2(vals.shape[dim]) + 1)
# output = [[] for _ in range(depth)] # one list per layer
#
# def get_elem(l_idx, r_idx, d):
# if l_idx == r_idx - 1: return
# idx = int((r_idx - l_idx) / 2) + l_idx
# output[d].append(vals[:, idx])
# get_elem(l_idx, idx, d + 1)
# get_elem(idx, r_idx, d + 1)
#
# get_elem(-1, vals.shape[dim], 0)
# return output
@staticmethod
def split_by_layer_bf(vals, dim):
"""Splits breadth-first vals into N arrays along dimension dim, each containing vals for the corresp. layer."""
depth = int(np.log2(vals.shape[dim]) + 1)
output = [] # one list per layer
current_idx = 0
for d in range(depth):
output.append(vals[:, current_idx : current_idx + int(2**d)])
current_idx += int(2**d)
return output
@property
def bf(self):
return AccessWrapper(self, 'bf')
@property
def df(self):
return AccessWrapper(self, 'df')
@property
def size(self):
return int(2**self.depth - 1)
class AccessWrapper():
def __init__(self, obj, type):
super().__setattr__('tree', obj)
super().__setattr__('type', type)
def __getattr__(self, item):
if self.type == 'bf':
return self.tree.get_attr_bf(item)
elif self.type == 'df':
return self.tree.get_attr_df(item)
def __setattr__(self, key, value):
if self.type == 'bf':
return self.tree.set_attr_bf(**{key: value})
elif self.type == 'df':
return self.tree.set_attr_df(**{key: value})
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, key, value):
return setattr(self, key, value)
def interleave(t1, t2):
if t1 is None or t2 is None: return None
assert t1.shape == t2.shape # can only interleave vectors of equal shape
return torch.stack((t1, t2), dim=2).view(t1.shape[0], 2*t1.shape[1], *t1.shape[2:])
rec_interleave = make_recursive_list(interleave)
def concat(*argv):
if argv[0][0] is None: return None
device = argv[0][0].device
return torch.cat([v.to(device) for v in argv[0]], dim=0)
def depthfirst2breadthfirst(tensor, dim=1):
""" Converts a sequence represented depth first to breadth first """
return torch.cat(depthfirst2layers(tensor, dim), dim)
def depthfirst2layers(tensor, dim=1):
""" Converts a sequence represented depth first to a list of layers """
len = tensor.shape[dim]
depth = np.int(np.log2(len + 1))
slices = []
for i in range(depth):
slices.append(slice_tensor(tensor, 0, 2, dim))
tensor = slice_tensor(tensor, 1, 2, dim)
return list(reversed(slices))
def ind_df2bf(df_indices, depth):
""" Transforms indices for a depth-first array such that the same elements can be retrieved from the corresponding
breadth-first array """
df_indices = (df_indices + 1).bool() # starting from 1
bf_indices = torch.zeros_like(df_indices)
for i in range(depth):
mask = (df_indices % (2**i) == 0) & (df_indices % (2**(i+1)) > 0) # if in layer i from the bottom
bf_indices[mask] = df_indices[mask] // (2**(i+1)) + (2**(depth - i - 1) - 1) # order in layer + layer position
return bf_indices
def ind_bf2df(bf_indices, depth):
""" Transforms indices for a breadth-first array such that the same elements can be retrieved from the corresponding
depth-first array """
bf_indices = (bf_indices + 1).bool() # starting from 1
df_indices = torch.zeros_like(bf_indices)
for i in range(depth):
mask = (bf_indices >= 2 ** i) & (bf_indices < 2 ** (i + 1)) # if in layer i from the top
ib = depth - i - 1 # layer from the bottom
# order in layer * layer position
df_indices[mask] = (bf_indices[mask] - 2**i) * (2**(ib+1)) + (2**ib) - 1
return df_indices
<file_sep>import os.path
from blox import AttrDict
from gcp.planning.infra.agent.general_agent import GeneralAgent
from gcp.planning.infra.envs.miniworld_env.multiroom3d.multiroom3d_env import TopdownMultiroom3dEnv
from gcp.planning.infra.policy.prm_policy.prm_policy import PrmPolicy
BASE_DIR = '/'.join(str.split(__file__, '/')[:-1])
current_dir = os.path.dirname(os.path.realpath(__file__))
env_params = {
'init_pos': None,
'goal_pos': None,
'n_rooms': 16,
'heading_smoothing': 0.1,
'crop_window': 40,
}
agent = AttrDict(
type=GeneralAgent,
env=(TopdownMultiroom3dEnv, env_params),
T=100,
make_final_gif=False, # whether to make final gif
#make_final_gif_freq=100, # final gif, frequency
image_height=128,
image_width=128,
)
policy = AttrDict(
type=PrmPolicy,
max_traj_length=agent.T,
)
config = AttrDict(
current_dir=current_dir,
start_index=0,
end_index=999,
agent=agent,
policy=policy,
save_format=['hdf5'],
data_save_dir=os.environ['GCP_DATA_DIR'] + '/nav_16rooms',
split_train_val_test=False,
traj_per_file=1,
)
<file_sep>import os
import os.path
import sys
from tensorflow.contrib.training import HParams
from gcp.planning.infra.agent.utils.hdf5_saver import HDF5Saver
from gcp.planning.infra.agent.utils.raw_saver import RawSaver
from gcp.prediction.utils.logger import HierarchyLogger
sys.path.append('/'.join(str.split(__file__, '/')[:-2]))
import numpy as np
class Sim(object):
""" Main class to run algorithms and experiments. """
def __init__(self, config, gpu_id=0, ngpu=1):
self._start_goal_list = config.pop('start_goal_list') if 'start_goal_list' in config else None
self._hp = self._default_hparams()
self.override_defaults(config)
self._hp.agent['log_dir'] = self._hp.log_dir
self._hp.n_rooms = self._hp.agent['env'][1]['n_rooms'] if 'n_rooms' in self._hp.agent['env'][1] else None
self.agent = self._hp.agent['type'](self._hp.agent, self._start_goal_list)
self.agentparams = self._hp.agent
self._record_queue = self._hp.record_saver
self._counter = self._hp.counter
if self._hp.logging_conf is None:
self.logger = HierarchyLogger(self._hp.log_dir + '/verbose', self._hp, self._hp.agent['T'])
self._hp.logging_conf = {'logger':self.logger, 'global_step':-1, 'phase':'test'}
self._hp.policy['logger'] = self.logger
self.policy = self._hp.policy['type'](self.agent._hp, self._hp.policy, gpu_id, ngpu,
**self.agent.env.env_policy_params())
self.trajectory_list = []
self.im_score_list = []
try:
os.remove(self._hp.agent['image_dir'])
except:
pass
self.savers = []
if 'hdf5' in self._hp.save_format:
self.savers.append(HDF5Saver(self._hp.data_save_dir, self.agent.env._hp, self.agent._hp,
traj_per_file=self._hp.traj_per_file, offset=self._hp.start_index,
split_train_val_test=self._hp.split_train_val_test))
if 'raw' in self._hp.save_format:
self.savers.append(RawSaver(self._hp.data_save_dir))
self.logging_conf = self._hp.logging_conf
def override_defaults(self, config):
"""
:param config: override default valus with config dict
:return:
"""
for name, value in config.items():
print('overriding param {} to value {}'.format(name, value))
if value == getattr(self._hp, name):
raise ValueError("attribute {} is identical to default value!!".format(name))
if name in self._hp and self._hp.get(name) is None: # don't do a type check for None default values
setattr(self._hp, name, value)
else: self._hp.set_hparam(name, value)
def _default_hparams(self):
default_dict = {
'save_format': ['hdf5', 'raw'],
'save_data': True,
'agent': {},
'policy': {},
'start_index': -1,
'end_index': -1,
'ntraj': -1,
'gpu_id': -1,
'current_dir': '',
'record_saver': None,
'counter': None,
'traj_per_file': 10,
'data_save_dir': '',
'log_dir': '',
'result_dir': '',
'split_train_val_test': True,
'logging_conf': None, # only needed for training loop
}
# add new params to parent params
parent_params = HParams()
for k in default_dict.keys():
parent_params.add_hparam(k, default_dict[k])
return parent_params
def run(self):
if self._counter is None:
for i in range(self._hp.start_index, self._hp.end_index+1):
self.take_sample(i)
else:
itr = self._counter.ret_increment()
while itr < self._hp.ntraj:
print('taking sample {} of {}'.format(itr, self._hp.ntraj))
self.take_sample(itr)
itr = self._counter.ret_increment()
def take_sample(self, index):
"""
:param index: run a single trajectory with index
:return:
"""
self.policy.reset()
agent_data, obs_dict, policy_out = self.agent.sample(self.policy, index)
if self._hp.save_data:
self.save_data(index, agent_data, obs_dict, policy_out)
if self.logging_conf is not None and 'goal_image' in agent_data and 'images' in obs_dict:
goal = agent_data['goal_image'][-1, 0] if len(agent_data['goal_image'].shape)>4 else agent_data['goal_image'][-1]
if 'goal_pos' in obs_dict:
goal_pos = obs_dict['goal'][-1, :] if isinstance(obs_dict['goal'], np.ndarray) else obs_dict['goal']
else:
goal_pos = None
topdown_image = obs_dict['topdown_image'] if 'topdown_image' in obs_dict else None
self.policy.log_outputs_stateful(**self.logging_conf, dump_dir=self._hp.log_dir,
exec_seq=obs_dict['images'][:, 0], goal=goal, goal_pos=goal_pos,
index=index, topdown_image=topdown_image, env=self.agent.env) # [:, 0] for cam0
return agent_data
def save_data(self, itr, agent_data, obs_dict, policy_outputs):
if self._record_queue is not None: # if using a queue to save data
self._record_queue.put((agent_data, obs_dict, policy_outputs))
else:
for saver in self.savers: # if directly saving data
saver.save_traj(itr, agent_data, obs_dict, policy_outputs)
<file_sep>import torch.nn as nn
from gcp.prediction.models.tree.tree_module import TreeModule
""" This module contains the hierarchical model in which every layer is predicted by a separately trained network. """
class UntiedLayersTree(nn.Module):
def __init__(self, hp, decoder):
super().__init__()
self._hp = hp
self.tree_modules = nn.ModuleList([TreeModule(hp, decoder) for i in range(self._hp.hierarchy_levels)])
def produce_subgoal(self, *args, depth, **kwargs):
return self.tree_modules[self._hp.hierarchy_levels - depth].produce_subgoal(*args, **kwargs)
def __getattr__(self, item):
if item in self._modules.keys():
return super().__getattr__(item)
return getattr(self.tree_modules[0], item)
<file_sep># import ipdb
import glob
import numpy as np
import gcp.planning.infra.datasets.save_util.configs.TAP_3obj_push as config
import tensorflow as tf
from blox import AttrDict
from blox.basic_types import str2int
from gcp.planning.infra.agent.utils.hdf5_saver import HDF5SaverBase
from tqdm import tqdm
phase = 'train'
count = 0
H = config.precrop_frame_ht
W = config.precrop_frame_wd
C = 3
class TAPMaker(HDF5SaverBase):
def __init__(self, save_dir, offset=0, split=(0.90, 0.05, 0.05)):
super().__init__(save_dir, traj_per_file=1, offset=offset, split=split)
self.filenames = sorted(glob.glob(config.tfrecord_dir + phase + '/*.tfrecord?'))
self.max_seq_len = 80
def get_traj(self, string_record):
example = tf.train.Example()
example.ParseFromString(string_record)
images = []
numbered_keys = filter(lambda x: str2int(x.split('/')[0]) is not None, example.features.feature.keys())
image_keys = filter(lambda x: 'image_view0/encoded' in x, numbered_keys)
indices = np.array(list([str2int(x.split('/')[0]) for x in image_keys]))
length = np.max(indices) + 1
for i in range(length):
key = '{}/image_view0/encoded'.format(i)
val = np.frombuffer(example.features.feature[key].bytes_list.value[0], dtype=np.uint8)
val = val.reshape(H, W, C)
images.append(val)
pad_mask = np.ones(len(images))
images = np.array(images)
return AttrDict(images=images, pad_mask=pad_mask)
def make_phase(self, filenames, phase):
for fn in filenames:
record_iterator = tf.python_io.tf_record_iterator(path=fn)
for i, string_record in enumerate(tqdm(record_iterator)):
traj = self.get_traj(string_record)
self.save_hdf5([traj], phase)
# def save_hdf5(self, traj_list, phase):
# traj = traj_list[0]
# with h5py.File(config.h5record_dir + 'hdf5/' + phase + '/traj_{0:06d}'.format(count) + '.h5', 'w') as F:
# F['traj0/pad_mask'] = traj.pad_mask
# F['traj0/images'] = traj.images
# F['traj_per_file'] = 1
if __name__ == '__main__':
maker = TAPMaker(config.h5record_dir)
maker.make_dataset() | 0179aa7d1a6ea99f6191e596e23a4ac900de979f | [
"Markdown",
"Python",
"Text"
] | 68 | Python | Douxation/video-gcp | 4608a543fe60c550363de864be7a38c4f663836a | 562aa10caf68fe6fe0bd53a5989e95746de3c93c | |
refs/heads/master | <repo_name>samueljaval/simple-blackjack<file_sep>/simple_bj.py
'''
Has to be run with python3, will not function properly
with python2.
'''
import random
import sys
#SIMPLE HELPER FUNCTIONS ###################################
def deal_cards(cards, nb):
deck = [1,2,3,4,5,6,7,8,9,10,10,10,10]
i = 0
while i < nb:
cards.append(deck[random.randint(0,12)])
i += 1
return cards
def sum(cards):
sum = 0
for x in cards:
sum += x
return sum
def make_bet(money):
print("\n")
bet = input("How much do you wanna bet? : ")
while bet.isdigit() == False:
bet = input("How much do you wanna bet? : ")
print("\n")
while int(bet) > money :
print("you do not have enough money for that")
bet = input("How much do you wanna bet? : ")
else :
money -= int(bet)
return bet, money
def intro():
print("\n\n\n\n")
print("WELCOME TO SIMPLE BLACKJACK, A SIMPLIFIED VERSION OF BLACKJACK")
print("IF YOU TRIPLE YOUR MONEY YOU WIN, YOU START WITH 1000$")
print("has to be run with python3")
print("\n\n\n\n")
###########################################################
#### the if1 and if1_start functions ask the user if they
#### want to use their ace as a 1 or an 11
def if1(cards,nb):
if cards[-nb] == 1:
change = input("do you want your ace to be 1 or 11? : ")
while change.isdigit() == False:
print("please enter 1 or 11")
change = input("do you want your ace to be 1 or 11? : ")
if int(change) == 11:
cards[-nb] = 11
elif int(change) == 1:
cards[-nb] = 1
else :
print("please enter 1 or 11")
change = input("do you want your ace to be 1 or 11? : ")
return cards
def if1_start(cards):
cards = if1(cards,1)
cards = if1(cards,2)
return cards
###################################
###### less_21 with what happens when the user's sum is
###### less than 21. If the user stops before 21, the dealer plays
def less_21(money, cards, dealer, bet):
print("\n")
response = input('do you wanna a card? if yes type 1, type 2 if you wanna stop : ')
while response.isdigit() == False:
print("please enter 1 or 2")
response = input('do you wanna a card? if yes type 1, type 2 if you wanna stop : ')
if int(response) == 1: #player wants another card
cards = deal_cards(cards, 1)
delt = cards[-1]
print("your cards are now ")
print(cards)
cards = if1(cards,1)
print("and your total sum is")
print(sum(cards))
elif int(response) == 2: #player does not want another card, dealer plays
print("let's see what the dealer gets")
while sum(dealer) < 17 :
dealer = deal_cards(dealer, 1)
print("the dealer now has")
print(dealer)
print("their sum is " + str(sum(dealer)))
if sum(dealer) > sum(cards) and sum(dealer) <= 21:
print("Sorry, the dealer beat you, you lose your bet")
print("\n\n\n\n\n\n")
if money == 0:
print("you are out of money")
sys.exit(0)
else :
bj(money)
elif sum(dealer) < sum(cards) or sum(dealer) > 21:
print("congrats, you beat the dealer, you win your bet")
money += 2*int(bet)
print("\n\n\n\n\n\n")
if money >= 3000:
print("congrats you win")
sys.exit(0)
else :
bj(money)
else :
print("It's a tie, you get your bet back")
money += int(bet)
bj(money)
else : #wrong user input, ask again
print("please enter 1 or 2")
response = input('do you wanna a card? if yes type 1, type 2 if you wanna stop : ')
return dealer
#########################################
###### This is the main function of the program. It is called recursively
###### when the player still has money and wants to play again
def bj(money):
print("You have " + str(money) + " dollars")
quit = input("do you wanna bet or quit, type 1 to bet, 2 to quit : ")
while quit.isdigit() == False:
print("please enter 1 or 2")
quit = input("do you wanna bet or quit, type 1 to bet, 2 to quit : ")
if int(quit) == 2:
print("quitting...")
sys.exit(0)
elif int(quit) == 1:
bet, money = make_bet(money)
cards = []
dealer = []
dealer = deal_cards(dealer, 1)
print("the dealer got : ")
print(dealer)
cards = deal_cards(cards,2)
print("your cards are : ")
print(cards)
cards = if1_start(cards)
print("and your total sum is")
print(sum(cards))
while sum(cards) <= 20:
dealer = less_21(money,cards,dealer,bet)
if sum(cards) == 21 and sum(dealer) != 21:
print("BLACKJACK!!!!")
money += 2*int(bet)
if money >= 3000:
print("congrats you win")
sys.exit(0)
print("\n\n\n\n\n\n")
bj(money)
if sum(cards) == 21 and sum(dealer) == 21:
print("It's a tie, you get your bet back")
money += int(bet)
bj(money)
if sum(cards) > 21:
print("Sorry, you are over 21, you lost your bet")
print("\n\n\n\n\n\n")
if money == 0:
print("you are out of money")
sys.exit(0)
else :
bj(money)
else :
print("please enter 1 or 2")
bj(money)
##### running the main function with 1000$ and showing a little text as intro
intro()
bj(1000)
| f2478fe7edc09789dc6f49307b810d6c5df8d0d7 | [
"Python"
] | 1 | Python | samueljaval/simple-blackjack | 2c7b6a8f711ce300d424e30c9330479aa2a97175 | 28acf76be1d01b479f8de59d531975658703a88a | |
refs/heads/master | <file_sep>require "novaposhta/api"
require "novaposhta/city"
require "novaposhta/order"
require "novaposhta/warehouse"
require "novaposhta/parsers/city_parser"
require "novaposhta/parsers/close_parser"
require "novaposhta/parsers/order_parser"
require "novaposhta/parsers/warehouse_parser"
module Novaposhta
def self.new(key = nil)
Novaposhta::Api.new(key.nil? ? self.api_key : key)
end
def self.api_key
@@api_key
end
def self.api_key=(key)
@@api_key = key
end
end<file_sep>require "net/http"
require "nokogiri"
require "pdfkit"
require "tempfile"
module Novaposhta
class Api
WEB_SERVICE_POINT = "http://orders.novaposhta.ua/xml.php"
PRINTED_FORM_TTH_POINT = "http://orders.novaposhta.ua/pformn.php"
PRINTED_FORM_MARKERS_POINT = "http://orders.novaposhta.ua/print_formm.php"
attr_accessor :api_key
def initialize(api_key)
@api_key = api_key
end
def order(orders=[])
return [] if orders.empty?
xml = create_request_xml do |xml|
orders.each do |o|
order = o.is_a?(Hash) ? Novaposhta::Order.new(o) : o
xml.order(order.hash.select{|k,v| !v.is_a?(Hash)}) do |xml_order|
order.hash.select{|k, v| v.is_a?(Hash)}.each do |k, v|
xml_order.send(k, v)
end
end
end
end
request_and_parse(xml, Novaposhta::Parser::OrderParser.new(orders)).orders
end
def close(ttn_ids=[])
ttn_ids = [ttn_ids] if !ttn_ids.is_a?(Array)
return {} if ttn_ids.empty?
xml = create_request_xml do |xml|
ttn_ids.each do |ttn_id|
xml.close ttn_id
end
end
request_and_parse(xml, Novaposhta::Parser::CloseParser.new).closes
end
def cities
xml = create_request_xml do |xml|
xml.city
end
request_and_parse(xml, Novaposhta::Parser::CityParser.new).cities.sort {|x,y| x.ru.downcase <=> y.ru.downcase}
end
def city_warehouses
xml = create_request_xml do |xml|
xml.citywarehouses
end
request_and_parse(xml, Novaposhta::Parser::CityParser.new).cities.sort {|x,y| x.ru.downcase <=> y.ru.downcase}
end
def warehouses(city_name = nil)
xml = create_request_xml do |xml|
xml.warenhouse
xml.filter city_name if !city_name.nil?
end
request_and_parse(xml, Novaposhta::Parser::WarehouseParser.new).warehouses
end
def warehouse(warehouse_id, city_name = nil)
selected = warehouses(city_name).select{ |w| w.id == warehouse_id.to_i }
raise "Warehouse with #{warehouse_id} not found in city #{city_name}" if selected.empty?
return selected[0]
end
def printed_form_ttn(ttn_id, options = {})
options = {:copy_count => 1,
:o => ttn_id,
:token => api_key,
:file_to => Tempfile.new(['ttn', '.pdf']).path
}.merge(options)
options[:file_to] = options[:file_to].path if options[:file_to].is_a?(File)
# TODO check that ttn_id not exists
url_options = {:o => options[:o], :num_copy => options[:copy_count], :token => options[:token]}
kit = PDFKit.new("#{PRINTED_FORM_TTH_POINT}?#{url_options.map{|k,v| "#{k}=#{v}"}.join("&")}", :orientation => "Landscape")
kit.to_file(options[:file_to])
end
def printed_form_markers(ttn_id, options = {})
options = {:o => ttn_id,
:token => api_key,
:file_to => Tempfile.new(['markers', '.pdf']).path
}.merge(options)
options[:file_to] = options[:file_to].path if options[:file_to].is_a?(File)
# TODO check that ttn_id not exists
url_options = {:o => options[:o], :token => options[:token]}
kit = PDFKit.new("#{PRINTED_FORM_MARKERS_POINT}?#{url_options.map{|k,v| "#{k}=#{v}"}.join("&")}", :orientation => "Landscape")
kit.to_file(options[:file_to])
end
private
def create_request_xml(&xml)
builder = Nokogiri::XML::Builder.new(:encoding => 'UTF-8') do |xml|
xml.file {
xml.auth api_key
yield xml
}
end
builder.to_xml
end
def request_and_parse(xml, parser)
result = request(xml)
parse(result, parser)
end
def request(xml)
uri = URI(WEB_SERVICE_POINT)
req = Net::HTTP::Post.new(uri.request_uri)
req.body = xml
req.content_type = "text/xml"
http = Net::HTTP.new(uri.host, uri.port)
http.request(req).body
end
def parse(xml, parser)
p = Nokogiri::XML::SAX::Parser.new(parser)
p.parse(xml)
parser
end
def to_pdf(uri)
uri = URI.parse(uri) if uri.is_a?(String)
http = Net::HTTP.new(uri.host, uri.port)
request = Net::HTTP::Get.new(uri.request_uri)
response = http.request(request)
if !response.response['Location'].nil?
return to_pdf(URI(response.response['Location']))
end
end
end
end<file_sep>module Novaposhta
class Order
DEFAULT_DATE = Date.today
attr_reader :hash
def initialize(hash = {})
@hash = hash
end
def saturday?
%w(true 1).include?(@hash[:saturday].to_s)
end
def convert
date = DEFAULT_DATE if date.nil?
date = date.strftime('%Y-%m-%d') if !date.is_a?(String)
date_desired = date_desired.strftime('%Y-%m-%d') if !date_desired.nil? and !date_desired.is_a?(String)
end
def method_missing(method_name, *args)
mname = method_name.id2name
len = args.length
if mname.chomp!('=') && method_name != :[]=
if len != 1
raise ArgumentError, "wrong number of arguments (#{len} for 1)", caller(1)
end
modifiable[new_ostruct_member(mname)] = args[0]
elsif len == 0 && method_name != :[]
@hash[method_name]
else
""
end
end
end
module PayType
CASH = "1"
CASHLESS = "2"
end
module Payer
RECIPIENT = "0"
SENDER = "1"
THIRD_PERSON = "2"
end
module REDELIVERY_PAYMENT_PAYER
RECIPIENT = "1"
SENDER = "2"
end
module REDELIVERY_TYPE
DOCUMENTS = "1"
MONEY = "2"
CONTAINERS = "3"
PRODUCT = "4"
OTHER = "5"
end
end<file_sep>require "nokogiri/xml/sax/document"
module Novaposhta
module Parser
class CityParser < Nokogiri::XML::SAX::Document
def initialize
@cities = []
@current_city = {}
@city_element = false
@tag_content = ''
end
def cities
return @cities
end
def start_element(name, attrs = [])
return if name != "city" && !@city_element
case name
when "city"
@current_city = {}
@city_element = true
when "id"
@tag_content = "id"
when "nameRu"
@tag_content = "ru"
when "nameUkr"
@tag_content = "ukr"
end
end
def characters(string)
return if @tag_content.empty?
@current_city[@tag_content.to_sym] = string
@tag_content = ''
end
def end_element(name)
if name == "city"
@city_element = false
@cities << Novaposhta::City.new(@current_city) if !@current_city.empty?
end
end
end
end
end
<file_sep># A sample Gemfile
source "https://rubygems.org"
gemspec
gem "pdfkit", :git => "<EMAIL>:azomazo/pdfkit.git"<file_sep>module Novaposhta
class Warehouse
def initialize(hash)
@hash = hash
end
def id
@hash[:id].to_i
end
def max_weight_allowed
@hash[:max_weight_allowed].to_i
end
def method_missing(method_name, *args)
if @hash.key?(method_name)
@hash[method_name]
else
super
end
end
end
end<file_sep>module Novaposhta
module Parser
class CloseParser < Nokogiri::XML::SAX::Document
attr_reader :closes
def initialize
@closes = {}
@current_close = {}
@close_element = false
@tag_content = ''
end
def start_element(name, attrs = [])
return if name.downcase != "close" && !@close_element
case name.downcase
when "close"
pattrs = parse_attr(attrs)
@current_close = pattrs[:id]
@close_element = true
end
end
def characters(string)
return if @tag_content.empty?
@closes[@current_close] = string
@tag_content = ''
end
def end_element(name)
if name.downcase == "close"
@close_element = false
end
end
private
def parse_attr(attrs = [])
pattrs = {}
attrs.each do |attr|
if attr[0].downcase == "id"
pattrs[:id] = attr[1]
end
end
pattrs
end
end
end
end
<file_sep>require "nokogiri/xml/sax/document"
module Novaposhta
module Parser
class WarehouseParser < Nokogiri::XML::SAX::Document
def initialize
@warehouses = []
@current_warehouse = {}
@warehouse_element = false
@tag_content = ''
end
def warehouses
return @warehouses
end
def start_element(name, attrs = [])
return if name != "warenhouse" && !@warehouse_element
case name
when "warenhouse"
@current_warehouse = {}
@warehouse_element = true
when "city"
@tag_content = "city_ukr"
when "cityRu"
@tag_content = "city_ru"
when "wareId"
@tag_content = "id"
when "address"
@tag_content = "address_ukr"
when "addressRu"
@tag_content = "address_ru"
when "weekday_reseiving_hours"
@tag_content = "weekday_receiving_hours"
when "saturday_reseiving_hours"
@tag_content = "saturday_receiving_hours"
else
@tag_content = name
end
end
def characters(string)
return if @tag_content.empty?
@current_warehouse[@tag_content.to_sym] = string
@tag_content = ''
end
def end_element(name)
if name == "warenhouse"
@warehouse_element = false
@warehouses << Novaposhta::Warehouse.new(@current_warehouse)
end
end
end
end
end<file_sep>Gem::Specification.new do |s|
s.name = 'novaposhta'
s.version = '0.0.0'
s.date = '2013-02-17'
s.summary = "Nobvaposhta API"
s.description = ""
s.authors = ["<NAME>"]
s.email = '<EMAIL>'
s.files = Dir['lib/**/*.rb']
s.homepage = 'https://github.com/azomazo/novaposhta'
s.add_dependency("nokogiri", "~> 1.5.6")
s.add_development_dependency "rspec", "~> 2.11.0"
end<file_sep>module Novaposhta
module Parser
class OrderParser < Nokogiri::XML::SAX::Document
attr_reader :orders
def initialize(orders)
@orders = orders
end
def start_element(name, attrs = [])
return if name.downcase != "order"
case name.downcase
when "order"
order_id, ttn_id = parse_attrs(attrs)
set_ttn_id(order_id, ttn_id)
end
end
private
def set_ttn_id(order_id, ttn_id)
@orders.each { |o|
if o.is_a?(Hash) and o[:order_id] == order_id.to_s
o[:ttn_id] = ttn_id.to_s
elsif o.order_id == order_id.to_s
o.ttn_id = ttn_id.to_s
end
}
end
def parse_attrs(attrs=[])
order_id = ""
ttn_id = ""
attrs.each do |attr|
if attr[0].downcase == "id"
order_id = attr[1]
elsif attr[0].downcase == "np_id"
ttn_id = attr[1]
end
end
return order_id, ttn_id
end
end
end
end | eead6786435fac901875ff9edc521ca675b38b2d | [
"Ruby"
] | 10 | Ruby | azomazo/novaposhta-api | 37f73117a9ec1c33c57bdcc08925a8a84ad0ef56 | feba079ae517efffbeee564b836fe0561838936a | |
refs/heads/master | <repo_name>folengo/ufo2ft<file_sep>/Lib/ufo2ft/__init__.py
from __future__ import print_function, division, absolute_import
from ufo2ft.kernFeatureWriter import KernFeatureWriter
from ufo2ft.makeotfParts import FeatureOTFCompiler
from ufo2ft.markFeatureWriter import MarkFeatureWriter
from ufo2ft.otfPostProcessor import OTFPostProcessor
from ufo2ft.outlineOTF import OutlineOTFCompiler, OutlineTTFCompiler
__version__ = "0.3.5.dev0"
def compileOTF(ufo, outlineCompilerClass=OutlineOTFCompiler,
featureCompilerClass=FeatureOTFCompiler, mtiFeaFiles=None,
kernWriter=KernFeatureWriter, markWriter=MarkFeatureWriter,
glyphOrder=None, convertCubics=True, cubicConversionError=2,
useProductionNames=True, optimizeCff=True):
"""Create FontTools CFF font from a UFO.
Some arguments are only used when generating CFF or TrueType outlines:
`convertCubics` and `cubicConversionError` only apply to TrueType, and
`optimizeCff` only applies to CFF.
"""
outlineCompiler = outlineCompilerClass(
ufo, glyphOrder, convertCubics, cubicConversionError)
otf = outlineCompiler.compile()
featureCompiler = featureCompilerClass(
ufo, otf, kernWriter, markWriter, mtiFeaFiles=mtiFeaFiles)
featureCompiler.compile()
postProcessor = OTFPostProcessor(otf, ufo)
otf = postProcessor.process(useProductionNames, optimizeCff)
return otf
def compileTTF(ufo, outlineCompilerClass=OutlineTTFCompiler, **kwargs):
"""Create FontTools TrueType font from a UFO."""
return compileOTF(ufo, outlineCompilerClass=outlineCompilerClass, **kwargs)
<file_sep>/tests/outlineOTF_test.py
from fontTools.ttLib import TTFont
from defcon import Font
from ufo2ft.outlineOTF import OutlineTTFCompiler
from ufo2ft import compileTTF
import unittest
import os
def getTestUFO():
dirname = os.path.dirname(__file__)
return Font(os.path.join(dirname, 'data', 'TestFont.ufo'))
class TestOutlineTTCompiler(unittest.TestCase):
def setUp(self):
self.otf = TTFont()
self.ufo = getTestUFO()
def test_setupTable_gasp(self):
compiler = OutlineTTFCompiler(self.ufo)
compiler.otf = self.otf
compiler.setupTable_gasp()
self.assertTrue('gasp' in compiler.otf)
self.assertEqual(compiler.otf['gasp'].gaspRange,
{7: 10, 65535: 15})
def test_compile_with_gasp(self):
compiler = OutlineTTFCompiler(self.ufo)
compiler.compile()
self.assertTrue('gasp' in compiler.otf)
self.assertEqual(compiler.otf['gasp'].gaspRange,
{7: 10, 65535: 15})
def test_compile_without_gasp(self):
self.ufo.info.openTypeGaspRangeRecords = None
compiler = OutlineTTFCompiler(self.ufo)
compiler.compile()
self.assertTrue('gasp' not in compiler.otf)
def test_compile_empty_gasp(self):
# ignore empty gasp
self.ufo.info.openTypeGaspRangeRecords = []
compiler = OutlineTTFCompiler(self.ufo)
compiler.compile()
self.assertTrue('gasp' not in compiler.otf)
class TestGlyphOrder(unittest.TestCase):
def setUp(self):
self.ufo = getTestUFO()
def test_compile_original_glyph_order(self):
DEFAULT_ORDER = ['.notdef', 'space', 'a', 'b', 'c']
compiler = OutlineTTFCompiler(self.ufo)
compiler.compile()
self.assertEqual(compiler.otf.getGlyphOrder(), DEFAULT_ORDER)
def test_compile_tweaked_glyph_order(self):
NEW_ORDER = ['.notdef', 'space', 'b', 'a', 'c']
self.ufo.lib['public.glyphOrder'] = NEW_ORDER
compiler = OutlineTTFCompiler(self.ufo)
compiler.compile()
self.assertEqual(compiler.otf.getGlyphOrder(), NEW_ORDER)
def test_compile_strange_glyph_order(self):
"""Move space and .notdef to end of glyph ids
ufo2ft always puts .notdef first.
"""
NEW_ORDER = ['b', 'a', 'c', 'space', '.notdef']
EXPECTED_ORDER = ['.notdef', 'b', 'a', 'c', 'space']
self.ufo.lib['public.glyphOrder'] = NEW_ORDER
compiler = OutlineTTFCompiler(self.ufo)
compiler.compile()
self.assertEqual(compiler.otf.getGlyphOrder(), EXPECTED_ORDER)
class TestNames(unittest.TestCase):
def setUp(self):
self.ufo = getTestUFO()
def test_compile_without_production_names(self):
result = compileTTF(self.ufo, useProductionNames=False)
self.assertEqual(result.getGlyphOrder(), ['.notdef', 'space', 'a', 'b', 'c'])
def test_compile_with_production_names(self):
result = compileTTF(self.ufo, useProductionNames=True)
self.assertEqual(result.getGlyphOrder(), ['.notdef', 'uni0020', 'uni0061', 'uni0062', 'uni0063'])
CUSTOM_POSTSCRIPT_NAMES = {
'.notdef': '.notdef',
'space': 'foo',
'a': 'bar',
'b': 'baz',
'c': 'meh'
}
def test_compile_with_custom_postscript_names(self):
self.ufo.lib['public.postscriptNames'] = self.CUSTOM_POSTSCRIPT_NAMES
result = compileTTF(self.ufo, useProductionNames=True)
self.assertEqual(sorted(result.getGlyphOrder()), sorted(self.CUSTOM_POSTSCRIPT_NAMES.values()))
def test_compile_with_custom_postscript_names_notdef_preserved(self):
custom_names = dict(self.CUSTOM_POSTSCRIPT_NAMES)
custom_names['.notdef'] = 'defnot'
self.ufo.lib['public.postscriptNames'] = custom_names
result = compileTTF(self.ufo, useProductionNames=True)
order = sorted(result.getGlyphOrder())
self.assertEqual(result.getGlyphOrder(), ['.notdef', 'foo', 'bar', 'baz', 'meh'])
if __name__ == "__main__":
import sys
sys.exit(unittest.main())
<file_sep>/requirements.txt
fonttools==3.7.2
ufoLib==2.0.0
defcon==0.2.1
cu2qu==1.1.1
compreffor==0.4.3
<file_sep>/Lib/ufo2ft/makeotfParts.py
from __future__ import \
print_function, division, absolute_import, unicode_literals
import logging
import os
import re
from fontTools.misc.py23 import *
from fontTools import feaLib
from fontTools.feaLib.builder import addOpenTypeFeaturesFromString
from fontTools import mtiLib
from ufo2ft.maxContextCalc import maxCtxFont
logger = logging.getLogger(__name__)
class FeatureOTFCompiler(object):
"""Generates OpenType feature tables for a UFO.
If mtiFeaFiles is passed to the constructor, it should be a dictionary
mapping feature table tags to source files which should be compiled by
mtiLib into that respective table.
"""
def __init__(self, font, outline, kernWriter, markWriter, mtiFeaFiles=None):
self.font = font
self.outline = outline
self.kernWriter = kernWriter
self.markWriter = markWriter
self.mtiFeaFiles = mtiFeaFiles
self.setupAnchorPairs()
def compile(self):
"""Compile the features.
Starts by generating feature syntax for the kern, mark, and mkmk
features. If they already exist, they will not be overwritten.
"""
self.precompile()
self.setupFile_features()
self.setupFile_featureTables()
# only after compiling features can usMaxContext be calculated
self.outline['OS/2'].usMaxContext = maxCtxFont(self.outline)
def precompile(self):
"""Set any attributes needed before compilation.
**This should not be called externally.** Subclasses
may override this method if desired.
"""
pass
def setupFile_features(self):
"""
Make the features source file. If any tables
or the kern feature are defined in the font's
features, they will not be overwritten.
**This should not be called externally.** Subclasses
may override this method to handle the file creation
in a different way if desired.
"""
if self.mtiFeaFiles is not None:
return
features = self._findLayoutFeatures()
kernRE = r"feature\s+kern\s+{.*?}\s+kern\s*;"
markRE = re.compile(kernRE.replace("kern", "mark"), re.DOTALL)
mkmkRE = re.compile(kernRE.replace("kern", "mkmk"), re.DOTALL)
kernRE = re.compile(kernRE, re.DOTALL)
existing = self.font.features.text or ""
# build the GPOS features as necessary
autoFeatures = {}
if "kern" not in features:
autoFeatures["kern"] = self.writeFeatures_kern()
writeMark = "mark" not in features
writeMkmk = "mkmk" not in features
if writeMark or writeMkmk:
autoFeatures["mark"] = self.writeFeatures_mark(
doMark=writeMark, doMkmk=writeMkmk)
# write the features
features = [existing]
for name, text in sorted(autoFeatures.items()):
features.append(text)
self.features = "\n\n".join(features)
def writeFeatures_kern(self):
"""
Write the kern feature to a string and return it.
**This should not be called externally.** Subclasses
may override this method to handle the string creation
in a different way if desired.
"""
writer = self.kernWriter(self.font)
return writer.write()
def writeFeatures_mark(self, doMark=True, doMkmk=True):
"""
Write the mark and mkmk features to a string and return it.
**This should not be called externally.** Subclasses
may override this method to handle the string creation
in a different way if desired.
"""
writer = self.markWriter(
self.font, self.anchorPairs, self.mkmkAnchorPairs,
self.ligaAnchorPairs)
return writer.write(doMark, doMkmk)
def setupAnchorPairs(self):
"""
Try to determine the base-accent anchor pairs to use in building the
mark and mkmk features.
**This should not be called externally.** Subclasses
may override this method to set up the anchor pairs
in a different way if desired.
"""
self.anchorPairs = []
self.ligaAnchorPairs = []
anchorNames = set()
for glyph in self.font:
for anchor in glyph.anchors:
if anchor.name is None:
logger.warning("Unnamed anchor discarded in %s", glyph.name)
continue
anchorNames.add(anchor.name)
for baseName in sorted(anchorNames):
accentName = "_" + baseName
if accentName in anchorNames:
self.anchorPairs.append((baseName, accentName))
ligaNames = []
i = 1
while True:
ligaName = "%s_%d" % (baseName, i)
if ligaName not in anchorNames:
break
ligaNames.append(ligaName)
i += 1
if ligaNames:
self.ligaAnchorPairs.append((tuple(ligaNames), accentName))
self.mkmkAnchorPairs = self.anchorPairs
def _findLayoutFeatures(self):
"""Returns what OpenType layout feature tags are present in the UFO."""
if self.font.path is None:
return set()
feapath = os.path.join(self.font.path, "features.fea")
if not os.path.exists(feapath):
return set()
glyphMap = self.outline.getReverseGlyphMap()
parser = feaLib.parser.Parser(feapath, glyphMap=glyphMap)
doc = parser.parse()
return {f.name for f in doc.statements
if isinstance(f, feaLib.ast.FeatureBlock)}
def setupFile_featureTables(self):
"""
Compile and return OpenType feature tables from the source.
Raises a FeaLibError if the feature compilation was unsuccessful.
**This should not be called externally.** Subclasses
may override this method to handle the table compilation
in a different way if desired.
"""
if self.mtiFeaFiles is not None:
for tag, feapath in self.mtiFeaFiles.items():
with open(feapath) as feafile:
table = mtiLib.build(feafile, self.outline)
assert table.tableTag == tag
self.outline[tag] = table
elif self.features.strip():
feapath = os.path.join(self.font.path, "features.fea") if self.font.path is not None else None
addOpenTypeFeaturesFromString(self.outline, self.features,
filename=feapath)
| 5bd9bf8bcd39deb10b13ed9c6898339ad41ac158 | [
"Python",
"Text"
] | 4 | Python | folengo/ufo2ft | 37242fe9ad6b10963f3c4d5feeddb44d5079276e | b63ecedf7117464cdf0fbb267be6ef298a451ebb | |
refs/heads/master | <repo_name>lewdawso/gpu-bot<file_sep>/config.toml
[[products]]
name = "6800 XT"
urls = [
"https://www.ebuyer.com/1138835-xfx-radeon-rx-6900-xt-16gb-merc-319-black-graphics-card-rx-69xtacbd9",
"https://www.ebuyer.com/1137015-msi-radeon-rx-6800-xt-gaming-x-trio-16gb-graphics-card-rx-6800-xt-gaming-x-trio-16g",
"https://www.ebuyer.com/1135766-asrock-radeon-rx-6800-xt-phantom-gaming-d-16gb-oc-graphics-card-rx6800xt-pgd-16go",
"https://www.ebuyer.com/1134062-powercolor-radeon-rx-6800-xt-16gb-red-devil-graphics-card-axrx-6800xt-16gbd6-3dhe-oc",
"https://www.ebuyer.com/1138784-gigabyte-radeon-rx-6800xt-16gb-aorus-master-type-c-graphics-card-gv-r68xtaorus-m-16gc",
"https://www.ebuyer.com/1138504-asrock-radeon-rx-6800-xt-16gb-graphics-card-90-ga2czz-00uanf",
"https://www.ebuyer.com/1128772-powercolor-radeon-rx-6800-xt-16gb-graphics-card-axrx-6800xt-16gbd6-m2dhc",
"https://www.ebuyer.com/1129448-msi-radeon-rx-6800-xt-16gb-graphics-card-rx-6800-xt-16g",
"https://www.ebuyer.com/1134038-sapphire-nitro-radeon-rx-6800-xt-oc-se-16gb-graphics-card-11304-01-20g",
"https://www.ebuyer.com/1134212-powercolor-radeon-rx-6800-xt-16gb-red-devil-limited-edition-graphics-card-axrx-6800xt-16gbd6-2dhce-oc",
"https://www.ebuyer.com/1134378-asus-radeon-rx-6800-xt-16gb-rog-strix-lc-oc-graphics-card-rog-strix-lc-rx6800xt-o16g-gaming",
"https://www.ebuyer.com/1134382-gigabyte-radeon-rx-6800xt-16gb-gaming-oc-graphics-card-gv-r68xtgaming-oc-16gd",
"https://www.ebuyer.com/1135765-asrock-radeon-rx-6800-xt-taichi-x-16gb-oc-graphics-card-rx6800xt-tcx-16go",
"https://www.ebuyer.com/1134039-sapphire-nitro-radeon-rx-6800-xt-oc-16gb-graphics-card-11304-02-20g",
"https://www.ebuyer.com/1134063-powercolor-radeon-rx-6800-xt-red-dragon-16gb-graphics-card-axrx-6800xt-16gbd6-3dhr-oc",
"https://www.ebuyer.com/1134379-asus-radeon-rx-6800-xt-tuf-gaming-oc-16gb-graphics-card-tuf-rx6800xt-o16g-gaming",
"https://www.ebuyer.com/1135696-xfx-radeon-rx-6800-xt-merc319-black-16gb-graphics-card-rx-68xtacbd9",
"https://www.ebuyer.com/1138786-gigabyte-radeon-rx-6800xt-16gb-aorus-master-graphics-card-gv-r68xtaorus-m-16gd",
]
[[products]]
name = "6800"
urls = [
"https://www.ebuyer.com/1139106-xfx-radeon-rx-6800-qick319-16gb-graphics-card-rx-68xlalbd9",
"https://www.ebuyer.com/1128774-powercolor-radeon-rx-6800-16gb-graphics-card-axrx-6800-16gbd6-m2dhc",
"https://www.ebuyer.com/1129451-msi-radeon-rx-6800-16gb-graphics-card-rx-6800-16g",
"https://www.ebuyer.com/1133851-xfx-radeon-rx-6800-16gb-graphics-card-rx-68lmatfd8",
"https://www.ebuyer.com/1134064-powercolor-radeon-rx-6800-16gb-red-devil-graphics-card-axrx-6800-16gbd6-3dhe-oc",
"https://www.ebuyer.com/1134067-powercolor-radeon-rx-6800-fighter-16gb-graphics-card-axrx-6800-16gbd6-3dh-oc",
"https://www.ebuyer.com/1134381-asus-radeon-rx-6800-16gb-tuf-gaming-oc-graphics-card-tuf-rx6800-o16g-gaming",
"https://www.ebuyer.com/1135697-xfx-radeon-rx-6800-merc319-black-16gb-graphics-card-rx-68xlatbd9",
"https://www.ebuyer.com/1135767-asrock-radeon-rx-6800-phantom-gaming-d-16gb-oc-graphics-card-rx6800-pgd-16go",
"https://www.ebuyer.com/1133959-asus-radeon-rx-6800-16gb-graphics-card-rx6800-16g",
"https://www.ebuyer.com/1134040-sapphire-nitro-radeon-rx-6800-oc-16gb-graphics-card-11305-01-20g",
"https://www.ebuyer.com/1134066-powercolor-radeon-rx-6800-red-dragon-16gb-graphics-card-axrx-6800-16gbd6-3dhr-oc",
"https://www.ebuyer.com/1134213-powercolor-radeon-rx-6800-16gb-red-devil-limited-edition-graphics-card-axrx-6800-16gbd6-2dhce-oc",
"https://www.ebuyer.com/1134380-asus-radeon-rx-6800-16gb-rog-strix-oc-graphics-card-rog-strix-rx6800-o16g-gaming",
"https://www.ebuyer.com/1134383-gigabyte-radeon-rx-6800-16gb-gaming-oc-graphics-card-gv-r68gaming-oc-16gd",
"https://www.ebuyer.com/1137016-msi-radeon-rx-6800-gaming-x-trio-16gb-graphics-card-rx-6800-gaming-x-trio-16g",
"https://www.ebuyer.com/1138499-asrock-radeon-rx-6800-16gb-graphics-card-radeon-rx-6800-16g",
"https://www.ebuyer.com/1135768-asrock-radeon-rx-6800-challenger-pro-16gb-oc-graphics-rx6800-clp-16go",
"https://www.ebuyer.com/1138788-gigabyte-radeon-rx-6800-16gb-aorus-master-graphics-card-gv-r68aorus-m-16gd",
]
[[products]]
name = "RX 580"
urls = [
"https://www.ebuyer.com/788993-msi-amd-radeon-rx-580-8gb-armor-8g-oc-graphics-card-rx-580-armor-8g-oc",
]
[[products]]
name = "RX 550"
urls = [
"https://www.ebuyer.com/974387-powercolor-radeon-rx-550-4gb-red-dragon-graphics-card-axrx-550-4gbd5-dh",
]
[[products]]
name = "3060"
urls = [
"https://www.ebuyer.com/1142117-gigabyte-geforce-rtx-3060-12gb-gaming-oc-ampere-graphics-card-gv-n3060gaming-oc-12gd",
"https://www.ebuyer.com/1142164-asus-geforce-rtx-3060-12gb-tuf-gaming-oc-ampere-graphics-card-tuf-rtx3060-o12g-gaming",
"https://www.ebuyer.com/1139155-zotac-geforce-rtx-3060-12gb-amp-white-edition-ampere-graphics-card-zt-a30600f-10p",
"https://www.ebuyer.com/1138915-pny-geforce-rtx-3060-12gb-uprising-edition-ampere-graphics-card-vcg306012dfmpb",
"https://www.ebuyer.com/1140152-msi-geforce-rtx-3060-12gb-ventus-2x-oc-ampere-graphics-card-rtx-3060-ventus-2x-12g-oc",
"https://www.ebuyer.com/1142118-gigabyte-geforce-rtx-3060-12gb-eagle-oc-ampere-graphics-card-gv-n3060eagle-oc-12gd",
"https://www.ebuyer.com/1142162-asus-geforce-rtx-3060-12gb-rog-strix-oc-ampere-graphics-card-rog-strix-rtx3060-o12g-gaming",
"https://www.ebuyer.com/1142168-asus-geforce-rtx-3060-12gb-dual-ampere-graphics-card-dual-rtx3060-12g",
"https://www.ebuyer.com/1143088-evga-geforce-rtx-3060-12gb-xc-gaming-ampere-graphics-card-12g-p5-3657-kr",
"https://www.ebuyer.com/1139157-zotac-geforce-rtx-3060-12gb-twin-edge-ampere-graphics-card-zt-a30600e-10m",
"https://www.ebuyer.com/1140151-msi-geforce-rtx-3060-12gb-ventus-3x-oc-ampere-graphics-card-rtx-3060-ventus-3x-12g-oc",
"https://www.ebuyer.com/1140154-msi-geforce-rtx-3060-12gb-ventus-2x-ampere-graphics-card-rtx-3060-ventus-2x-12g",
"https://www.ebuyer.com/1141627-palit-geforce-rtx-3060-12gb-dual-ampere-graphics-card-ne63060019k9-190ad",
"https://www.ebuyer.com/1139156-zotac-geforce-rtx-3060-12gb-twin-edge-oc-ampere-graphics-card-zt-a30600h-10m",
"https://www.ebuyer.com/1140149-msi-geforce-rtx-3060-12gb-gaming-x-trio-ampere-graphics-card-rtx-3060-gaming-x-trio-12g",
"https://www.ebuyer.com/1140150-msi-geforce-rtx-3060-12gb-gaming-x-ampere-graphics-card-rtx-3060-gaming-x-12g",
"https://www.ebuyer.com/1140800-pny-geforce-rtx-3060-12gb-xlr8-gaming-revel-epic-x-single-fan-ampere-vcg306012sfxppb",
"https://www.ebuyer.com/1141626-palit-geforce-rtx-3060-12gb-dual-oc-ampere-graphics-card-ne63060t19k9-190ad",
"https://www.ebuyer.com/1142116-gigabyte-geforce-rtx-3060-12gb-vision-oc-ampere-graphics-card-gv-n3060vision-oc-12gd",
"https://www.ebuyer.com/1142119-gigabyte-geforce-rtx-3060-12gb-eagle-ampere-graphics-card-gv-n3060eagle-12gd",
"https://www.ebuyer.com/1142163-asus-geforce-rtx-3060-12gb-rog-strix-ampere-graphics-card-rog-strix-rtx3060-12g-gaming",
"https://www.ebuyer.com/1142166-asus-geforce-rtx-3060-12gb-tuf-gaming-ampere-graphics-card-tuf-rtx3060-12g-gaming",
]
[[products]]
name = "3060 Ti"
urls = [
"https://www.ebuyer.com/1133708-gigabyte-geforce-rtx-3060-ti-8gb-gaming-oc-pro-ampere-graphics-card-gv-n306tgamingoc-pro-8gd",
"https://www.ebuyer.com/1136049-asus-geforce-rtx-3060-ti-8gb-gddr6-tuf-gaming-oc-ampere-graphics-tuf-rtx3060ti-o8g-gaming",
"https://www.ebuyer.com/1129353-gigabyte-geforce-rtx-3060-ti-8gb-aorus-master-ampere-graphics-card-gv-n306taorus-m-8gd",
"https://www.ebuyer.com/1129355-gigabyte-geforce-rtx-3060-ti-8gb-gaming-oc-ampere-graphics-card-gv-n306tgaming-oc-8gd",
"https://www.ebuyer.com/1133705-gigabyte-geforce-rtx-3060-ti-8gb-eagle-oc-ampere-graphics-card-gv-n306teagle-oc-8gd",
"https://www.ebuyer.com/1129488-zotac-geforce-rtx-3060-ti-8gb-twin-edge-ampere-graphics-card-zt-a30610e-10m",
"https://www.ebuyer.com/1134235-palit-geforce-rtx-3060-ti-8gb-dual-oc-ampere-graphics-card-ne6306ts19p2-190ad",
"https://www.ebuyer.com/1136048-asus-geforce-rtx-3060-ti-8gb-rog-strix-ampere-graphics-card-rog-strix-rtx3060ti-8g-gaming",
"https://www.ebuyer.com/1136052-asus-geforce-rtx-3060-ti-8gb-gddr6-dual-ampere-graphics-card-dual-rtx3060ti-8g",
"https://www.ebuyer.com/1139372-msi-geforce-rtx-3060-ti-8gb-gaming-x-ampere-graphics-card-rtx-3060-ti-gaming-x",
"https://www.ebuyer.com/1139658-gigabyte-geforce-rtx-3060-ti-8gb-vision-oc-ampere-graphics-card-gv-n306tvision-oc-8gd",
"https://www.ebuyer.com/1139775-evga-geforce-rtx-3060-ti-8gb-ftw3-ultra-ampere-graphics-card-08g-p5-3667-kr",
"https://www.ebuyer.com/1129189-pny-geforce-rtx-3060-ti-8gb-xlr8-revel-epic-x-ampere-graphics-card-vcg3060t8dfxppb",
"https://www.ebuyer.com/1133706-gigabyte-geforce-rtx-3060-ti-8gb-eagle-ampere-graphics-card-gv-n306teagle-8gd",
"https://www.ebuyer.com/1134060-palit-geforce-rtx-3060-ti-8gb-gamingpro-oc-ampere-graphics-card-ne6306tt19p2-1041a",
"https://www.ebuyer.com/1134069-msi-geforce-rtx-3060-ti-8gb-gaming-x-trio-ampere-graphics-card-rtx-3060-ti-gaming-x-trio",
"https://www.ebuyer.com/1134070-msi-geforce-rtx-3060-ti-8gb-ventus-3x-oc-ampere-graphics-card-rtx-3060-ti-ventus-3x-oc",
"https://www.ebuyer.com/1134236-palit-geforce-rtx-3060-ti-8gb-dual-ampere-graphics-card-ne6306t019p2-190ad",
"https://www.ebuyer.com/1136050-asus-geforce-rtx-3060-ti-8gb-gddr6-tuf-gaming-ampere-graphics-card-tuf-rtx3060ti-8g-gaming",
"https://www.ebuyer.com/1129487-zotac-geforce-rtx-3060-ti-8gb-twin-edge-oc-ampere-graphics-zt-a30610h-10m",
"https://www.ebuyer.com/1134061-palit-geforce-rtx-3060-ti-8gb-gamingpro-ampere-graphics-card-ne6306t019p2-1041a",
"https://www.ebuyer.com/1134071-msi-geforce-rtx-3060-ti-8gb-ventus-2x-oc-ampere-graphics-card-rtx-3060-ti-ventus-2x-oc",
"https://www.ebuyer.com/1136047-asus-geforce-rtx-3060-ti-8gb-rog-strix-oc-ampere-graphics-card-rog-strix-rtx3060ti-o8g-gaming",
"https://www.ebuyer.com/1136051-asus-geforce-rtx-3060-ti-8gb-gddr6-dual-oc-ampere-graphics-card-dual-rtx3060ti-o8g",
]
[[products]]
name = "AMD RX 5500XT"
urls = [
"https://www.ebuyer.com/937200-msi-radeon-rx-5500-xt-gaming-x-8gb-graphics-card-rx-5500-xt-gaming-x-8g",
"https://www.ebuyer.com/937375-asus-radeon-rx-5500-xt-dual-evo-oc-8gb-graphics-card-dual-rx5500xt-o8g-evo",
"https://www.ebuyer.com/937387-sapphire-radeon-rx-5500-xt-pulse-8gb-graphics-card-11295-01-20g",
]
[[products]]
name = "AMD RX 5600XT"
urls = [
"https://www.ebuyer.com/951044-msi-radeon-rx-5600-xt-mech-oc-6gb-14gbps-graphics-card-rx-5600-xt-mech-oc",
]
[[products]]
name = "GTX 1650"
urls = [
"https://www.ebuyer.com/881982-gigabyte-geforce-gtx-1650-oc-4gb-gddr5-graphics-card-gv-n1650oc-4gd",
"https://www.ebuyer.com/973418-palit-geforce-gtx-1650-4gb-gaming-pro-graphics-card-ne6165001bg1-1175a",
"https://www.ebuyer.com/1135907-msi-geforce-gtx-1650-d6-ventus-xs-ocv2-4gb-graphics-card-gtx-1650-d6-ventus-xs-ocv2",
"https://www.ebuyer.com/1138931-palit-geforce-gtx-1650-stormx-4gb-oc-graphics-card-ne61650u18g1-166f",
"https://www.ebuyer.com/1139131-msi-geforce-gtx-1650-d6-aero-itx-oc-4gb-graphics-card-gtx-1650-d6-aero-itx-oc",
"https://www.ebuyer.com/1142151-asus-geforce-gtx-1650-4gb-tuf-gaming-graphics-card-tuf-gtx1650-4gd6-p-gaming",
"https://www.ebuyer.com/882051-msi-geforce-gtx-1650-gaming-x-4gb-gddr5-graphics-card-gtx-1650-gaming-x-4g",
"https://www.ebuyer.com/973331-gigabyte-geforce-gtx-1650-4gb-d6-oc-graphics-card-gv-n1656oc-4gd",
"https://www.ebuyer.com/992087-palit-geforce-gtx-1650-stormx-d6-4gb-graphics-card-ne61650018g1-166f",
"https://www.ebuyer.com/1125375-pny-geforce-gtx-1650-4gb-gddr6-graphics-card-vcg16504d6dfppb",
"https://www.ebuyer.com/1133812-asus-geforce-gtx-1650-mini-4gb-gddr6-graphics-card-dual-gtx1650-o4gd6-mini",
"https://www.ebuyer.com/881983-gigabyte-geforce-gtx-1650-mini-itx-oc-4gb-gddr5-graphics-card-gv-n1650ixoc-4gd",
"https://www.ebuyer.com/911286-msi-geforce-gtx-1650-oc-4gb-low-profile-graphics-card-gtx-1650-4gt-lp-oc",
"https://www.ebuyer.com/1126289-msi-geforce-gtx-1650-ventus-xs-4gb-ocv1-graphics-card-gtx-1650-ventus-xs-4g-ocv1",
"https://www.ebuyer.com/1126769-asus-geforce-gtx-1650-tuf-oc-4gb-graphics-card-tuf-gtx1650-o4gd6-p-gaming",
"https://www.ebuyer.com/1138930-palit-geforce-gtx-1650-gaming-pro-4gb-oc-graphics-card-ne61650s1bg1-1175a",
"https://www.ebuyer.com/1137493-palit-geforce-gtx-1650-4gb-gamingpro-graphics-card-ne6165001bg1-166a",
]
[[products]]
name = "GTX 1650 Super"
urls = [
"https://www.ebuyer.com/921105-gigabyte-gtx-1650-super-oc-windforce-oc-4gb-graphics-card-gv-n165swf2oc-4gd",
"https://www.ebuyer.com/921162-msi-geforce-gtx-1650-super-ventus-xs-oc-4gb-graphics-card-gtx-1650-super-ventus-xs-oc",
"https://www.ebuyer.com/921091-palit-geforce-gtx-1650-super-stormx-4gb-graphics-card-ne6165s018g1-166f",
"https://www.ebuyer.com/921154-asus-tuf-geforce-gtx-1650-super-4gb-oc-graphics-card-tuf-gtx1650s-o4g-gaming",
"https://www.ebuyer.com/921161-msi-geforce-gtx-1650-super-gaming-x-4gb-graphics-card-gtx-1650-super-gaming-x",
"https://www.ebuyer.com/921146-pny-geforce-gtx-1650-super-single-fan-graphics-card-vcg16504ssfppb",
"https://www.ebuyer.com/921153-asus-geforce-gtx-1650-super-phoenix-oc-4gb-graphics-card-ph-gtx1650s-o4g",
"https://www.ebuyer.com/921163-msi-geforce-gtx-1650-super-aero-itx-oc-4gb-graphics-card-gtx-1650-super-aero-itx-oc",
]
[[products]]
name = "GTX 1660"
urls = [
"https://www.ebuyer.com/920484-zotac-geforce-gtx-1660-6gb-graphics-card-zt-t16600k-10m",
"https://www.ebuyer.com/880055-gigabyte-geforce-gtx-1660-oc-6gb-gddr5-graphics-card-gv-n1660oc-6gd",
"https://www.ebuyer.com/879572-msi-geforce-gtx-1660-ventus-xs-6gb-oc-gddr5-graphics-card-gtx-1660-ventus-xs-6g-oc",
"https://www.ebuyer.com/879570-msi-geforce-gtx-1660-gaming-x-6gb-gddr5-graphics-card-gtx-1660-gaming-x-6g",
]
[[products]]
name = "GTX 1660 Super"
urls = [
"https://www.ebuyer.com/914171-msi-geforce-gtx-1660-super-ventus-xs-6gb-oc-graphics-card-gtx-1660-super-ventus-xs-oc",
"https://www.ebuyer.com/1142531-asus-geforce-gtx-rog-strix-advanced-1660-super-graphics-card-rog-strix-gtx1660s-a6g-gaming",
"https://www.ebuyer.com/915241-gigabyte-geforce-gtx-1660-super-oc-6gb-graphics-card-gv-n166soc-6gd",
"https://www.ebuyer.com/1142368-asus-geforce-gtx-1660-super-6gb-tuf-oc-graphics-card-tuf-gtx1660s-o6g-gaming",
"https://www.ebuyer.com/919629-palit-geforce-gtx-1660-super-6gb-stormx-graphics-card-ne6166s018j9-161f",
"https://www.ebuyer.com/912618-zotac-geforce-gtx-1660-super-6gb-amp-edition-graphics-card-zt-t16620d-10m",
"https://www.ebuyer.com/919258-asus-geforce-gtx-1660-super-dual-evo-6gb-oc-graphics-card-dual-gtx1660s-o6g-evo",
"https://www.ebuyer.com/1138061-asus-geforce-gtx-1660-super-rog-strix-6gb-oc-graphics-card-rog-strix-gtx1660s-o6g-gaming",
"https://www.ebuyer.com/1138717-msi-geforce-gtx-1660-super-gaming-z-plus-6gb-graphics-card-gtx-1660-super-gaming-z-plus",
"https://www.ebuyer.com/1139774-evga-geforce-gtx-1660-super-6gb-sc-ultra-graphics-card-06g-p4-1068-kr",
"https://www.ebuyer.com/1139901-palit-geforce-gtx-1660-super-gamingpro-6gb-graphics-card-ne6166s018j9-1160a",
"https://www.ebuyer.com/919631-palit-geforce-gtx-1660-super-gamingpro-oc-6gb-graphics-card-ne6166ss18j9-1160a",
"https://www.ebuyer.com/919721-palit-geforce-gtx-1660-super-6gb-stormx-oc-graphics-card-ne6166ss18j9-161f",
"https://www.ebuyer.com/1128362-kfa2-geforce-gtx-1660-super-6gb-oc-graphics-card-60srl7dsy91k",
"https://www.ebuyer.com/1133815-asus-geforce-gtx-1660-super-mini-6gb-oc-graphics-card-dual-gtx1660s-o6g-mini",
"https://www.ebuyer.com/915240-gigabyte-geforce-gtx-1660-super-6gb-oc-gaming-graphics-card-gv-n166sgaming-oc-6gd",
"https://www.ebuyer.com/919160-msi-geforce-gtx-1660-super-gaming-x-6gb-graphics-card-gtx-1660-super-gaming-x",
"https://www.ebuyer.com/919326-pny-geforce-gtx-1660-super-6gb-graphics-card-vcg16606ssfppb",
"https://www.ebuyer.com/1129733-msi-geforce-gtx-1660-super-ventus-6gb-oc-graphics-card-gtx-1660-super-ventus-oc",
]
[[products]]
name = "GTX 1660 Ti"
urls = [
"https://www.ebuyer.com/1138927-palit-geforce-gtx-1660ti-dual-graphics-card-ne6166t018j9-1160a",
"https://www.ebuyer.com/877353-msi-geforce-gtx-1660-ti-ventus-xs-6gb-oc-graphics-card-gtx-1660-ti-ventus-xs-6g-oc",
]
[[products]]
name = "RTX 2060"
urls = [
"https://www.ebuyer.com/874003-zotac-geforce-rtx-2060-amp-6gb-graphics-card-zt-t20600d-10m",
"https://www.ebuyer.com/875113-palit-geforce-rtx-2060-stormx-6gb-graphics-card-ne62060018j9-161f",
"https://www.ebuyer.com/874210-gigabyte-geforce-rtx-2060-oc-v2-6gb-graphics-card-gv-n2060oc-6gd-v2",
"https://www.ebuyer.com/1141899-pny-geforce-rtx-2060-6gb-blower-graphics-card-vcg20606blmpb",
]
[[products]]
name = "RTX 2060 Super"
urls = [
"https://www.ebuyer.com/901616-palit-geforce-rtx-2060-super-8gb-graphics-card-at-ebuyer-ne6206s018p2-1160a",
]
[[products]]
name = "RTX 3080"
urls = [
"https://www.ebuyer.com/1121306-asus-geforce-rtx-3080-10gb-gddr6x-tuf-gaming-oc-ampere-graphics-card-tuf-rtx3080-o10g-gaming",
"https://www.ebuyer.com/1121357-asus-geforce-rtx-3080-10gb-gddr6x-rog-strix-oc-ampere-graphics-card-rog-strix-rtx3080-o10g-gaming",
"https://www.ebuyer.com/1125453-zotac-geforce-rtx-3080-10gb-gddr6x-trinity-oc-ampere-graphics-card-zt-a30800j-10p",
"https://www.ebuyer.com/1121311-msi-geforce-rtx-3080-10gb-gddr6x-ventus-3x-oc-ampere-graphics-card-rtx-3080-ventus-3x-10g-oc",
"https://www.ebuyer.com/1121314-zotac-geforce-rtx-3080-10gb-gddr6x-trinity-ampere-graphics-card-zt-a30800d-10p",
"https://www.ebuyer.com/1128087-palit-geforce-rtx-3080-gamerock-oc-10gb-gddr6x-ampere-graphics-card-ned3080h19ia-1020g",
"https://www.ebuyer.com/1128207-msi-geforce-rtx-3080-suprim-x-10gb-ampere-graphics-card-rtx-3080-suprim-x-10g",
"https://www.ebuyer.com/1128214-gigabyte-geforce-rtx-3080-10gb-gddr6x-vision-oc-ampere-graphics-card-gv-n3080vision-oc-10gd",
"https://www.ebuyer.com/1128361-kfa2-geforce-rtx-3080-10gb-gddr6x-sg-ampere-graphics-card-38nwm3md99nk",
"https://www.ebuyer.com/1128751-gigabyte-geforce-rtx-3080-10gb-gddr6x-eagle-ampere-graphics-card-gv-n3080eagle-10gd",
"https://www.ebuyer.com/1121308-gigabyte-geforce-rtx-3080-10gb-gddr6x-gaming-oc-ampere-graphics-card-gv-n3080gaming-oc-10gd",
"https://www.ebuyer.com/1121358-asus-geforce-rtx-3080-10gb-gddr6x-rog-strix-ampere-graphics-card-rog-strix-rtx3080-10g-gaming",
"https://www.ebuyer.com/1127888-gigabyte-geforce-rtx-3080-10gb-gddr6x-aorus-xtreme-ampere-graphics-card-gv-n3080aorus-x-10gd",
"https://www.ebuyer.com/1140018-asus-geforce-rtx-3080-10gb-gddr6x-rog-strix-oc-white-ampere-graphics-rog-strix-rtx3080-o10g-white",
"https://www.ebuyer.com/1121290-asus-geforce-rtx-3080-10gb-gddr6x-tuf-gaming-ampere-graphics-card-tuf-rtx3080-10g-gaming",
"https://www.ebuyer.com/1121300-pny-geforce-rtx-3080-10gb-xlr8-gaming-triple-fan-ampere-graphics-card-vcg308010tfxppb",
"https://www.ebuyer.com/1121309-gigabyte-geforce-rtx-3080-10gb-gddr6x-eagle-oc-ampere-graphics-card-gv-n3080eagle-oc-10gd",
"https://www.ebuyer.com/1121310-msi-geforce-rtx-3080-10gb-gddr6x-gaming-x-trio-ampere-graphics-card-rtx-3080-gaming-x-trio-10g",
"https://www.ebuyer.com/1128086-palit-geforce-rtx-3080-gamerock-10gb-gddr6x-ampere-graphics-card-ned3080u19ia-1020g",
"https://www.ebuyer.com/1135883-evga-geforce-rtx-3080-10gb-xc3-ultra-gaming-ampere-graphics-card-10g-p5-3885-kr",
"https://www.ebuyer.com/1136570-asus-geforce-rtx-3080-ekwb-10gb-ampere-graphics-card-rtx3080-10g-ek",
"https://www.ebuyer.com/1137509-gigabyte-aorus-geforce-rtx-3080-gaming-box-external-graphics-card-gv-n3080ixeb-10gd",
"https://www.ebuyer.com/1140930-gigabyte-geforce-rtx-3080-10gb-gddr6x-aorus-master-ampere-graphics-card-gv-n3080aorus-m-10gd-v2",
]
<file_sep>/main.go
package main
import (
"flag"
"fmt"
"net/http"
"os"
"text/tabwriter"
"time"
"github.com/BurntSushi/toml"
"github.com/PuerkitoBio/goquery"
log "github.com/sirupsen/logrus"
)
type bot struct {
DiscordURL string `toml:"discordURL"`
Stock map[string]map[string]*gpu
Products []Product
writer *tabwriter.Writer
lastChecked time.Time
refresh time.Duration
}
type Product struct {
Name string
URLS []string
}
type gpu struct {
inStock bool
url string
}
func main() {
welcome := flag.Bool("welcome", false, "send welcome message")
flag.Parse()
b := bot{
Stock: make(map[string]map[string]*gpu),
refresh: 10 * time.Minute,
}
// Read config file.
if _, err := toml.DecodeFile("./config.toml", &b); err != nil {
log.Fatal(err)
}
b.writer = new(tabwriter.Writer)
b.writer.Init(os.Stdout, 8, 8, 3, '\t', 0)
var count int
for _, prod := range b.Products {
for _, _ = range prod.URLS {
count++
}
}
log.Infof("watching %d gpus", count)
if *welcome {
if err := b.startUp(count); err != nil {
log.Error(err)
}
}
b.checkStock()
b.printSummary()
b.lastChecked = time.Now()
ticker := time.NewTicker(b.refresh)
for {
select {
case <-ticker.C:
// Update stock list.
b.checkStock()
b.printSummary()
b.lastChecked = time.Now()
}
}
}
func (b *bot) printStatus() {
fmt.Printf("\033[H\033[2J")
fmt.Printf("\033[A")
// fmt.Fprintf(b.writer, "%s\t%s\n\n", "GPU", "IN STOCK")
for _, prod := range b.Products {
fmt.Fprintf(b.writer, "%s\n\n", prod.Name)
for name, gpu := range b.Stock[prod.Name] {
fmt.Fprintf(b.writer, "%s\t%v\n", name, gpu.inStock)
}
fmt.Fprintf(b.writer, "\n\n")
}
fmt.Fprintf(b.writer, "Last checked: %v", b.lastChecked.Format("Mon Jan 2 15:04:05 MST 2006"))
b.writer.Flush()
}
func (b *bot) printSummary() {
for _, prod := range b.Products {
var count int
for _, gpu := range b.Stock[prod.Name] {
if gpu.inStock {
count++
}
}
log.Infof("%s: %d", prod.Name, count)
}
}
func (b *bot) checkStock() {
for _, prod := range b.Products {
stock, ok := b.Stock[prod.Name]
if !ok {
stock = make(map[string]*gpu)
b.Stock[prod.Name] = stock
}
for _, u := range prod.URLS {
doc, err := getDoc(u)
if err != nil {
continue
}
name := getName(doc)
inStock, err := inStock(doc)
if err != nil {
log.Error(err)
continue
}
if g, ok := stock[name]; ok {
if inStock && !g.inStock {
// Send notification that item has come into stock.
g.inStock = inStock
fmt.Printf("%s is now in stock: %s\n", prod.Name, u)
if err := b.sendDiscord(name, u, prod.Name); err != nil {
log.Error(err)
}
}
if !inStock && g.inStock {
// Send notification that item has come into stock.
g.inStock = inStock
fmt.Printf("%s is out of stock: %s\n", prod.Name, u)
if err := b.outOfStock(name, u, prod.Name); err != nil {
log.Error(err)
}
}
} else {
stock[name] = &gpu{inStock: inStock, url: u}
/*if inStock {
fmt.Printf("%s is in stock: %s\n", prod.Name, u)
if err := b.sendDiscord(name, u, prod.Name); err != nil {
log.Error(err)
}
}*/
}
}
}
}
/*func (b *bot) sendUpdate(gpu string, url string) {
attachment := slack.Attachment{
Color: "good",
AuthorName: gpu,
AuthorLink: url,
AuthorIcon: "https://avatars2.githubusercontent.com/u/652790",
Text: "This GPU is now in stock :smile",
FooterIcon: "https://platform.slack-edge.com/img/default_application_icon.png",
Ts: json.Number(strconv.FormatInt(time.Now().Unix(), 10)),
}
msg := slack.WebhookMessage{
Attachments: []slack.Attachment{attachment},
}
err := slack.PostWebhook(webhookURL, &msg)
if err != nil {
fmt.Println(err)
}
}*/
func getDoc(url string) (*goquery.Document, error) {
// Get the HTML
resp, err := http.Get(url)
if err != nil {
return nil, err
}
// Convert HTML into goquery document
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return nil, err
}
return doc, nil
}
func getName(doc *goquery.Document) string {
sel := doc.Find(".product-hero__title")
return sel.Text()
}
func getModel(doc *goquery.Document) string {
sel := doc.Find(".product-hero__key-selling-point")
return sel.Text()
}
func getPic(doc *goquery.Document) string {
sel := doc.Find(".image-gallery__hero .js-gallery-trigger")
for _, n := range sel.Nodes {
if n.FirstChild != nil {
fmt.Println(*n.FirstChild)
}
}
return ""
}
func inStock(doc *goquery.Document) (bool, error) {
sel := doc.Find(".purchase-info__price .inc-vat .price")
if len(sel.Nodes) == 0 {
return false, nil
}
/*sel.Each(func(i int, s *goquery.Selection) {
fmt.Println(strings.TrimSpace(s.Text()))
})*/
return true, nil
}
<file_sep>/go.mod
module go-bot
go 1.16
require (
github.com/BurntSushi/toml v0.3.1 // indirect
github.com/PuerkitoBio/goquery v1.6.1 // indirect
github.com/bwmarrin/discordgo v0.23.2 // indirect
github.com/konsorten/go-windows-terminal-sequences v1.0.1 // indirect
github.com/sirupsen/logrus v1.8.1 // indirect
github.com/slack-go/slack v0.8.1 // indirect
github.com/stretchr/objx v0.1.1 // indirect
)
<file_sep>/discord.go
package main
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"github.com/bwmarrin/discordgo"
)
const (
ebuyerAvatar = "https://media.glassdoor.com/sqll/764749/ebuyer-squarelogo-1396431281679.png"
)
func (b *bot) startUp(num int) error {
hook := discordgo.WebhookParams{
Content: fmt.Sprintf("GPU stock bot is live! Watching %d GPU's.", num),
}
reqBody, err := json.Marshal(hook)
if err != nil {
return err
}
_, err = http.Post(b.DiscordURL, "application/json", bytes.NewBuffer(reqBody))
return err
}
func (b *bot) sendDiscord(gpu, url, model string) error {
embed := &discordgo.MessageEmbed{
URL: url,
Author: &discordgo.MessageEmbedAuthor{Name: "eBuyer", IconURL: ebuyerAvatar, ProxyIconURL: ebuyerAvatar, URL: url},
Title: gpu,
Color: 39219,
}
hook := discordgo.WebhookParams{
Content: fmt.Sprintf("%s now in stock!", model),
Embeds: []*discordgo.MessageEmbed{embed},
}
reqBody, err := json.Marshal(hook)
if err != nil {
return err
}
_, err = http.Post(b.DiscordURL, "application/json", bytes.NewBuffer(reqBody))
return err
}
func (b *bot) outOfStock(gpu, url, model string) error {
embed := &discordgo.MessageEmbed{
URL: url,
Author: &discordgo.MessageEmbedAuthor{Name: "eBuyer", IconURL: ebuyerAvatar, ProxyIconURL: ebuyerAvatar, URL: url},
Title: gpu,
Color: 13369344,
}
hook := discordgo.WebhookParams{
Content: fmt.Sprintf("%s now out of stock :rage:", model),
Embeds: []*discordgo.MessageEmbed{embed},
}
reqBody, err := json.Marshal(hook)
if err != nil {
return err
}
_, err = http.Post(b.DiscordURL, "application/json", bytes.NewBuffer(reqBody))
return err
}
| 77a339e01d81d31e14204e5a2d815b4f40e9af05 | [
"TOML",
"Go Module",
"Go"
] | 4 | TOML | lewdawso/gpu-bot | 8bde8b7eec0d100527325cad7b7bd3263771f968 | f20c2f45ea0dbefe18792356cd305c669deeb37f | |
refs/heads/main | <file_sep>package com.flight;
public class BookingHistory {
private String flightNo;
private String source;
private String destination;
private String bookingDate;
private int cost;
private String paymentMode;
public BookingHistory(String flightNo, String source, String destination, String bookingDate) {
super();
this.flightNo = flightNo;
this.source = source;
this.destination = destination;
this.bookingDate = bookingDate;
}
public String getFlightNo() {
return flightNo;
}
public void setFlightNo(String flightNo) {
this.flightNo = flightNo;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public String getDestination() {
return destination;
}
public void setDestination(String destination) {
this.destination = destination;
}
public String getBookingDate() {
return bookingDate;
}
public void setBookingDate(String bookingDate) {
this.bookingDate = bookingDate;
}
public int getCost() {
return cost;
}
public void setCost(int cost) {
this.cost = cost;
}
public String getPaymentMode() {
return paymentMode;
}
public void setPaymentMode(String paymentMode) {
this.paymentMode = paymentMode;
}
}
<file_sep>package com.flight;
public class Login {
private String userName = "ABC";
private String password = "XYZ";
public boolean getLogin(String userName, String password) {
if(userName.equals(this.userName) && password.equals(this.password)) {
return true;
}
return false;
}
}
<file_sep>package com.flight;
public class Registration {
private String firstName;
private String lastName;
private String dateOfBirth;
private String contactNumber;
private String userName;
private String password;
private String mailId;
public Registration(String firstName, String lastName, String dateOfBirth, String contactNumber, String userName,
String password, String mailId) {
super();
this.firstName = firstName;
this.lastName = lastName;
this.dateOfBirth = dateOfBirth;
this.contactNumber = contactNumber;
this.userName = userName;
this.password = <PASSWORD>;
this.mailId = mailId;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getDateOfBirth() {
return dateOfBirth;
}
public void setDateOfBirth(String dateOfBirth) {
this.dateOfBirth = dateOfBirth;
}
public String getContactNumber() {
return contactNumber;
}
public void setContactNumber(String contactNumber) {
this.contactNumber = contactNumber;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getMailId() {
return mailId;
}
public void setMailId(String mailId) {
this.mailId = mailId;
}
}
<file_sep>package com.flight;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
public class FlightBookingMain {
private String userName;
Map<String, Registration> registerMap = new HashMap<>();
List<String> loginList = new ArrayList<>();
Map<String, List<String>> flightInfoMap = new HashMap<>();
private final String flight1 = "Emirates";
private final String flight2 = "Indigo";
private final String flight3 = "GoAir";
private final String flight4 = "AirIndia";
private String[] flight1Timing = {"06.00","11.00", "14.30", "21.00"};
private String[] flight2Timing = {"08.00","15.00", "16.30", "21.00"};
private String[] flight3Timing = {"09.00","14.00", "16.30", "22.00"};
private String[] flight4Timing = {"03.00","11.00", "18.30", "20.00"};
private Map<String,List<BookingHistory>> historyMap = new HashMap<>();
public static void main(String[] args) {
FlightBookingMain fbm = new FlightBookingMain();
fbm.loadFlightData();
fbm.getLogin();
//calling login method
}
private void loadFlightData() {
flightInfoMap.put(flight1, Arrays.asList(flight1Timing));
flightInfoMap.put(flight2, Arrays.asList(flight2Timing));
flightInfoMap.put(flight3, Arrays.asList(flight3Timing));
flightInfoMap.put(flight4, Arrays.asList(flight4Timing));
}
private void getLogin() {
Scanner scanner = new Scanner(System.in);
System.out.println("Enter user name: ");
String userName = scanner.nextLine();
System.out.println("Enter password");
String password = scanner.nextLine();
Login login = new Login();
boolean isValid = login.getLogin(userName, password);
// scanner.close();
if(isValid) {
this.userName = userName;
System.out.println("Login success");
loginList.add(userName);
//login success
flightTicketBooking();
}
else {
//calling registration
System.out.println("Login failed");
register();
}
//Flight ticket booking
}
private void register() {
String firstName;
String lastName;
String dateOfBirth;
String contactNumber;
String userName;
String password;
String mailId;
Scanner scanner = new Scanner(System.in);
System.out.println("Enter user first name: ");
firstName = scanner.nextLine();
System.out.println("Enter user last name");
lastName = scanner.nextLine();
System.out.println("Enter Date Of Birth");
dateOfBirth = scanner.nextLine();
System.out.println("Enter contact number: ");
contactNumber = scanner.nextLine();
System.out.println("Enter mail id: ");
mailId = scanner.nextLine();
System.out.println("Enter user name");
userName = scanner.nextLine();
System.out.println("Enter password");
password = scanner.nextLine();
Registration reg = new Registration(firstName, lastName, dateOfBirth, contactNumber, userName, password, mailId);
RegisterUserInformation regInfo = new RegisterUserInformation();
regInfo.registerUser(reg, registerMap, userName);
// registerMap.put(userName, reg);
scanner.close();
}
private void flightTicketBooking() {
System.out.println(" Choose Your option - 1. Booking History 2. New Booking");
Scanner scanner = new Scanner(System.in);
int option = Integer.parseInt(scanner.nextLine());
//scanner.close();
if(option == 1) {
//display History
displayBookingHistory();
}
else if(option == 2) {
//flight Booking
flightBooking();
}
}
private void displayBookingHistory() {
BookingHistoryInfo bookingInfo = new BookingHistoryInfo();
bookingInfo.displayBookingHistory(historyMap, userName);
}
private void flightBooking() {
//display flight information
FlightDetails();
Scanner scanner = new Scanner(System.in);
System.out.println("Enter Flight No");
String flightNo = scanner.nextLine();
System.out.println("Enter Flight Timing");
String timing = scanner.nextLine();
System.out.println("Enter Number of seats");
String seats = scanner.nextLine();
System.out.println("Enter Destionation: ");
String destination = scanner.nextLine();
System.out.println("Enter source");
String source = scanner.nextLine();
int cost = 9000;
System.out.println("Total cose (Inc. Tax): INR "+cost);
String BookingDate = new Date().toString();
BookingHistory bookingHistory = new BookingHistory(flightNo,source,destination,BookingDate);
System.out.println("Enter payment mode");
System.out.println("Credit Card, Debit Card, NetBanking, QR Code");
String paymnetMode = scanner.nextLine();
bookingHistory.setPaymentMode(paymnetMode);
bookingHistory.setCost(cost);
BookingHistoryInfo bookingInfo = new BookingHistoryInfo();
bookingInfo.flightBooking(bookingHistory, historyMap, userName);
System.out.println("Cancel Ticket, Booking History, Exit");
String option = scanner.nextLine();
if("Booking History".equals(option)) {
displayBookingHistory();
}
scanner.close();
}
private void FlightDetails() {
Iterator<String> iterator = flightInfoMap.keySet().iterator();
while(iterator.hasNext()) {
String keyFlightNo = iterator.next();
List<String> timingList = flightInfoMap.get(keyFlightNo);
flightTimings(timingList, keyFlightNo);
}
}
private void flightTimings(List<String> timingsList, String flightNo) {
System.out.println("-----------------------");
System.out.println("Flight No: "+flightNo +" Timings are: ");
Iterator<String> iterator = timingsList.iterator();
while(iterator.hasNext()) {
System.out.println(iterator.next());
}
System.out.println("----------------");
System.out.println();
}
} | 63601b41c3a3d5f8638f03ec946c49040d2e6b15 | [
"Java"
] | 4 | Java | nimmakayalamaheswari35/Flight-Architecture | b04b6576aedc7c380217b3a362e553bff813aa23 | 29d176bd62e529b8919a2b71a82d0cfd41ee8289 | |
refs/heads/master | <file_sep><?php namespace App\Domain\Entities\Kependudukan;
use Illuminate\Database\Eloquent\Model;
class Keluarga extends Model
{
/**
* @var string
*/
protected $table = 'keluarga';
/**
* @var array
*/
protected $fillable = [
'nik_kk',
'nama_kk',
'alamat',
'rt',
'rw',
'dusun',
'telepon',
'status',
];
/**
* @var string
*/
public static $tags = 'keluarga';
/**
* The attributes that aren't mass assignable.
*
* @var array
*/
protected $hidden = [
'created_at',
'updated_at'
];
}
<file_sep><?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateTableMutasi extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('mutasi', function (Blueprint $table) {
$table->engine = 'InnoDB';
$table->increments('id');
$table->string('nik', 16)->unique();
$table->string('alamat', 1);
$table->string('rt', 3);
$table->string('rw', 20);
$table->string('dusun');
$table->integer('desa_id');
$table->boolean('status')->default(0);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('mutasi');
}
}
<file_sep><?php namespace App\Http\Requests\Kependudukan;
use App\Http\Requests\Request;
use Illuminate\Contracts\Validation\Validator;
class OrtuFormRequest extends Request
{
/**
* @var array
*/
protected $attrs = [
'nik' => 'NIK',
'nik_bapak' => 'NIK Bapak',
'nama_bapak' => '<NAME>',
'status_bapak' => 'Status Bapak',
'alamat_bapak' => 'Alamat Bapak',
'nik_ibu' => 'NIK Ibu',
'nama_ibu' => 'Nama Ibu',
'status_ibu' => 'Status Ibu',
'alamat_ibu' => 'Alamat Ibu',
];
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
return [
'nik' => 'required|max:16|unique:ortu,nik',
'nik_bapak' => 'required|max:16',
'nama_bapak' => 'required|max:50',
'status_bapak' => 'required|max:25',
'alamat_bapak' => 'required|max:50',
'nik_ibu' => 'required|max:16',
'nama_ibu' => 'required|max:50',
'status_ibu' => 'required|max:25',
'alamat_ibu' => 'required|max:50',
];
}
/**
* @param $validator
*
* @return mixed
*/
public function validator($validator)
{
return $validator->make($this->all(), $this->container->call([$this, 'rules']), $this->messages(), $this->attrs);
}
/**
* @param Validator $validator
*
* @return array
*/
protected function formatErrors(Validator $validator)
{
$message = $validator->errors();
return [
'success' => false,
'validation' => [
'nik' => $message->first('nik'),
'nik_bapak' => $message->first('nik_bapak'),
'nama_bapak' => $message->first('nama_bapak'),
'status_bapak' => $message->first('status_bapak'),
'alamat_bapak' => $message->first('alamat_bapak'),
'nik_ibu' => $message->first('nik_ibu'),
'nama_ibu' => $message->first('nama_ibu'),
'status_ibu' => $message->first('status_ibu'),
'alamat_ibu' => $message->first('alamat_ibu'),
]
];
}
}
<file_sep><?php namespace App\Http\Requests\Kependudukan;
use App\Http\Requests\Request;
use Illuminate\Contracts\Validation\Validator;
class PribadiRincianFormRequest extends Request
{
/**
* @var array
*/
protected $attrs = [
'nik' => 'NIK',
'kelainan_fisik' => 'Kelainan Fisik',
'cacat_fisik' => 'Cacat Fisik',
'warga_negara' => 'Warga Negara',
'website' => 'Website',
'email' => 'Email',
'telp' => 'Telepon',
];
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
return [
'nik' => 'required|max:16|unique:pribadi_rincian,nik',
'kelainan_fisik' => 'max:100',
'cacat_fisik' => 'max:100',
'warga_negara' => 'required|max:30',
'website' => 'max:80|url',
'email' => 'max:80|email',
'telp' => 'max:15',
];
}
/**
* @param $validator
*
* @return mixed
*/
public function validator($validator)
{
return $validator->make($this->all(), $this->container->call([$this, 'rules']), $this->messages(), $this->attrs);
}
/**
* @param Validator $validator
*
* @return array
*/
protected function formatErrors(Validator $validator)
{
$message = $validator->errors();
return [
'success' => false,
'validation' => [
'nik' => $message->first('nik'),
'kelainan_fisik' => $message->first('kelainan_fisik'),
'cacat_fisik' => $message->first('cacat_fisik'),
'warga_negara' => $message->first('warga_negara'),
'website' => $message->first('website'),
'email' => $message->first('email'),
'telp' => $message->first('telp'),
]
];
}
}
<file_sep><?php namespace App\Domain\Entities\Kependudukan;
use Illuminate\Database\Eloquent\Model;
class Ortu extends Model
{
/**
* @var string
*/
protected $table = 'ortu';
/**
* @var array
*/
protected $fillable = [
'nik',
'nik_bapak',
'nama_bapak',
'status_bapak',
'alamat_bapak',
'nik_ibu',
'nama_ibu',
'status_ibu',
'alamat_ibu',
'status',
];
/**
* @var string
*/
public static $tags = 'ortu';
/**
* The attributes that aren't mass assignable.
*
* @var array
*/
protected $hidden = [
'created_at',
'updated_at'
];
}
<file_sep><?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateTablePribadi extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('pribadi', function (Blueprint $table) {
$table->engine = 'InnoDB';
$table->increments('id');
$table->string('keluarga_id', 18);
$table->string('nik', 25)->unique();
$table->string('title_depan', 20)->nullable();
$table->string('title_belakang', 20)->nullable();
$table->string('nama');
$table->string('kelamin');
$table->string('tempat_lahir');
$table->string('tanggal_lahir');
$table->string('golongan_darah')->nullable();
$table->string('agama');
$table->string('status_kawin');
$table->string('status_keluarga');
$table->string('pendidikan')->nullable();
$table->string('pekerjaan')->nullable();
$table->boolean('status')->default(0);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('pribadi');
}
}
<file_sep><?php namespace App\Http\Controllers\Kependudukan;
use App\Domain\Repositories\Kependudukan\KeluargaRepository;
use Illuminate\Http\Request;
use App\Http\Requests;
use App\Http\Controllers\Controller;
use App\Http\Requests\Kependudukan\KeluargaFormRequest;
class KeluargaController extends Controller
{
/**
* @var KeluargaRepository
*/
protected $keluarga;
/**
* @param KeluargaRepository $keluarga
*/
public function __construct(KeluargaRepository $keluarga)
{
$this->keluarga = $keluarga;
// $this->middleware('auth');
}
/**
* Display a listing of the resource.
*
* @param Request $request
*
* @return mixed
*/
public function index(Request $request)
{
return $this->keluarga->getByPage(10, $request->input('page'), $column = ['*'], $key = '', $request->input('term'));
}
/**
* Store a newly created resource in storage.
*
* @param KeluargaFormRequest $request
*
* @return mixed
*/
public function store(KeluargaFormRequest $request)
{
return $this->keluarga->create($request->all());
}
/**
* Display the specified resource.
*
* @param int $id
*
* @return mixed
*/
public function show($id)
{
return $this->keluarga->find($id);
}
/**
* Update the specified resource in storage.
*
* @param KeluargaFormRequest $request
* @param int $id
*
* @return mixed
*/
public function update(KeluargaFormRequest $request, $id)
{
return $this->keluarga->update($id, $request->all());
}
/**
* Remove the specified resource from storage.
*
* @param int $id
*
* @return mixed
*/
public function destroy($id)
{
return $this->keluarga->delete($id);
}
}
<file_sep><?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateTableKabkot extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('kabkot', function (Blueprint $table) {
$table->engine = 'InnoDB';
$table->increments('id');
$table->integer('provinsi_id');
$table->string('kode_kabupaten', 3);
$table->string('kabupaten');
$table->integer('is_status');
$table->string('waktu', 4);
$table->integer('pbb_kabukot_kode');
$table->integer('arsip_kabukot_kode');
$table->integer('kodepos_kabukot_kode');
$table->integer('ramil_kabukot_kode');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('kabkot');
}
}
<file_sep><?php namespace App\Domain\Repositories\Kependudukan;
use App\Domain\Contracts\Cacheable;
use App\Domain\Contracts\Crudable;
use App\Domain\Contracts\Paginable;
use App\Domain\Entities\Kependudukan\Pribadi;
use App\Domain\Repositories\AbstractRepository;
use Illuminate\Support\Facades\Log;
class PribadiRepository extends AbstractRepository implements Crudable, Paginable
{
/**
* @var Cacheable
*/
protected $cache;
/**
* @param Pribadi $pribadi
* @param Cacheable $cache
*/
public function __construct(Pribadi $pribadi, Cacheable $cache)
{
$this->model = $pribadi;
$this->cache = $cache;
}
/**
* @param int $id
* @param array $columns
*
* @return \Illuminate\Database\Eloquent\Model
*/
public function find($id, array $columns = ['*'])
{
// set key
$key = 'pribadi-find-' . $id;
// has section and key
if ($this->cache->has(Pribadi::$tags, $key)) {
return $this->cache->get(Pribadi::$tags, $key);
}
// query to sql
$result = parent::find($id, $columns);
// store to cache
$this->cache->put(Pribadi::$tags, $key, $result, 10);
return $result;
}
/**
* @param array $data
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function create(array $data)
{
try {
// execute sql insert
$result = parent::create([
'keluarga_id' => e($data['keluarga_id']),
'nik' => e($data['nik']),
'title_depan' => (empty($data['title_depan'])) ? null : e($data['title_depan']),
'title_belakang' => (empty($data['title_belakang'])) ? null : e($data['title_belakang']),
'nama' => e($data['nama']),
'kelamin' => e($data['kelamin']),
'tempat_lahir' => e($data['tempat_lahir']),
'tanggal_lahir' => e($data['tanggal_lahir']),
'golongan_darah' => (empty($data['golongan_darah'])) ? null : e($data['golongan_darah']),
'agama' => e($data['agama']),
'status_kawin' => e($data['status_kawin']),
'status_keluarga' => e($data['status_keluarga']),
'pendidikan' => (empty($data['pendidikan'])) ? null : e($data['pendidikan']),
'pekerjaan' => (empty($data['pekerjaan'])) ? null : e($data['pekerjaan']),
'status' => (empty($data['status'])) ? '0' : e($data['status']),
]);
// flush cache with tags
$this->cache->flush(Pribadi::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . PribadiRepository::class . ' method : create | ' . $e);
return $this->createError();
}
}
/**
* @param $id
* @param array $data
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function update($id, array $data)
{
try {
$result = parent::update($id, [
'keluarga_id' => e($data['keluarga_id']),
'nik' => e($data['nik']),
'title_depan' => (empty($data['title_depan'])) ? null : e($data['title_depan']),
'title_belakang' => (empty($data['title_belakang'])) ? null : e($data['title_belakang']),
'nama' => e($data['nama']),
'kelamin' => e($data['kelamin']),
'tempat_lahir' => e($data['tempat_lahir']),
'tanggal_lahir' => e($data['tanggal_lahir']),
'golongan_darah' => (empty($data['golongan_darah'])) ? null : e($data['golongan_darah']),
'agama' => e($data['agama']),
'status_kawin' => e($data['status_kawin']),
'status_keluarga' => e($data['status_keluarga']),
'pendidikan' => (empty($data['pendidikan'])) ? null : e($data['pendidikan']),
'pekerjaan' => (empty($data['pekerjaan'])) ? null : e($data['pekerjaan']),
'status' => (empty($data['status'])) ? '0' : e($data['status']),
]);
// flush cache with tags
$this->cache->flush(Pribadi::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . PribadiRepository::class . ' method : update | ' . $e);
return $this->createError();
}
}
/**
* @param $id
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function delete($id)
{
try {
$result = parent::delete($id);
// flush cache with tags
$this->cache->flush(Pribadi::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . PribadiRepository::class . ' method : delete | ' . $e);
return $this->createError();
}
}
/**
* @param int $limit
* @param int $page
* @param array $column
* @param string $field
* @param string $search
*
* @return \Illuminate\Pagination\Paginator
*/
public function getByPage($limit = 10, $page = 1, array $column = ['*'], $field, $search = '')
{
// set key
$key = 'pribadi-get-by-page-' . $page . $limit . $search;
// has section and key
if ($this->cache->has(Pribadi::$tags, $key)) {
return $this->cache->get(Pribadi::$tags, $key);
}
// query to sql
$result = parent::getByPageOrderBy($limit, $page, $column, 'nik', $search, 'nama');
// store to cache
$this->cache->put(Pribadi::$tags, $key, $result, 10);
return $result;
}
}
<file_sep><?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateTableOrtu extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('ortu', function (Blueprint $table) {
$table->engine = 'InnoDB';
$table->increments('id');
$table->string('nik', 16)->unique();
$table->string('nik_bapak', 16)->unique();
$table->string('nama_bapak', 55);
$table->string('status_bapak', 30);
$table->string('alamat_bapak', 55);
$table->string('nik_ibu', 16)->unique();
$table->string('nama_ibu', 55);
$table->string('status_ibu', 30);
$table->string('alamat_ibu', 55);
$table->boolean('status')->default(0);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('ortu');
}
}
<file_sep><?php namespace App\Domain\Entities\Kependudukan;
use Illuminate\Database\Eloquent\Model;
class Pribadi extends Model
{
/**
* @var string
*/
protected $table = 'pribadi';
/**
* @var array
*/
protected $fillable = [
'keluarga_id',
'nik',
'title_depan',
'title_belakang',
'nama',
'kelamin',
'tempat_lahir',
'tanggal_lahir',
'golongan_darah',
'agama',
'status_kawin',
'status_keluarga',
'pendidikan',
'pekerjaan',
'status',
];
/**
* @var string
*/
public static $tags = 'pribadi';
/**
* The attributes that aren't mass assignable.
*
* @var array
*/
protected $hidden = [
'created_at',
'updated_at'
];
}
<file_sep><?php namespace App\Http\Controllers\Kependudukan;
use App\Domain\Repositories\Kependudukan\PribadiRincianRepository;
use Illuminate\Http\Request;
use App\Http\Requests;
use App\Http\Controllers\Controller;
use App\Http\Requests\Kependudukan\PribadiRincianFormRequest;
class PribadiRincianController extends Controller
{
/**
* @var PribadiRincianRepository
*/
protected $pribadi;
/**
* @param PribadiRincianRepository $pribadi
*/
public function __construct(PribadiRincianRepository $pribadi)
{
$this->pribadi = $pribadi;
// $this->middleware('auth');
}
/**
* Display a listing of the resource.
*
* @param Request $request
*
* @return mixed
*/
public function index(Request $request)
{
return $this->pribadi->getByPage(10, $request->input('page'), $column = ['*'], $key = '', $request->input('term'));
}
/**
* Store a newly created resource in storage.
*
* @param PribadiRincianFormRequest $request
*
* @return mixed
*/
public function store(PribadiRincianFormRequest $request)
{
return $this->pribadi->create($request->all());
}
/**
* Display the specified resource.
*
* @param int $id
*
* @return mixed
*/
public function show($id)
{
return $this->pribadi->find($id);
}
/**
* Update the specified resource in storage.
*
* @param PribadiRincianFormRequest $request
* @param int $id
*
* @return mixed
*/
public function update(PribadiRincianFormRequest $request, $id)
{
return $this->pribadi->update($id, $request->all());
}
/**
* Remove the specified resource from storage.
*
* @param int $id
*
* @return mixed
*/
public function destroy($id)
{
return $this->pribadi->delete($id);
}
}
<file_sep><?php namespace App\Domain\Entities\Kependudukan;
use Illuminate\Database\Eloquent\Model;
class PribadiRincian extends Model
{
/**
* @var string
*/
protected $table = 'pribadi_rincian';
/**
* @var array
*/
protected $fillable = [
'nik',
'kelainan_fisik',
'cacat_fisik',
'warga_negara',
'website',
'email',
'telp',
'status',
];
/**
* @var string
*/
public static $tags = 'pribadi-rincian';
/**
* The attributes that aren't mass assignable.
*
* @var array
*/
protected $hidden = [
'created_at',
'updated_at'
];
}
<file_sep><?php namespace App\Http\Controllers\Kependudukan;
use App\Domain\Repositories\Kependudukan\OrtuRepository;
use App\Domain\Repositories\Kependudukan\PribadiRepository;
use Illuminate\Http\Request;
use App\Http\Requests;
use App\Http\Controllers\Controller;
use App\Http\Requests\Kependudukan\OrtuFormRequest;
class OrtuController extends Controller
{
/**
* @var PribadiRepository
*/
protected $ortu;
/**
* @param OrtuRepository $ortu
*/
public function __construct(OrtuRepository $ortu)
{
$this->ortu = $ortu;
// $this->middleware('auth');
}
/**
* Display a listing of the resource.
*
* @param Request $request
*
* @return mixed
*/
public function index(Request $request)
{
return $this->ortu->getByPage(10, $request->input('page'), $column = ['*'], $key = '', $request->input('term'));
}
/**
* Store a newly created resource in storage.
*
* @param OrtuFormRequest $request
*
* @return mixed
*/
public function store(OrtuFormRequest $request)
{
return $this->ortu->create($request->all());
}
/**
* Display the specified resource.
*
* @param int $id
*
* @return mixed
*/
public function show($id)
{
return $this->ortu->find($id);
}
/**
* Update the specified resource in storage.
*
* @param OrtuFormRequest $request
* @param int $id
*
* @return mixed
*/
public function update(OrtuFormRequest $request, $id)
{
return $this->ortu->update($id, $request->all());
}
/**
* Remove the specified resource from storage.
*
* @param int $id
*
* @return mixed
*/
public function destroy($id)
{
return $this->ortu->delete($id);
}
}
<file_sep><?php namespace App\Domain\Repositories\Kependudukan;
use App\Domain\Contracts\Cacheable;
use App\Domain\Contracts\Crudable;
use App\Domain\Contracts\Paginable;
use App\Domain\Entities\Kependudukan\Keluarga;
use App\Domain\Repositories\AbstractRepository;
use Illuminate\Support\Facades\Log;
class KeluargaRepository extends AbstractRepository implements Crudable, Paginable
{
/**
* @var Cacheable
*/
protected $cache;
/**
* @param Keluarga $keluarga
* @param Cacheable $cache
*/
public function __construct(Keluarga $keluarga, Cacheable $cache)
{
$this->model = $keluarga;
$this->cache = $cache;
}
/**
* @param int $id
* @param array $columns
*
* @return \Illuminate\Database\Eloquent\Model
*/
public function find($id, array $columns = ['*'])
{
// set key
$key = 'keluarga-find-' . $id;
// has section and key
if ($this->cache->has(Keluarga::$tags, $key)) {
return $this->cache->get(Keluarga::$tags, $key);
}
// query to sql
$result = parent::find($id, $columns);
// store to cache
$this->cache->put(Keluarga::$tags, $key, $result, 10);
return $result;
}
/**
* @param array $data
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function create(array $data)
{
try {
// execute sql insert
$result = parent::create([
'nik_kk' => e($data['nik_kk']),
'nama_kk' => e($data['nama_kk']),
'alamat' => e($data['alamat']),
'rt' => e($data['rt']),
'rw' => e($data['rw']),
'dusun' => e($data['dusun']),
'telepon' => (empty($data['telepon'])) ? null : e($data['telepon']),
'status' => (empty($data['status'])) ? '0' : e($data['status']),
]);
// flush cache with tags
$this->cache->flush(Keluarga::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . KeluargaRepository::class . ' method : create | ' . $e);
return $this->createError();
}
}
/**
* @param $id
* @param array $data
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function update($id, array $data)
{
try {
$result = parent::update($id, [
'nik_kk' => e($data['nik_kk']),
'nama_kk' => e($data['nama_kk']),
'alamat' => e($data['alamat']),
'rt' => e($data['rt']),
'rw' => e($data['rw']),
'dusun' => e($data['dusun']),
'telepon' => e($data['telepon']),
'status' => (empty($data['status'])) ? '0' : e($data['status']),
]);
// flush cache with tags
$this->cache->flush(Keluarga::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . KeluargaRepository::class . ' method : update | ' . $e);
return $this->createError();
}
}
/**
* @param $id
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function delete($id)
{
try {
$result = parent::delete($id);
// flush cache with tags
$this->cache->flush(Keluarga::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . KeluargaRepository::class . ' method : delete | ' . $e);
return $this->createError();
}
}
/**
* @param int $limit
* @param int $page
* @param array $column
* @param string $field
* @param string $search
*
* @return \Illuminate\Pagination\Paginator
*/
public function getByPage($limit = 10, $page = 1, array $column = ['*'], $field, $search = '')
{
// set key
$key = 'keluarga-get-by-page-' . $page . $limit . $search;
// has section and key
if ($this->cache->has(Keluarga::$tags, $key)) {
return $this->cache->get(Keluarga::$tags, $key);
}
// query to aql
$result = parent::getByPageOrderBy($limit, $page, $column, 'nik_kk', $search, 'nama_kk');
// store to cache
$this->cache->put(Keluarga::$tags, $key, $result, 10);
return $result;
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class ProvinsiSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('provinsi')->truncate();
DB::table('provinsi')->insert([
['id' => 1, 'kode_provinsi' => '35', 'provinsi' => 'Jawa Timur', 'pbb_prov_kode' => '', 'arsip_prov_kode' => '', 'kodepos_prov_kode' => '', 'ramil_prov_kode' => '', 'created_at' => \Carbon\Carbon::now()],
]);
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Dingo\Api\Http\Request;
use Dingo\Api\Exception\ValidationHttpException;
use Illuminate\Routing\Controller as BaseController;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
/**
* Class Controller
*
* @package App\Http\Controllers
*/
abstract class Controller extends BaseController
{
use AuthorizesRequests, ValidatesRequests;
/**
* @param Request $request
* @param array $rules
* @param array $messages
* @param array $customAttributes
*/
public function validate(Request $request, array $rules, array $messages = [], array $customAttributes = [])
{
$validator = $this->getValidationFactory()->make($request->all(), $rules, $messages, $customAttributes);
if ($validator->fails()) {
throw new ValidationHttpException($validator->errors());
}
}
}
<file_sep><?php namespace App\Domain\Repositories\Kependudukan;
use App\Domain\Contracts\Cacheable;
use App\Domain\Contracts\Crudable;
use App\Domain\Contracts\Paginable;
use App\Domain\Entities\Kependudukan\Ortu;
use App\Domain\Entities\Kependudukan\Pribadi;
use App\Domain\Repositories\AbstractRepository;
use Illuminate\Support\Facades\Log;
class OrtuRepository extends AbstractRepository implements Crudable, Paginable
{
/**
* @var Cacheable
*/
protected $cache;
/**
* @param Ortu $ortu
* @param Cacheable $cache
*/
public function __construct(Ortu $ortu, Cacheable $cache)
{
$this->model = $ortu;
$this->cache = $cache;
}
/**
* @param int $id
* @param array $columns
*
* @return \Illuminate\Database\Eloquent\Model
*/
public function find($id, array $columns = ['*'])
{
// set key
$key = 'ortu-find-' . $id;
// has section and key
if ($this->cache->has(Ortu::$tags, $key)) {
return $this->cache->get(Ortu::$tags, $key);
}
// query to sql
$result = parent::find($id, $columns);
// store to cache
$this->cache->put(Ortu::$tags, $key, $result, 10);
return $result;
}
/**
* @param array $data
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function create(array $data)
{
try {
// execute sql insert
$result = parent::create([
'nik' => e($data['nik']),
'nik_bapak' => e($data['nik_bapak']),
'nama_bapak' => e($data['nama_bapak']),
'status_bapak' => e($data['status_bapak']),
'alamat_bapak' => e($data['alamat_bapak']),
'nik_ibu' => e($data['nik_ibu']),
'nama_ibu' => e($data['nama_ibu']),
'status_ibu' => e($data['status_ibu']),
'alamat_ibu' => e($data['alamat_ibu']),
'status' => (empty($data['status'])) ? '0' : e($data['status']),
]);
// flush cache with tags
$this->cache->flush(Ortu::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . OrtuRepository::class . ' method : create | ' . $e);
return $this->createError();
}
}
/**
* @param $id
* @param array $data
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function update($id, array $data)
{
try {
$result = parent::update($id, [
'nik' => e($data['nik']),
'nik_bapak' => e($data['nik_bapak']),
'nama_bapak' => e($data['nama_bapak']),
'status_bapak' => e($data['status_bapak']),
'alamat_bapak' => e($data['alamat_bapak']),
'nik_ibu' => e($data['nik_ibu']),
'nama_ibu' => e($data['nama_ibu']),
'status_ibu' => e($data['status_ibu']),
'alamat_ibu' => e($data['alamat_ibu']),
'status' => (empty($data['status'])) ? '0' : e($data['status']),
]);
// flush cache with tags
$this->cache->flush(Ortu::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . OrtuRepository::class . ' method : update | ' . $e);
return $this->createError();
}
}
/**
* @param $id
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function delete($id)
{
try {
$result = parent::delete($id);
// flush cache with tags
$this->cache->flush(Ortu::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . OrtuRepository::class . ' method : delete | ' . $e);
return $this->createError();
}
}
/**
* @param int $limit
* @param int $page
* @param array $column
* @param string $field
* @param string $search
*
* @return \Illuminate\Pagination\Paginator
*/
public function getByPage($limit = 10, $page = 1, array $column = ['*'], $field, $search = '')
{
// set key
$key = 'ortu-get-by-page-' . $page . $limit . $search;
// has section and key
if ($this->cache->has(Ortu::$tags, $key)) {
return $this->cache->get(Ortu::$tags, $key);
}
// query to sql
$result = parent::getByPageOrderBy($limit, $page, $column, 'nik', $search, 'created_at');
// store to cache
$this->cache->put(Ortu::$tags, $key, $result, 10);
return $result;
}
}
<file_sep><?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateTableBiometric extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('biometric', function (Blueprint $table) {
$table->engine = 'InnoDB';
$table->increments('id');
$table->string('nik', 16)->unique();
$table->string('foto');
$table->string('tanda tangan');
$table->text('iris_mata_kanan')->nullable();
$table->text('iris_mata_kiri')->nullable();
$table->text('kelingking_kiri')->nullable();
$table->text('manis_kiri')->nullable();
$table->text('tengah_kiri')->nullable();
$table->text('telunjuk_kiri')->nullable();
$table->text('jempol_kiri')->nullable();
$table->text('kelingking_kanan')->nullable();
$table->text('manis_kanan')->nullable();
$table->text('tengah_kanan')->nullable();
$table->text('telunjuk_kanan')->nullable();
$table->text('jempol_kanan')->nullable();
$table->boolean('status')->default(0);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('biometric');
}
}
<file_sep><?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateTableDokumen extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('dokumen', function (Blueprint $table) {
$table->engine = 'InnoDB';
$table->increments('id');
$table->string('nik', 16)->unique();
$table->string('jenis_dokumen', 1);
$table->string('nomor_dokumen', 32);
$table->string('tempat_terbit', 58);
$table->string('tanggal_terbit', 16);
$table->string('nomor_seri', 58);
$table->string('pejabat', 200);
$table->boolean('status')->default(0);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('dokumen');
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class KecamatanSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('kecamatan')->truncate();
DB::table('kecamatan')->insert([
['id' => 1, 'kabupaten_id' => '73', 'kode_kecamatan' => '01', 'kecamatan' => 'Blimbing', 'pbb_kec_kode' => '', 'arsip_kec_kode' => '', 'kodepos_kec_kode' => '', 'ramil_kec_kode' => ''],
['id' => 2, 'kabupaten_id' => '73', 'kode_kecamatan' => '02', 'kecamatan' => 'Klojen', 'pbb_kec_kode' => '', 'arsip_kec_kode' => '', 'kodepos_kec_kode' => '', 'ramil_kec_kode' => ''],
['id' => 3, 'kabupaten_id' => '73', 'kode_kecamatan' => '03', 'kecamatan' => 'Kedungkandang', 'pbb_kec_kode' => '', 'arsip_kec_kode' => '', 'kodepos_kec_kode' => '', 'ramil_kec_kode' => ''],
['id' => 4, 'kabupaten_id' => '73', 'kode_kecamatan' => '04', 'kecamatan' => 'Sukun', 'pbb_kec_kode' => '', 'arsip_kec_kode' => '', 'kodepos_kec_kode' => '', 'ramil_kec_kode' => ''],
['id' => 5, 'kabupaten_id' => '73', 'kode_kecamatan' => '05', 'kecamatan' => 'Lowokwaru', 'pbb_kec_kode' => '', 'arsip_kec_kode' => '', 'kodepos_kec_kode' => '', 'ramil_kec_kode' => ''],
]);
}
}
<file_sep><?php namespace App\Http\Requests\Kependudukan;
use App\Http\Requests\Request;
use Illuminate\Contracts\Validation\Validator;
class PribadiFormRequest extends Request
{
/**
* @var array
*/
protected $attrs = [
'keluarga_id' => 'Keluarga',
'nik' => 'NIK',
'title_depan' => 'Title Depan',
'title_belakang' => 'Title Belakang',
'nama' => 'Nama',
'kelamin' => 'Jenis Kelamin',
'tempat_lahir' => 'Tempat Lahir',
'tanggal_lahir' => 'Tanggal Lahir',
'golongan_darah' => 'Golongan Darah',
'agama' => 'Agama',
'status_kawin' => 'Status Kawin',
'status_keluarga' => 'Status Keluarga',
'pendidikan' => 'Pendidikan',
'pekerjaan' => 'Pekerjaan',
];
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
return [
'keluarga_id' => 'required|max:50',
'nik' => 'required|max:25|unique:pribadi,nik',
'nama' => 'required|max:50',
'kelamin' => 'required|max:10',
'tempat_lahir' => 'required|max:35',
'tanggal_lahir' => 'required|max:10',
'golongan_darah' => 'max:2',
'agama' => 'required|max:12',
'status_kawin' => 'required|max:12',
'status_keluarga' => 'required|max:50',
'title_depan' => 'max:20',
'title_belakang' => 'max:20',
'pendidikan' => 'max:35',
'pekerjaan' => 'max:35',
];
}
/**
* @param $validator
*
* @return mixed
*/
public function validator($validator)
{
return $validator->make($this->all(), $this->container->call([$this, 'rules']), $this->messages(), $this->attrs);
}
/**
* @param Validator $validator
*
* @return array
*/
protected function formatErrors(Validator $validator)
{
$message = $validator->errors();
return [
'success' => false,
'validation' => [
'keluarga_id' => $message->first('keluarga_id'),
'nik' => $message->first('nik'),
'nama' => $message->first('nama'),
'kelamin' => $message->first('kelamin'),
'tempat_lahir' => $message->first('tempat_lahir'),
'tanggal_lahir' => $message->first('tanggal_lahir'),
'golongan_darah' => $message->first('golongan_darah'),
'agama' => $message->first('agama'),
'status_kawin' => $message->first('status_kawin'),
'status_keluarga' => $message->first('status_keluarga'),
'title_depan' => $message->first('title_depan'),
'title_belakang' => $message->first('title_belakang'),
'pendidikan' => $message->first('pendidikan'),
'pekerjaan' => $message->first('pekerjaan'),
]
];
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class KabkotaSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('kabkot')->truncate();
DB::table('kabkot')->insert([
['id' => 1, 'provinsi_id' => '35', 'kode_kabupaten' => '73', 'kabupaten' => 'Malang', 'is_status' => '2', 'pbb_kabukot_kode' => '', 'arsip_kabukot_kode' => '', 'kodepos_kabukot_kode' => '', 'ramil_kabukot_kode' => ''],
]);
}
}
<file_sep><?php namespace App\Domain\Repositories\Kependudukan;
use App\Domain\Contracts\Cacheable;
use App\Domain\Contracts\Crudable;
use App\Domain\Contracts\Paginable;
use App\Domain\Entities\Kependudukan\PribadiRincian;
use App\Domain\Repositories\AbstractRepository;
use Illuminate\Support\Facades\Log;
class PribadiRincianRepository extends AbstractRepository implements Crudable, Paginable
{
/**
* @var Cacheable
*/
protected $cache;
/**
* @param PribadiRincian $pribadi
* @param Cacheable $cache
*/
public function __construct(PribadiRincian $pribadi, Cacheable $cache)
{
$this->model = $pribadi;
$this->cache = $cache;
}
/**
* @param int $id
* @param array $columns
*
* @return \Illuminate\Database\Eloquent\Model
*/
public function find($id, array $columns = ['*'])
{
// set key
$key = 'pribadi-rincian-find-' . $id;
// has section and key
if ($this->cache->has(PribadiRincian::$tags, $key)) {
return $this->cache->get(PribadiRincian::$tags, $key);
}
// query to sql
$result = parent::find($id, $columns);
// store to cache
$this->cache->put(PribadiRincian::$tags, $key, $result, 10);
return $result;
}
/**
* @param array $data
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function create(array $data)
{
try {
// execute sql insert
$result = parent::create([
'nik' => e($data['nik']),
'kelainan_fisik' => (empty($data['kelainan_fisik'])) ? null : e($data['kelainan_fisik']),
'cacat_fisik' => (empty($data['cacat_fisik'])) ? null : e($data['cacat_fisik']),
'warga_negara' => e($data['warga_negara']),
'website' => (empty($data['website'])) ? null : e($data['website']),
'email' => (empty($data['email'])) ? null : e($data['email']),
'telp' => (empty($data['telp'])) ? null : e($data['telp']),
'status' => (empty($data['status'])) ? '0' : e($data['status']),
]);
// flush cache with tags
$this->cache->flush(PribadiRincian::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . PribadiRincianRepository::class . ' method : create | ' . $e);
return $this->createError();
}
}
/**
* @param $id
* @param array $data
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function update($id, array $data)
{
try {
$result = parent::update($id, [
'nik' => e($data['nik']),
'kelainan_fisik' => (empty($data['kelainan_fisik'])) ? null : e($data['kelainan_fisik']),
'cacat_fisik' => (empty($data['cacat_fisik'])) ? null : e($data['cacat_fisik']),
'warga_negara' => e($data['warga_negara']),
'website' => (empty($data['website'])) ? null : e($data['website']),
'email' => (empty($data['email'])) ? null : e($data['email']),
'telp' => (empty($data['telp'])) ? null : e($data['telp']),
'status' => (empty($data['status'])) ? '0' : e($data['status']),
]);
// flush cache with tags
$this->cache->flush(PribadiRincian::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . PribadiRincianRepository::class . ' method : update | ' . $e);
return $this->createError();
}
}
/**
* @param $id
*
* @return \Symfony\Component\HttpFoundation\Response
*/
public function delete($id)
{
try {
$result = parent::delete($id);
// flush cache with tags
$this->cache->flush(PribadiRincian::$tags);
return $result;
} catch (\Exception $e) {
// store errors to log
Log::error('class : ' . PribadiRincianRepository::class . ' method : delete | ' . $e);
return $this->createError();
}
}
/**
* @param int $limit
* @param int $page
* @param array $column
* @param string $field
* @param string $search
*
* @return \Illuminate\Pagination\Paginator
*/
public function getByPage($limit = 10, $page = 1, array $column = ['*'], $field, $search = '')
{
// set key
$key = 'pribadi-rincian-get-by-page-' . $page . $limit . $search;
// has section and key
if ($this->cache->has(PribadiRincian::$tags, $key)) {
return $this->cache->get(PribadiRincian::$tags, $key);
}
// query to sql
$result = parent::getByPageOrderBy($limit, $page, $column, 'nik', $search, 'created_at');
// store to cache
$this->cache->put(PribadiRincian::$tags, $key, $result, 10);
return $result;
}
}
<file_sep><?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateTableKecamatan extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('kecamatan', function (Blueprint $table) {
$table->engine = 'InnoDB';
$table->increments('id');
$table->integer('kabupaten_id');
$table->string('kode_kecamatan', 2);
$table->string('kecamatan', 200);
$table->string('status');
$table->integer('pbb_kec_kode');
$table->integer('arsip_kec_kode');
$table->integer('kodepos_kec_kode');
$table->integer('ramil_kec_kode');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('kecamatan');
}
}
<file_sep>## Aplikasi Kependudukan untuk Hackathon Merdeka 2.0
<file_sep><?php namespace App\Http\Requests\Kependudukan;
use App\Http\Requests\Request;
use Illuminate\Contracts\Validation\Validator;
class KeluargaFormRequest extends Request
{
/**
* @var array
*/
protected $attrs = [
'nik_kk' => 'NIK KK',
'nama_kk' => 'Nama KK',
'alamat' => 'Alamat',
'rt' => 'RT',
'rw' => 'RW',
'dusun' => 'Dusun',
'telepon' => 'Telepon',
];
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
return [
'nik_kk' => 'required|max:16|unique:keluarga,nik_kk',
'nama_kk' => 'required|max:60',
'alamat' => 'required|max:225',
'rt' => 'required|max:3',
'rw' => 'required|max:3',
'dusun' => 'required|max:50',
'telepon' => 'max:15',
];
}
/**
* @param $validator
*
* @return mixed
*/
public function validator($validator)
{
return $validator->make($this->all(), $this->container->call([$this, 'rules']), $this->messages(), $this->attrs);
}
/**
* @param Validator $validator
*
* @return array
*/
protected function formatErrors(Validator $validator)
{
$message = $validator->errors();
return [
'success' => false,
'validation' => [
'nik_kk' => $message->first('nik_kk'),
'nama_kk' => $message->first('nama_kk'),
'alamat' => $message->first('alamat'),
'rt' => $message->first('rt'),
'rw' => $message->first('rw'),
'dusun' => $message->first('dusun'),
'telepon' => $message->first('telepon'),
]
];
}
}
| e31227c3cad13993cc8d768526245e1351e52c9c | [
"Markdown",
"PHP"
] | 27 | PHP | bodrexsquad/kependudukan | bd782aec75392582763f4dacc3bb759fc5b9023d | 8c05a7ad4f6472ec725650ce8b5c36af7073ed9b | |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
namespace CanariosMenu
{
/// <summary>
/// Lógica de interacción para MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
public MainWindow()
{
InitializeComponent();
}
private void MenuAutosCrear_Click(object sender, RoutedEventArgs e)
{
}
private void MenuAutosConsultar_Click(object sender, RoutedEventArgs e)
{
}
private void MenuAutosEditar_Click(object sender, RoutedEventArgs e)
{
}
private void MenuPropietariosAdicionar_Click(object sender, RoutedEventArgs e)
{
}
private void MenuPropietariosConsultar_Click(object sender, RoutedEventArgs e)
{
}
private void MenuPropietariosModificar_Click(object sender, RoutedEventArgs e)
{
}
private void MenuAyudaAcercaDe_Click(object sender, RoutedEventArgs e)
{
Acercade acercade = new Acercade();
acercade.Owner = this;
acercade.ShowDialog();
}
}
}
| c7749b31df524f420eb113742f04f4c7339fcdbf | [
"C#"
] | 1 | C# | ChristopherSanchez99/EjercicioVs_MenuCanarios | 67c6241bcce3e0efd562e7ad7a5e019280b9465b | 6e6af1944a6b0deaa5918731578998094a5a06c5 | |
refs/heads/master | <repo_name>stochanskyi/AI_Lab_3<file_sep>/main.py
from task import *
from tkinter import *
from tkmacosx import Button as MacosButton
def calculate_score():
k = int(k_input.get())
if k <= 0:
return
score = get_learning_score(neighbours_classifier(k))
score_label['text'] = 'Learning score: ' + str(int(score * 100)) + '%'
root = Tk()
root.title("Lab 2")
root.geometry("1300x800")
# Task one layout
task_one_frame = Frame(root)
task_one_frame.place(relx=0.05, rely=0.05, relwidth=0.5, relheight=1)
task_one_title = Label(task_one_frame, text='TASK', font='Helvetica 14 bold')
task_one_title.pack()
task_one_description = Label(task_one_frame, text='Please enter k that is number of neighbours that will be used to '
'calculate item class\n(k > 0)')
task_one_description.pack()
input_frame = Frame(task_one_frame)
input_frame.pack(pady=20)
k_label = Label(input_frame, text='k', fg='grey')
k_label.pack(side=LEFT)
k_input = Entry(input_frame)
k_input.pack(side=LEFT)
button_zero_to_one = MacosButton(task_one_frame, text='Check score for k', command=calculate_score)
button_zero_to_one.pack(pady=10)
score_label = Label(task_one_frame)
score_label.pack(pady=20)
# Task two layout
task_two_frame = Frame(root)
task_two_frame.place(relx=0.5, rely=0.05, relwidth=0.5, relheight=1)
Label(task_two_frame, text='Classes description', font='Helvetica 14 bold').pack()
classes_label = Label(task_two_frame)
classes_label.pack(pady=10)
classes_label['text'] = '\n'.join(map(str, newgroups_bunch.target_names))
Label(task_two_frame, text='Top 5 input data\nFormat: (i, j) value', font='Helvetica 14 bold').pack()
data_label = Label(task_two_frame)
data_label.pack()
input_data = newgroups_bunch.data
counter = 0
for i, j in zip(*input_data.nonzero()):
data_label['text'] += '\n' + "(%d, %d) %s" % (i, j, input_data[i, j])
counter += 1
if counter >= 5:
break
root.mainloop()
<file_sep>/task.py
import sklearn
import sklearn.datasets
import sklearn.model_selection
import sklearn.neighbors
newgroups_bunch = sklearn.datasets.fetch_20newsgroups_vectorized()
x_train, x_test, y_train, y_test = sklearn.model_selection.train_test_split(
newgroups_bunch.data,
newgroups_bunch.target,
random_state=0)
def newgroups_target_names():
return newgroups_bunch.target_names
def newgroups_data():
return newgroups_bunch.data
def neighbours_classifier(neighbours_count):
knn = sklearn.neighbors.KNeighborsClassifier(n_neighbors=neighbours_count)
knn.fit(x_train, y_train)
return knn
def get_learning_score(knn):
return knn.score(x_test, y_test)
| 62836640bcdd1c25821026ef8bcf275595816ac1 | [
"Python"
] | 2 | Python | stochanskyi/AI_Lab_3 | 958aad9421ede4341dc10ed3890bf2c43d329191 | 22d49c1817730cbf22067eaae1cbf397e6427e6c | |
refs/heads/master | <file_sep>class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
def move
ActionController::Parameters.permit_all_parameters = true
row = real_params[:row].to_i
column = real_params[:column].to_i
board = JSON.parse(real_params[:board].as_json)
if board[row][column].blank?
board[row][column] = 'O'
board = do_computer_move(board)
winner = determine_winner(board)
end
render json: { move: { board: board, winner: winner } }
end
def do_computer_move(board)
3.times.each do |row|
3.times.each do |column|
if board[row][column].blank?
board[row][column] = 'X'
return board
end
end
end
board
end
def determine_winner(board)
3.times.each do |x|
if board[x][0] == board[x][1] && board[x][0] == board[x][2]
return board[x][0]
end
end
3.times.each do |y|
if board[0][y] == board[1][y] && board[0][y] == board[2][y]
return board[0][y]
end
end
if board[0][0] == board[1][1] && board[0][0] == board[2][2]
return board[0][0]
end
if board[0][2] == board[1][1] && board[0][2] == boaad[2][0]
return board[0][2]
end
end
def real_params
params.require(:move).permit(:row, :column, :board)
end
end
| 565657c5de1dbeb10357b6a88bd3dfcd1e1f8886 | [
"Ruby"
] | 1 | Ruby | mshappe/ttt-dobt | 5d7a3a150799b8048964d08a4991afd732809df1 | 001afa4308989b3be94288717ae5d1b65d101274 | |
refs/heads/master | <file_sep># Unoffical AdminTool2 API!
This is a unoffical API for Tele2's customercare tool AdminTool2
This API has been created to make easy automatic interactions with the AdminTool2 WEB GUI
WORK IN PROGRESS!!!!
<file_sep>#!env/bin/python
# -*- coding: utf-8 -*-
"""
Made By <NAME>!
Unofficial API for Tele2's customerservice tool AdminTool2
All functions return a dictionary where status = statuscode
"""
import urllib, urllib2, re, string
from bs4 import BeautifulSoup
from argparse import ArgumentParser
from getpass import getpass
from sys import exit
class SessionHandler():
def __init__(self, username, password, baseurl='https://admintool2.tele2.se'):
self.username = username
self.password = <PASSWORD>
self.baseurl = baseurl
self.sessionstate = False
def login(self):
"""
Login function used for establishing first connection to Admintool2.
Stores the
"""
uri='/AdminTool2/base/j_security_check'
uriencoding={'j_username':self.username,
'j_password':<PASSWORD>,
'&__checkbox_chksaveuser':'false',
'login.button':'Log+in'
}
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor())
urllib2.install_opener(opener)
try:
urllib2.urlopen(self.baseurl + '/AdminTool2/base/')
except (urllib2.HTTPError), e:
return {'status':e.code}
try:
f = urllib2.urlopen(self.baseurl + uri,
urllib.urlencode(uriencoding))
bs = BeautifulSoup(f, 'html.parser')
if bs.find(id='errdiv') is not None:
return {'status':503}
else:
self.sessionstate = True
return {'status':200}
except (urllib2.HTTPError), e:
return {'status':e.code}
class Access():
"""
Handles Everything related to Access searches, needs a valid SessionHandler at __init__
"""
def __init__(self, SessionHandler):
self.SessionHandler = SessionHandler
def find_by_msisdn(self, msisdn):
"""
Searches for a access, returns the customer as a dictionary, or error message
Login before calling this!
"""
#Check if logged in
if not self.SessionHandler.sessionstate:
return {'status':503}
uri='/AdminTool2/base/Access.search.action'
uriencoding={'searchAccessName':msisdn,
'searchUserName':'',
'searchAccessType':'all',
'button.find':'Find HTTP/1.1'}
try:
f = urllib2.urlopen(self.SessionHandler.baseurl + uri,urllib.urlencode(uriencoding))
if urllib2.getcode() == 403:
return {'status':403}
bs = BeautifulSoup(f, "html.parser")
if 'Service list' in bs.text: # Only found if there are no access found
return self.parse_single_access(bs)
elif bs.find('tr', {'class','odd'}) is not None: # Only found if there are multiple accesses on the same MSISDN
return self.return_multiple_access(bs)
else:
return {'status':404}
except (urllib2.HTTPError), e:
return {'status':e.code}
def parse_single_access(self,bs):
if 'Service list' in bs.text:
dataobject = {}
tables = bs.find_all('table', {'class','table'})
for count, table in enumerate(tables):
# Each access has 3 tables, 2 with account information, 1 with service information, This if statement handles the account information!
if count < 2:
for tr in table.find_all('tr', recursive=False):
td = tr.find_all('td', recursive=False)
if len(td) == 2:
key=td[0].text.strip().lower()
# Regexp... Making code unreadable since 1980
# Multiple substituions separated by |
key = re.compile('(\:|\(.*?\)|\ |\.|\n)').sub('', key)
data=td[1].text.strip()
data = re.compile('(\n|\ +)').sub('',data)
#Special case due to reprovision button in the middle of the table
if key =='accessname':
data = re.sub("\D", "", data)
if key != "":
dataobject[key]=data
# Only handle cases where td[0]==key & td[1]==value
else:
continue
# Else statement for when we reach the service list
else:
# If access has no services return empty list
if table.find('tr', {'class', 'empty'}) is not None:
dataobject['services']=[]
else:
dataobject['services'] = []
tbody = table.find('tbody', recursive=False)
trs = tbody.find_all('tr', recursive=False)
for tr in trs:
tds = tr.find_all('td', recursive=False)
# Regexp for removing unwanted characters
rep = re.compile('(\r|\n|\t|\ +)')
serviceid = re.sub(rep,'', tds[0].text)
servicename = re.sub(rep,'', tds[1].text)
product = re.sub(rep,'', tds[2].text)
servicetype = re.sub(rep,'', tds[3].text)
billing = re.sub(rep,'', tds[4].text)
provisioning = re.sub(rep,'', tds[5].text)
abuse = re.sub(rep,'', tds[6].text)
capusername = re.sub(rep,'', tds[7].text)
# Appends one new list object for each service
dataobject['services'].append({'serviceid':serviceid, 'servicename':servicename,
'product':product, 'servicetype':servicetype, 'billing':billing,'provisioning':provisioning,
'abuse':abuse,'capusername':capusername})
dataobject['status']=200
return dataobject
else:
return {'status':404}
def find_by_access_id(self, accessid):
"""
This method is called by takes billing id, returns access dictionary or 404 for not found, or 503 for SessionHandler error
"""
if not self.SessionHandler.sessionstate:
return {'status':503}
try:
uri='/AdminTool2/base/Access.details.action'
uriencoding={'detailsAccessId':accessid}
f = urllib2.urlopen(self.SessionHandler.baseurl + uri,urllib.urlencode(uriencoding))
if f.getcode() == 403:
return {'status':403}
bs = BeautifulSoup(f, "html.parser")
return self.parse_single_access(bs)
except (urllib2.HTTPError), e:
return {'status':e.code}
def return_multiple_access(self, bs):
"""
In cases where a single access can't be determined, this function returns all access with the billingID
"""
multipleaccessobject={'access':[], 'status':303}
trs = bs.find('tr',{'class', 'odd'}).find_parent().find_all('tr')
reg = re.compile('(\r|\n|\t|\ +)')
for tr in trs:
tds = tr.find_all('td')
accessid = re.sub(reg,'',tds[0].text)
product = re.sub(reg,'',tds[1].text)
accesstype = re.sub(reg,'',tds[2].text)
billing = re.sub(reg,'',tds[3].text)
provisioning = re.sub(reg,'',tds[4].text)
abuse = re.sub(reg,'',tds[5].text)
multipleaccessobject['access'].append({'accessid':accessid, 'product':product, 'accesstype':accesstype, 'billing':billing, 'provisioning':provisioning, 'abuse':abuse})
return multipleaccessobject
class Customer():
def __init__(self, SessionHandler):
self.SessionHandler = SessionHandler
def parse_single_customer(self, bs):
reg = re.compile('(\:|\ |\.|\n|\ +|\r|\t)')
customerObject = {}
customerObject['accesslist'] = []
customerObject['accessindependentlist'] = []
mainTable = bs.find('table', {'class','wwFormTable'})
billingInput = mainTable.find(id='Customer_capBillingData_id')
billingID = billingInput['value']
customerObject['billingID'] = billingID
trs = mainTable.find_all('tr', recursive=True)
for tr in trs:
tds = tr.find_all('td',recursive=True)
key = re.compile('(\:|\(.*?\)|\ |\.|\n|\ +)').sub('', tds[0].text.lower())
value = tds[1].text.strip()
# Removes the spacer columns
if key != '':
# A customer can have up to 3 addresses, these are free text fields for our customercare groups, I've never seen them used properly
# So i'm just ignoring them instead if we already have 1 customer address
if customerObject.has_key(key) == False:
customerObject[key]=value
elif customerObject[key]=='':
customerObject[key]=value
mainDiv = bs.find('div', {'class','col-md-10'})
displays = mainDiv.find_all('div', {'class', 'display'})
accessList = displays[0].find('tbody')
accessIndependetList = displays[1].find('tbody')
for trs in accessList.find_all('tr'):
tds = trs.find_all('td')
if len(tds) != 1:
accessName = re.sub(reg, '', tds[1].text)
product = re.sub(reg,'',tds[2].text)
accessType = re.sub(reg,'',tds[3].text)
billing = re.sub(reg,'',tds[4].text)
provisioning = re.sub(reg,'',tds[5].text)
abuse = re.sub(reg,'',tds[6].text)
customerObject['accesslist'].append({'accessName':accessName, 'product':product,
'accessType':accessType, 'billing':billing,'provisioning':provisioning,'abuse':abuse})
for trs in accessIndependetList.find_all('tr'):
tds = trs.find_all('td')
if len(tds) != 1:
serviceName = re.sub(reg, '', tds[1].text)
product = re.sub(reg, '', tds[2].text)
servicename = re.sub(reg, '', tds[3].text)
billing = re.sub(reg, '', tds[4].text)
provisioning = re.sub(reg, '', tds[5].text)
abuse = re.sub(reg, '', tds[6].text)
capusername = re.sub(reg, '', tds[7].text)
customerObject['accessindependentlist'].append({'serviceName':serviceName, 'product':product, 'servicename':servicename,
'billing':billing, 'provisioning':provisioning, 'abuse':abuse, 'capusername':capusername})
customerObject['status']=200
return customerObject
def find_by_customer_id(self, customerid):
uri = '/AdminTool2/base/Customer.search.action'
uriencoding = {'submitted':'true',
'formAdvanced':'false',
'ownerId':customerid,
'ssn':'',
'surname':'',
'address1':'',
'company':'',
'searchLegalentity':'',
'button.find':'Find HTTP/1.1'}
try:
# Check if user is logged in
if not self.SessionHandler.sessionstate:
return {'status':503}
f = urllib2.urlopen(self.SessionHandler.baseurl + uri, urllib.urlencode(uriencoding))
if f.getcode() == 403:
return {'status':403}
bs = BeautifulSoup(f, "html.parser")
# class empty is only available when the search return no customer
if bs.find('td', {'class', 'empty'}) is not None:
return {'status':404}
else:
return self.parse_single_customer(bs)
except (urllib2.HTTPError), e:
return {'status':e.code()}
def find_by_billing_id(self, billingid):
uri = '/AdminTool2/base/Customer.details.action'
uriencoding = {'billingId':billingid}
try:
if not self.SessionHandler.sessionstate:
return {'status':503}
f = urllib2.urlopen(self.SessionHandler.baseurl + uri, urllib.urlencode(uriencoding))
if f.getcode() == 403:
return {'status':403}
bs = BeautifulSoup(f, "html.parser")
parent = bs.find('table', {'class', 'wwFormTable'}).find_parent()
for trs in parent.find_all('tr'):
tds = trs.find_all('td')
if len(tds) == 2:
if 'Customer ID' in tds[0].text.strip() and tds[1].text.strip() == '':
return {'status':404}
else:
return self.parse_single_customer(bs)
except (urllib2.HTTPError), e:
return {'status':e.code()}
def find_by_ssn(self,ssn):
pass
# --------TODO : Remove when API is finished!
if __name__=="__main__":
argparser = ArgumentParser()
argparser.add_argument("-u", "--username", dest="username", required=True, help="User to use in J_Auth")
args = argparser.parse_args()
password = getpass("Please enter password for %s: " % args.username)
s = SessionHandler(args.username, password)
s.login()
a = Access(s)
c = Customer(s)
#received = a.find_by_access_id('35998200')
#received = c.find_by_customer_id('100')
received = c.find_by_billing_id('314')
print received
| f99cea05b6a81e2b5c1063b8542d62554da5e64d | [
"Markdown",
"Python"
] | 2 | Markdown | Pixxle/API-admintool2 | 2ac977eca677db1318d80b32927867b34421d763 | 4ca3cfd00ebf015b1410408a4e00b167a41df2c4 | |
refs/heads/master | <file_sep><?php
$value = array (
'pay' => '支持货到付款,请准备好零钱,谢谢合作。',
);
?><file_sep><?php include template("header");?>
<style>
.whybind{margin:60px 10px 10px;padding:10px;zoom:1;font-size:12px;background:#fffbcc;border:1px solid #ffec19;}
.whybind h3{margin-bottom:5px;font-size:14px;}
.whybind ol{padding-left:10px;}
.whybind ol li{list-style-position:inside;list-style-type:decimal;}
</style>
<div id="bdw" class="bdw">
<div id="bd" class="cf">
<div id="consult">
<div class="dashboard" id="dashboard">
<ul><?php echo current_account('/account/settings.php'); ?></ul>
</div>
<div id="content">
<div class="box clear">
<div class="box-top"></div>
<div class="box-content">
<div class="head">
<h2>手机绑定</h2>
<ul class="filter">
<li><a href="/account/settings.php">帐户设置</a></li>
<li><a href="/credit/index.php">帐户余额</a></li>
<li><a href="/account/myask.php">我的问答</a></li>
<li class="current"><a href="/account/setbinds.php">手机绑定</a></li>
<li><a href="/account/setaddress.php">收货地址</a></li>
</ul>
</div>
<div class="sect consult-list">
<?php if(!$havemobile){?>
<h3>尚未绑定手机号,请先绑定:</h3>
<div style="margin-top:20px;"><a href="javascript:;" onclick="X.bindajax('sms','loginbindmobile','<?php echo $login_user_id; ?>');">点击绑定手机号码</a></div>
<?php } else { ?>
<h3>已绑定的手机号:</h3>
<table cellspacing="0" cellpadding="0" border="0" class="coupons-table" id="bind-mobile-list">
<tr class="alt"><td class="mobile"><strong><?php echo $havemobile['tools']; ?></strong></td></tr></table>
<div class="clear"></div>
<h3>绑定新手机号:</h3>
<div style="margin-top:20px;"><a href="javascript:;" onclick="X.bindajax('sms','loginbindmobile','<?php echo $login_user_id; ?>');">点击绑定新手机号</a></div>
<span class="hint">注意:绑定新手机号将会清空所有已绑定的手机号</span>
<?php }?>
<div>
<div class="whybind">
<h3>为什么要绑定您的手机呢?</h3>
<ol>
<li>手机找回密码(遗忘密码的时候通过手机轻易找回)</li>
<li>绑定手机后,可以购买特殊商品</li>
<li>便于我们为您提供更好的服务</li>
</ol>
</div>
</div>
</div>
</div>
<div class="box-bottom"></div>
</div>
</div>
<div id="sidebar">
<?php include template("block_side_invite");?>
</div>
</div>
</div> <!-- bd end -->
</div> <!-- bdw end -->
<?php include template("footer");?>
<file_sep><div id="order-pay-dialog" class="order-pay-dialog-c" style="width:600px;">
<h3><span id="order-pay-dialog-close" class="close" onclick="return X.boxClose();">关闭</span></h3>
<div style="overflow-x:hidden;padding:10px;" id="dialog-order-id" oid="<?php echo $order['id']; ?>">
<table width="96%" align="center" class="coupons-table">
<tr><td><b>项目列表:</b></td>
<td>
<?php if(is_array($teams)){foreach($teams AS $index=>$one) { ?>
<input type="checkbox" value="<?php echo $one['id']; ?>" name="team" > <?php echo mb_strimwidth($one['product'],0,90,'...'); ?>
<br />
<?php }}?>
</td>
</tr>
<tr>
<td><button style="padding:0;" id="dialog_subscribe_button_id" onclick="if(confirm('发送邮件过程中,请耐心等待,同意吗?')){this.disabled=true;return X.misc.noticeall(0);}">发送邮件 (<?php echo $subcount; ?>)</button>
</td>
<td></td>
</tr>
</table>
</div>
</div>
<script type="text/javascript">
X.misc.noticeall = function(nid) {
var teams =[];
$('input[name="team"]:checked').each(function(){
teams.push($(this).val());
});
return X.get(WEB_ROOT + '/ajax/subscribe.php?action=noticesubscribe&tid='+teams+'&nid='+nid);
};
</script> | 951ea2fb49b2de7e7e076bd29e2a85c4c90d12c0 | [
"PHP"
] | 3 | PHP | jammp/food | b37ccab4370a26aaada552029569c30037692e76 | 42867a9a687ef061db609bb17379570dd6356f5b | |
refs/heads/master | <repo_name>Bucknalla/edge-node-manager<file_sep>/device/device.go
package device
import (
"encoding/json"
"fmt"
"github.com/resin-io/edge-node-manager/board"
"github.com/resin-io/edge-node-manager/board/esp8266"
"github.com/resin-io/edge-node-manager/board/microbit"
"github.com/resin-io/edge-node-manager/board/nrf51822dk"
"github.com/resin-io/edge-node-manager/device/hook"
"github.com/resin-io/edge-node-manager/device/status"
"github.com/resin-io/edge-node-manager/micro/nrf51822"
"github.com/resin-io/edge-node-manager/supervisor"
)
type Device struct {
Board board.Interface `json:"-"`
ApplicationUUID int `storm:"index"`
BoardType board.Type `storm:"index"`
Name string `storm:"index"`
LocalUUID string `storm:"index"`
ResinUUID string `storm:"id,unique,index"`
Commit string `storm:"index"`
TargetCommit string `storm:"index"`
Status status.Status `storm:"index"`
Config map[string]interface{} `storm:"index"`
TargetConfig map[string]interface{} `storm:"index"`
Environment map[string]interface{} `storm:"index"`
TargetEnvironment map[string]interface{} `storm:"index"`
RestartFlag bool `storm:"index"`
DeleteFlag bool `storm:"index"`
}
func (d Device) String() string {
return fmt.Sprintf(
"Application UUID: %d, "+
"Board type: %s, "+
"Name: %s, "+
"Local UUID: %s, "+
"Resin UUID: %s, "+
"Commit: %s, "+
"Target commit: %s, "+
"Status: %s, "+
"Config: %v, "+
"Target config: %v, "+
"Environment: %v, "+
"Target environment: %v, "+
"Restart: %t, "+
"Delete: %t",
d.ApplicationUUID,
d.BoardType,
d.Name,
d.LocalUUID,
d.ResinUUID,
d.Commit,
d.TargetCommit,
d.Status,
d.Config,
d.TargetConfig,
d.Environment,
d.TargetEnvironment,
d.RestartFlag,
d.DeleteFlag)
}
func New(applicationUUID int, boardType board.Type, name, localUUID, resinUUID string) Device {
return Device{
ApplicationUUID: applicationUUID,
BoardType: boardType,
Name: name,
LocalUUID: localUUID,
ResinUUID: resinUUID,
Commit: "",
TargetCommit: "",
Status: status.OFFLINE,
}
}
func (d *Device) PopulateBoard() error {
log := hook.Create(d.ResinUUID)
switch d.BoardType {
case board.MICROBIT:
d.Board = microbit.Microbit{
Log: log,
Micro: nrf51822.Nrf51822{
Log: log,
LocalUUID: d.LocalUUID,
Firmware: nrf51822.FIRMWARE{},
NotificationChannel: make(chan []byte),
},
}
case board.NRF51822DK:
d.Board = nrf51822dk.Nrf51822dk{
Log: log,
Micro: nrf51822.Nrf51822{
Log: log,
LocalUUID: d.LocalUUID,
Firmware: nrf51822.FIRMWARE{},
NotificationChannel: make(chan []byte),
},
}
case board.ESP8266:
d.Board = esp8266.Esp8266{
Log: log,
LocalUUID: d.LocalUUID,
}
default:
return fmt.Errorf("Unsupported board type")
}
return nil
}
// Sync device with resin to ensure we have the latest values for:
// - Device name
// - Device target config
// - Device target environment
func (d *Device) Sync() []error {
bytes, errs := supervisor.DependentDeviceInfo(d.ResinUUID)
if errs != nil {
return errs
}
var temp Device
if err := json.Unmarshal(bytes, &temp); err != nil {
// Ignore the error here as it means the device we are trying
// to sync has been deleted
return nil
}
d.Name = temp.Name
d.TargetConfig = temp.TargetConfig
d.TargetEnvironment = temp.TargetEnvironment
return nil
}
<file_sep>/radio/bluetooth/bluetooth.go
package bluetooth
import (
"fmt"
"os"
"os/exec"
"strings"
"time"
"github.com/pkg/errors"
"golang.org/x/net/context"
log "github.com/Sirupsen/logrus"
"github.com/currantlabs/ble"
"github.com/currantlabs/ble/linux"
"github.com/currantlabs/ble/linux/hci"
"github.com/currantlabs/ble/linux/hci/cmd"
"github.com/resin-io/edge-node-manager/config"
)
var (
initialised bool
doneChannel chan struct{}
name *ble.Characteristic
shortTimeout time.Duration
longTimeout time.Duration
)
func Initialise() error {
if !initialised && os.Getenv("RESIN_DEVICE_TYPE") == "raspberrypi3" {
log.Info("Initialising bluetooth")
for i := 1; i <= 3; i++ {
if err := exec.Command("bash", "-c", "/usr/bin/hciattach /dev/ttyAMA0 bcm43xx 921600 noflow -").Run(); err == nil {
if err := exec.Command("bash", "-c", "hciconfig hci0 up").Run(); err != nil {
return err
}
log.Info("Initialised bluetooth")
initialised = true
break
}
}
// Small sleep to give the bluetooth interface time to settle
time.Sleep(shortTimeout)
}
device, err := linux.NewDevice()
if err != nil {
return err
}
if err := updateLinuxParam(device); err != nil {
return err
}
ble.SetDefaultDevice(device)
return nil
}
func Cleanup() error {
return ble.Stop()
}
func Connect(id string) (ble.Client, error) {
client, err := ble.Dial(ble.WithSigHandler(context.WithTimeout(context.Background(), longTimeout)), hci.RandomAddress{ble.NewAddr(id)})
if err != nil {
return nil, err
}
if _, err := client.ExchangeMTU(ble.MaxMTU); err != nil {
return nil, err
}
doneChannel = make(chan struct{})
go func() {
<-client.Disconnected()
close(doneChannel)
}()
return client, nil
}
func Disconnect(client ble.Client) error {
if err := client.ClearSubscriptions(); err != nil {
return err
}
if err := client.CancelConnection(); err != nil {
return err
}
<-doneChannel
return nil
}
func WriteCharacteristic(client ble.Client, characteristic *ble.Characteristic, value []byte, noRsp bool) error {
err := make(chan error)
go func() {
err <- client.WriteCharacteristic(characteristic, value, noRsp)
}()
select {
case done := <-err:
return done
case <-time.After(shortTimeout):
return fmt.Errorf("Write characteristic timed out")
}
}
func ReadCharacteristic(client ble.Client, characteristic *ble.Characteristic) ([]byte, error) {
type Result struct {
Val []byte
Err error
}
result := make(chan Result)
go func() {
result <- func() Result {
val, err := client.ReadCharacteristic(characteristic)
return Result{val, err}
}()
}()
select {
case done := <-result:
return done.Val, done.Err
case <-time.After(shortTimeout):
return nil, fmt.Errorf("Read characteristic timed out")
}
}
func WriteDescriptor(client ble.Client, descriptor *ble.Descriptor, value []byte) error {
err := make(chan error)
go func() {
err <- client.WriteDescriptor(descriptor, value)
}()
select {
case done := <-err:
return done
case <-time.After(shortTimeout):
return fmt.Errorf("Write descriptor timed out")
}
}
func Scan(id string) (map[string]struct{}, error) {
devices := make(map[string]struct{})
advChannel := make(chan ble.Advertisement)
ctx := ble.WithSigHandler(context.WithTimeout(context.Background(), longTimeout))
go func() {
for {
select {
case <-ctx.Done():
return
case adv := <-advChannel:
if strings.EqualFold(adv.LocalName(), id) {
var s struct{}
devices[adv.Address().String()] = s
}
}
}
}()
err := ble.Scan(ctx, false, func(adv ble.Advertisement) { advChannel <- adv }, nil)
if errors.Cause(err) != context.DeadlineExceeded && errors.Cause(err) != context.Canceled {
return devices, err
}
return devices, nil
}
func Online(id string) (bool, error) {
online := false
advChannel := make(chan ble.Advertisement)
ctx, cancel := context.WithCancel(context.Background())
ctx = ble.WithSigHandler(context.WithTimeout(ctx, longTimeout))
go func() {
for {
select {
case <-ctx.Done():
return
case adv := <-advChannel:
if strings.EqualFold(adv.Address().String(), id) {
online = true
cancel()
}
}
}
}()
err := ble.Scan(ctx, false, func(adv ble.Advertisement) { advChannel <- adv }, nil)
if errors.Cause(err) != context.DeadlineExceeded && errors.Cause(err) != context.Canceled {
return online, err
}
return online, nil
}
func GetName(id string) (string, error) {
client, err := Connect(id)
if err != nil {
return "", err
}
resp, err := ReadCharacteristic(client, name)
if err != nil {
return "", err
}
if err := Disconnect(client); err != nil {
return "", err
}
return string(resp), nil
}
func GetCharacteristic(uuid string, property ble.Property, handle, vhandle uint16) (*ble.Characteristic, error) {
parsedUUID, err := ble.Parse(uuid)
if err != nil {
return nil, err
}
characteristic := ble.NewCharacteristic(parsedUUID)
characteristic.Property = property
characteristic.Handle = handle
characteristic.ValueHandle = vhandle
return characteristic, nil
}
func GetDescriptor(uuid string, handle uint16) (*ble.Descriptor, error) {
parsedUUID, err := ble.Parse(uuid)
if err != nil {
return nil, err
}
descriptor := ble.NewDescriptor(parsedUUID)
descriptor.Handle = handle
return descriptor, nil
}
func init() {
log.SetLevel(config.GetLogLevel())
var err error
if shortTimeout, err = config.GetShortBluetoothTimeout(); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to load bluetooth timeout")
}
if longTimeout, err = config.GetLongBluetoothTimeout(); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to load bluetooth timeout")
}
name, err = GetCharacteristic("2a00", ble.CharRead+ble.CharWrite, 0x02, 0x03)
if err != nil {
log.Fatal(err)
}
log.Debug("Initialised bluetooth radio")
}
func updateLinuxParam(device *linux.Device) error {
if err := device.HCI.Send(&cmd.LESetScanParameters{
LEScanType: 0x00, // 0x00: passive, 0x01: active
LEScanInterval: 0x0060, // 0x0004 - 0x4000; N * 0.625msec
LEScanWindow: 0x0060, // 0x0004 - 0x4000; N * 0.625msec
OwnAddressType: 0x01, // 0x00: public, 0x01: random
ScanningFilterPolicy: 0x00, // 0x00: accept all, 0x01: ignore non-white-listed.
}, nil); err != nil {
return errors.Wrap(err, "can't set scan param")
}
if err := device.HCI.Option(hci.OptConnParams(
cmd.LECreateConnection{
LEScanInterval: 0x0060, // 0x0004 - 0x4000; N * 0.625 msec
LEScanWindow: 0x0060, // 0x0004 - 0x4000; N * 0.625 msec
InitiatorFilterPolicy: 0x00, // White list is not used
PeerAddressType: 0x00, // Public Device Address
PeerAddress: [6]byte{}, //
OwnAddressType: 0x00, // Public Device Address
ConnIntervalMin: 0x0028, // 0x0006 - 0x0C80; N * 1.25 msec
ConnIntervalMax: 0x0038, // 0x0006 - 0x0C80; N * 1.25 msec
ConnLatency: 0x0000, // 0x0000 - 0x01F3; N * 1.25 msec
SupervisionTimeout: 0x002A, // 0x000A - 0x0C80; N * 10 msec
MinimumCELength: 0x0000, // 0x0000 - 0xFFFF; N * 0.625 msec
MaximumCELength: 0x0000, // 0x0000 - 0xFFFF; N * 0.625 msec
})); err != nil {
return errors.Wrap(err, "can't set connection param")
}
return nil
}
<file_sep>/supervisor/supervisor.go
package supervisor
import (
"encoding/json"
"fmt"
"net/url"
"os"
"path"
"strconv"
"time"
log "github.com/Sirupsen/logrus"
"github.com/cavaliercoder/grab"
"github.com/parnurzeal/gorequest"
"github.com/resin-io/edge-node-manager/config"
)
var (
address string
version string
key string
rawKey string
)
func WaitUntilReady() {
log.Info("Waiting until supervisor is ready")
delay, err := config.GetSupervisorCheckDelay()
if err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to load supervisor check delay")
}
for {
resp, _, errs := gorequest.New().Timeout(1 * time.Second).Get(address).End()
if errs == nil && resp.StatusCode == 401 {
// The supervisor is up once a 401 status code is returned
log.Info("Supervisor is ready")
return
}
time.Sleep(delay)
}
}
func DependentApplicationsList() ([]byte, []error) {
url, err := buildPath(address, []string{version, "dependent-apps"})
if err != nil {
return nil, []error{err}
}
req := gorequest.New()
req.Get(url)
req.Query(key)
log.WithFields(log.Fields{
"URL": req.Url,
"Method": req.Method,
"Query": req.QueryData,
}).Debug("Requesting dependent applications list")
resp, body, errs := req.EndBytes()
if errs = handleResp(resp, errs, 200); errs != nil {
return nil, errs
}
return body, nil
}
// DependentApplicationUpdate downloads the binary.tar for a specific application and target commit
// Saving it to {ENM_ASSETS_DIRECTORY}/{applicationUUID}/{targetCommit}/binary.tar
func DependentApplicationUpdate(applicationUUID int, targetCommit string) error {
url, err := buildPath(address, []string{version, "dependent-apps", strconv.Itoa(applicationUUID), "assets", targetCommit})
if err != nil {
return err
}
req, err := grab.NewRequest(url)
if err != nil {
return err
}
q := req.HTTPRequest.URL.Query()
q.Set("apikey", rawKey)
req.HTTPRequest.URL.RawQuery = q.Encode()
filePath := config.GetAssetsDir()
filePath = path.Join(filePath, strconv.Itoa(applicationUUID))
filePath = path.Join(filePath, targetCommit)
if err = os.MkdirAll(filePath, os.ModePerm); err != nil {
return err
}
filePath = path.Join(filePath, "binary.tar")
req.Filename = filePath
log.WithFields(log.Fields{
"URL": req.HTTPRequest.URL,
"Method": req.HTTPRequest.Method,
"Query": req.HTTPRequest.URL.RawQuery,
"Destination": req.Filename,
}).Debug("Requesting dependent application update")
client := grab.NewClient()
resp, err := client.Do(req)
if err != nil {
return err
}
if resp.HTTPResponse.StatusCode != 200 {
return fmt.Errorf("Dependent application update failed")
}
log.Debug("Dependent application update succeeded")
return nil
}
func DependentDeviceLog(UUID, message string) []error {
url, err := buildPath(address, []string{version, "devices", UUID, "logs"})
if err != nil {
return []error{err}
}
type dependentDeviceLog struct {
Message string `json:"message"`
}
content := &dependentDeviceLog{
Message: message,
}
bytes, err := json.Marshal(content)
if err != nil {
return []error{err}
}
req := gorequest.New()
req.Post(url)
req.Set("Content-Type", "application/json")
req.Query(key)
req.Send((string)(bytes))
log.WithFields(log.Fields{
"URL": req.Url,
"Method": req.Method,
"Query": req.QueryData,
"Body": (string)(bytes),
}).Debug("Transmitting dependent device log")
resp, _, errs := req.End()
return handleResp(resp, errs, 202)
}
func DependentDeviceInfoUpdateWithOnlineState(UUID, status, commit string, online bool) []error {
url, err := buildPath(address, []string{version, "devices", UUID})
if err != nil {
return []error{err}
}
type dependentDeviceInfo struct {
Status string `json:"status"`
Online bool `json:"is_online"`
Commit string `json:"commit,omitempty"`
}
content := &dependentDeviceInfo{
Status: status,
Online: online,
Commit: commit,
}
bytes, err := json.Marshal(content)
if err != nil {
return []error{err}
}
req := gorequest.New()
req.Put(url)
req.Set("Content-Type", "application/json")
req.Query(key)
req.Send((string)(bytes))
log.WithFields(log.Fields{
"URL": req.Url,
"Method": req.Method,
"Query": req.QueryData,
"Body": (string)(bytes),
}).Debug("Transmitting dependent device info")
resp, _, errs := req.End()
return handleResp(resp, errs, 200)
}
func DependentDeviceInfoUpdateWithoutOnlineState(UUID, status, commit string) []error {
url, err := buildPath(address, []string{version, "devices", UUID})
if err != nil {
return []error{err}
}
type dependentDeviceInfo struct {
Status string `json:"status"`
Commit string `json:"commit,omitempty"`
}
content := &dependentDeviceInfo{
Status: status,
Commit: commit,
}
bytes, err := json.Marshal(content)
if err != nil {
return []error{err}
}
req := gorequest.New()
req.Put(url)
req.Set("Content-Type", "application/json")
req.Query(key)
req.Send((string)(bytes))
log.WithFields(log.Fields{
"URL": req.Url,
"Method": req.Method,
"Query": req.QueryData,
"Body": (string)(bytes),
}).Debug("Transmitting dependent device info")
resp, _, errs := req.End()
return handleResp(resp, errs, 200)
}
func DependentDeviceInfo(UUID string) ([]byte, []error) {
url, err := buildPath(address, []string{version, "devices", UUID})
if err != nil {
return nil, []error{err}
}
req := gorequest.New()
req.Get(url)
req.Query(key)
log.WithFields(log.Fields{
"URL": req.Url,
"Method": req.Method,
"Query": req.QueryData,
}).Debug("Requesting dependent device info")
resp, body, errs := req.EndBytes()
if errs = handleResp(resp, errs, 200); errs != nil {
return nil, errs
}
return body, nil
}
func DependentDeviceProvision(applicationUUID int) (resinUUID, name string, errs []error) {
url, err := buildPath(address, []string{version, "devices"})
if err != nil {
errs = []error{err}
return
}
type dependentDeviceProvision struct {
ApplicationUUID int `json:"appId"`
}
content := &dependentDeviceProvision{
ApplicationUUID: applicationUUID,
}
bytes, err := json.Marshal(content)
if err != nil {
errs = []error{err}
return
}
req := gorequest.New()
req.Post(url)
req.Set("Content-Type", "application/json")
req.Query(key)
req.Send((string)(bytes))
log.WithFields(log.Fields{
"URL": req.Url,
"Method": req.Method,
"Query": req.QueryData,
"Body": (string)(bytes),
}).Debug("Requesting dependent device provision")
resp, body, errs := req.EndBytes()
if errs = handleResp(resp, errs, 201); errs != nil {
return
}
var buffer map[string]interface{}
if err := json.Unmarshal(body, &buffer); err != nil {
errs = []error{err}
return
}
resinUUID = buffer["uuid"].(string)
name = buffer["device_name"].(string)
return
}
func DependentDevicesList() ([]byte, []error) {
url, err := buildPath(address, []string{version, "devices"})
if err != nil {
return nil, []error{err}
}
req := gorequest.New()
req.Get(url)
req.Query(key)
log.WithFields(log.Fields{
"URL": req.Url,
"Method": req.Method,
"Query": req.QueryData,
}).Debug("Requesting dependent devices list")
resp, body, errs := req.EndBytes()
if errs = handleResp(resp, errs, 200); errs != nil {
return nil, errs
}
return body, nil
}
func init() {
log.SetLevel(config.GetLogLevel())
address = config.GetSuperAddr()
version = config.GetVersion()
rawKey = config.GetSuperAPIKey()
type apiKey struct {
APIKey string `json:"apikey"`
}
content := &apiKey{
APIKey: rawKey,
}
bytes, err := json.Marshal(content)
if err != nil {
log.WithFields(log.Fields{
"Key": rawKey,
"Error": err,
}).Fatal("Unable to marshall API key")
}
key = (string)(bytes)
log.WithFields(log.Fields{
"Address": address,
"Version": version,
"Key": key,
"Raw key": rawKey,
}).Debug("Initialised outgoing supervisor API")
}
func buildPath(base string, paths []string) (string, error) {
url, err := url.ParseRequestURI(address)
if err != nil {
return "", err
}
for _, p := range paths {
url.Path = path.Join(url.Path, p)
}
return url.String(), nil
}
func handleResp(resp gorequest.Response, errs []error, statusCode int) []error {
if errs != nil {
return errs
}
// Allow 404 and 410 here as it means the dep. app or dep. device has just been deleted
if resp.StatusCode != statusCode && resp.StatusCode != 404 && resp.StatusCode != 410 {
return []error{fmt.Errorf("Invalid response received: %s", resp.Status)}
}
log.WithFields(log.Fields{
"Response": resp.Status,
}).Debug("Valid response received")
return nil
}
<file_sep>/micro/nrf51822/nrf51822.go
package nrf51822
import (
"bytes"
"encoding/binary"
"fmt"
"io/ioutil"
"path"
"time"
log "github.com/Sirupsen/logrus"
"github.com/currantlabs/ble"
"github.com/mholt/archiver"
"github.com/resin-io/edge-node-manager/config"
"github.com/resin-io/edge-node-manager/radio/bluetooth"
)
// Firmware-over-the-air update info
// https://infocenter.nordicsemi.com/index.jsp?topic=%2Fcom.nordic.infocenter.sdk5.v11.0.0%2Fbledfu_transport_bleprofile.html
// https://infocenter.nordicsemi.com/index.jsp?topic=%2Fcom.nordic.infocenter.sdk5.v11.0.0%2Fbledfu_transport_bleservice.html&anchor=ota_spec_control_state
const (
Success byte = 0x01
Start = 0x01
Initialise = 0x02
Receive = 0x03
Validate = 0x04
Activate = 0x05
Restart = 0x06
ReceivedSize = 0x07
RequestBlockRecipt = 0x08
Response = 0x10
BlockRecipt = 0x11
)
// Nrf51822 is a BLE SoC from Nordic
// https://www.nordicsemi.com/eng/Products/Bluetooth-low-energy/nRF51822
type Nrf51822 struct {
Log *log.Logger
LocalUUID string
Firmware FIRMWARE
NotificationChannel chan []byte
}
type FIRMWARE struct {
currentBlock int
size int
binary []byte
data []byte
}
var (
dfuPkt *ble.Characteristic
dfuCtrl *ble.Characteristic
shortTimeout time.Duration
longTimeout time.Duration
)
func (m *Nrf51822) InitialiseRadio() error {
return bluetooth.Initialise()
}
func (m *Nrf51822) CleanupRadio() error {
return bluetooth.Cleanup()
}
func (m *Nrf51822) ExtractFirmware(filePath, bin, data string) error {
m.Log.WithFields(log.Fields{
"Firmware path": filePath,
"Bin": bin,
"Data": data,
}).Debug("Extracting firmware")
var err error
if err = archiver.Zip.Open(path.Join(filePath, "application.zip"), filePath); err != nil {
return err
}
m.Firmware.binary, err = ioutil.ReadFile(path.Join(filePath, bin))
if err != nil {
return err
}
m.Firmware.data, err = ioutil.ReadFile(path.Join(filePath, data))
if err != nil {
return err
}
m.Firmware.size = len(m.Firmware.binary)
m.Log.WithFields(log.Fields{
"Size": m.Firmware.size,
}).Debug("Extracted firmware")
return nil
}
func (m *Nrf51822) Update(client ble.Client) error {
if err := m.subscribe(client); err != nil {
return err
}
defer client.ClearSubscriptions()
if err := m.checkFOTA(client); err != nil {
return err
}
if m.Firmware.currentBlock == 0 {
if err := m.initFOTA(client); err != nil {
return err
}
}
if err := m.transferFOTA(client); err != nil {
return err
}
if err := m.validateFOTA(client); err != nil {
return err
}
return m.finaliseFOTA(client)
}
func init() {
log.SetLevel(config.GetLogLevel())
var err error
if shortTimeout, err = config.GetShortBluetoothTimeout(); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to load bluetooth timeout")
}
if longTimeout, err = config.GetLongBluetoothTimeout(); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to load bluetooth timeout")
}
dfuCtrl, err = bluetooth.GetCharacteristic("000015311212efde1523785feabcd123", ble.CharWrite+ble.CharNotify, 0x0F, 0x10)
if err != nil {
log.Fatal(err)
}
descriptor, err := bluetooth.GetDescriptor("2902", 0x11)
if err != nil {
log.Fatal(err)
}
dfuCtrl.CCCD = descriptor
dfuPkt, err = bluetooth.GetCharacteristic("000015321212efde1523785feabcd123", ble.CharWriteNR, 0x0D, 0x0E)
if err != nil {
log.Fatal(err)
}
log.Debug("Initialised nRF51822 characteristics")
}
func (m *Nrf51822) subscribe(client ble.Client) error {
if err := bluetooth.WriteDescriptor(client, dfuCtrl.CCCD, []byte{0x0001}); err != nil {
return err
}
return client.Subscribe(dfuCtrl, false, func(b []byte) {
m.NotificationChannel <- b
})
}
func (m *Nrf51822) checkFOTA(client ble.Client) error {
m.Log.Debug("Checking FOTA")
if err := bluetooth.WriteCharacteristic(client, dfuCtrl, []byte{ReceivedSize}, false); err != nil {
return err
}
resp, err := m.getNotification([]byte{Response, ReceivedSize, Success}, true)
if err != nil {
return err
}
m.Firmware.currentBlock, err = unpack(resp[3:])
if err != nil {
return err
}
m.Log.WithFields(log.Fields{
"Start block": m.Firmware.currentBlock,
}).Debug("Checked FOTA")
return nil
}
func (m *Nrf51822) initFOTA(client ble.Client) error {
m.Log.Debug("Initialising FOTA")
if err := bluetooth.WriteCharacteristic(client, dfuCtrl, []byte{Start, 0x04}, false); err != nil {
return err
}
buf := new(bytes.Buffer)
if _, err := buf.Write(make([]byte, 8)); err != nil {
return err
}
if err := binary.Write(buf, binary.LittleEndian, (int32)(m.Firmware.size)); err != nil {
return err
}
if err := bluetooth.WriteCharacteristic(client, dfuPkt, buf.Bytes(), false); err != nil {
return err
}
if _, err := m.getNotification([]byte{Response, Start, Success}, true); err != nil {
return err
}
if err := bluetooth.WriteCharacteristic(client, dfuCtrl, []byte{Initialise, 0x00}, false); err != nil {
return err
}
if err := bluetooth.WriteCharacteristic(client, dfuPkt, m.Firmware.data, false); err != nil {
return err
}
if err := bluetooth.WriteCharacteristic(client, dfuCtrl, []byte{Initialise, 0x01}, false); err != nil {
return err
}
if _, err := m.getNotification([]byte{Response, Initialise, Success}, true); err != nil {
return err
}
if err := bluetooth.WriteCharacteristic(client, dfuCtrl, []byte{RequestBlockRecipt, 0x64, 0x00}, false); err != nil {
return err
}
if err := bluetooth.WriteCharacteristic(client, dfuCtrl, []byte{Receive}, false); err != nil {
return err
}
m.Log.Debug("Initialised FOTA")
return nil
}
func (m *Nrf51822) transferFOTA(client ble.Client) error {
blockCounter := 1
blockSize := 20
if m.Firmware.currentBlock != 0 {
blockCounter += (m.Firmware.currentBlock / blockSize)
}
m.Log.WithFields(log.Fields{
"Progress %": m.getProgress(),
}).Info("Transferring FOTA")
for i := m.Firmware.currentBlock; i < m.Firmware.size; i += blockSize {
sliceIndex := i + blockSize
if sliceIndex > m.Firmware.size {
sliceIndex = m.Firmware.size
}
block := m.Firmware.binary[i:sliceIndex]
if err := bluetooth.WriteCharacteristic(client, dfuPkt, block, true); err != nil {
return err
}
if (blockCounter % 100) == 0 {
resp, err := m.getNotification(nil, false)
if err != nil {
return err
}
if resp[0] != BlockRecipt {
return fmt.Errorf("Incorrect notification received")
}
if m.Firmware.currentBlock, err = unpack(resp[1:]); err != nil {
return err
}
if (i + blockSize) != m.Firmware.currentBlock {
return fmt.Errorf("FOTA transer out of sync")
}
m.Log.WithFields(log.Fields{
"Progress %": m.getProgress(),
}).Info("Transferring FOTA")
}
blockCounter++
}
if _, err := m.getNotification([]byte{Response, Receive, Success}, true); err != nil {
return err
}
m.Log.WithFields(log.Fields{
"Progress %": 100,
}).Info("Transferring FOTA")
return nil
}
func (m *Nrf51822) validateFOTA(client ble.Client) error {
m.Log.Debug("Validating FOTA")
if err := m.checkFOTA(client); err != nil {
return err
}
if m.Firmware.currentBlock != m.Firmware.size {
return fmt.Errorf("Bytes received does not match binary size")
}
if err := bluetooth.WriteCharacteristic(client, dfuCtrl, []byte{Validate}, false); err != nil {
return err
}
if _, err := m.getNotification([]byte{Response, Validate, Success}, true); err != nil {
return err
}
m.Log.Debug("Validated FOTA")
return nil
}
func (m Nrf51822) finaliseFOTA(client ble.Client) error {
m.Log.Debug("Finalising FOTA")
// Ignore the error because this command causes the device to disconnect
bluetooth.WriteCharacteristic(client, dfuCtrl, []byte{Activate}, false)
// Give the device time to disconnect
time.Sleep(shortTimeout)
m.Log.Debug("Finalised FOTA")
return nil
}
func (m *Nrf51822) getNotification(exp []byte, compare bool) ([]byte, error) {
select {
case <-time.After(longTimeout):
return nil, fmt.Errorf("Timed out waiting for notification")
case resp := <-m.NotificationChannel:
if !compare || bytes.Equal(resp[:3], exp) {
return resp, nil
}
m.Log.WithFields(log.Fields{
"[0]": fmt.Sprintf("0x%X", resp[0]),
"[1]": fmt.Sprintf("0x%X", resp[1]),
"[2]": fmt.Sprintf("0x%X", resp[2]),
}).Debug("Received")
m.Log.WithFields(log.Fields{
"[0]": fmt.Sprintf("0x%X", exp[0]),
"[1]": fmt.Sprintf("0x%X", exp[1]),
"[2]": fmt.Sprintf("0x%X", exp[2]),
}).Debug("Expected")
return nil, fmt.Errorf("Incorrect notification received")
}
}
func (m *Nrf51822) getProgress() float32 {
return ((float32)(m.Firmware.currentBlock) / (float32)(m.Firmware.size)) * 100.0
}
func unpack(resp []byte) (int, error) {
var result int32
buf := bytes.NewReader(resp)
if err := binary.Read(buf, binary.LittleEndian, &result); err != nil {
return 0, err
}
return (int)(result), nil
}
<file_sep>/README.md
# edge-node-manager
[](https://goreportcard.com/report/github.com/resin-io/edge-node-manager)
[](https://travis-ci.com/resin-io/edge-node-manager)
resin.io dependent device edge-node-manager written in Go.
## Getting started
- Sign up on [resin.io](https://dashboard.resin.io/signup)
- Work through the [getting started
guide](https://docs.resin.io/raspberrypi3/nodejs/getting-started/)
- Create a new application
- Set these variables in the `Fleet Configuration` application side tab
- `RESIN_SUPERVISOR_DELTA=1`
- `RESIN_UI_ENABLE_DEPENDENT_APPLICATIONS=1`
- Clone this repository to your local workspace
- Add the dependent application `resin remote` to your local workspace
- Provision a gateway device
- Push code to resin as normal :)
- Follow the readme of the [supported dependent
device](#supported-dependent-devices) you would like to use
## Configuration variables
More info about environment variables can be found in
the [documentation](https://docs.resin.io/management/env-vars/). If you
don't set environment variables the default will be used.
Environment Variable | Default | Description
------------ | ------------- | -------------
ENM_LOG_LEVEL | `info` | the edge-node-manager log level
DEPENDENT_LOG_LEVEL | `info` | the dependent device log level
ENM_SUPERVISOR_CHECK_DELAY | `1` | the time delay in seconds between each supervisor check at startup
ENM_HOTSPOT_DELETE_DELAY | `10` | the time delay in seconds between hotspot deletion and creation
ENM_CONFIG_LOOP_DELAY | `10` | the time delay in seconds between each application process loop
ENM_CONFIG_PAUSE_DELAY | `10` | the time delay in seconds between each pause check
ENM_HOTSPOT_SSID | `resin-hotspot` | the SSID used for the hotspot
ENM_HOTSPOT_PASSWORD | `<PASSWORD>` | the password used for the hotspot
ENM_BLUETOOTH_SHORT_TIMEOUT | `1` | the timeout in seconds for instantaneous bluetooth operations
ENM_BLUETOOTH_LONG_TIMEOUT | `10` | the timeout in seconds for long running bluetooth operations
ENM_AVAHI_TIMEOUT | `10` | the timeout in seconds for Avahi scan operations
ENM_UPDATE_RETRIES | `1` | the number of times the firmware update process should be retried
ENM_ASSETS_DIRECTORY | `/data/assets` | the root directory used to store the dependent device firmware
ENM_DB_DIRECTORY | `/data/database` | the root directory used to store the database
ENM_DB_FILE | `enm.db` | the database file name
ENM_API_VERSION | `v1` | the proxyvisor API version
RESIN_SUPERVISOR_ADDRESS | `http://127.0.0.1:4000` | the address used to communicate with the proxyvisor
RESIN_SUPERVISOR_API_KEY | `na` | the api key used to communicate with the proxyvisor
ENM_LOCK_FILE_LOCATION | `/tmp/resin/resin-updates.lock` | the [lock file](https://github.com/resin-io/resin-supervisor/blob/master/docs/update-locking.md) location
## API
The edge-node-manager provides an API that allows the user to set the
target status of the main process. This is useful to free up the on-board radios
allowing user code to interact directly with the dependent devices e.g. to
collect sensor data.
**Warning** - Do not try and interact with the on-board radios whilst the
edge-node-manager is running (this leads to inconsistent, unexpected behaviour).
### SET /v1/enm/status
Set the edge-node-manager process status.
#### Example
```
curl -i -H "Content-Type: application/json" -X PUT --data \
'{"targetStatus":"Paused"}' localhost:1337/v1/enm/status
curl -i -H "Content-Type: application/json" -X PUT --data \
'{"targetStatus":"Running"}' localhost:1337/v1/enm/status
```
#### Response
```
HTTP/1.1 200 OK
```
### GET /v1/enm/status
Get the edge-node-manager process status.
#### Example
```
curl -i -X GET localhost:1337/v1/enm/status
```
#### Response
```
HTTP/1.1 200 OK
{
"currentStatus":"Running",
"targetStatus":"Paused",
}
```
### GET /v1/devices
Get all dependent devices.
#### Example
```
curl -i -X GET localhost:1337/v1/devices
```
#### Response
```
HTTP/1.1 200 OK
[{
"ApplicationUUID": 511898,
"BoardType": "esp8266",
"Name": "holy-sunset",
"LocalUUID": "1265892",
"ResinUUID": "64a1ae375b213d7e5af8409da3ad63108df4c8462089a05aa9af358c3f0df1",
"Commit": "16b5cd4df8085d2872a6f6fc0c378629a185d78b",
"TargetCommit": "16b5cd4df8085d2872a6f6fc0c378629a185d78b",
"Status": "Idle",
"Config": null,
"TargetConfig": {
"RESIN_HOST_TYPE": "esp8266",
"RESIN_SUPERVISOR_DELTA": "1"
},
"Environment": null,
"TargetEnvironment": {},
"RestartFlag": false,
"DeleteFlag": false
}]
```
### GET /v1/devices/{uuid}
Get a dependent device.
#### Example
```
curl -i -X GET localhost:1337/v1/devices/1265892
```
#### Response
```
HTTP/1.1 200 OK
{
"ApplicationUUID": 511898,
"BoardType": "esp8266",
"Name": "holy-sunset",
"LocalUUID": "1265892",
"ResinUUID": "64a1ae375b213d7e5af8409da3ad63108df4c8462089a05aa9af358c3f0df1",
"Commit": "<PASSWORD>",
"TargetCommit": "<PASSWORD>",
"Status": "Idle",
"Config": null,
"TargetConfig": {
"RESIN_HOST_TYPE": "esp8266",
"RESIN_SUPERVISOR_DELTA": "1"
},
"Environment": null,
"TargetEnvironment": {},
"RestartFlag": false,
"DeleteFlag": false
}
```
## Supported dependent devices
- [micro:bit](https://github.com/resin-io-projects/micro-bit)
- [nRF51822-DK](https://github.com/resin-io-projects/nRF51822-DK)
- [ESP8266](https://github.com/resin-io-projects/esp8266)
## Further reading
### About
The edge-node-manager is an example of a gateway
application designed to bridge the gap between Resin OS capable single board
computers (e.g. the Raspberry Pi) and non Resin OS capable devices (e.g.
micro-controllers). It has been designed to make it as easy as possible to add
new supported dependent device types and to run alongside your user application.
The following functionality is implemented:
- Dependent device detection
- Dependent device provisioning
- Dependent device restart
- Dependent device over-the-air (OTA) updating
- Dependent device logging and information updating
- API
### Definitions
#### Dependent application
A dependent application is a Resin application that targets devices not capable
of interacting directly with the Resin API.
The dependent application is scoped under a Resin application, which gets the
definition of gateway application.
A dependent application follows the same development cycle as a conventional
Resin application:
- It binds to your git workspace via the `resin remote`
- It consists of a Docker application
- It offers the same environment and configuration variables management
There are some key differences:
- It does not support Dockerfile templating
- The Dockerfile must target an x86 base image
- The actual firmware must be stored in the `/assets` folder within the built
docker image
#### Dependent device
A dependent device is a device not capable of interacting directly with the
Resin API - the reasons can be several, the most common are:
- No direct Internet capabilities
- Not able to run the Resin OS (being a microcontroller, for example)
#### Gateway application
The gateway application is responsible for detecting, provisioning and managing
dependent devices belonging to one of its dependent applications. This is
possible leveraging a new set of endpoints exposed by the [Resin
Supervisor](https://github.com/resin-io/resin-supervisor).
The edge-node-manager (this repository) is an example of a gateway application.
#### Gateway device
The gateway device runs the gateway application and has the needed on-board
radios to communicate with the managed dependent devices, for example:
- Bluetooth
- WiFi
- LoRa
- ZigBee
Throughout development a Raspberry Pi 3 has been used as the gateway device.
<file_sep>/board/esp8266/esp8266.go
package esp8266
import (
"fmt"
"path"
"strconv"
log "github.com/Sirupsen/logrus"
"github.com/resin-io/edge-node-manager/radio/wifi"
)
type Esp8266 struct {
Log *log.Logger
LocalUUID string
}
func (b Esp8266) InitialiseRadio() error {
return wifi.Initialise()
}
func (b Esp8266) CleanupRadio() error {
return wifi.Cleanup()
}
func (b Esp8266) Update(filePath string) error {
b.Log.Info("Starting update")
ip, err := wifi.GetIP(b.LocalUUID)
if err != nil {
return err
}
if err := wifi.PostForm("http://"+ip+"/update", path.Join(filePath, "firmware.bin")); err != nil {
return err
}
b.Log.Info("Finished update")
return nil
}
func (b Esp8266) Scan(applicationUUID int) (map[string]struct{}, error) {
return wifi.Scan(strconv.Itoa(applicationUUID))
}
func (b Esp8266) Online() (bool, error) {
return wifi.Online(b.LocalUUID)
}
func (b Esp8266) Restart() error {
b.Log.Info("Restarting...")
return fmt.Errorf("Restart not implemented")
}
func (b Esp8266) Identify() error {
b.Log.Info("Identifying...")
return fmt.Errorf("Identify not implemented")
}
func (b Esp8266) UpdateConfig(config interface{}) error {
b.Log.WithFields(log.Fields{
"Config": config,
}).Info("Updating config...")
return fmt.Errorf("Update config not implemented")
}
func (b Esp8266) UpdateEnvironment(config interface{}) error {
b.Log.WithFields(log.Fields{
"Config": config,
}).Info("Updating environment...")
return fmt.Errorf("Update environment not implemented")
}
<file_sep>/config/config.go
package config
import (
"os"
"path"
"strconv"
"time"
log "github.com/Sirupsen/logrus"
)
// GetLogLevel returns the log level
func GetLogLevel() log.Level {
return switchLogLevel(getEnv("ENM_LOG_LEVEL", ""))
}
// GetDependentLogLevel returns the log level for dependent devices
func GetDependentLogLevel() log.Level {
return switchLogLevel(getEnv("DEPENDENT_LOG_LEVEL", ""))
}
// GetSupervisorCheckDelay returns the time delay in seconds between each supervisor check at startup
func GetSupervisorCheckDelay() (time.Duration, error) {
value, err := strconv.Atoi(getEnv("ENM_SUPERVISOR_CHECK_DELAY", "1"))
return time.Duration(value) * time.Second, err
}
// GetHotspotDeleteDelay returns the time delay in seconds between hotspot deletion and creation
func GetHotspotDeleteDelay() (time.Duration, error) {
value, err := strconv.Atoi(getEnv("ENM_HOTSPOT_DELETE_DELAY", "10"))
return time.Duration(value) * time.Second, err
}
// GetLoopDelay returns the time delay in seconds between each application process loop
func GetLoopDelay() (time.Duration, error) {
value, err := strconv.Atoi(getEnv("ENM_CONFIG_LOOP_DELAY", "10"))
return time.Duration(value) * time.Second, err
}
// GetPauseDelay returns the time delay in seconds between each pause check
func GetPauseDelay() (time.Duration, error) {
value, err := strconv.Atoi(getEnv("ENM_CONFIG_PAUSE_DELAY", "10"))
return time.Duration(value) * time.Second, err
}
// GetHotspotSSID returns the SSID to be used for the hotspot
func GetHotspotSSID() string {
return getEnv("ENM_HOTSPOT_SSID", "resin-hotspot")
}
// GetHotspotPassword returns the password to be used for the hotspot
func GetHotspotPassword() string {
return getEnv("ENM_HOTSPOT_PASSWORD", "<PASSWORD>")
}
// GetShortBluetoothTimeout returns the timeout for each instantaneous bluetooth operation
func GetShortBluetoothTimeout() (time.Duration, error) {
value, err := strconv.Atoi(getEnv("ENM_BLUETOOTH_SHORT_TIMEOUT", "1"))
return time.Duration(value) * time.Second, err
}
// GetLongBluetoothTimeout returns the timeout for each long running bluetooth operation
func GetLongBluetoothTimeout() (time.Duration, error) {
value, err := strconv.Atoi(getEnv("ENM_BLUETOOTH_LONG_TIMEOUT", "10"))
return time.Duration(value) * time.Second, err
}
// GetAvahiTimeout returns the timeout for each Avahi scan operation
func GetAvahiTimeout() (time.Duration, error) {
value, err := strconv.Atoi(getEnv("ENM_AVAHI_TIMEOUT", "10"))
return time.Duration(value) * time.Second, err
}
// GetUpdateRetries returns the number of times the firmware update process should be attempted
func GetUpdateRetries() (int, error) {
return strconv.Atoi(getEnv("ENM_UPDATE_RETRIES", "1"))
}
// GetAssetsDir returns the root directory used to store the database and application commits
func GetAssetsDir() string {
return getEnv("ENM_ASSETS_DIRECTORY", "/data/assets")
}
// GetDbDir returns the directory used to store the database
func GetDbDir() string {
return getEnv("ENM_DB_DIRECTORY", "/data/database")
}
// GetDbPath returns the path used to store the database
func GetDbPath() string {
directory := GetDbDir()
file := getEnv("ENM_DB_FILE", "enm.db")
return path.Join(directory, file)
}
// GetVersion returns the API version used to communicate with the supervisor
func GetVersion() string {
return getEnv("ENM_API_VERSION", "v1")
}
// GetSuperAddr returns the address used to communicate with the supervisor
func GetSuperAddr() string {
return getEnv("RESIN_SUPERVISOR_ADDRESS", "http://127.0.0.1:4000")
}
// GetSuperAPIKey returns the API key used to communicate with the supervisor
func GetSuperAPIKey() string {
return getEnv("RESIN_SUPERVISOR_API_KEY", "")
}
// GetLockFileLocation returns the location of the lock file
func GetLockFileLocation() string {
return getEnv("ENM_LOCK_FILE_LOCATION", "/tmp/resin/resin-updates.lock")
}
func getEnv(key, fallback string) string {
result := os.Getenv(key)
if result == "" {
result = fallback
}
return result
}
func switchLogLevel(level string) log.Level {
switch level {
case "Debug":
return log.DebugLevel
case "Info":
return log.InfoLevel
case "Warn":
return log.WarnLevel
case "Error":
return log.ErrorLevel
case "Fatal":
return log.FatalLevel
case "Panic":
return log.PanicLevel
}
return log.InfoLevel
}
<file_sep>/main.go
package main
import (
"encoding/json"
"net/http"
"os"
"sort"
"time"
log "github.com/Sirupsen/logrus"
"github.com/asdine/storm"
"github.com/jmoiron/jsonq"
"github.com/resin-io/edge-node-manager/api"
"github.com/resin-io/edge-node-manager/application"
"github.com/resin-io/edge-node-manager/config"
"github.com/resin-io/edge-node-manager/device"
"github.com/resin-io/edge-node-manager/process"
"github.com/resin-io/edge-node-manager/supervisor"
)
var (
// This variable will be populated at build time with the current version tag
version string
// This variable defines the delay between each processing loop
loopDelay time.Duration
)
func main() {
log.Info("Starting edge-node-manager")
if err := checkVersion(); err != nil {
log.Error("Unable to check if edge-node-manager is up to date")
}
supervisor.WaitUntilReady()
for {
// Run processing loop
loop()
// Delay between processing each set of applications to prevent 100% CPU usage
time.Sleep(loopDelay)
}
}
func init() {
log.SetLevel(config.GetLogLevel())
log.SetFormatter(&log.TextFormatter{ForceColors: true, DisableTimestamp: true})
var err error
loopDelay, err = config.GetLoopDelay()
if err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to load loop delay")
}
dbDir := config.GetDbDir()
if err := os.MkdirAll(dbDir, os.ModePerm); err != nil {
log.WithFields(log.Fields{
"Directory": dbDir,
"Error": err,
}).Fatal("Unable to create database directory")
}
db, err := storm.Open(config.GetDbPath())
if err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to open database")
}
defer db.Close()
if err := db.Init(&device.Device{}); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to initialise database")
}
go func() {
router := api.NewRouter()
port := ":8081"
log.WithFields(log.Fields{
"Port": port,
}).Debug("Initialising incoming supervisor API")
if err := http.ListenAndServe(port, router); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to initialise incoming supervisor API")
}
}()
}
func checkVersion() error {
resp, err := http.Get("https://api.github.com/repos/resin-io/edge-node-manager/releases/latest")
if err != nil {
return err
}
defer resp.Body.Close()
data := map[string]interface{}{}
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
return err
}
latest, err := jsonq.NewQuery(data).String("tag_name")
if err != nil {
return err
}
if version == latest {
log.WithFields(log.Fields{
"Current version": version,
}).Info("edge-node-manager upto date")
} else {
log.WithFields(log.Fields{
"Current version": version,
"Latest version": latest,
"Update command": "git push resin master:resin-nocache",
}).Warn("Please update edge-node-manager")
}
return nil
}
func loop() {
// Get applications from the supervisor
bytes, errs := supervisor.DependentApplicationsList()
if errs != nil {
log.WithFields(log.Fields{
"Errors": errs,
}).Error("Unable to get applications")
return
}
// Unmarshal applications
applications, err := application.Unmarshal(bytes)
if err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to unmarshal applications")
return
}
// Sort applications to ensure they run in order
var keys []int
for key := range applications {
keys = append(keys, key)
}
sort.Ints(keys)
// Process applications
for _, key := range keys {
if errs := process.Run(applications[key]); errs != nil {
log.WithFields(log.Fields{
"Application": applications[key],
"Errors": errs,
}).Error("Unable to process application")
}
}
}
<file_sep>/CHANGELOG.md
# Change Log
All notable changes to this project will be documented in this file
automatically by Versionist. DO NOT EDIT THIS FILE MANUALLY!
This project adheres to [Semantic Versioning](http://semver.org/).
## v3.0.0 - 2018-03-27
* Update device name parameter to comply with API v4 and supervisor v7 #199 [Lucian Buzzo]
## v2.4.0 - 2017-09-08
* Configure time between hotspot deletion/creation #193 [Joe Roberts]
## v2.3.0 - 2017-09-08
* Configure time between supervisor checks #194 [Joe Roberts]
## v2.2.0 - 2017-08-29
* Improve device endpoints #190 [Joe Roberts]
## v2.1.0 - 2017-08-18
* Implement query dependent device endpoint #187 [Joe Roberts]
## v2.0.1 - 2017-08-18
* Check for index-out-of-range #186 [Joe Roberts]
## v2.0.0 - 2017-08-14
* Implement avahi/mdns discovery #185 [Joe Roberts]
* Document config variables in the readme #185 [Joe Roberts]
## v1.6.2 - 2017-07-10
* Updated readme [Joe Roberts]
## v1.6.1 - 2017-07-10
* Implement context timeout for nmap scan [Joe Roberts]
## v1.6.0 - 2017-07-06
* Tag architectures with the correct name [Joe Roberts]
## v1.5.3 - 2017-07-06
* Only perform extra bluetooth initialisation if the device type is raspberrypi [Joe Roberts]
## v1.5.2 - 2017-07-06
* Another attempt to fix conflicts between jobs [Joe Roberts]
## v1.5.1 - 2017-07-06
* Fix issue with conflicting travis jobs [Joe Roberts]
* Fix typo in variable check [Joe Roberts]
## v1.5.0 - 2017-07-06
* Get architecture during the docker build [Joe Roberts]
## v1.4.5 - 2017-05-19
* Add more ignores to .gitignore [<NAME>]
## v1.4.4 - 2017-05-19
* Update `versionist` config to latest, ensuring semver types are case insensitive. [Heds Simons]
## v1.4.3 - 2017-04-25
* Refactor hotspot creation [Joseph Roberts]
* Removed unused resin-hotspot system connection [Joseph Roberts]
* Update dependencies [Joseph Roberts]
## v1.4.2 - 2017-04-21
* Ignore db open errors allowing the supervisor to silently retry [Joseph Roberts]
## v1.4.1 - 2017-04-21
* Improve radio initialisation [Joseph Roberts]
## v1.4.0 - 2017-04-07
* ESP Support [Joseph Roberts]
## v1.3.0 - 2017-03-23
* Remove state from memory by writing to database [Joseph Roberts]
## v1.2.1 - 2017-03-16
* Wrap timeout around all bluetooth operations [Joseph Roberts]
## v1.2.0 - 2017-03-16
* Implement git auto release [Joseph Roberts]
## v1.1.0 - 2017-03-15
* Implement out of date warning [Joseph Roberts]
## v1.0.3 - 2017-03-15
* Remove edge-node-manager binary
* Add edge-node-manager to .gitignore [Joseph Roberts]
## v1.0.2 - 2017-03-13
* Sync changelog with release [Joseph Roberts]
## v1.0.1 - 2017-03-10
* Get latest tag from the git API
* Add update lock info to readme
## v1.0.0 - 2017-03-10
* Set up versionist
* Implement lock mechanism
* Defer save state
* Increased time to allow device to disconnect
* Fix att-request-failed error
* Quick hacky temporary fix to solve slow DFU
* Remove disconnect requests as the dep. device disconnects on its own
* Replace wget with curl
* Temp fix device status by sending every minute
* Retry updates
* Bluetooth stability fixes
* Add dep device log level
* Removed timestamp so that the supervisor will set the timestamp instead
## v0.1.9 2017-03-01
* Updated travis icon
* Fixed nrf51822dk issue and added some debugging
* Removed bluez apt source
## v0.1.8 2017-02-23
* Updated readme
* Updated dockerfile
* Micro:bit now restarts after an update
* Removed trailing new lines from log output
* Changed default log level to Info
* Updated gitignore
* Removed images
## v0.1.7 2017-02-21
* Removed resin sync from repo
* Refactored bluetooth
* Auto build binaries when a tag is created on master
## v0.1.6 2016-11-28
* Merge pull request #86 from resin-io/develop
## v0.1.5 2016-11-01
* Set up flag and logic to ensure is_online is always sent first time (#50)
* 17 send progress dep device logs (#58)
* Enforce order (#59)
* Tidied logging (#60)
* Turned off notifications
* Fixing dep. device delete (#61)
* 36 dep device env and config (#62)
* 63 pull down fields (#64)
* 36 dep device env and config (#65)
## v0.1.4 2016-10-26
* Fixed device deletion (#44)
* Auto board type (#45)
* Auto board type (#46)
## v0.1.3 2016-10-25
* Merge pluginification into develop
* Custom device name issue#35 (#37)
* Quick online state bug fix
* Another quick bug fix
* Microbit (#40)
## v0.1.2 2016-10-18
* Merge develop into master
## v0.1.1 2016-10-11
* Merge develop into master
## v0.1.0 2016-10-04
* Updated git ignore
* Merge initial-architecture into develop
* Merge bluetooth-radio into develop
* Merge godoc-comments into develop
* Merge linting into develop
* Merge proxyvisor-integration into develop
* Merge docker into develop
* Bumped docker version to 1.7
* Refactored build method slightly
* Merge testing into develop
<file_sep>/api/routes.go
package api
import "net/http"
// Route contains all the variables needed to define a route
type Route struct {
Name string
Method string
Pattern string
HandlerFunc http.HandlerFunc
}
// Routes holds all the routes assigned to the API
type Routes []Route
var routes = Routes{
Route{
"DependentDeviceUpdate",
"PUT",
"/v1/devices/{uuid}",
DependentDeviceUpdate,
},
Route{
"DependentDeviceDelete",
"DELETE",
"/v1/devices/{uuid}",
DependentDeviceDelete,
},
Route{
"DependentDeviceRestart",
"PUT",
"/v1/devices/{uuid}/restart",
DependentDeviceRestart,
},
Route{
"DependentDevicesQuery",
"GET",
"/v1/devices",
DependentDevicesQuery,
},
Route{
"DependentDeviceQuery",
"GET",
"/v1/devices/{uuid}",
DependentDeviceQuery,
},
Route{
"SetStatus",
"PUT",
"/v1/enm/status",
SetStatus,
},
Route{
"GetStatus",
"GET",
"/v1/enm/status",
GetStatus,
},
}
<file_sep>/application/application.go
package application
import (
"encoding/json"
"fmt"
"github.com/resin-io/edge-node-manager/board"
"github.com/resin-io/edge-node-manager/board/esp8266"
"github.com/resin-io/edge-node-manager/board/microbit"
"github.com/resin-io/edge-node-manager/board/nrf51822dk"
)
type Application struct {
Board board.Interface `json:"-"`
BoardType board.Type `json:"-"`
Name string `json:"name"`
ResinUUID int `json:"id"`
Commit string `json:"-"` // Ignore this when unmarshalling from the supervisor as we want to set the target commit
TargetCommit string `json:"commit"` // Set json tag to commit as the supervisor has no concept of target commit
Config map[string]interface{} `json:"config"`
}
func (a Application) String() string {
return fmt.Sprintf(
"Board type: %s, "+
"Name: %s, "+
"Resin UUID: %d, "+
"Commit: %s, "+
"Target commit: %s, "+
"Config: %v",
a.BoardType,
a.Name,
a.ResinUUID,
a.Commit,
a.TargetCommit,
a.Config)
}
func Unmarshal(bytes []byte) (map[int]Application, error) {
applications := make(map[int]Application)
var buffer []Application
if err := json.Unmarshal(bytes, &buffer); err != nil {
return nil, err
}
for key, value := range buffer {
value, ok := value.Config["RESIN_HOST_TYPE"]
if !ok {
continue
}
boardType := (board.Type)(value.(string))
var b board.Interface
switch boardType {
case board.MICROBIT:
b = microbit.Microbit{}
case board.NRF51822DK:
b = nrf51822dk.Nrf51822dk{}
case board.ESP8266:
b = esp8266.Esp8266{}
default:
continue
}
application := buffer[key]
application.BoardType = boardType
application.Board = b
applications[application.ResinUUID] = application
}
return applications, nil
}
<file_sep>/radio/wifi/nm.go
package wifi
import (
"fmt"
"github.com/godbus/dbus"
)
type NmDeviceState uint32
const (
NmDeviceStateUnknown NmDeviceState = 0
NmDeviceStateUnmanaged NmDeviceState = 10
NmDeviceStateUnavailable NmDeviceState = 20
NmDeviceStateDisconnected NmDeviceState = 30
NmDeviceStatePrepare NmDeviceState = 40
NmDeviceStateConfig NmDeviceState = 50
NmDeviceStateNeed_auth NmDeviceState = 60
NmDeviceStateIp_config NmDeviceState = 70
NmDeviceStateIp_check NmDeviceState = 80
NmDeviceStateSecondaries NmDeviceState = 90
NmDeviceStateActivated NmDeviceState = 100
NmDeviceStateDeactivating NmDeviceState = 110
NmDeviceStateFailed NmDeviceState = 120
)
type NmDeviceType uint32
const (
NmDeviceTypeUnknown NmDeviceType = 0
NmDeviceTypeEthernet NmDeviceType = 1
NmDeviceTypeWifi NmDeviceType = 2
NmDeviceTypeUnused1 NmDeviceType = 3
NmDeviceTypeUnused2 NmDeviceType = 4
NmDeviceTypeBt NmDeviceType = 5
NmDeviceTypeOlpcMesh NmDeviceType = 6
NmDeviceTypeWimax NmDeviceType = 7
NmDeviceTypeModem NmDeviceType = 8
NmDeviceTypeInfiniband NmDeviceType = 9
NmDeviceTypeBond NmDeviceType = 10
NmDeviceTypeVlan NmDeviceType = 11
NmDeviceTypeAdsl NmDeviceType = 12
NmDeviceTypeBridge NmDeviceType = 13
NmDeviceTypeGeneric NmDeviceType = 14
NmDeviceTypeTeam NmDeviceType = 15
)
type NmActiveConnectionState uint32
const (
NmActiveConnectionStateUnknown NmActiveConnectionState = 0
NmActiveConnectionStateActivating NmActiveConnectionState = 1
NmActiveConnectionStateActivated NmActiveConnectionState = 2
NmActiveConnectionStateDeactivating NmActiveConnectionState = 3
NmActiveConnectionStateDeactivated NmActiveConnectionState = 4
)
type NmDevice struct {
nmPath dbus.ObjectPath
nmState NmDeviceState
nmType NmDeviceType
nmInterface string
}
func removeHotspotConnections(ssid string) error {
settingsObject, err := getConnection(ssid)
if err != nil {
return err
} else if settingsObject == nil {
return nil
}
if err := settingsObject.Call("org.freedesktop.NetworkManager.Settings.Connection.Delete", 0).Store(); err != nil {
return err
}
for {
if settingsObject, err := getConnection(ssid); err != nil {
return err
} else if settingsObject == nil {
break
}
}
return nil
}
func getConnection(ssid string) (dbus.BusObject, error) {
connection, err := dbus.SystemBus()
if err != nil {
return nil, err
}
var settingsPaths []dbus.ObjectPath
settingsObject := connection.Object("org.freedesktop.NetworkManager", "/org/freedesktop/NetworkManager/Settings")
if err := settingsObject.Call("org.freedesktop.NetworkManager.Settings.ListConnections", 0).Store(&settingsPaths); err != nil {
return nil, err
}
for _, settingsPath := range settingsPaths {
var settings map[string]map[string]dbus.Variant
settingsObject := connection.Object("org.freedesktop.NetworkManager", settingsPath)
if err := settingsObject.Call("org.freedesktop.NetworkManager.Settings.Connection.GetSettings", 0).Store(&settings); err != nil {
return nil, err
}
if settings["connection"]["id"].Value().(string) == ssid {
return settingsObject, nil
}
}
return nil, nil
}
func isEthernetConnected() (bool, error) {
devices, err := getDevices()
if err != nil {
return false, err
}
for _, device := range devices {
if device.nmType == NmDeviceTypeEthernet && device.nmState == NmDeviceStateActivated {
return true, nil
}
}
return false, nil
}
func getWifiDevice() (NmDevice, error) {
devices, err := getDevices()
if err != nil {
return NmDevice{}, err
}
for _, device := range devices {
if device.nmType == NmDeviceTypeWifi {
return device, nil
}
}
return NmDevice{}, fmt.Errorf("No wifi device found")
}
func getFreeWifiDevice() (NmDevice, error) {
devices, err := getDevices()
if err != nil {
return NmDevice{}, err
}
for _, device := range devices {
if device.nmType == NmDeviceTypeWifi && device.nmState == NmDeviceStateDisconnected {
return device, nil
}
}
return NmDevice{}, fmt.Errorf("No free wifi device found")
}
func createHotspotConnection(device NmDevice, ssid, password string) error {
connection, err := dbus.SystemBus()
if err != nil {
return err
}
hotspot := make(map[string]map[string]interface{})
hotspot["802-11-wireless"] = make(map[string]interface{})
hotspot["802-11-wireless"]["band"] = "bg"
hotspot["802-11-wireless"]["hidden"] = false
hotspot["802-11-wireless"]["mode"] = "ap"
hotspot["802-11-wireless"]["security"] = "802-11-wireless-security"
hotspot["802-11-wireless"]["ssid"] = []byte(ssid)
hotspot["802-11-wireless-security"] = make(map[string]interface{})
hotspot["802-11-wireless-security"]["key-mgmt"] = "wpa-psk"
hotspot["802-11-wireless-security"]["psk"] = password
hotspot["connection"] = make(map[string]interface{})
hotspot["connection"]["autoconnect"] = false
hotspot["connection"]["id"] = ssid
hotspot["connection"]["interface-name"] = device.nmInterface
hotspot["connection"]["type"] = "801-11-wireless"
hotspot["ipv4"] = make(map[string]interface{})
hotspot["ipv4"]["method"] = "shared"
var path, activeConnectionPath dbus.ObjectPath
rootObject := connection.Object("org.freedesktop.NetworkManager", "/org/freedesktop/NetworkManager")
if err := rootObject.Call(
"org.freedesktop.NetworkManager.AddAndActivateConnection",
0,
hotspot,
device.nmPath,
dbus.ObjectPath("/")).
Store(&path, &activeConnectionPath); err != nil {
return err
}
activeConnectionObject := connection.Object("org.freedesktop.NetworkManager", activeConnectionPath)
for {
value, err := getProperty(activeConnectionObject, "org.freedesktop.NetworkManager.Connection.Active.State")
if err != nil {
return err
}
if NmActiveConnectionState(value.(uint32)) == NmActiveConnectionStateActivated {
break
}
}
return nil
}
func getDevices() ([]NmDevice, error) {
connection, err := dbus.SystemBus()
if err != nil {
return nil, err
}
var paths []dbus.ObjectPath
rootObject := connection.Object("org.freedesktop.NetworkManager", "/org/freedesktop/NetworkManager")
if err := rootObject.Call("org.freedesktop.NetworkManager.GetAllDevices", 0).Store(&paths); err != nil {
return nil, err
}
devices := make([]NmDevice, 5)
for _, path := range paths {
deviceObject := connection.Object("org.freedesktop.NetworkManager", path)
device := NmDevice{}
device.nmPath = path
value, err := getProperty(deviceObject, "org.freedesktop.NetworkManager.Device.State")
if err != nil {
return nil, err
}
device.nmState = NmDeviceState(value.(uint32))
value, err = getProperty(deviceObject, "org.freedesktop.NetworkManager.Device.DeviceType")
if err != nil {
return nil, err
}
device.nmType = NmDeviceType(value.(uint32))
value, err = getProperty(deviceObject, "org.freedesktop.NetworkManager.Device.Interface")
if err != nil {
return nil, err
}
device.nmInterface = value.(string)
devices = append(devices, device)
}
return devices, nil
}
func getProperty(object dbus.BusObject, property string) (interface{}, error) {
value, err := object.GetProperty(property)
if err != nil {
return nil, err
}
return value.Value(), nil
}
<file_sep>/process/process.go
package process
import (
"os"
"path"
"strconv"
"time"
log "github.com/Sirupsen/logrus"
"github.com/asdine/storm"
"github.com/asdine/storm/index"
"github.com/asdine/storm/q"
"github.com/fredli74/lockfile"
"github.com/resin-io/edge-node-manager/application"
"github.com/resin-io/edge-node-manager/config"
"github.com/resin-io/edge-node-manager/device"
deviceStatus "github.com/resin-io/edge-node-manager/device/status"
processStatus "github.com/resin-io/edge-node-manager/process/status"
"github.com/resin-io/edge-node-manager/supervisor"
tarinator "github.com/verybluebot/tarinator-go"
)
var (
CurrentStatus processStatus.Status
TargetStatus processStatus.Status
updateRetries int
pauseDelay time.Duration
lockLocation string
lock *lockfile.LockFile
)
func Run(a application.Application) []error {
log.Info("----------------------------------------")
// Pause the process if necessary
if err := pause(); err != nil {
return []error{err}
}
// Initialise the radio
if err := a.Board.InitialiseRadio(); err != nil {
return []error{err}
}
defer a.Board.CleanupRadio()
if log.GetLevel() == log.DebugLevel {
log.WithFields(log.Fields{
"Application": a,
}).Debug("Processing application")
} else {
log.WithFields(log.Fields{
"Application": a.Name,
}).Info("Processing application")
}
// Enable update locking
var err error
lock, err = lockfile.Lock(lockLocation)
if err != nil {
return []error{err}
}
defer lock.Unlock()
// Handle delete flags
if err := handleDelete(a); err != nil {
return []error{err}
}
// Get all online devices associated with this application
onlineDevices, err := getOnlineDevices(a)
if err != nil {
return []error{err}
}
// Get all provisioned devices associated with this application
provisionedDevices, err := getProvisionedDevices(a)
if err != nil {
return []error{err}
}
if log.GetLevel() == log.DebugLevel {
log.WithFields(log.Fields{
"Provisioned devices": provisionedDevices,
}).Debug("Processing application")
} else {
log.WithFields(log.Fields{
"Number of provisioned devices": len(provisionedDevices),
}).Info("Processing application")
}
// Convert provisioned devices to a hash map
hashmap := make(map[string]struct{})
var s struct{}
for _, value := range provisionedDevices {
hashmap[value.LocalUUID] = s
}
// Provision all unprovisioned devices associated with this application
for key := range onlineDevices {
if _, ok := hashmap[key]; ok {
// Device already provisioned
continue
}
// Device not already provisioned
if errs := provisionDevice(a, key); errs != nil {
return errs
}
}
// Refesh all provisioned devices associated with this application
provisionedDevices, err = getProvisionedDevices(a)
if err != nil {
return []error{err}
}
// Sync all provisioned devices associated with this application
for _, value := range provisionedDevices {
if errs := value.Sync(); errs != nil {
return errs
}
if err := updateDevice(value); err != nil {
return []error{err}
}
}
// Refesh all provisioned devices associated with this application
provisionedDevices, err = getProvisionedDevices(a)
if err != nil {
return []error{err}
}
// Set state for all provisioned devices associated with this application
for _, value := range provisionedDevices {
if _, ok := onlineDevices[value.LocalUUID]; ok {
value.Status = deviceStatus.IDLE
} else {
value.Status = deviceStatus.OFFLINE
}
if err := updateDevice(value); err != nil {
return []error{err}
}
if errs := sendState(value); errs != nil {
return errs
}
}
// Refesh all provisioned devices associated with this application
provisionedDevices, err = getProvisionedDevices(a)
if err != nil {
return []error{err}
}
// Update all online, outdated, provisioned devices associated with this application
for _, value := range provisionedDevices {
if (value.Commit != value.TargetCommit) && (value.Status != deviceStatus.OFFLINE) {
// Populate board (and micro) for the device
if err := value.PopulateBoard(); err != nil {
return []error{err}
}
// Perform the update
if errs := updateFirmware(value); errs != nil {
return errs
}
}
}
return nil
}
func init() {
log.SetLevel(config.GetLogLevel())
var err error
if pauseDelay, err = config.GetPauseDelay(); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to load pause delay")
}
if updateRetries, err = config.GetUpdateRetries(); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to update retries")
}
lockLocation = config.GetLockFileLocation()
CurrentStatus = processStatus.RUNNING
TargetStatus = processStatus.RUNNING
log.Debug("Initialised process")
}
func pause() error {
if TargetStatus != processStatus.PAUSED {
return nil
}
CurrentStatus = processStatus.PAUSED
log.WithFields(log.Fields{
"Status": CurrentStatus,
}).Info("Process status")
for TargetStatus == processStatus.PAUSED {
time.Sleep(pauseDelay)
}
CurrentStatus = processStatus.RUNNING
log.WithFields(log.Fields{
"Status": CurrentStatus,
}).Info("Process status")
return nil
}
func getOnlineDevices(a application.Application) (map[string]struct{}, error) {
onlineDevices, err := a.Board.Scan(a.ResinUUID)
if err != nil {
return nil, err
}
log.WithFields(log.Fields{
"Number of online devices": len(onlineDevices),
}).Info("Processing application")
return onlineDevices, nil
}
func getProvisionedDevices(a application.Application) ([]device.Device, error) {
db, err := storm.Open(config.GetDbPath())
if err != nil {
return nil, err
}
defer db.Close()
var provisionedDevices []device.Device
if err := db.Find("ApplicationUUID", a.ResinUUID, &provisionedDevices); err != nil && err.Error() != index.ErrNotFound.Error() {
return nil, err
}
return provisionedDevices, nil
}
func provisionDevice(a application.Application, localUUID string) []error {
log.WithFields(log.Fields{
"Local UUID": localUUID,
}).Info("Provisioning device")
resinUUID, name, errs := supervisor.DependentDeviceProvision(a.ResinUUID)
if errs != nil {
return errs
}
db, err := storm.Open(config.GetDbPath())
if err != nil {
return []error{err}
}
defer db.Close()
d := device.New(a.ResinUUID, a.BoardType, name, localUUID, resinUUID)
if err := db.Save(&d); err != nil {
return []error{err}
}
log.WithFields(log.Fields{
"Name": d.Name,
"Local UUID": d.LocalUUID,
}).Info("Provisioned device")
return nil
}
func updateDevice(d device.Device) error {
db, err := storm.Open(config.GetDbPath())
if err != nil {
return err
}
defer db.Close()
return db.Update(&d)
}
func sendState(d device.Device) []error {
online := true
if d.Status == deviceStatus.OFFLINE {
online = false
}
return supervisor.DependentDeviceInfoUpdateWithOnlineState(d.ResinUUID, (string)(d.Status), d.Commit, online)
}
func updateFirmware(d device.Device) []error {
online, err := d.Board.Online()
if err != nil {
return []error{err}
} else if !online {
return nil
}
filepath, err := getFirmware(d)
if err != nil {
return []error{err}
}
d.Status = deviceStatus.INSTALLING
if err := updateDevice(d); err != nil {
return []error{err}
}
if errs := sendState(d); errs != nil {
return errs
}
for i := 1; i <= updateRetries; i++ {
log.WithFields(log.Fields{
"Name": d.Name,
"Attempt": i,
}).Info("Starting update")
if err := d.Board.Update(filepath); err != nil {
log.WithFields(log.Fields{
"Name": d.Name,
"Error": err,
}).Error("Update failed")
continue
} else {
log.WithFields(log.Fields{
"Name": d.Name,
}).Info("Finished update")
d.Commit = d.TargetCommit
break
}
}
d.Status = deviceStatus.IDLE
if err := updateDevice(d); err != nil {
return []error{err}
}
return sendState(d)
}
func getFirmware(d device.Device) (string, error) {
// Build the file paths
filepath := config.GetAssetsDir()
filepath = path.Join(filepath, strconv.Itoa(d.ApplicationUUID))
filepath = path.Join(filepath, d.TargetCommit)
tarPath := path.Join(filepath, "binary.tar")
// Check if the firmware exists
if _, err := os.Stat(tarPath); os.IsNotExist(err) {
// Download the firmware
if err := supervisor.DependentApplicationUpdate(d.ApplicationUUID, d.TargetCommit); err != nil {
return "", err
}
// Extract the firmware
if err := tarinator.UnTarinate(filepath, tarPath); err != nil {
return "", err
}
}
return filepath, nil
}
func handleDelete(a application.Application) error {
db, err := storm.Open(config.GetDbPath())
if err != nil {
return err
}
defer db.Close()
if err := db.Select(q.Eq("ApplicationUUID", a.ResinUUID), q.Eq("DeleteFlag", true)).Delete(&device.Device{}); err != nil && err.Error() != index.ErrNotFound.Error() {
return err
}
return nil
}
<file_sep>/board/board.go
package board
type Type string
const (
MICROBIT Type = "microbit"
NRF51822DK = "nrf51822dk"
ESP8266 = "esp8266"
)
type Interface interface {
InitialiseRadio() error
CleanupRadio() error
Update(filePath string) error
Scan(applicationUUID int) (map[string]struct{}, error)
Online() (bool, error)
Restart() error
Identify() error
UpdateConfig(interface{}) error
UpdateEnvironment(interface{}) error
}
<file_sep>/process/status/status.go
package status
// Status defines the process statuses
type Status string
const (
RUNNING Status = "Running"
PAUSED = "Paused"
)
<file_sep>/api/handlers.go
package api
import (
"encoding/json"
"net/http"
"github.com/asdine/storm"
"github.com/asdine/storm/q"
"github.com/gorilla/mux"
"github.com/resin-io/edge-node-manager/config"
"github.com/resin-io/edge-node-manager/device"
"github.com/resin-io/edge-node-manager/process"
"github.com/resin-io/edge-node-manager/process/status"
log "github.com/Sirupsen/logrus"
)
func DependentDeviceUpdate(w http.ResponseWriter, r *http.Request) {
type dependentDeviceUpdate struct {
Commit string `json:"commit"`
Environment interface{} `json:"environment"`
}
decoder := json.NewDecoder(r.Body)
var content dependentDeviceUpdate
if err := decoder.Decode(&content); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to decode Dependent device update hook")
w.WriteHeader(http.StatusInternalServerError)
return
}
if err := setField(r, "TargetCommit", content.Commit); err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusAccepted)
}
func DependentDeviceDelete(w http.ResponseWriter, r *http.Request) {
if err := setField(r, "Delete", true); err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusOK)
}
func DependentDeviceRestart(w http.ResponseWriter, r *http.Request) {
if err := setField(r, "Restart", true); err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusOK)
}
func DependentDevicesQuery(w http.ResponseWriter, r *http.Request) {
db, err := storm.Open(config.GetDbPath())
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
defer db.Close()
var d []device.Device
if err := db.All(&d); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to find devices in database")
w.WriteHeader(http.StatusInternalServerError)
return
}
bytes, err := json.Marshal(d)
if err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to encode devices")
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
if written, err := w.Write(bytes); (err != nil) || (written != len(bytes)) {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to write response")
w.WriteHeader(http.StatusInternalServerError)
return
}
log.Debug("Get dependent device")
}
func DependentDeviceQuery(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
UUID := vars["uuid"]
db, err := storm.Open(config.GetDbPath())
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
defer db.Close()
var d device.Device
if err := db.Select(
q.Or(
q.Eq("LocalUUID", UUID),
q.Eq("ResinUUID", UUID),
),
).First(&d); err != nil {
log.WithFields(log.Fields{
"Error": err,
"UUID": UUID,
}).Error("Unable to find device in database")
w.WriteHeader(http.StatusInternalServerError)
return
}
bytes, err := json.Marshal(d)
if err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to encode device")
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
if written, err := w.Write(bytes); (err != nil) || (written != len(bytes)) {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to write response")
w.WriteHeader(http.StatusInternalServerError)
return
}
log.WithFields(log.Fields{
"Device": d,
}).Debug("Get dependent device")
}
func SetStatus(w http.ResponseWriter, r *http.Request) {
type s struct {
TargetStatus status.Status `json:"targetStatus"`
}
var content *s
decoder := json.NewDecoder(r.Body)
if err := decoder.Decode(&content); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to decode status hook")
w.WriteHeader(http.StatusInternalServerError)
return
}
process.TargetStatus = content.TargetStatus
w.WriteHeader(http.StatusOK)
log.WithFields(log.Fields{
"Target status": process.TargetStatus,
}).Debug("Set status")
}
func GetStatus(w http.ResponseWriter, r *http.Request) {
type s struct {
CurrentStatus status.Status `json:"currentStatus"`
TargetStatus status.Status `json:"targetStatus"`
}
content := &s{
CurrentStatus: process.CurrentStatus,
TargetStatus: process.TargetStatus,
}
bytes, err := json.Marshal(content)
if err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to encode status hook")
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
if written, err := w.Write(bytes); (err != nil) || (written != len(bytes)) {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to write response")
w.WriteHeader(http.StatusInternalServerError)
return
}
log.WithFields(log.Fields{
"Target status": process.TargetStatus,
"Curent status": process.CurrentStatus,
}).Debug("Get status")
}
func setField(r *http.Request, key string, value interface{}) error {
vars := mux.Vars(r)
deviceUUID := vars["uuid"]
db, err := storm.Open(config.GetDbPath())
if err != nil {
return err
}
defer db.Close()
var d device.Device
if err := db.One("ResinUUID", deviceUUID, &d); err != nil {
log.WithFields(log.Fields{
"Error": err,
"UUID": deviceUUID,
}).Error("Unable to find device in database")
return err
}
switch key {
case "TargetCommit":
d.TargetCommit = value.(string)
case "Delete":
d.DeleteFlag = value.(bool)
case "Restart":
d.RestartFlag = value.(bool)
default:
log.WithFields(log.Fields{
"Error": err,
"UUID": deviceUUID,
"Key": key,
"value": value,
}).Error("Unable to set field")
return err
}
if err := db.Update(&d); err != nil {
log.WithFields(log.Fields{
"Error": err,
"UUID": deviceUUID,
}).Error("Unable to update device in database")
return err
}
log.WithFields(log.Fields{
"UUID": deviceUUID,
"Key": key,
"value": value,
}).Debug("Dependent device field updated")
return nil
}
<file_sep>/release.sh
#!/bin/bash
set -o errexit
if [ -z "$ACCOUNT" ] || [ -z "$REPO" ] || [ -z "$ACCESS_TOKEN" ] || [ -z "$TRAVIS_TAG" ]; then
echo "Please set value for ACCOUNT, REPO, ACCESS_TOKEN and TRAVIS_TAG"
exit 1
fi
echo "Attempting to create a new $TRAVIS_TAG release"
json="{
\"tag_name\": \"$TRAVIS_TAG\",
\"name\": \"$TRAVIS_TAG\",
\"body\": \"Release of $TRAVIS_TAG: [changelog](https://github.com/resin-io/edge-node-manager/blob/master/CHANGELOG.md)\n$1\"
}"
resp=$(curl -i --data "$json" --header "Content-Type:application/json" \
"https://api.github.com/repos/$ACCOUNT/$REPO/releases?access_token=$ACCESS_TOKEN" | \
head -n 1 | cut -d$' ' -f2)
if [ $resp = "201" ]; then
echo "Success"
elif [ $resp = "422" ]; then
echo "Release already exists, appending instead"
release=$(curl https://api.github.com/repos/$ACCOUNT/$REPO/releases/tags/$TRAVIS_TAG)
id=$(echo $release | jq .id)
body=$(echo $release | jq .body)
body="${body%\"}"
body="${body#\"}"
json="{
\"body\": \"$body\n$1\"
}"
resp=$(curl --data "$json" --header "Content-Type:application/json" \
-X PATCH "https://api.github.com/repos/$ACCOUNT/$REPO/releases/$id?access_token=$ACCESS_TOKEN" | \
head -n 1 | cut -d$' ' -f2)
if [ $resp = "200" ]; then
exit 0
else
exit 1
fi
fi
<file_sep>/device/status/status.go
package status
// Status defines the device statuses
type Status string
const (
DOWNLOADING Status = "Downloading"
INSTALLING = "Installing"
STARTING = "Starting"
STOPPING = "Stopping"
IDLE = "Idle"
OFFLINE = "Offline"
)
<file_sep>/board/microbit/microbit.go
package microbit
import (
"fmt"
"strconv"
"time"
log "github.com/Sirupsen/logrus"
"github.com/currantlabs/ble"
"github.com/resin-io/edge-node-manager/config"
"github.com/resin-io/edge-node-manager/micro/nrf51822"
"github.com/resin-io/edge-node-manager/radio/bluetooth"
)
type Microbit struct {
Log *log.Logger
Micro nrf51822.Nrf51822
}
var (
dfu *ble.Characteristic
shortTimeout time.Duration
)
func (b Microbit) InitialiseRadio() error {
return b.Micro.InitialiseRadio()
}
func (b Microbit) CleanupRadio() error {
return b.Micro.CleanupRadio()
}
func (b Microbit) Update(filePath string) error {
b.Log.Info("Starting update")
if err := b.Micro.ExtractFirmware(filePath, "micro-bit.bin", "micro-bit.dat"); err != nil {
return err
}
name, err := bluetooth.GetName(b.Micro.LocalUUID)
if err != nil {
return err
}
if name != "DfuTarg" {
b.Log.Debug("Starting bootloader")
client, err := bluetooth.Connect(b.Micro.LocalUUID)
if err != nil {
return err
}
// Ignore the error because this command causes the device to disconnect
bluetooth.WriteCharacteristic(client, dfu, []byte{nrf51822.Start}, false)
// Give the device time to disconnect
time.Sleep(shortTimeout)
b.Log.Debug("Started bootloader")
} else {
b.Log.Debug("Bootloader already started")
}
client, err := bluetooth.Connect(b.Micro.LocalUUID)
if err != nil {
return err
}
if err := b.Micro.Update(client); err != nil {
return err
}
b.Log.Info("Finished update")
return nil
}
func (b Microbit) Scan(applicationUUID int) (map[string]struct{}, error) {
id := "BBC micro:bit [" + strconv.Itoa(applicationUUID) + "]"
return bluetooth.Scan(id)
}
func (b Microbit) Online() (bool, error) {
return bluetooth.Online(b.Micro.LocalUUID)
}
func (b Microbit) Restart() error {
b.Log.Info("Restarting...")
return fmt.Errorf("Restart not implemented")
}
func (b Microbit) Identify() error {
b.Log.Info("Identifying...")
return fmt.Errorf("Identify not implemented")
}
func (b Microbit) UpdateConfig(config interface{}) error {
b.Log.WithFields(log.Fields{
"Config": config,
}).Info("Updating config...")
return fmt.Errorf("Update config not implemented")
}
func (b Microbit) UpdateEnvironment(config interface{}) error {
b.Log.WithFields(log.Fields{
"Config": config,
}).Info("Updating environment...")
return fmt.Errorf("Update environment not implemented")
}
func init() {
log.SetLevel(config.GetLogLevel())
var err error
if shortTimeout, err = config.GetShortBluetoothTimeout(); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to load bluetooth timeout")
}
dfu, err = bluetooth.GetCharacteristic("e95d93b1251d470aa062fa1922dfa9a8", ble.CharRead+ble.CharWrite, 0x0D, 0x0E)
if err != nil {
log.Fatal(err)
}
log.Debug("Initialised micro:bit characteristics")
}
<file_sep>/radio/wifi/wifi.go
package wifi
import (
"context"
"fmt"
"net/http"
"os"
"strings"
"time"
log "github.com/Sirupsen/logrus"
"github.com/grandcat/zeroconf"
"github.com/parnurzeal/gorequest"
"github.com/resin-io/edge-node-manager/config"
)
var (
initialised bool
avahiTimeout time.Duration
)
type Host struct {
ip string
deviceType string
applicationUUID string
id string
}
func Initialise() error {
if initialised {
return nil
}
log.Info("Initialising wifi hotspot")
os.Setenv("DBUS_SYSTEM_BUS_ADDRESS", "unix:path=/host/run/dbus/system_bus_socket")
ssid := config.GetHotspotSSID()
password := config.GetHotspotPassword()
if err := removeHotspotConnections(ssid); err != nil {
return err
}
if delay, err := config.GetHotspotDeleteDelay(); err != nil {
return err
} else {
time.Sleep(delay)
}
// If ethernet is connected, create the hotspot on the first wifi interface found
// If ethernet is not connected, create the hotspot on the first FREE wifi interface found
var device NmDevice
if ethernet, err := isEthernetConnected(); err != nil {
return err
} else if ethernet {
if device, err = getWifiDevice(); err != nil {
return err
}
} else {
if device, err = getFreeWifiDevice(); err != nil {
return err
}
}
if err := createHotspotConnection(device, ssid, password); err != nil {
return err
}
log.WithFields(log.Fields{
"SSID": ssid,
"Password": <PASSWORD>,
"Device": device,
}).Info("Initialised wifi hotspot")
initialised = true
return nil
}
func Cleanup() error {
// Return as we do not want to disable the hotspot
return nil
}
func Scan(id string) (map[string]struct{}, error) {
hosts, err := scan()
if err != nil {
return nil, err
}
online := make(map[string]struct{})
for _, host := range hosts {
if host.applicationUUID == id {
var s struct{}
online[host.id] = s
}
}
return online, nil
}
func Online(id string) (bool, error) {
hosts, err := scan()
if err != nil {
return false, err
}
for _, host := range hosts {
if host.id == id {
return true, nil
}
}
return false, nil
}
func GetIP(id string) (string, error) {
hosts, err := scan()
if err != nil {
return "", err
}
for _, host := range hosts {
if host.id == id {
return host.ip, nil
}
}
return "", fmt.Errorf("Device offline")
}
func PostForm(url, filePath string) error {
req := gorequest.New()
req.Post(url)
req.Type("multipart")
req.SendFile(filePath, "firmware.bin", "image")
log.WithFields(log.Fields{
"URL": req.Url,
"Method": req.Method,
}).Info("Posting form")
resp, _, errs := req.End()
return handleResp(resp, errs, http.StatusOK)
}
func init() {
log.SetLevel(config.GetLogLevel())
var err error
if avahiTimeout, err = config.GetAvahiTimeout(); err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Fatal("Unable to load Avahi timeout")
}
log.Debug("Initialised wifi")
}
func scan() ([]Host, error) {
ctx, cancel := context.WithTimeout(context.Background(), avahiTimeout)
defer cancel()
resolver, err := zeroconf.NewResolver(nil)
if err != nil {
return nil, err
}
entries := make(chan *zeroconf.ServiceEntry)
var hosts []Host
go func(entries <-chan *zeroconf.ServiceEntry, hosts *[]Host) {
for entry := range entries {
parts := strings.Split(entry.ServiceRecord.Instance, "_")
if len(entry.AddrIPv4) < 1 || len(parts) < 3 {
continue
}
host := Host{
ip: entry.AddrIPv4[0].String(),
deviceType: parts[0],
applicationUUID: parts[1],
id: parts[2],
}
*hosts = append(*hosts, host)
}
}(entries, &hosts)
err = resolver.Browse(ctx, "_http._tcp", "local", entries)
if err != nil {
log.WithFields(log.Fields{
"Error": err,
}).Error("Unable to scan")
return nil, err
}
<-ctx.Done()
return hosts, nil
}
func handleResp(resp gorequest.Response, errs []error, statusCode int) error {
if errs != nil {
return errs[0]
}
if resp.StatusCode != statusCode {
return fmt.Errorf("Invalid response received: %s", resp.Status)
}
log.WithFields(log.Fields{
"Response": resp.Status,
}).Debug("Valid response received")
return nil
}
<file_sep>/device/hook/hook.go
package hook
import (
"io/ioutil"
"regexp"
"github.com/Sirupsen/logrus"
"github.com/resin-io/edge-node-manager/config"
"github.com/resin-io/edge-node-manager/supervisor"
)
type Hook struct {
ResinUUID string
}
func (h *Hook) Fire(entry *logrus.Entry) error {
serialised, _ := entry.Logger.Formatter.Format(entry)
message := regexp.MustCompile(`\r?\n`).ReplaceAllString((string)(serialised), "")
supervisor.DependentDeviceLog(h.ResinUUID, message)
return nil
}
func (h *Hook) Levels() []logrus.Level {
return []logrus.Level{
logrus.PanicLevel,
logrus.FatalLevel,
logrus.ErrorLevel,
logrus.WarnLevel,
logrus.InfoLevel,
logrus.DebugLevel,
}
}
func Create(resinUUID string) *logrus.Logger {
log := logrus.New()
log.Out = ioutil.Discard
log.Level = config.GetDependentLogLevel()
log.Formatter = &logrus.TextFormatter{ForceColors: true, DisableTimestamp: true}
log.Hooks.Add(&Hook{
ResinUUID: resinUUID,
})
return log
}
| 865b98296c3270b9ac59ea02da41f2ed16ea1156 | [
"Markdown",
"Go",
"Shell"
] | 21 | Go | Bucknalla/edge-node-manager | 18ac4ad23c89c3701e58546f9b13c8a4426c6da2 | 086fdd347cd60192ea33a7f9d57a59b577c0b176 | |
refs/heads/master | <file_sep>package lindsay.devon.spring.repository;
import lindsay.devon.spring.domain.Journal;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* Created by devon on 10/20/16.
*/
public interface JournalRepository extends JpaRepository <Journal,Long>{
}
| 548e9ca5d22217d39d51df0aae834a699385c2f3 | [
"Java"
] | 1 | Java | DLindza/SpringBootJournalDemo | afdb5d062c950f3e6712b40012890f23bcb8ecb6 | f858f39bc052efbbfc930098823917aa72eb5223 | |
refs/heads/master | <file_sep><?php
namespace App\Http\Controllers;
use App\Http\Requests\PersonalInformationUser as RequestsPersonalInformationUser;
use App\Http\Requests\PersonalInformationUserRequest;
use App\Models\PersonalInformationUser;
use Illuminate\Http\Request;
class PersonalInformationUserController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
//
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(PersonalInformationUserRequest $request)
{
// dd($request->all());
$data = PersonalInformationUser::create($request->all());
return response()->json(['response'=>$data],200);
}
/**
* Display the specified resource.
*
* @param \App\Models\PersonalInformationUser $personalInformationUser
* @return \Illuminate\Http\Response
*/
public function show(PersonalInformationUser $personalInformationUser)
{
//
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param \App\Models\PersonalInformationUser $personalInformationUser
* @return \Illuminate\Http\Response
*/
public function update(Request $request, PersonalInformationUser $personalInformationUser)
{
//
}
/**
* Remove the specified resource from storage.
*
* @param \App\Models\PersonalInformationUser $personalInformationUser
* @return \Illuminate\Http\Response
*/
public function destroy(PersonalInformationUser $personalInformationUser)
{
//
}
public function getPersonalDataByUserId(Request $request)
{
$userId = $request->post('user_id');
$personalInformation = PersonalInformationUser::firstOrFail()->where('users_id',$userId);
return response()->json(['response'=>$personalInformation],200);
}
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Factories\HasFactory;
use Illuminate\Database\Eloquent\Model;
class PersonalInformationUser extends Model
{
use HasFactory;
protected $fillable = [
'users_id',
'name',
'lastname',
'day_of_birth',
'document_number',
'document_type_id',
'profile_picture',
'phone_number',
'address'
];
}
<file_sep>
export const fetch_form = ({formulario}) => {
var formulario = document.getElementById('formulario');
formulario.addEventListener('submit', function(e){
console.log('evento de click')
})
}
<file_sep><?php
use App\Http\Controllers\PersonalInformationUserController;
use App\Http\Controllers\UsersController;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Route;
/*
|--------------------------------------------------------------------------
| API Routes
|--------------------------------------------------------------------------
|
| Here is where you can register API routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| is assigned the "api" middleware group. Enjoy building your API!
|
*/
Route::middleware('auth:api')->get('/user', function (Request $request) {
return $request->user();
});
Route::post('login', [UsersController::class, 'login']);
Route::group(['middleware' => ['cors']],function(){
Route::post('register', [UsersController::class, 'register']);
});
Route::Resource('personalInformation', PersonalInformationUserController::class)->except(['create', 'edit']);
Route::post('getPersonalInformation',[PersonalInformationUserController::class,'getPersonalDataByUserId']);
<file_sep>import React, {useState} from 'react'
import {Container} from '@material-ui/core';
const Form_registro = () => {
const [email, setEmail] = useState("")
const [password, setPass] = useState("")
const [form_data, setForm_data] = useState([])
const handdleSubmit = async e => {
// fetch o axios
e.preventDefault()
let form = document.getElementById('form_registro')
// let form = document.getElementById('form_registro')
// console.log(form)
// console.log('hola');
let data = new FormData(form)
data = JSON.stringify({email,password})
console.log(data)
await fetch('http://127.0.0.1:8000/api/register',{
method: 'POST',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
body: data,
}) .then(datos => datos.json())
.then(dataFormRegistro => {
console.log(dataFormRegistro.errors.email)
setForm_data(dataFormRegistro)
})
}
return (
<div>
<h1>Formulario de registro</h1>
<Container>
<form onSubmit= {handdleSubmit} id="form_registro" >
<label > Email</label>
<input
onChange = {({target : {value}}) => setEmail(value) }
value = {email}
name = "correo"
/>
<label > Password </label>
<input
onChange = {({target : {value}}) => setPass(value) }
value = {password}
name = "password"
/>
<input type="submit" value="submit"/>
</form>
<div>Form data: {JSON.stringify({email, pass: password})} </div>
<div>
{form_data.map( data => (
<p> ${data} </p> //data. cosasssss ctmr
))}
</div>
{/* <form action="hola.php" method="post" id="formulario">
<InputLabel htmlFor="email">Ingrese su Mail</InputLabel>
<Input id="email" type="email" aria-describedby="email-helper" />
<FormHelperText id="email-helper">Ingrese su mail </FormHelperText>
<InputLabel htmlFor="pwd">Ingrese su contraseña</InputLabel>
<Input id="pwd" type="<PASSWORD>" aria-describedby="password-helper" />
<FormHelperText id="password-helper">Ingrese su pass</FormHelperText>
<Button variant="contained" color="primary" type="submit">
Inciar sesión
</Button>
</form> */}
{/* <Grid container>
<Grid item md={12}>
<FormControl >
<InputLabel htmlFor="email">Ingrese su Mail</InputLabel>
<Input id="email" type="email" aria-describedby="email-helper" />
<FormHelperText id="email-helper">Ingrese su mail </FormHelperText>
</FormControl>
</Grid>
<Grid item md={12}>
<FormControl>
<InputLabel htmlFor="pwd">Ingrese su contraseña</InputLabel>
<Input id="pwd" type="<PASSWORD>" aria-describedby="password-helper" />
<FormHelperText id="password-helper">Ingrese su pass</FormHelperText>
</FormControl>
</Grid>
<Grid item md={12} >
<Button variant="contained" color="primary" type="submit">
Inciar sesión
</Button>
</Grid>
</Grid> */}
</Container>
</div>
)
}
export default Form_registro
<file_sep><?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class CreatePersonalInformationUsersTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('personal_information_users', function (Blueprint $table) {
$table->id();
$table->foreignId('users_id')->constrained('users');
$table->string('name');
$table->string('lastname');
$table->date('day_of_birth')->nullable();
$table->integer('document_number');
$table->enum('document_type_id',['DNI','PASAPORTE','LIBRETA CIVICA']);
$table->string('profile_picture')->nullable();
$table->string('phone_number');
$table->string('address');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('personal_information_users');
}
}
<file_sep><?php
namespace App\Http\Controllers;
use App\Http\Requests\RegisterRequest;
use App\Models\User;
use Illuminate\Http\Request;
class UsersController extends Controller
{
public function register(RegisterRequest $request): \Illuminate\Http\JsonResponse
{
// dd($request->all());
$request->merge(['password' => <PASSWORD>($request->input('password'))]);
$User = User::create($request->only(['email','password']));
return response()->json(['response'=>$User]);
}
public function login()
{
}
}
<file_sep>import './App.scss';
import NavBar from './components/navBar/NavBar';
// import Button from '@material-ui/core/Button';
// import Box from '@material-ui/core/Box';
// import Publication from './components/Publication/Publication';
import Grid_publication from './components/Grid_publication/Grid_publication';
// import Button_donar from './components/Button_donar/Button_donar';
import Button_donar from './components/Button_donar/Button_donar'
import {
BrowserRouter as Router,
Switch,
Route,
Link
} from "react-router-dom";
import Form_registro from './components/Formulario_registro/Form_registro';
function App() {
// const name_button = "Publicar Donación"
return (
<Router>
<Switch>
<Route path="/registro">
<Form_registro />
</Route>
<Route path="/">
<div className="App">
<NavBar />
<Link to="/registro" style={{ textDecoration: 'none' }}>
<Button_donar nameButton = {"Publicar Donación"} />
</Link>
<Link to="/registro" style={{ textDecoration: 'none' }}>
<Button_donar nameButton = {"Publicar Pedido"} />
</Link>
<Grid_publication />
</div>
</Route>
</Switch>
</Router>
);
}
export default App;
<file_sep>import React from 'react'
import Button from '@material-ui/core/Button';
import '../../App'
import './Button_donar.scss'
const Button_donar = ({nameButton}) => {
return (
<Button p={1} bgcolor="grey.300" variant="contained" color="secondary" className="Button_donar">
{nameButton}
</Button>
)
}
export default Button_donar
| aa1fa4b18d99a1d4061985d75ca9672a981c1987 | [
"JavaScript",
"PHP"
] | 9 | PHP | CristianDavidChinoBejar/DonAR | d2f14dac6cb166f0d1bd9fd601c98f561e381a56 | 7839ac473f7728d48ca7dc703f0e3d676f7e3581 | |
refs/heads/master | <file_sep>//
// MyClass.swift
// First
//
// Created by Eugen on 31.10.18.
// Copyright © 2018 Blanorama. All rights reserved.
//
class Debug {
init() {
}
}
| 11d2a51ac27819fa2a385ceb181deb8a9b1d999e | [
"Swift"
] | 1 | Swift | lecodeski/First | 57dbd4d7f84d9e491bc4dd6fb0a5ff29f8e97470 | 8336ded431e58110410f53e09ae4885b999a15d9 | |
refs/heads/master | <file_sep>-- Import foods with volumes in cups
INSERT INTO ingredient (NDB_No, name, gPerCup)
SELECT FOOD_DES.NDB_No, FOOD_DES.Long_Desc, WEIGHT.Gm_Wgt / WEIGHT.Amount
FROM FOOD_DES INNER JOIN WEIGHT ON FOOD_DES.NDB_No = WEIGHT.NDB_No
WHERE WEIGHT.Msre_Desc = "cup";
-- Import foods with volumes in tbsp (16 tbsp / cup)
INSERT INTO ingredient (NDB_No, name, gPerCup)
SELECT FOOD_DES.NDB_No, FOOD_DES.Long_Desc, WEIGHT.Gm_Wgt / WEIGHT.Amount * 16.0
FROM FOOD_DES INNER JOIN WEIGHT ON FOOD_DES.NDB_No = WEIGHT.NDB_No
WHERE WEIGHT.Msre_Desc = "tbsp";
-- Import foods with volumes in fl oz (8 fl oz / cup)
INSERT INTO ingredient (NDB_No, name, gPerCup)
SELECT FOOD_DES.NDB_No, FOOD_DES.Long_Desc, WEIGHT.Gm_Wgt / WEIGHT.Amount * 8.0
FROM FOOD_DES INNER JOIN WEIGHT ON FOOD_DES.NDB_No = WEIGHT.NDB_No
WHERE WEIGHT.Msre_Desc = "fl oz";
-- Import foods with volumes in tsp (48 tsp / cup)
INSERT INTO ingredient (NDB_No, name, gPerCup)
SELECT FOOD_DES.NDB_No, FOOD_DES.Long_Desc, WEIGHT.Gm_Wgt / WEIGHT.Amount * 48.0
FROM FOOD_DES INNER JOIN WEIGHT ON FOOD_DES.NDB_No = WEIGHT.NDB_No
WHERE WEIGHT.Msre_Desc = "tsp";
-- Delete ingredients containing (capitalized) brand names
DELETE FROM ingredient WHERE name REGEXP '.*[A-Z][A-Z].*';
-- Delete all but first of duplicates (by name)
DELETE FROM ingredient WHERE id NOT IN
(SELECT * FROM
(SELECT MIN(id) FROM ingredient group by name)
)<file_sep># Cupful

## Install
[Cupful - Google Play Store](https://play.google.com/store/apps/details?id=com.natalieperna.cupful)
## About
Cupful is an app that allows you to easily convert amounts of ingredients from volume (i.e. cups, ml, tsp, tbsp) to weight (i.e. g, oz, lb), and vice versa, with over 500 different ingredients supported!
It's an essential tool for every baker and chef who values precise measurements in their baking and cooking.
## Screenshots
| Conversion | Ingredients |
|:--------------------------:|:--------------------------:|
| ![Screenshot][screenshot1] | ![Screenshot][screenshot4] |
[screenshot1]: https://github.com/natalieperna/cupful-android/blob/master/images/screenshots/screenshot_1.png
[screenshot4]: https://github.com/natalieperna/cupful-android/blob/master/images/screenshots/screenshot_4.png
<file_sep>package com.natalieperna.cupful.data;
import com.natalieperna.cupful.models.DisplayUnit;
import javax.measure.quantity.Volume;
import javax.measure.quantity.VolumetricDensity;
import javax.measure.unit.NonSI;
import javax.measure.unit.SI;
import javax.measure.unit.Unit;
public class Units {
// Custom volume units
public static final Unit<Volume> TABLESPOON_UK = NonSI.OUNCE_LIQUID_UK.divide(1.6);
public static final Unit<Volume> TEASPOON_UK = TABLESPOON_UK.divide(3);
public static final Unit<Volume> TABLESPOON_US = NonSI.OUNCE_LIQUID_US.divide(2);
public static final Unit<Volume> TEASPOON_US = TABLESPOON_US.divide(3);
public static final Unit<Volume> CUP_UK = NonSI.OUNCE_LIQUID_UK.times(10);
public static final Unit<Volume> CUP_US = NonSI.OUNCE_LIQUID_US.times(8);
// Density unit
public static final Unit<VolumetricDensity> G_PER_CUP = SI.GRAM.divide(CUP_US).asType(VolumetricDensity.class);
private static final DisplayUnit[] UNITS = {
new DisplayUnit<>(CUP_US, "cup (US)"),
new DisplayUnit<>(TABLESPOON_US, "tbsp (US)"),
new DisplayUnit<>(TEASPOON_US, "tsp (US)"),
new DisplayUnit<>(SI.GRAM, "g"),
new DisplayUnit<>(SI.KILOGRAM, "kg"),
new DisplayUnit<>(NonSI.OUNCE, "oz"),
new DisplayUnit<>(NonSI.POUND, "lb"),
new DisplayUnit<>(SI.MILLI(NonSI.LITER), "mL"),
new DisplayUnit<>(NonSI.LITER, "L"),
new DisplayUnit<>(NonSI.OUNCE_LIQUID_US, "fl oz (US)"),
new DisplayUnit<>(CUP_UK, "cup (UK)"),
new DisplayUnit<>(TABLESPOON_UK, "tbsp (UK)"),
new DisplayUnit<>(TEASPOON_UK, "tsp (UK)"),
new DisplayUnit<>(NonSI.OUNCE_LIQUID_UK, "fl oz (UK)"),
};
public static DisplayUnit[] getAll() {
return UNITS;
}
}
| 9730ec425a4f8e467eb001582599ff143905775c | [
"Markdown",
"SQL",
"Java"
] | 3 | SQL | natalieperna/cupful-android | 535e6efde7759bb1ba1ab9266bc755f8abb6c19e | 8867eb9a850dc7d40c71027b457ab751efa1a85d | |
refs/heads/master | <repo_name>examples-unsorted/perl.cli.examples<file_sep>/cli/perl/perl -e printf/perl -e "printf \"%c\n\", $int;" # int 2 ASCII char
#!/usr/bin/env bash
{ set +x; } 2>/dev/null
int=65
perl -e "printf \"%c\n\", $int;" # int 2 ASCII char
for int in {1..255}; do
perl -e "printf \"%c\n\", $int;" # int 2 ASCII char
done
<file_sep>/cli/perl/perl -e 'use Time::HiRes qw(time)/time perl -e 'use Time--HiRes qw(time); print time'
#!/usr/bin/env bash
time perl -e 'use Time::HiRes qw(time); print time'
<file_sep>/cli/perl/function timeout() { perl -e 'alarm shift; exec @ARGV' "$@"; }
#!/bin/sh
function timeout() { perl -e 'alarm shift; exec @ARGV' "$@"; }
timeout 1 sleep 2
<file_sep>/cli/perl/perl -e 'use Time::HiRes qw(time)/perl -e 'use Time--HiRes qw(time); print time'
#!/usr/bin/env bash
perl -e 'use Time::HiRes qw(time); print time'
| 66ab683b74b82825c2a36612f6c079440ce8a1bd | [
"Shell"
] | 4 | Shell | examples-unsorted/perl.cli.examples | 9cca77c27e772586fe4bda619a1cccadcf92814b | 0797e12bc66c6d54f2c5165f0aac5e044a9eb0a9 | |
refs/heads/master | <repo_name>yaolya/exam<file_sep>/include/Hp.h
#pragma once
#include <iostream>
#include "storage.h"
#include "error.h"
class Hp {
private:
Storage<int> m_n; //нормальный вектор
Storage<int> m_x0; //точка,через которую проходит гиперплоскость
int m_size;
public:
Hp(Storage<int>& n, Storage<int>& x0, int razm);
double distance(Storage<int>& point);
void position(Storage<int>& point);
bool operator==(Hp& hp2);
int getSizeofHp() { return --m_size; }
};
<file_sep>/src/main.cpp
#include <iostream>
#include "Hp.h"
/*3. Реализовать класс "гиперплоскость в n-мерном пространстве", с
возможностями проверки совпадения двух гиперплоскостей, а также
проверки для заданной точки нахождения на гиперплоскости, или в
положительном (отрицательном) полупространстве относительно нее.*/
void testCreation() {
Storage<int> nv1;
Storage<int> point;
int n = 2;
//std::cin >> n;
/*std::cout << "Normalny vector" << std::endl;
for (int i = 0; i < n - 1; ++i) {
std::cin >> nv[i];
}
std::cout << "Tochka" << std::endl;
for (int i = 0; i < n - 1; ++i) {
std::cin >> p[i];
}*/
nv1.add(1);
nv1.add(2);
point.add(2);
point.add(4);
Hp hp1(nv1, point, n);
std::cout << hp1.getSizeofHp() << std::endl;
}
void testEqual() {
Storage<int> nv1;
Storage<int> nv2;
Storage<int> nv3;
Storage<int> p;
Storage<int> point;
int n = 2;
nv1.add(1);
nv1.add(2);
nv2.add(2);
nv2.add(4);
nv3.add(2);
nv3.add(7);
p.add(2);
p.add(4);
point.add(2);
point.add(4);
Hp hp1(nv1, p, n);
Hp hp2(nv2, p, n);
Hp hp3(nv3, p, n);
std::cout << "Equal? " << (hp1 == hp2) << std::endl;
std::cout << "Equal? " << (hp1 == hp3) << std::endl;
}
void testDistance() {
Storage<int> nv;
Storage<int> p;
Storage<int> point;
Storage<int> point2;
int n = 2;
nv.add(1);
nv.add(2);
p.add(2);
p.add(4);
point.add(2);
point.add(4);
point2.add(3);
point2.add(8);
Hp hp(nv, p, n);
std::cout << "Distance " << hp.distance(point) << std::endl;
std::cout << "Distance " << hp.distance(point2) << std::endl;
}
void testPosition() {
Storage<int> nv;
Storage<int> p;
Storage<int> point;
Storage<int> point2;
int n = 2;
nv.add(1);
nv.add(2);
p.add(2);
p.add(4);
point.add(2);
point.add(4);
point2.add(3);
point2.add(8);
Hp hp(nv, p, n);
hp.position(point);
hp.position(point2);
}
int main() {
testCreation();
testDistance();
testEqual();
testPosition();
return 0;
}
<file_sep>/include/error.h
#pragma once
class Error
{
public:
Error()
{
}
};
class BadArgument
{
public:
BadArgument()
{
}
};<file_sep>/src/Hp.cpp
#include "Hp.h"
Hp::Hp(Storage<int>& n, Storage<int>& x0, int razm) {
m_n = n;
m_x0 = x0;
m_size = razm;
}
double Hp::distance(Storage<int>& point) {
if (point.size() != m_size) throw Error();
int sum = 0;
for (int i = 0; i < m_size; ++i) {
sum += (point[i] - m_x0[i]) * m_n[i];
}
int lengthp_x0 = sum;
sum = 0;
for (int i = 0; i < m_size; ++i) {
sum += pow(m_n[i], 2);
}
int lengthn = sqrt(sum);
return lengthp_x0 / lengthn;
}
bool Hp::operator==(Hp& hp2) {
bool b = true;
if (m_size == hp2.m_size) {
for (int i = 0; i < m_size; ++i) {
if (!(m_x0[i] == hp2.m_x0[i] && ((m_n[i] % hp2.m_n[i]) == 0 || (hp2.m_n[i] % m_n[i]) == 0))) b = false;
}
}
else {
int min_size = 0;
(m_size < hp2.m_size) ? min_size = m_size : min_size = hp2.m_size;
for (int i = 0; i < min_size; ++i) {
if (!(m_x0[i] == hp2.m_x0[i] && (m_n[i] % hp2.m_n[i] == 0 || hp2.m_n[i] % m_n[i] == 0))) b = false;
}
}
return b;
}
void Hp::position(Storage<int>& point) {
int sum = 0;
for (int i = 0; i < m_size; ++i) {
sum += (point[i] - m_x0[i]) * m_n[i];
}
if (sum > 0) std::cout << "point is situated in +" << std::endl;
if (sum < 0) std::cout << "point is situated in -" << std::endl;
if (sum == 0) std::cout << "point belongs" << std::endl;
}
<file_sep>/README.md
# exam
георгин
exam in computer programming
| 13aa3e66c33318905b32dd6463e3a30988cfa2f8 | [
"Markdown",
"C++"
] | 5 | C++ | yaolya/exam | 99a2909391b83f9dc43df321b3549d7105d672f4 | 7eb12dcaf6b814cdc3c2b0528579d721cc92e5b7 | |
refs/heads/master | <repo_name>b1rdex/sync<file_sep>/test/FileMutexTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Sync\FileMutex;
use Amp\Sync\Mutex;
class FileMutexTest extends AbstractMutexTest
{
public function createMutex(): Mutex
{
return new FileMutex(\tempnam(\sys_get_temp_dir(), 'mutex-') . '.lock');
}
}
<file_sep>/test/AbstractKeyedSemaphoreTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Loop;
use Amp\PHPUnit\AsyncTestCase;
use Amp\Sync\KeyedSemaphore;
abstract class AbstractKeyedSemaphoreTest extends AsyncTestCase
{
abstract public function createSemaphore(int $size): KeyedSemaphore;
public function testAcquire(): \Generator
{
$mutex = $this->createSemaphore(1);
$lock = yield $mutex->acquire('test');
$lock->release();
$this->assertTrue($lock->isReleased());
}
public function testAcquireMultiple(): \Generator
{
$this->setMinimumRuntime(300);
$this->setTimeout(500);
$mutex = $this->createSemaphore(5);
for ($i = 0; $i < 15; $i++) {
$lock = yield $mutex->acquire('test');
Loop::delay(100, function () use ($lock) {
$lock->release();
});
}
}
}
<file_sep>/test/AbstractSemaphoreTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Loop;
use Amp\PHPUnit\AsyncTestCase;
use Amp\Sync\Semaphore;
abstract class AbstractSemaphoreTest extends AsyncTestCase
{
/**
* @var \Amp\Sync\Semaphore
*/
protected $semaphore;
/**
* @param int $locks Number of locks in the semaphore.
*
* @return \Amp\Sync\Semaphore
*/
abstract public function createSemaphore(int $locks): Semaphore;
public function tearDown(): void
{
parent::tearDown();
$this->semaphore = null; // Force Semaphore::__destruct() to be invoked.
}
public function testConstructorOnInvalidMaxLocks(): void
{
$this->expectException(\Error::class);
$this->semaphore = $this->createSemaphore(-1);
}
public function testAcquire(): \Generator
{
$this->semaphore = $this->createSemaphore(1);
$lock = yield $this->semaphore->acquire();
$this->assertFalse($lock->isReleased());
$lock->release();
$this->assertTrue($lock->isReleased());
}
public function testAcquireMultipleFromSingleLockSemaphore(): \Generator
{
$this->setMinimumRuntime(300);
$this->semaphore = $this->createSemaphore(1);
$lock1 = yield $this->semaphore->acquire();
$this->assertSame(0, $lock1->getId());
Loop::delay(100, function () use ($lock1) {
$lock1->release();
});
$lock2 = yield $this->semaphore->acquire();
$this->assertSame(0, $lock2->getId());
Loop::delay(100, function () use ($lock2) {
$lock2->release();
});
$lock3 = yield $this->semaphore->acquire();
$this->assertSame(0, $lock3->getId());
Loop::delay(100, function () use ($lock3) {
$lock3->release();
});
}
public function testAcquireMultipleFromMultipleLockSemaphore(): \Generator
{
$this->setMinimumRuntime(300);
$this->semaphore = $this->createSemaphore(3);
$lock1 = yield $this->semaphore->acquire();
Loop::delay(100, function () use ($lock1) {
$lock1->release();
});
$lock2 = yield $this->semaphore->acquire();
$this->assertNotSame($lock1->getId(), $lock2->getId());
Loop::delay(200, function () use ($lock2) {
$lock2->release();
});
$lock3 = yield $this->semaphore->acquire();
$this->assertNotSame($lock1->getId(), $lock3->getId());
$this->assertNotSame($lock2->getId(), $lock3->getId());
Loop::delay(200, function () use ($lock3) {
$lock3->release();
});
$lock4 = yield $this->semaphore->acquire();
$this->assertSame($lock1->getId(), $lock4->getId());
Loop::delay(200, function () use ($lock4) {
$lock4->release();
});
}
public function getSemaphoreSizes(): array
{
return [
[5],
[10],
[20],
[30],
];
}
/**
* @dataProvider getSemaphoreSizes
*
* @param int $count Number of locks to test.
*/
public function testAcquireFromMultipleSizeSemaphores(int $count): \Generator
{
$this->setMinimumRuntime(200);
$this->semaphore = $this->createSemaphore($count);
foreach (\range(0, $count - 1) as $value) {
$this->semaphore->acquire()->onResolve(function ($exception, $lock) {
if ($exception) {
throw $exception;
}
Loop::delay(100, [$lock, "release"]);
});
}
$lock = yield $this->semaphore->acquire();
Loop::delay(100, [$lock, "release"]);
}
public function testSimultaneousAcquire(): \Generator
{
$this->setMinimumRuntime(100);
$this->semaphore = $this->createSemaphore(1);
$promise1 = $this->semaphore->acquire();
$promise2 = $this->semaphore->acquire();
Loop::delay(100, function () use ($promise1) {
(yield $promise1)->release();
});
(yield $promise2)->release();
}
}
<file_sep>/test/ThreadedSemaphoreTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Delayed;
use Amp\Loop;
use Amp\Sync\Semaphore;
use Amp\Sync\ThreadedSemaphore;
/**
* @requires extension pthreads
*/
class ThreadedSemaphoreTest extends AbstractSemaphoreTest
{
public function createSemaphore(int $locks): Semaphore
{
return new ThreadedSemaphore($locks);
}
public function testWithinThread(): \Generator
{
$semaphore = $this->createSemaphore(1);
$thread = new class($semaphore) extends \Thread {
private $semaphore;
public function __construct(Semaphore $semaphore)
{
$this->semaphore = $semaphore;
}
public function run()
{
// Protect scope by using an unbound closure (protects static access as well).
(static function () {
$paths = [
\dirname(__DIR__) . \DIRECTORY_SEPARATOR . "vendor" . \DIRECTORY_SEPARATOR . "autoload.php",
\dirname(__DIR__, 3) . \DIRECTORY_SEPARATOR . "autoload.php",
];
foreach ($paths as $path) {
if (\file_exists($path)) {
$autoloadPath = $path;
break;
}
}
if (!isset($autoloadPath)) {
throw new \Error("Could not locate autoload.php");
}
require $autoloadPath;
})->bindTo(null, null)();
Loop::run(function () {
$lock = yield $this->semaphore->acquire();
Loop::delay(1000, [$lock, "release"]);
});
}
};
$this->setMinimumRuntime(1100);
$thread->start(\PTHREADS_INHERIT_INI);
yield new Delayed(500); // Wait for thread to start and obtain lock.
$lock = yield $semaphore->acquire();
Loop::delay(100, [$lock, "release"]);
}
}
<file_sep>/test/LocalKeyedSemaphoreTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Sync\KeyedSemaphore;
use Amp\Sync\LocalKeyedSemaphore;
class LocalKeyedSemaphoreTest extends AbstractKeyedSemaphoreTest
{
public function createSemaphore(int $size): KeyedSemaphore
{
return new LocalKeyedSemaphore($size);
}
}
<file_sep>/test/ThreadedMutexTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Delayed;
use Amp\Loop;
use Amp\Sync\Mutex;
use Amp\Sync\ThreadedMutex;
/**
* @requires extension pthreads
*/
class ThreadedMutexTest extends AbstractMutexTest
{
public function createMutex(): Mutex
{
return new ThreadedMutex;
}
public function testWithinThread(): \Generator
{
$mutex = $this->createMutex();
$thread = new class($mutex) extends \Thread {
private $mutex;
public function __construct(Mutex $mutex)
{
$this->mutex = $mutex;
}
public function run()
{
// Protect scope by using an unbound closure (protects static access as well).
(static function () {
$paths = [
\dirname(__DIR__) . \DIRECTORY_SEPARATOR . "vendor" . \DIRECTORY_SEPARATOR . "autoload.php",
\dirname(__DIR__, 3) . \DIRECTORY_SEPARATOR . "autoload.php",
];
foreach ($paths as $path) {
if (\file_exists($path)) {
$autoloadPath = $path;
break;
}
}
if (!isset($autoloadPath)) {
throw new \Error("Could not locate autoload.php");
}
require $autoloadPath;
})->bindTo(null, null)();
Loop::run(function () {
$lock = yield $this->mutex->acquire();
Loop::delay(1000, [$lock, "release"]);
});
}
};
$this->setMinimumRuntime(1100);
$thread->start(\PTHREADS_INHERIT_INI);
yield new Delayed(500); // Wait for thread to start and obtain lock.
$lock = yield $mutex->acquire();
Loop::delay(100, [$lock, "release"]);
}
}
<file_sep>/test/ConcurrentEachTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Iterator;
use Amp\PHPUnit\AsyncTestCase;
use Amp\Sync\LocalSemaphore;
use function Amp\delay;
use function Amp\Sync\ConcurrentIterator\each;
class ConcurrentEachTest extends AsyncTestCase
{
public function test(): \Generator
{
$this->expectOutputString('123');
$processor = static function ($job) {
print $job;
};
$this->assertSame(
3,
yield each(Iterator\fromIterable([1, 2, 3]), new LocalSemaphore(3), $processor)
);
}
public function testOutputOrder(): \Generator
{
$processor = static function ($job) {
yield delay($job * 100);
};
$this->assertSame(
3,
yield each(Iterator\fromIterable([3, 2, 1]), new LocalSemaphore(3), $processor)
);
}
public function testOutputOrderWithoutConcurrency(): \Generator
{
$processor = static function ($job) {
yield delay($job * 100);
};
$this->assertSame(
3,
yield each(Iterator\fromIterable([3, 2, 1]), new LocalSemaphore(1), $processor)
);
}
public function testErrorHandling(): \Generator
{
$processor = static function ($job) {
print $job;
yield delay(0);
if ($job === 2) {
throw new \Exception('Failure');
}
return $job;
};
// Job 2 errors, so only job 3 and 4 should be executed
$this->expectOutputString('1234');
$this->expectException(\Exception::class);
$this->expectExceptionMessage('Failure');
yield each(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
}
protected function tearDownAsync()
{
// Required to make testBackpressure fail instead of the following test
\gc_collect_cycles();
}
}
<file_sep>/test/LocalMutexTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Sync\LocalMutex;
use Amp\Sync\Mutex;
class LocalMutexTest extends AbstractMutexTest
{
public function createMutex(): Mutex
{
return new LocalMutex;
}
}
<file_sep>/docs/mutex.md
---
title: Mutex
permalink: /mutex
---
[Mutual exclusion](https://en.wikipedia.org/wiki/Mutual_exclusion) can be achieved using `Amp\Sync\synchronized()` and any `Mutex` implementation, or by manually using the `Mutex` instance to acquire a lock.
Locks are acquired using `Mutex::acquire()`, which returns a `Promise` that resolves to an instance of `Lock` once the lock as been successfully acquired.
As long as the resulting `Lock` object isn't released using `Lock::release()` or by being garbage collected, the holder of the lock can exclusively run some code as long as all other parties running the same code also acquire a lock before doing so.
### Examples
```php
function writeExclusively(Amp\Sync\Mutex $mutex, string $filePath, string $data) {
return Amp\call(function () use ($mutex, $filePath, $data) {
/** @var Amp\Sync\Lock $lock */
$lock = yield $mutex->acquire();
$this->fileHandle = yield Amp\File\open($filePath, 'w');
yield $this->fileHandle->write($data);
$lock->release();
});
}
```
```php
function writeExclusively(Amp\Sync\Mutex $mutex, string $filePath, string $data) {
return Amp\Sync\synchronized($mutex, function () use ($filePath, $data) {
$this->fileHandle = yield Amp\File\open($filePath, 'w');
yield $this->fileHandle->write($data);
});
}
```<file_sep>/test/PrefixedKeyedSemaphoreTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Sync\KeyedSemaphore;
use Amp\Sync\LocalKeyedSemaphore;
use Amp\Sync\PrefixedKeyedSemaphore;
class PrefixedKeyedSemaphoreTest extends AbstractKeyedSemaphoreTest
{
public function createSemaphore(int $size): KeyedSemaphore
{
return new PrefixedKeyedSemaphore(new LocalKeyedSemaphore($size), 'prefix.');
}
}
<file_sep>/examples/mutex.php
<?php
use Amp\Sync\LocalMutex;
use Amp\Sync\Lock;
use function Amp\call;
use function Amp\delay;
use function Amp\Promise\all;
use function Amp\Promise\wait;
require __DIR__ . '/../vendor/autoload.php';
$mutex = new LocalMutex;
$task = function (string $identifier) use ($mutex) {
print "[$identifier] Starting" . \PHP_EOL;
for ($i = 0; $i < 3; $i++) {
print "[$identifier][$i] Acquiring lock" . \PHP_EOL;
/** @var Lock $lock */
$lock = yield $mutex->acquire();
try {
print "[$identifier][$i] Acquired lock" . \PHP_EOL;
// do anything exclusively
yield delay(\random_int(0, 1000));
} finally {
print "[$identifier][$i] Releasing lock" . \PHP_EOL;
$lock->release();
}
}
print "[$identifier] Finished" . \PHP_EOL;
};
$promiseA = call($task, 'A');
$promiseB = call($task, 'B');
$promiseC = call($task, 'C');
wait(all([$promiseA, $promiseB, $promiseC]));<file_sep>/test/LocalKeyedMutexTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Sync\KeyedMutex;
use Amp\Sync\LocalKeyedMutex;
class LocalKeyedMutexTest extends AbstractKeyedMutexTest
{
public function createMutex(): KeyedMutex
{
return new LocalKeyedMutex();
}
}
<file_sep>/test/BarrierTest.php
<?php
namespace Amp\Sync\Test;
use Amp\PHPUnit\AsyncTestCase;
use Amp\Sync\Barrier;
class BarrierTest extends AsyncTestCase
{
/** @var Barrier */
private $barrier;
public function testArriveUntilResolved(): void
{
$resolved = false;
$this->barrier->await()->onResolve(static function () use (&$resolved) {
$resolved = true;
});
$this->assertFalse($resolved);
$this->barrier->arrive();
$this->assertSame(1, $this->barrier->getCount());
$this->assertFalse($resolved);
$this->barrier->arrive();
$this->assertTrue($resolved);
$this->assertSame(0, $this->barrier->getCount());
}
public function testArriveAfterResolved(): void
{
$this->barrier->arrive();
$this->barrier->arrive();
$this->expectException(\Error::class);
$this->barrier->arrive();
}
public function testArriveWithCount(): void
{
$resolved = false;
$this->barrier->await()->onResolve(static function () use (&$resolved) {
$resolved = true;
});
$this->assertFalse($resolved);
$this->barrier->arrive(2);
$this->assertTrue($resolved);
}
public function testArriveWithInvalidCount(): void
{
$this->expectException(\Error::class);
$this->barrier->arrive(0);
}
public function testArriveTooHighCount(): void
{
$this->expectException(\Error::class);
$this->barrier->arrive(3);
}
public function testGetCurrentCount(): void
{
$this->barrier->arrive();
$this->assertEquals(1, $this->barrier->getCount());
}
public function testInvalidSignalCountInConstructor(): void
{
$this->expectException(\Error::class);
new Barrier(0);
}
public function testRegisterCount(): void
{
$resolved = false;
$this->barrier->await()->onResolve(static function () use (&$resolved) {
$resolved = true;
});
$this->barrier->arrive();
$this->barrier->register();
$this->barrier->arrive();
$this->assertFalse($resolved);
$this->barrier->arrive();
$this->assertTrue($resolved);
}
public function testRegisterCountWithCustomCount(): void
{
$resolved = false;
$this->barrier->await()->onResolve(static function () use (&$resolved) {
$resolved = true;
});
$this->barrier->arrive();
$this->barrier->register(2);
$this->barrier->arrive();
$this->assertFalse($resolved);
$this->barrier->arrive();
$this->assertFalse($resolved);
$this->barrier->arrive();
$this->assertTrue($resolved);
}
public function testRegisterCountWithInvalidCount(): void
{
$this->expectException(\Error::class);
$this->expectExceptionMessage('Count must be at least 1, got 0');
$this->barrier->register(0);
}
public function testRegisterCountWithResolvedBarrier(): void
{
$this->barrier->arrive();
$this->barrier->arrive();
$this->expectException(\Error::class);
$this->expectExceptionMessage('Can\'t increase count, because the barrier already broke');
$this->barrier->register(1);
}
protected function setUp(): void
{
parent::setUp();
$this->barrier = new Barrier(2);
}
}
<file_sep>/docs/concurrent-iterator.md
---
title: Concurrent Iterators
permalink: /concurrent-iterator
---
As already stated in the [preamble of our documentation](https://amphp.org/amp/), the weak link when managing concurrency is humans; so `amphp/sync` provides abstractions to hide the complexity of concurrent iteration.
## Concurrency Approaches
Given you have a list of URLs you want to crawl, let's discuss a few possible approaches. For simplicity, we will assume a `fetch` function already exists, which takes a URL and returns the HTTP status code (which is everything we want to know for these examples).
### Approach 1: Sequential
Simple loop using non-blocking I/O, but no concurrency while fetching the individual URLs; starts the second request as soon as the first completed.
```php
$urls = [...];
Amp\call(function () use ($urls) {
$results = [];
foreach ($urls as $url) {
// always wait for the promise to resolve before fetching the next one
$statusCode = yield fetch($url);
$results[$url] = $statusCode;
}
return $results;
});
```
### Approach 2: Everything Concurrently
Almost the same loop, but awaiting all promises at once; starts all requests immediately. Might not be feasible with too many URLs.
```php
$urls = [...];
Amp\call(function () use ($urls) {
$results = [];
foreach ($urls as $url) {
// note the missing yield, we're adding the promises to the array
$statusCodePromise = fetch($url);
$results[$url] = $statusCodePromise;
}
// yielding an array of promises awaits them all at once
$results = yield $results;
return $results;
});
```
### Approach 3: Concurrent Chunks
Splitting the jobs into chunks of ten; all requests within a chunk are made concurrently, but each chunk sequentially, so the timing for each chunk depends on the slowest response; starts the eleventh request as soon as the first ten requests completed.
```php
$urls = [...];
Amp\call(function () use ($urls) {
$results = [];
foreach (\array_chunk($urls, 10) as $chunk) {
$promises = [];
foreach ($chunk as $url) {
// note the missing yield, we're adding the promises to the array
$statusCodePromise = fetch($url);
$promises[$url] = $statusCodePromise;
}
// yielding an array of promises awaits them all at once
$results = \array_merge($results, yield $promises);
}
return $results;
});
```
### Approach 4: Concurrent Iterator
Concurrent iteration, keeping the concurrency at a maximum of ten; starts the eleventh request as soon as any of the first ten requests completes.
```php
$urls = [...];
Amp\call(function () use ($urls) {
$results = [];
yield Amp\Sync\ConcurrentIterator\each(
Amp\Iterator\fromIterable($urls),
new Amp\Sync\LocalSemaphore(10),
function (string $url) use (&$results) {
$statusCode = yield fetch($url);
$results[$url] = $statusCode;
}
);
return $results;
});
```
## Provided APIs
### `Amp\Sync\ConcurrentIterator\each`
Calls `$processor` for each item in the iterator while acquiring a lock from `$semaphore` during each operation.
The returned `Promise` resolves as soon as the iterator is empty and all operations are completed.
Use `LocalSemaphore` if you don't need to synchronize beyond a single process.
```php
function each(Iterator $iterator, Semaphore $semaphore, callable $processor): Promise
{
// ...
}
```
### `Amp\Sync\ConcurrentIterator\map`
Calls `$processor` for each item in the iterator while acquiring a lock from `$semaphore` during each operation.
Returns a new `Iterator` instance with the return values of `$processor`.
Use `LocalSemaphore` if you don't need to synchronize beyond a single process.
```php
function map(Iterator $iterator, Semaphore $semaphore, callable $processor): Iterator
{
// ...
}
```
### `Amp\Sync\ConcurrentIterator\filter`
Calls `$filter` for each item in the iterator while acquiring a lock from `$semaphore` during each operation.
Returns a new `Iterator` instance with the original values where `$filter` resolves to `true`.
Use `LocalSemaphore` if you don't need to synchronize beyond a single process.
```php
function filter(Iterator $iterator, Semaphore $semaphore, callable $filter): Iterator
{
// ...
}
```
### `Amp\Sync\ConcurrentIterator\transform`
Calls `$processor` for each item in the iterator while acquiring a lock from `$semaphore` during each operation.
`$processor` receives the current element and an `$emit` callable as arguments.
This function can be used to implement additional concurrent iteration functions and is the base for `map`, `filter`, and `each`.
Use `LocalSemaphore` if you don't need to synchronize beyond a single process.
```php
function transform(Iterator $iterator, Semaphore $semaphore, callable $processor): Iterator
{
// ...
}
```<file_sep>/docs/index.md
---
title: Synchronization Tools
permalink: /
---
This package defines synchronization primitives for PHP applications and libraries using Amp, such as locks, semaphores and concurrency limited iterator operations.
## Installation
This package can be installed as a [Composer](https://getcomposer.org/) dependency.
```bash
composer require amphp/sync
```
## Usage
See [`Mutex`](./mutex.md), [`Semaphore`](./semaphore.md), and [concurrent iteration](./concurrent-iterator.md).<file_sep>/test/PosixSemaphoreTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Loop;
use Amp\Sync\PosixSemaphore;
use Amp\Sync\Semaphore;
use Amp\Sync\SyncException;
/**
* @group posix
* @requires extension sysvmsg
*/
class PosixSemaphoreTest extends AbstractSemaphoreTest
{
const ID = __CLASS__ . '/4';
public function makeId(): string
{
return \spl_object_hash($this);
}
/**
* @param $locks
*
* @return \Amp\Sync\PosixSemaphore
*/
public function createSemaphore(int $locks): Semaphore
{
return PosixSemaphore::create(self::ID, $locks);
}
public function testConstructorOnInvalidMaxLocks(): void
{
$this->expectException(\Error::class);
$this->expectExceptionMessage("Number of locks must be greater than 0");
$this->semaphore = $this->createSemaphore(-1);
}
public function testCreateOnInvalidMaxLocks(): void
{
$this->expectException(\Error::class);
PosixSemaphore::create(self::ID, -1);
}
public function testGetPermissions(): void
{
$this->semaphore = PosixSemaphore::create(self::ID, 1);
$used = PosixSemaphore::use(self::ID);
$used->setPermissions(0644);
$this->assertSame(0644, $this->semaphore->getPermissions());
}
public function testGetId(): void
{
$this->semaphore = $this->createSemaphore(1);
$this->assertSame(self::ID, $this->semaphore->getId());
}
public function testUseOnInvalidSemaphoreId(): void
{
$this->expectException(SyncException::class);
$this->expectExceptionMessage("No semaphore with that ID found");
PosixSemaphore::use(1);
}
public function testCreateOnDuplicatedSemaphoreId(): void
{
$this->expectException(SyncException::class);
$this->expectExceptionMessage("A semaphore with that ID already exists");
$semaphore = PosixSemaphore::create(self::ID, 1);
$semaphore::create(self::ID, 1);
}
public function testUse()
{
$this->setMinimumRuntime(500);
$this->semaphore = $this->createSemaphore(1);
$used = PosixSemaphore::use(self::ID);
$promise1 = $used->acquire();
$promise2 = $this->semaphore->acquire();
Loop::delay(500, function () use ($promise1) {
(yield $promise1)->release();
});
(yield $promise2)->release();
}
}
<file_sep>/test/ConcurrentMapTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Iterator;
use Amp\PHPUnit\AsyncTestCase;
use Amp\Sync\LocalSemaphore;
use function Amp\delay;
use function Amp\Iterator\toArray;
use function Amp\Sync\ConcurrentIterator\map;
class ConcurrentMapTest extends AsyncTestCase
{
public function test(): \Generator
{
$this->expectOutputString('123');
$processor = static function ($job) {
print $job;
};
$this->assertSame(
[null, null, null],
yield toArray(map(Iterator\fromIterable([1, 2, 3]), new LocalSemaphore(3), $processor))
);
}
public function testOutputOrder(): \Generator
{
$processor = static function ($job) {
yield delay($job * 100);
return $job;
};
$this->assertSame(
[1, 2, 3],
yield toArray(map(Iterator\fromIterable([3, 2, 1]), new LocalSemaphore(3), $processor))
);
}
public function testOutputOrderWithoutConcurrency(): \Generator
{
$processor = static function ($job) {
yield delay($job * 100);
return $job;
};
$this->assertSame(
[3, 2, 1],
yield toArray(map(Iterator\fromIterable([3, 2, 1]), new LocalSemaphore(1), $processor))
);
}
public function testBackpressure(): void
{
$this->expectOutputString('12');
$processor = static function ($job) {
print $job;
return $job;
};
map(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
}
public function testBackpressurePartialConsume1(): \Generator
{
$this->expectOutputString('123');
$processor = static function ($job) {
print $job;
return $job;
};
$iterator = map(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
yield $iterator->advance();
}
public function testBackpressurePartialConsume2(): \Generator
{
$this->expectOutputString('1234');
$processor = static function ($job) {
print $job;
return $job;
};
$iterator = map(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
yield $iterator->advance();
yield $iterator->advance();
}
public function testErrorHandling(): \Generator
{
$processor = static function ($job) {
print $job;
yield delay(0);
if ($job === 2) {
throw new \Exception('Failure');
}
return $job;
};
$iterator = map(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
// Job 2 errors, so only job 3 and 4 should be executed
$this->expectOutputString('1234');
$this->expectException(\Exception::class);
$this->expectExceptionMessage('Failure');
yield $iterator->advance();
yield $iterator->advance();
yield $iterator->advance();
yield $iterator->advance();
}
public function testErrorHandlingCompletesPending(): \Generator
{
$processor = static function ($job) {
print $job;
if ($job === 2) {
throw new \Exception('Failure');
}
yield delay(0);
print $job;
return $job;
};
$iterator = map(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
// Job 2 errors, so only job 3 and 4 should be executed
$this->expectOutputString('121');
$this->expectException(\Exception::class);
$this->expectExceptionMessage('Failure');
yield $iterator->advance();
yield $iterator->advance();
}
protected function tearDownAsync()
{
// Required to make testBackpressure fail instead of the following test
\gc_collect_cycles();
}
}
<file_sep>/test/LocalSemaphoreTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Sync\LocalSemaphore;
use Amp\Sync\Semaphore;
class LocalSemaphoreTest extends AbstractSemaphoreTest
{
public function createSemaphore(int $locks): Semaphore
{
return new LocalSemaphore($locks);
}
}
<file_sep>/test/ConcurrentFilterTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Iterator;
use Amp\PHPUnit\AsyncTestCase;
use Amp\Sync\LocalSemaphore;
use function Amp\delay;
use function Amp\Iterator\toArray;
use function Amp\Sync\ConcurrentIterator\filter;
class ConcurrentFilterTest extends AsyncTestCase
{
public function test(): \Generator
{
$this->expectOutputString('123');
$processor = static function ($job) {
print $job;
return $job === 2;
};
$this->assertSame(
[2],
yield toArray(filter(Iterator\fromIterable([1, 2, 3]), new LocalSemaphore(3), $processor))
);
}
public function testOutputOrder(): \Generator
{
$processor = static function ($job) {
yield delay($job * 100);
return true;
};
$this->assertSame(
[1, 2, 3],
yield toArray(filter(Iterator\fromIterable([3, 2, 1]), new LocalSemaphore(3), $processor))
);
}
public function testOutputOrderWithoutConcurrency(): \Generator
{
$processor = static function ($job) {
yield delay($job * 100);
return true;
};
$this->assertSame(
[3, 2, 1],
yield toArray(filter(Iterator\fromIterable([3, 2, 1]), new LocalSemaphore(1), $processor))
);
}
public function testBackpressure(): void
{
$this->expectOutputString('12');
$processor = static function ($job) {
print $job;
return true;
};
filter(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
}
public function testBackpressurePartialConsume1(): \Generator
{
$this->expectOutputString('123');
$processor = static function ($job) {
print $job;
return true;
};
$iterator = filter(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
yield $iterator->advance();
}
public function testBackpressurePartialConsume2(): \Generator
{
$this->expectOutputString('1234');
$processor = static function ($job) {
print $job;
return true;
};
$iterator = filter(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
yield $iterator->advance();
yield $iterator->advance();
}
public function testErrorHandling(): \Generator
{
$processor = static function ($job) {
print $job;
yield delay(0);
if ($job === 2) {
throw new \Exception('Failure');
}
return true;
};
$iterator = filter(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
// Job 2 errors, so only job 3 and 4 should be executed
$this->expectOutputString('1234');
$this->expectException(\Exception::class);
$this->expectExceptionMessage('Failure');
yield $iterator->advance();
yield $iterator->advance();
yield $iterator->advance();
yield $iterator->advance();
}
public function testInvalidReturn(): \Generator
{
$processor = static function ($job) {
print $job;
yield delay(0);
if ($job === 2) {
return 0;
}
return true;
};
$iterator = filter(Iterator\fromIterable([1, 2, 3, 4, 5]), new LocalSemaphore(2), $processor);
// Job 2 errors, so only job 3 and 4 should be executed
$this->expectOutputString('1234');
$this->expectException(\TypeError::class);
$this->expectExceptionMessage('Amp\Sync\ConcurrentIterator\filter\'s callable must resolve to a boolean value, got integer');
yield $iterator->advance();
yield $iterator->advance();
yield $iterator->advance();
yield $iterator->advance();
}
protected function tearDownAsync()
{
// Required to make testBackpressure fail instead of the following test
\gc_collect_cycles();
}
}
<file_sep>/examples/queue.php
<?php
use Amp\Emitter;
use Amp\Sync\LocalSemaphore;
use function Amp\delay;
use function Amp\Promise\wait;
use function Amp\Sync\ConcurrentIterator\each;
require __DIR__ . '/../vendor/autoload.php';
$emitter = new Emitter;
$jobId = 0;
for ($i = 0; $i < 10; $i++) {
print 'enqueued ' . $jobId . \PHP_EOL;
$emitter->emit($jobId++);
}
wait(each(
$emitter->iterate(),
new LocalSemaphore(3),
static function ($job) use ($emitter, &$jobId) {
print 'starting ' . $job . \PHP_EOL;
yield delay(1000);
if ($job < 10) {
if (\random_int(0, 1)) {
print 'enqueued ' . $jobId . \PHP_EOL;
$emitter->emit($jobId++);
}
} elseif ($job === 10) {
$emitter->complete();
}
print 'finished ' . $job . \PHP_EOL;
}
));<file_sep>/test/PrefixedKeyedMutexTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Sync\KeyedMutex;
use Amp\Sync\LocalKeyedMutex;
use Amp\Sync\PrefixedKeyedMutex;
class PrefixedKeyedMutexTest extends AbstractKeyedMutexTest
{
public function createMutex(): KeyedMutex
{
return new PrefixedKeyedMutex(new LocalKeyedMutex, 'prefix.');
}
}
<file_sep>/test/AbstractMutexTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Loop;
use Amp\PHPUnit\AsyncTestCase;
use Amp\Sync\Mutex;
abstract class AbstractMutexTest extends AsyncTestCase
{
abstract public function createMutex(): Mutex;
public function testAcquire(): \Generator
{
$mutex = $this->createMutex();
$lock = yield $mutex->acquire();
$lock->release();
$this->assertTrue($lock->isReleased());
}
public function testAcquireMultiple(): \Generator
{
$this->setMinimumRuntime(300);
$mutex = $this->createMutex();
$lock1 = yield $mutex->acquire();
Loop::delay(100, function () use ($lock1) {
$lock1->release();
});
$lock2 = yield $mutex->acquire();
Loop::delay(100, function () use ($lock2) {
$lock2->release();
});
$lock3 = yield $mutex->acquire();
Loop::delay(100, function () use ($lock3) {
$lock3->release();
});
}
}
<file_sep>/test/SemaphoreMutexTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Sync\LocalSemaphore;
use Amp\Sync\Mutex;
use Amp\Sync\SemaphoreMutex;
class SemaphoreMutexTest extends AbstractMutexTest
{
const ID = __CLASS__;
public function createMutex(): Mutex
{
return new SemaphoreMutex(new LocalSemaphore(1));
}
public function testSemaphoreWithMultipleLocks(): \Generator
{
$this->expectException(\Error::class);
$this->expectExceptionMessage('Cannot use a semaphore with more than a single lock');
$mutex = new SemaphoreMutex(new LocalSemaphore(2));
while (yield $mutex->acquire());
}
}
<file_sep>/test/SynchronizedTest.php
<?php
namespace Amp\Sync\Test;
use Amp\Delayed;
use Amp\PHPUnit\AsyncTestCase;
use Amp\Sync\LocalMutex;
use function Amp\Sync\synchronized;
class SynchronizedTest extends AsyncTestCase
{
public function testSynchronized(): \Generator
{
$this->setMinimumRuntime(300);
$mutex = new LocalMutex;
$callback = function (int $value) {
return yield new Delayed(100, $value);
};
$promises = [];
foreach (\range(0, 2) as $value) {
$promises[] = synchronized($mutex, $callback, $value);
}
$result = yield $promises;
$this->assertSame(\range(0, 2), $result);
}
}
<file_sep>/docs/semaphore.md
---
title: Semaphore
permalink: /semaphore
---
[Semaphores](https://en.wikipedia.org/wiki/Semaphore_%28programming%29) are another synchronization primitive in addition to [mutual exclusion](./mutex.md).
Instead of providing exclusive access to a single party, they provide access to a limited set of N parties at the same time.
This makes them great to control concurrency, e.g. limiting an HTTP client to X concurrent requests, so the HTTP server doesn't get overwhelmed.
Similar to [`Mutex`](./mutex.md), `Lock` instances can be acquired using `Semaphore::acquire()`.
Please refer to the `Mutex` documentation for additional usage documentation, as they're basically equivalent except for the fact that `Mutex` is always a `Semaphore` with a count of exactly one party.
In many cases you can use [concurrent iterators](./concurrent-iterator.md) instead of directly using a `Semaphore`.<file_sep>/test/LockTest.php
<?php
namespace Amp\Sync\Test;
use Amp\PHPUnit\AsyncTestCase;
use Amp\Sync\Lock;
class LockTest extends AsyncTestCase
{
public function testIsReleased()
{
$lock = new Lock(0, $this->createCallback(1));
$this->assertFalse($lock->isReleased());
$lock->release();
$this->assertTrue($lock->isReleased());
}
public function testIsReleasedOnDestruct()
{
$lock = new Lock(0, $this->createCallback(1));
unset($lock);
}
public function testThrowsOnMultiRelease()
{
$lock = new Lock(0, $this->createCallback(1));
$lock->release();
$this->assertTrue($lock->isReleased());
$lock->release();
$this->assertTrue($lock->isReleased());
}
public function testGetId()
{
$id = 42;
$lock = new Lock($id, $this->createCallback(1));
$this->assertSame($id, $lock->getId());
$lock->release();
}
}
| f39b3c6bd8f1f81ea74a7dd68c957bb20399dff4 | [
"Markdown",
"PHP"
] | 26 | PHP | b1rdex/sync | fe182e0bfea7fbed90f926a8faa5c8a2399820a9 | f3aab23671ef174b3b11543a21be5188211c070b | |
refs/heads/master | <file_sep>using Financial.Business.Models;
using Financial.Business.ServiceInterfaces;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Services
{
public class AccountTypeSettingTypeService : IAccountTypeSettingTypeService
{
private IUnitOfWork _unitOfWork;
public AccountTypeSettingTypeService(IUnitOfWork unitOfWork)
{
_unitOfWork = unitOfWork;
}
public List<Business.Models.AttributeType> GetListOfActiveLinkedSettingTypes(int assetTypeId)
{
return _unitOfWork.SettingTypes.GetAll()// your starting point - table in the "from" statement
.Where(r => r.IsActive)
.ToList()
.Join(_unitOfWork.AssetTypeSettingTypes.FindAll(r => r.IsActive), // the source table of the inner join
st => st.Id, // Select the primary key (the first part of the "on" clause in an sql "join" statement)
atst => atst.Id, // Select the foreign key (the second part of the "on" clause)
(st, atst) => new { SType = st, ATypeSType = atst }) // selection
.Where(link => link.ATypeSType.AssetTypeId == assetTypeId)
.Select(j => new Business.Models.AttributeType(j.SType, j.ATypeSType))
.ToList();
}
public List<Business.Models.AccountType> GetListOfActiveLinkedAssetTypes(int settingTypeId)
{
return _unitOfWork.AssetTypes.GetAll()// your starting point - table in the "from" statement
.Where(r => r.IsActive)
.ToList()
.Join(_unitOfWork.AssetTypeSettingTypes.FindAll(r => r.IsActive), // the source table of the inner join
at => at.Id, // Select the primary key (the first part of the "on" clause in an sql "join" statement)
atst => atst.Id, // Select the foreign key (the second part of the "on" clause)
(at, atst) => new { AType = at, ATypeSType = atst }) // selection
.Where(link => link.ATypeSType.SettingTypeId == settingTypeId)
.Select(j => new Business.Models.AccountType(j.AType, j.ATypeSType))
.ToList();
}
public Business.Models.AccountType CreateLinkedSettingTypesGetModel(int assetTypeId)
{
var dtoAssetType = _unitOfWork.AssetTypes.Find(r => r.IsActive && r.Id == assetTypeId);
if (dtoAssetType == null)
{
return null;
}
return new Business.Models.AccountType(dtoAssetType);
}
public Business.Models.AccountType EditLinkedSettingTypesGetModel(int assetTypeId)
{
var dtoAssetType = _unitOfWork.AssetTypes.Find(r => r.IsActive && r.Id == assetTypeId);
if (dtoAssetType == null)
{
return null;
}
return new Business.Models.AccountType(dtoAssetType);
}
public int GetAssetTypeSettingTypeIdForLinkedAssetType(int assetTypeId, int settingTypeId)
{
// get link information
var dbAssetTypeAttributeTypes = _unitOfWork.AssetTypeSettingTypes.GetAllActive()
.Where(r => r.AssetTypeId == assetTypeId)
.FirstOrDefault(r => r.SettingTypeId == settingTypeId);
// validate
if (dbAssetTypeAttributeTypes == null)
{
return 0;
}
return dbAssetTypeAttributeTypes.Id;
}
public List<AccountType> GetListOfLinkedAssetTypes(int settingTypeId)
{
// get linked setting types from db
var dbAssetTypeAttributeTypes = _unitOfWork.AssetTypeSettingTypes.GetAll()
.Where(r => r.IsActive)
.Where(r => r.SettingTypeId == settingTypeId)
.ToList();
// transfer dto to bm
var bmAssetTypes = new List<AccountType>();
foreach (var dtoAssetTypeSettingType in dbAssetTypeAttributeTypes)
{
var dtoAssetType = _unitOfWork.AssetTypes.Get(dtoAssetTypeSettingType.AssetTypeId);
if (dtoAssetType != null)
{
bmAssetTypes.Add(new AccountType(dtoAssetType, dtoAssetTypeSettingType));
}
}
return bmAssetTypes;
}
public List<AttributeType> GetListOfLinkedSettingTypes(int assetTypeId)
{
// get linked setting types from db
var dbAssetTypeSettingTypes = _unitOfWork.AssetTypeSettingTypes.GetAll()
.Where(r => r.IsActive)
.Where(r => r.AssetTypeId == assetTypeId)
.ToList();
// transfer dto to bm
var bmSettingTypes = new List<AttributeType>();
foreach (var dtoAssetTypeSettingType in dbAssetTypeSettingTypes)
{
var dtoSettingType = _unitOfWork.SettingTypes.Get(dtoAssetTypeSettingType.SettingTypeId);
if (dtoSettingType != null)
{
bmSettingTypes.Add(new AttributeType(dtoSettingType, dtoAssetTypeSettingType));
}
}
return bmSettingTypes;
}
public List<AttributeType> GetListOfSettingTypesWithLinkedAssetType(int assetTypeId)
{
var bmSettingTypes = new List<AttributeType>();
// transfer from db
var dbSettingTypes = _unitOfWork.SettingTypes.GetAllActive();
foreach (var dtoSettingType in dbSettingTypes)
{
// check for existing link
var dtoAssetTypeSettingType = _unitOfWork.AssetTypeSettingTypes.GetAllActive()
.Where(r => r.AssetTypeId == assetTypeId)
.FirstOrDefault(r => r.SettingTypeId == dtoSettingType.Id);
if(dtoAssetTypeSettingType == null)
{
// no link found
dtoAssetTypeSettingType = new Core.Models.AssetTypeSettingType();
}
// transfer dto to bm
bmSettingTypes.Add(new AttributeType(dtoSettingType, dtoAssetTypeSettingType));
}
return bmSettingTypes;
}
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.WebApplication.Models.ViewModels.RelationshipType;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Financial.Business;
namespace Financial.WebApplication.Controllers
{
public class RelationshipTypeController : BaseController
{
private IUnitOfWork _unitOfWork;
private IBusinessService _businessService;
public RelationshipTypeController(IUnitOfWork unitOfWork, IBusinessService businessService)
: base()
{
_unitOfWork = unitOfWork;
_businessService = businessService;
}
[HttpGet]
public ViewResult Index()
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
if (TempData["ErrorMessage"] != null)
{
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
}
// transfer dto to vm
var vmIndex = _unitOfWork.RelationshipTypes.GetAll()
.Select(r => new IndexViewModel(r))
.ToList();
// display view
return View("Index", vmIndex);
}
[HttpGet]
public ViewResult Create()
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
// display view
return View("Create");
}
[HttpPost]
public ActionResult Create(CreateViewModel vmCreate)
{
// validation
if(!ModelState.IsValid)
{
return View("Create", vmCreate);
}
// check for duplicate
var count = _unitOfWork.RelationshipTypes.GetAll()
.Count(r => r.Name == vmCreate.Name);
if (count > 0)
{
// display view with message
ViewData["ErrorMessage"] = "Record already exists";
return View("Create", vmCreate);
}
// transfer vm to dto
_unitOfWork.RelationshipTypes.Add(new RelationshipType()
{
Name = vmCreate.Name,
IsActive = true
});
// update db
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Record created";
return RedirectToAction("Index", "RelationshipType");
}
[HttpGet]
public ViewResult Edit(int id)
{
// transfer dto to vm
var vmEdit = _unitOfWork.RelationshipTypes.GetAll()
.Select(r => new EditViewModel(r))
.FirstOrDefault(r => r.Id == id);
// display view
return View("Edit", vmEdit);
}
[HttpPost]
public ActionResult Edit(EditViewModel vmEdit)
{
// validation
if(!ModelState.IsValid)
{
return View("Edit", vmEdit);
}
// check for duplicate
var count = _unitOfWork.RelationshipTypes.GetAll()
.Where(r => r.Name == vmEdit.Name)
.Count(r => r.Id != vmEdit.Id);
if(count > 0)
{
// display view with message
ViewData["ErrorMessage"] = "Record already exists";
return View("Edit", vmEdit);
}
// transfer vm to dto
var dtoRelationshipType = _unitOfWork.RelationshipTypes.Get(vmEdit.Id);
dtoRelationshipType.Name = vmEdit.Name;
dtoRelationshipType.IsActive = vmEdit.IsActive;
// update db
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Record updated";
return RedirectToAction("Index", "RelationshipType");
}
[HttpGet]
public ViewResult Details(int id)
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
if (TempData["ErrorMessage"] != null)
{
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
}
// transfer dto to vm
var vmDetails = _unitOfWork.RelationshipTypes.GetAll()
.Select(r => new DetailsViewModel(r))
.FirstOrDefault(r => r.Id == id);
// display view
return View("Details", vmDetails);
}
}
}<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface ITransactionTypeRepository : IRepository<TransactionType>
{
IEnumerable<TransactionType> GetAllActiveOrderedByName();
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
using NUnit.Framework;
using Financial.Business.Services;
using Financial.Data;
using Moq;
using Financial.Core.Models;
using Financial.Business.Models;
using Financial.Core;
using Financial.Data.Repositories;
using Financial.Tests.Mocks;
namespace Financial.Tests.Business.Services
{
[TestFixture]
public class AccountTransactionServiceTests
{
private Asset _dbAsset;
private AssetType _dbAssetType;
private AssetSetting _dbAssetSetting;
private AssetTransaction _dbAssetTransaction;
private SettingType _dbSettingType;
private TransactionType _dbTransactionType;
private TransactionCategory _dbTransactionCategory;
private TransactionDescription _dbTransactionDescription;
private AccountTransactionService _service;
private Mock<IUnitOfWork> _mockUnitOfWork;
private IUnitOfWork _fakeUnitOfWork;
private FinancialDbContext _fakeDbContext;
[SetUp]
public void SetUp()
{
// setup db models
_dbAssetType = new AssetType { Id = 1, Name = "a", IsActive = true };
_dbAsset = new Asset
{
Id = 2,
AssetTypeId = _dbAssetType.Id,
AssetType = _dbAssetType, // setup include
Name = "b",
IsActive = true
};
_dbSettingType = new SettingType { Id = 3, Name = "c", IsActive = true };
_dbAssetSetting = new AssetSetting
{
Id = 4,
AssetId = _dbAsset.Id,
Asset = _dbAsset, // setup include
SettingTypeId = _dbSettingType.Id,
SettingType = _dbSettingType, // setup include
Value = "d",
IsActive = true
};
_dbTransactionType = new TransactionType { Id = 5, Name = "e", IsActive = true };
_dbTransactionCategory = new TransactionCategory {Id = 6, Name = "f", IsActive = true};
_dbTransactionDescription = new TransactionDescription {Id = 7, Name = "g", IsActive = true};
_dbAssetTransaction = new AssetTransaction
{
Id = 8,
AssetId = _dbAsset.Id,
Asset = _dbAsset, // setup include
TransactionTypeId = _dbTransactionType.Id,
TransactionType = _dbTransactionType, // setup include
TransactionCategoryId = _dbTransactionCategory.Id,
TransactionCategory = _dbTransactionCategory, // setup include
TransactionDescriptionId = _dbTransactionDescription.Id,
TransactionDescription = _dbTransactionDescription, // setup include
DueDate = new DateTime(1234, 5, 6),
ClearDate = new DateTime(1234, 7, 8),
Amount = 1.23M,
Note = "abc",
IsActive = true
};
// setup DbContext
Setup_FakeDbContext();
// setup uow
_fakeUnitOfWork = new UnitOfWork(_fakeDbContext);
// setup service
_service = new AccountTransactionService(_fakeUnitOfWork);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void GetListOfActiveTransactions_WhenCalled_ReturnAccountTransactionList_Test()
{
var result = _service.GetListOfActiveTransactions();
Assert.That(result, Is.TypeOf<List<AccountTransaction>>());
}
[Test]
public void GetListOfActiveTransactions_WhenCalled_ShouldCallOneTimeUnitOfWorkRepositoryAssetTransactionsMethodGetAllActiveByDueDate_Test()
{
_mockUnitOfWork = new Mock<IUnitOfWork>();
Setup_MockUnitOfWork_AssetTransaction_GetAllActiveByDueDate();
Setup_MockUnitOfWork_Assets_Get();
Setup_MockUnitOfWork_AssetTypes_Get();
Setup_MockUnitOfWork_AssetSettings_GetActive();
_service = new AccountTransactionService(_mockUnitOfWork.Object);
_service.GetListOfActiveTransactions();
_mockUnitOfWork.Verify(uow => uow.AssetTransactions.GetAllActiveByDueDate(),
Times.Once);
}
[Test]
public void GetListOfActiveTransactions_WhenAccountTransactionListHasAccountTransactions_ShouldCallUnitOfWorkRepositoryAssetsMethodGet_Test()
{
_mockUnitOfWork = new Mock<IUnitOfWork>();
Setup_MockUnitOfWork_AssetTransaction_GetAllActiveByDueDate();
Setup_MockUnitOfWork_Assets_Get();
Setup_MockUnitOfWork_AssetTypes_Get();
Setup_MockUnitOfWork_AssetSettings_GetActive();
_service = new AccountTransactionService(_mockUnitOfWork.Object);
_service.GetListOfActiveTransactions();
_mockUnitOfWork.Verify(uow => uow.Assets.Get(It.IsAny<int>()),
Times.AtLeastOnce);
}
[Test]
public void GetListOfActiveTransactions_WhenAccountTransactionListHasAccountTransactions_ShouldCallUnitOfWorkRepositoryAssetSettingsMethodGetActive_Test()
{
_mockUnitOfWork = new Mock<IUnitOfWork>();
Setup_MockUnitOfWork_AssetTransaction_GetAllActiveByDueDate();
Setup_MockUnitOfWork_Assets_Get();
Setup_MockUnitOfWork_AssetTypes_Get();
Setup_MockUnitOfWork_AssetSettings_GetActive();
_service = new AccountTransactionService(_mockUnitOfWork.Object);
_service.GetListOfActiveTransactions();
_mockUnitOfWork.Verify(uow => uow.AssetSettings.GetActive(It.IsAny<int>(), It.IsAny<int>()),
Times.AtLeastOnce);
}
[Test]
public void GetListOfActiveTransactions_WhenAccountListHasAccount_ReturnAccountTransactionValues_Test()
{
var result = _service.GetListOfActiveTransactions();
Assert.Multiple(() =>
{
Assert.That(result.Count, Is.EqualTo(1), "Count");
Assert.That(result[0].AssetTransactionId, Is.EqualTo(_dbAssetTransaction.Id), "AssetTransaction Id");
Assert.That(result[0].AssetId, Is.EqualTo(_dbAssetTransaction.AssetId), "Asset Id");
Assert.That(result[0].AssetName, Is.EqualTo(_dbAsset.Name), "Asset Name");
Assert.That(result[0].AssetTypeId, Is.EqualTo(_dbAsset.AssetTypeId), "AssetType Id");
Assert.That(result[0].AssetTypeName, Is.EqualTo(_dbAssetType.Name), "AssetType Name");
Assert.That(result[0].DueDate, Is.EqualTo(_dbAssetTransaction.DueDate), "DueDate");
Assert.That(result[0].ClearDate, Is.EqualTo(_dbAssetTransaction.ClearDate), "ClearDate");
Assert.That(result[0].Amount, Is.EqualTo(_dbAssetTransaction.Amount), "Amount");
Assert.That(result[0].Note, Is.EqualTo(_dbAssetTransaction.Note), "Note");
});
}
[Test]
public void GetListOfActiveTransactions_WhenAccountTypeEqualsCreditCard_ReturnNameWithAccountNumber_Test()
{
Setup_Service_FakeUnitOfWork_AssetType_CreditCard(assetName: "a", assetSettingValue: "1234");
var expectedAssetName = "a (1234)";
var result = _service.GetListOfActiveTransactions();
Assert.That(result[0].AssetName, Is.EqualTo(expectedAssetName));
}
[Test]
public void GetListOfActiveTransactions_WhenTransactionTypeEqualsIncome_ReturnPositiveAmount_Test()
{
Setup_Service_FakeUnitOfWork_TransactionType_Income(transactionAmount: 1.25M);
var result = _service.GetListOfActiveTransactions();
Assert.That(result[0].Amount, Is.EqualTo(1.25));
}
[Test]
public void GetListOfActiveTransactions_WhenTransactionTypeEqualsExpense_ReturnNegativeAmount_Test()
{
Setup_Service_FakeUnitOfWork_TransactionType_Expense(transactionAmount: 1.25M);
var result = _service.GetListOfActiveTransactions();
Assert.That(result[0].Amount, Is.EqualTo(-1.25));
}
[Test]
public void GetListOfActiveTransactions_WhenAccountIdEqualsZero_ReturnEmptyAccountList_Test()
{
_dbAssetTransaction.AssetId = 0;
var result = _service.GetListOfActiveTransactions();
Assert.That(result, Is.EquivalentTo(new List<AccountTransaction>()));
}
[Test]
public void GetAccountForTransaction_WhenCalled_ReturnAccount_Test()
{
var result = _service.GetAccountForTransaction(assetId: _dbAsset.Id);
Assert.That(result, Is.InstanceOf<Account>());
}
[Test]
public void GetAccountForTransaction_WhenAssetIdEqualsNull_ReturnNull_Test()
{
var result = _service.GetAccountForTransaction(assetId: null);
Assert.That(result, Is.EqualTo(null));
}
[Test]
public void GetAccountForTransaction_WhenAssetIdEqualsZero_ReturnNull_Test()
{
var result = _service.GetAccountForTransaction(assetId: 0);
Assert.That(result, Is.EqualTo(null));
}
[Test]
public void GetAccountForTransaction_WhenAssetIdProvided_ReturnAccountValues_Test()
{
var result = _service.GetAccountForTransaction(_dbAsset.Id);
Assert.Multiple(() =>
{
Assert.That(result.AssetId, Is.EqualTo(_dbAsset.Id), "Asset Id");
Assert.That(result.AssetName, Is.EqualTo(_dbAsset.Name), "Asset Name");
Assert.That(result.AssetTypeId, Is.EqualTo(_dbAsset.AssetTypeId), "AssetType Id");
Assert.That(result.AssetTypeName, Is.EqualTo(_dbAssetType.Name), "AssetType Name");
});
}
[Test]
public void GetAccountForTransaction_WhenAssetTypeEqualsCreditCard_ReturnFormattedAccountName_Test()
{
Setup_Service_FakeUnitOfWork_AssetType_CreditCard(assetName: "a", assetSettingValue: "1234");
var expectedFormattedAssetName = "a (1234)";
var result = _service.GetAccountForTransaction(assetId: _dbAsset.Id);
Assert.That(result.AssetName, Is.EqualTo(expectedFormattedAssetName));
}
[Test]
public void GetAccountForTransaction_WhenUnitOfWorkReturnsAssetEqualsNull_ReturnNull_Test()
{
_mockUnitOfWork = new Mock<IUnitOfWork>();
_mockUnitOfWork.Setup(uow => uow.Assets.Get(It.IsAny<int>()));
_service = new AccountTransactionService(_mockUnitOfWork.Object);
var result = _service.GetAccountForTransaction(assetId: 1);
Assert.That(result, Is.EqualTo(null));
}
[Test]
public void GetAccountSelectList_WhenCalled_ReturnSelectListItems_Test()
{
var result = _service.GetAccountSelectList(selectedId: null);
Assert.That(result, Is.InstanceOf<List<SelectListItem>>());
}
[Test]
public void GetAccountSelectList_WhenCalled_ReturnAssetValues_Test()
{
var result = _service.GetAccountSelectList(selectedId: null);
Assert.Multiple(() =>
{
Assert.That(result[0].Value, Is.EqualTo(_dbAsset.Id.ToString()));
Assert.That(result[0].Text, Is.EqualTo(_dbAsset.Name));
});
}
[Test]
public void GetAccountSelectList_WhenAssetTypeEqualsCreditCard_ReturnFormattedAssetName_Test()
{
Setup_Service_FakeUnitOfWork_AssetType_CreditCard(assetName: "a", assetSettingValue: "1234");
var expectedFormattedAssetName = "a (1234)";
var result = _service.GetAccountSelectList(selectedId: null);
Assert.That(result[0].Text, Is.EqualTo(expectedFormattedAssetName));
}
[Test]
public void GetAccountSelectList_WhenSelectedIdProvided_ReturnSelectedIndex_Test()
{
var result = _service.GetAccountSelectList(selectedId: _dbAsset.Id.ToString());
Assert.That(result[0].Selected, Is.EqualTo(true));
}
[Test]
public void GetAccountSelectList_WhenMultipleAssetsFound_ReturnAssetsOrderedAscendingByName_Test()
{
var fakeAssetTypeList = new List<AssetType> {_dbAssetType};
var asset1 = new Asset { Id = 1, Name = "z", AssetTypeId = _dbAssetType.Id, AssetType = _dbAssetType, IsActive = true };
var asset2 = new Asset { Id = 2, Name = "a", AssetTypeId = _dbAssetType.Id, AssetType = _dbAssetType, IsActive = true };
var fakeAssetList = new List<Asset> { asset1, asset2 };
var fakeSettingTypeList = new List<SettingType> {_dbSettingType};
var fakeAssetSettingList = new List<AssetSetting>
{
new AssetSetting { Id = 3, AssetId = asset1.Id, Asset = asset1, SettingTypeId = _dbSettingType.Id, SettingType = _dbSettingType, Value = "abc", IsActive = true },
new AssetSetting { Id = 4, AssetId = asset2.Id, Asset = asset2, SettingTypeId = _dbSettingType.Id, SettingType = _dbSettingType, Value = "abc", IsActive = true },
};
Setup_Service_FakeUnitOfWork(fakeAssetList, fakeAssetSettingList, fakeAssetTypeList, fakeSettingTypeList);
var result = _service.GetAccountSelectList(selectedId: null);
Assert.Multiple(() =>
{
Assert.That(result[0].Text, Is.EqualTo("a"), "First Index");
Assert.That(result[1].Text, Is.EqualTo("z"), "Second Index");
});
}
[Test]
public void GetTransactionTypeSelectList_WhenCalled_ReturnSelectListItems_Test()
{
var result = _service.GetTransactionTypeSelectList(selectedId: null);
Assert.That(result, Is.InstanceOf<List<SelectListItem>>());
}
[Test]
public void GetTransactionTypeSelectList_WhenCalled_ReturnTransactionTypeValues_Test()
{
var result = _service.GetTransactionTypeSelectList(selectedId: null);
Assert.Multiple(() =>
{
Assert.That(result[0].Value, Is.EqualTo(_dbTransactionType.Id.ToString()), "Id");
Assert.That(result[0].Text, Is.EqualTo(_dbTransactionType.Name), "Name");
});
}
[Test]
public void GetTransactionTypeSelectList_WhenSelectedIdProvided_ReturnSelectedIndex_Test()
{
var result = _service.GetTransactionTypeSelectList(selectedId: _dbTransactionType.Id.ToString());
Assert.That(result[0].Selected, Is.EqualTo(true));
}
[Test]
public void GetTransactionTypeSelectList_WhenMultipleTransactionTypesFound_ReturnOrderedAscendingByName_Test()
{
var fakeTransactionTypeList = new List<TransactionType>
{
new TransactionType { Id = 1, Name = "z", IsActive = true },
new TransactionType { Id = 2, Name = "a", IsActive = true },
};
Setup_Service_FakeUnitOfWork(fakeTransactionTypeList);
var result = _service.GetTransactionTypeSelectList(selectedId: null);
Assert.Multiple(() =>
{
Assert.That(result[0].Text, Is.EqualTo("a"), "First Index");
Assert.That(result[1].Text, Is.EqualTo("z"), "Second Index");
});
}
[Test]
public void GetTransactionCategorySelectList_WhenCalled_ReturnSelectListItems_Test()
{
var result = _service.GetTransactionCategorySelectList(selectedId: null);
Assert.That(result, Is.InstanceOf<List<SelectListItem>>());
}
[Test]
public void GetTransactionCategorySelectList_WhenCalled_ReturnTransactionCategoryValues_Test()
{
var result = _service.GetTransactionCategorySelectList(selectedId: null);
Assert.Multiple(() =>
{
Assert.That(result[0].Value, Is.EqualTo(_dbTransactionCategory.Id.ToString()), "Id");
Assert.That(result[0].Text, Is.EqualTo(_dbTransactionCategory.Name), "Name");
});
}
[Test]
public void GetTransactionCategorySelectList_WhenSelectedIdProvided_ReturnSelectedIndex_Test()
{
var result = _service.GetTransactionCategorySelectList(selectedId: _dbTransactionCategory.Id.ToString());
Assert.That(result[0].Selected, Is.EqualTo(true));
}
[Test]
public void GetTransactionCategorySelectList_WhenMultipleTransactionCategoriesFound_ReturnOrderedAscendingByName_Test()
{
var fakeTransactionCategoryList = new List<TransactionCategory>
{
new TransactionCategory { Id = 1, Name = "z", IsActive = true },
new TransactionCategory { Id = 2, Name = "a", IsActive = true },
};
Setup_Service_FakeUnitOfWork(fakeTransactionCategoryList);
var result = _service.GetTransactionCategorySelectList(selectedId: null);
Assert.Multiple(() =>
{
Assert.That(result[0].Text, Is.EqualTo("a"), "First Index");
Assert.That(result[1].Text, Is.EqualTo("z"), "Second Index");
});
}
// private
private void Setup_FakeDbContext()
{
Setup_FakeDbContext(
new List<Asset> { _dbAsset },
new List<AssetSetting> { _dbAssetSetting },
new List<AssetTransaction> { _dbAssetTransaction },
new List<AssetType> { _dbAssetType },
new List<SettingType> { _dbSettingType },
new List<TransactionCategory> { _dbTransactionCategory },
new List<TransactionDescription> { _dbTransactionDescription },
new List<TransactionType> { _dbTransactionType });
}
private void Setup_FakeDbContext(
List<Asset> fakeAssetList,
List<AssetSetting> fakeAssetSettingList,
List<AssetTransaction> fakeAssetTransactionList,
List<AssetType> fakeAssetTypeList,
List<SettingType> fakeSettingTypeList,
List<TransactionCategory> fakeTransactionCategoryList,
List<TransactionDescription> fakeTransactionDescriptionList,
List<TransactionType> fakeTransactionTypeList)
{
_fakeDbContext = MockFinancialDbContext.Create(
assets: fakeAssetList,
assetSettings: fakeAssetSettingList,
assetTransactions: fakeAssetTransactionList,
assetTypes: fakeAssetTypeList,
settingTypes: fakeSettingTypeList,
transactionCategories: fakeTransactionCategoryList,
transactionDescriptions: fakeTransactionDescriptionList,
transactionTypes: fakeTransactionTypeList);
}
private void Setup_Service_FakeUnitOfWork(
List<Asset> fakeAssets,
List<AssetSetting> fakeAssetSettings,
List<AssetType> fakeAssetTypes,
List<SettingType> fakeSettingTypes)
{
// setup DbContext
_fakeDbContext = MockFinancialDbContext.Create(
assets: fakeAssets,
assetSettings: fakeAssetSettings,
assetTypes: fakeAssetTypes,
settingTypes: fakeSettingTypes);
// set up uow
_fakeUnitOfWork = new UnitOfWork(_fakeDbContext);
// set up repository
_service = new AccountTransactionService(_fakeUnitOfWork);
}
private void Setup_Service_FakeUnitOfWork(List<TransactionType> fakeTransactionTypes)
{
// setup DbContext
_fakeDbContext = MockFinancialDbContext.Create(transactionTypes: fakeTransactionTypes);
// set up uow
_fakeUnitOfWork = new UnitOfWork(_fakeDbContext);
// set up repository
_service = new AccountTransactionService(_fakeUnitOfWork);
}
private void Setup_Service_FakeUnitOfWork(List<TransactionCategory> fakeTransactionCategories)
{
// setup DbContext
_fakeDbContext = MockFinancialDbContext.Create(transactionCategories: fakeTransactionCategories);
// set up uow
_fakeUnitOfWork = new UnitOfWork(_fakeDbContext);
// set up repository
_service = new AccountTransactionService(_fakeUnitOfWork);
}
private void Setup_Service_FakeUnitOfWork_AssetType_CreditCard(string assetName, string assetSettingValue)
{
// setup fake model
_dbAssetType.Id = AssetType.IdForCreditCard;
_dbAsset.AssetTypeId = _dbAssetType.Id;
_dbAsset.AssetType = _dbAssetType;
_dbAsset.Name = assetName;
_dbSettingType.Id = SettingType.IdForAccountNumber;
_dbAssetSetting.SettingTypeId = _dbSettingType.Id;
_dbAssetSetting.SettingType = _dbSettingType;
_dbAssetSetting.Asset = _dbAsset;
_dbAssetSetting.Value = assetSettingValue;
_dbAssetTransaction.AssetId = _dbAsset.Id;
_dbAssetTransaction.Asset = _dbAsset;
// setup DbContext
Setup_FakeDbContext();
// set up uow
_fakeUnitOfWork = new UnitOfWork(_fakeDbContext);
// set up repository
_service = new AccountTransactionService(_fakeUnitOfWork);
}
private void Setup_Service_FakeUnitOfWork_TransactionType_Income(decimal transactionAmount)
{
// setup fake model
_dbTransactionType.Id = TransactionType.IdForIncome;
_dbAssetTransaction.TransactionTypeId = _dbTransactionType.Id;
_dbAssetTransaction.TransactionType = _dbTransactionType;
_dbAssetTransaction.Amount = transactionAmount;
// setup DbContext
Setup_FakeDbContext();
// set up uow
_fakeUnitOfWork = new UnitOfWork(_fakeDbContext);
// set up repository
_service = new AccountTransactionService(_fakeUnitOfWork);
}
private void Setup_Service_FakeUnitOfWork_TransactionType_Expense(decimal transactionAmount)
{
// setup fake model
_dbTransactionType.Id = TransactionType.IdForExpense;
_dbAssetTransaction.TransactionTypeId = _dbTransactionType.Id;
_dbAssetTransaction.TransactionType = _dbTransactionType;
_dbAssetTransaction.Amount = transactionAmount;
// setup DbContext
Setup_FakeDbContext();
// set up uow
_fakeUnitOfWork = new UnitOfWork(_fakeDbContext);
// set up repository
_service = new AccountTransactionService(_fakeUnitOfWork);
}
private void Setup_MockUnitOfWork_Assets_Get()
{
_mockUnitOfWork.Setup(uow => uow.Assets.Get(_dbAsset.Id))
.Returns(_dbAsset);
}
private void Setup_MockUnitOfWork_AssetTypes_Get()
{
_mockUnitOfWork.Setup(uow => uow.AssetTypes.Get(_dbAssetType.Id))
.Returns(_dbAssetType);
}
private void Setup_MockUnitOfWork_AssetSettings_GetActive()
{
_mockUnitOfWork.Setup(uow => uow.AssetSettings.GetActive(_dbAssetSetting.AssetId, _dbAssetSetting.SettingTypeId))
.Returns(_dbAssetSetting);
}
private void Setup_MockUnitOfWork_AssetTransaction_GetAllActiveByDueDate()
{
_mockUnitOfWork.Setup(uow => uow.AssetTransactions.GetAllActiveByDueDate())
.Returns(new List<AssetTransaction> { _dbAssetTransaction });
}
}
}
<file_sep>using Financial.Business;
using Financial.Business.ServiceInterfaces;
using Financial.Business.Services;
using Financial.Data;
using System.Web.Mvc;
using Unity;
using Unity.Mvc5;
namespace Financial.WebApplication
{
public static class UnityConfig
{
public static void RegisterComponents()
{
var container = new UnityContainer();
// register all your components with the container here
// it is NOT necessary to register your controllers
// e.g. container.RegisterType<ITestService, TestService>();
container.RegisterType<IUnitOfWork, UnitOfWork>();
container.RegisterType<IBusinessService, BusinessService>();
container.RegisterType<IAccountTypeService, AccountTypeService>();
container.RegisterType<IAccountTypeSettingTypeService, AccountTypeSettingTypeService>();
container.RegisterType<IAccountTransactionService, AccountTransactionService>();
DependencyResolver.SetResolver(new UnityDependencyResolver(container));
}
}
}<file_sep>using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemoryAssetRepository : InMemoryRepository<Asset>, IAssetRepository
{
private List<Asset> _entities = null;
public InMemoryAssetRepository(IEnumerable<Asset> entities)
: base(entities)
{
_entities = entities as List<Asset>;
}
public IEnumerable<Asset> GetAllActiveOrderedByName()
{
return _entities
.Where(r => r.IsActive)
.OrderBy(r => r.Name)
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.ParentChildRelationshipType
{
public class CreateViewModel
{
public CreateViewModel(){}
public CreateViewModel(Core.Models.RelationshipType dtoSuppliedRelationshipType,
List<SelectListItem> sliRelationshipLevels,
List<SelectListItem> sliLinkedRelationshipTypes)
{
SuppliedRelationshipTypeId = dtoSuppliedRelationshipType.Id;
SuppliedRelationshipTypeName = dtoSuppliedRelationshipType.Name;
RelationshipLevels = sliRelationshipLevels;
LinkedRelationshipTypes = sliLinkedRelationshipTypes;
}
public int SuppliedRelationshipTypeId { get; set; }
[Display(Name = "Relationship Type")]
public string SuppliedRelationshipTypeName { get; set; }
[Required]
[Display(Name = "Relationship Level")]
public string SelectedRelationshipLevel { get; set; }
public IEnumerable<SelectListItem> RelationshipLevels { get; set; }
[Required]
[Display(Name = "Linked Relationship Type")]
public string SelectedLinkedRelationshipType { get; set; }
public IEnumerable<SelectListItem> LinkedRelationshipTypes { get; set; }
}
}
<file_sep>using Financial.Core;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class Repository<TEntity> : IRepository<TEntity> where TEntity : BaseEntity
{
protected readonly DbContext _context;
private DbSet<TEntity> _entities;
public Repository(DbContext context)
{
_context = context;
_entities = _context.Set<TEntity>();
}
public TEntity Get(int id)
{
return _entities.FirstOrDefault(r => r.Id == id);
}
public TEntity GetActive(int id)
{
return _entities.Where(r => r.IsActive).FirstOrDefault(r => r.Id == id);
}
public IEnumerable<TEntity> GetAll()
{
return _entities.ToList();
}
public IEnumerable<TEntity> GetAllActive()
{
return _entities.Where(r => r.IsActive).ToList();
}
public TEntity Find(Expression<Func<TEntity, bool>> predicate)
{
return _context.Set<TEntity>().SingleOrDefault(predicate);
}
public IEnumerable<TEntity> FindAll(Expression<Func<TEntity, bool>> predicate)
{
return _context.Set<TEntity>().Where(predicate);
}
public bool Exists(int id)
{
return _entities.FirstOrDefault(r => r.Id == id) != null;
}
public void Add(TEntity entity)
{
_context.Set<TEntity>().Add(entity);
}
public void AddRange(IEnumerable<TEntity> entities)
{
_context.Set<TEntity>().AddRange(entities);
}
public void Remove(TEntity entity)
{
_context.Set<TEntity>().Remove(entity);
}
public void RemoveRange(IEnumerable<TEntity> entities)
{
_context.Set<TEntity>().RemoveRange(entities);
}
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public class FakeParentChildRelationshipTypes
{
public static IEnumerable<ParentChildRelationshipType> InitialFakeParentChildRelationshipTypes()
{
yield return new ParentChildRelationshipType() { Id = 1, ParentRelationshipTypeId = 2, ChildRelationshipTypeId = 4, IsActive = true };
yield return new ParentChildRelationshipType() { Id = 2, ParentRelationshipTypeId = 4, ChildRelationshipTypeId = 5, IsActive = true };
yield return new ParentChildRelationshipType() { Id = 3, ParentRelationshipTypeId = 3, ChildRelationshipTypeId = 3, IsActive = false };
yield return new ParentChildRelationshipType() { Id = 4, ParentRelationshipTypeId = 5, ChildRelationshipTypeId = 1, IsActive = true };
yield return new ParentChildRelationshipType() { Id = 5, ParentRelationshipTypeId = 1, ChildRelationshipTypeId = 2, IsActive = true };
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("AccountAttributes")]
public class AssetSetting : BaseEntity
{
[Required]
[Column("AccountId")]
public int AssetId { get; set; }
[Required]
[Column("AttributeTypeId")]
public int SettingTypeId { get; set; }
public string Value { get; set; }
[ForeignKey("AssetId")]
public Asset Asset { get; set; }
[ForeignKey("SettingTypeId")]
public SettingType SettingType { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemoryAssetTypeSettingTypeRepository : InMemoryRepository<AssetTypeSettingType>, IAssetTypeSettingTypeRepository
{
private List<AssetTypeSettingType> _entities = null;
public InMemoryAssetTypeSettingTypeRepository(IEnumerable<AssetTypeSettingType> entities)
: base(entities)
{
_entities = entities as List<AssetTypeSettingType>;
}
public AssetTypeSettingType Get(int assetTypeId, int settingTypeId)
{
return _entities
.Where(r => r.AssetTypeId == assetTypeId)
.FirstOrDefault(r => r.SettingTypeId == settingTypeId);
}
public AssetTypeSettingType GetActive(int assetTypeId, int settingTypeId)
{
return _entities
.Where(r => r.IsActive)
.Where(r => r.AssetTypeId == assetTypeId)
.FirstOrDefault(r => r.SettingTypeId == settingTypeId);
}
public IEnumerable<AssetTypeSettingType> GetAllForAssetType(int assetTypeId)
{
return _entities
.Where(r => r.AssetTypeId == assetTypeId)
.ToList();
}
public IEnumerable<AssetTypeSettingType> GetAllForSettingType(int settingTypeId)
{
return _entities
.Where(r => r.SettingTypeId == settingTypeId)
.ToList();
}
public IEnumerable<AssetTypeSettingType> GetAllActiveForAssetType(int assetTypeId)
{
return _entities
.Where(r => r.IsActive)
.Where(r => r.AssetTypeId == assetTypeId)
.ToList();
}
public IEnumerable<AssetTypeSettingType> GetAllActiveForSettingType(int settingTypeId)
{
return _entities
.Where(r => r.IsActive)
.Where(r => r.SettingTypeId == settingTypeId)
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework.Internal;
using NUnit.Framework;
using Moq;
namespace Financial.Tests._DmitriNesteruk
{
public interface ILogForMoq
{
bool Write(string msg);
}
public class BankAccountWithMoq
{
public int Balance { get; set; }
private ILogForMoq _log;
public BankAccountWithMoq(ILogForMoq log)
{
this._log = log;
}
public void Deposit(int amount)
{
_log.Write($"User has withdrawn {amount}");
Balance += amount;
}
}
[TestFixture]
public class BankAccountWithMoqTests
{
private BankAccountWithMoq ba;
[Test]
public void Deposit_MockTest()
{
var log = new Mock<ILogForMoq>();
ba = new BankAccountWithMoq(log.Object) {Balance = 100};
ba.Deposit(100);
Assert.That(ba.Balance, Is.EqualTo(200));
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeSettingType
{
public class IndexLinkedAssetTypesViewModel
{
public IndexLinkedAssetTypesViewModel()
{
}
public IndexLinkedAssetTypesViewModel(Core.Models.AssetType dtoAssetType,
Core.Models.AssetTypeSettingType dtoAssetTypeSettingType)
{
Id = dtoAssetTypeSettingType.Id;
SettingTypeId = dtoAssetTypeSettingType.SettingTypeId;
IsActive = dtoAssetTypeSettingType.IsActive;
AssetTypeId = dtoAssetType.Id;
AssetTypeName = dtoAssetType.Name;
AssetTypeIsActive = dtoAssetType.IsActive;
}
public int Id { get; set; }
public int SettingTypeId { get; set; }
public int AssetTypeId { get; set; }
[Display(Name = "Asset Type")]
public string AssetTypeName { get; set; }
[Display(Name = "AssetType IsActive")]
public bool AssetTypeIsActive { get; set; }
[Display(Name = "AssetTypeSettingType IsActive")]
public bool IsActive { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Business.Models;
namespace Financial.WebApplication.Models.ViewModels.Account
{
public class IndexViewModel
{
public IndexViewModel() {}
public IndexViewModel(Business.Models.Account bmAccount)
{
Id = bmAccount.AssetId;
AssetName = bmAccount.AssetName;
AssetTypeName = bmAccount.AssetTypeName;
}
public int Id { get; set; }
[Display(Name = "Name")]
public string AssetName { get; set; }
[Display(Name = "Type")]
public string AssetTypeName { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface IAssetRepository : IRepository<Asset>
{
Asset GetActive(int assetId);
IEnumerable<Asset> GetAllActiveOrderedByName();
}
}
<file_sep>using Financial.Business.ServiceInterfaces;
using Financial.Data;
using System.Collections.Generic;
using System.Linq;
using System.Web.Mvc;
namespace Financial.Business.Services
{
public class AccountTypeService : IAccountTypeService
{
private IUnitOfWork _unitOfWork;
public AccountTypeService(IUnitOfWork unitOfWork)
{
_unitOfWork = unitOfWork;
}
public Business.Models.AccountType GetAssetType(int assetTypeId)
{
var dtoAssetType = _unitOfWork.AssetTypes.Get(assetTypeId);
if (dtoAssetType == null)
{
return null;
}
return new Business.Models.AccountType(dtoAssetType);
}
public List<Business.Models.AccountType> IndexGetModelList()
{
return _unitOfWork.AssetTypes.GetAll()
.Where(r => r.IsActive)
.OrderBy(r => r.Name)
.Select(r => new Business.Models.AccountType(r))
.ToList();
}
public int CreatePostUpdateDatabase(Business.Models.AccountType bmAssetType)
{
// check for existing name
var exists = _unitOfWork.AssetTypes.GetAllActive()
.Any(r => r.Name == bmAssetType.AssetTypeName);
if (exists)
{
return 0;
}
// transfer bm to dto
var dtoAssetType = new Core.Models.AssetType()
{
Name = bmAssetType.AssetTypeName,
IsActive = true,
};
// update db
_unitOfWork.AssetTypes.Add(dtoAssetType);
_unitOfWork.CommitTrans();
// return new ID
return dtoAssetType.Id;
}
public Business.Models.AccountType EditGetModel(int assetTypeId)
{
var dtoAssetType = _unitOfWork.AssetTypes.Find(r => r.IsActive && r.Id == assetTypeId);
if (dtoAssetType == null)
{
return null;
}
return new Business.Models.AccountType(dtoAssetType);
}
public string EditPostUpdateDatabase(Business.Models.AccountType bmAssetType)
{
// get dto
var dtoAssetType = _unitOfWork.AssetTypes.Get(bmAssetType.AssetTypeId);
if (dtoAssetType == null)
{
return "Invalid Asset Type";
}
// transfer bm to dto
dtoAssetType.Name = bmAssetType.AssetTypeName;
// update db
_unitOfWork.CommitTrans();
return "Success";
}
public Business.Models.AccountType DetailsGetModel(int assetTypeId)
{
var dtoAssetType = _unitOfWork.AssetTypes.Find(r => r.IsActive && r.Id == assetTypeId);
if (dtoAssetType == null)
{
return null;
}
return new Business.Models.AccountType(dtoAssetType);
}
public List<SelectListItem> GetAssetTypesDropDownList(int? selectedId)
{
return _unitOfWork.AssetTypes.FindAll(r => r.IsActive)
.Select(r => new SelectListItem()
{
Value = r.Id.ToString(),
Selected = r.Id == selectedId,
Text = r.Name
})
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("AccountTypeAttributeTypes")]
public class AssetTypeSettingType : BaseEntity
{
[Required]
[Column("AccountTypeId")]
public int AssetTypeId { get; set; }
[Required]
[Column("AttributeTypeId")]
public int SettingTypeId { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public class FakeTransactionDescriptions
{
public static IEnumerable<TransactionDescription> InitialFakeTransactionDescriptions()
{
yield return new TransactionDescription() { Id = 1, Name = "TransactionDescription1", IsActive = true };
yield return new TransactionDescription() { Id = 2, Name = "TransactionDescription2", IsActive = true };
yield return new TransactionDescription() { Id = 3, Name = "TransactionDescription3", IsActive = false };
yield return new TransactionDescription() { Id = 4, Name = "TransactionDescription4", IsActive = true };
yield return new TransactionDescription() { Id = 5, Name = "TransactionDescription5", IsActive = true };
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Business.Models;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeSettingType
{
public class EditLinkedAssetTypesViewModel
{
public EditLinkedAssetTypesViewModel()
{
}
/*
public EditLinkedAssetTypesViewModel(Core.Models.SettingType dtoSettingType,
List<Business.Models.BusinessModels.AssetTypeSettingType> atstLinks)
{
SettingTypeId = dtoSettingType.Id;
SettingTypeName = dtoSettingType.Name;
LinkedAssetTypeSettingTypes = atstLinks;
}
*/
public int SettingTypeId { get; set; }
[Display(Name = "Setting Type")]
public string SettingTypeName { get; set; }
public List<Business.Models.AccountTypeSettingType> LinkedAssetTypeSettingTypes { get; set; }
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.WebApplication.Models.ViewModels.AssetSetting;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Financial.Business;
namespace Financial.WebApplication.Controllers
{
public class AssetSettingController : BaseController
{
private IUnitOfWork _unitOfWork;
private IBusinessService _businessService;
public AssetSettingController(IUnitOfWork unitOfWork, IBusinessService businessService)
: base()
{
_unitOfWork = unitOfWork;
_businessService = businessService;
}
[HttpGet]
public ActionResult Index(int assetId)
{
try
{
// transfer id to dto
var dtoAsset = _unitOfWork.Assets.Get(assetId);
if (dtoAsset != null)
{
// get list of linked setting types
var dbAssetSettings = _unitOfWork.AssetSettings.GetAllActiveForAsset(dtoAsset.Id);
// create & transfer values to vm
var vmIndex = new List<IndexViewModel>();
foreach (var dtoAssetSetting in dbAssetSettings)
{
// transfer to dto
var dtoSettingType = _unitOfWork.SettingTypes.GetActive(dtoAssetSetting.SettingTypeId);
// validate dto & update vm
var vm = dtoAssetSetting == null
? new IndexViewModel(new AssetSetting(), assetId, dtoSettingType)
: new IndexViewModel(dtoAssetSetting, assetId, dtoSettingType);
vmIndex.Add(vm);
}
// display view
return PartialView("_Index", vmIndex);
}
return PartialView("_Index", new List<IndexViewModel>());
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetSetting");
}
}
[HttpGet]
public ActionResult Create(int assetId)
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
try
{
// transfer id to dto
var dtoAsset = _unitOfWork.Assets.Get(assetId);
if (dtoAsset != null)
{
var dtoAssetType = _unitOfWork.AssetTypes.Get(dtoAsset.AssetTypeId);
// transfer dto to vm
var vmCreate = new List<CreateViewModel>();
var dbAssetTypeSettingTypes = _unitOfWork.AssetTypeSettingTypes.GetAllActiveForAssetType(dtoAsset.AssetTypeId);
foreach(var dtoAssetTypeSettingType in dbAssetTypeSettingTypes)
{
var dtoSettingType = _unitOfWork.SettingTypes.GetActive(dtoAssetTypeSettingType.SettingTypeId);
if (dtoSettingType != null)
{
vmCreate.Add(new CreateViewModel(dtoAsset, dtoSettingType));
}
}
// display view
return View("Create", new CreateLinkedSettingTypesViewModel(dtoAsset, dtoAssetType, vmCreate));
}
TempData["ErrorMessage"] = "Unable to create record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "Asset");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(CreateLinkedSettingTypesViewModel vmCreate)
{
try
{
if (ModelState.IsValid)
{
// validate vm
if (vmCreate.CreateViewModels != null)
{
// transfer vm to dto
foreach (var vm in vmCreate.CreateViewModels)
{
_unitOfWork.AssetSettings.Add(new AssetSetting()
{
AssetId = vm.AssetId,
SettingTypeId = vm.SettingTypeId,
Value = vm.Value,
IsActive = true
});
}
// update db
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Records created";
return RedirectToAction("Details", "Asset", new { id = vmCreate.AssetId });
}
TempData["SuccessMessage"] = "No Linked Setting Types to Update";
return RedirectToAction("Details", "Asset", new { id = vmCreate.AssetId });
}
TempData["ErrorMessage"] = "Unable to create record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered Problem";
return RedirectToAction("Index", "Asset");
}
}
[HttpGet]
public ActionResult Edit(int assetId)
{
try
{
// transfer id to dto
var dtoAsset = _unitOfWork.Assets.Get(assetId);
if (dtoAsset != null)
{
var dtoAssetType = _unitOfWork.AssetTypes.Get(dtoAsset.AssetTypeId);
// get list of linked setting types
var dbAssetTypeSettingTypes = _unitOfWork.AssetTypeSettingTypes.GetAllActiveForAssetType(dtoAsset.AssetTypeId);
// create & transfer values to vm
var vmEdit = new List<EditViewModel>();
foreach (var dtoAssetTypeSettingType in dbAssetTypeSettingTypes)
{
// transfer to dto
var dtoSettingType = _unitOfWork.SettingTypes.GetActive(dtoAssetTypeSettingType.SettingTypeId);
if (dtoSettingType != null)
{
var dtoAssetSetting = _unitOfWork.AssetSettings.GetActive(dtoAsset.Id, dtoSettingType.Id);
if (dtoAssetSetting != null)
{
vmEdit.Add(new EditViewModel(dtoAssetSetting, dtoAsset, dtoSettingType));
}
}
}
// display view
return View("Edit", new EditLinkedSettingTypesViewModel(dtoAsset, dtoAssetType, vmEdit));
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered Problem";
return RedirectToAction("Index", "Asset");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(EditLinkedSettingTypesViewModel vmEditLinkedSettingTypes)
{
try
{
if (ModelState.IsValid)
{
// transfer vm to dto
foreach (var vmEdit in vmEditLinkedSettingTypes.EditViewModels)
{
// new entry?
if (vmEdit.Id == 0)
{
// YES. Create record
_unitOfWork.AssetSettings.Add(new AssetSetting()
{
AssetId = vmEditLinkedSettingTypes.AssetId,
SettingTypeId = vmEdit.SettingTypeId,
Value = vmEdit.Value,
IsActive = true
});
}
else
{
var dtoAssetSetting = _unitOfWork.AssetSettings.Get(vmEdit.Id);
dtoAssetSetting.Value = vmEdit.Value;
}
}
// update db
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Records updated";
return RedirectToAction("Details", "Asset", new { id = vmEditLinkedSettingTypes.AssetId });
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered Problem";
return RedirectToAction("Index", "Asset");
}
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Utilities
{
public class DataTypeUtility
{
public static int GetIntegerFromString(string stringValue)
{
int integerValue = 0;
int.TryParse(stringValue, out integerValue);
return integerValue;
}
public static string GetDateValidatedToShortDateString(DateTime? date)
{
var formatedDate = string.Empty;
if (date != null)
{
var validDate = (DateTime)date;
if (validDate > new DateTime(0001, 1, 1))
{
formatedDate = validDate.ToString("MM/dd/yyyy");
}
}
return formatedDate;
}
}
}
<file_sep>using Financial.Business.ServiceInterfaces;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.Business.Services
{
public class SelectListService : ISelectListService
{
private IUnitOfWork _unitOfWork;
public SelectListService()
{
_unitOfWork = new UnitOfWork();
}
public SelectListService(IUnitOfWork unitOfWork)
{
_unitOfWork = unitOfWork;
}
public List<SelectListItem> TransactionCategories(string selectedId)
{
return _unitOfWork.TransactionCategories.GetAll()
.Where(r => r.IsActive)
.Select(r => new SelectListItem()
{
Value = r.Id.ToString(),
Selected = r.Id.ToString() == selectedId,
Text = r.Name
})
.OrderBy(r => r.Text)
.ToList();
}
public List<SelectListItem> TransactionDescriptions(string selectedId)
{
return _unitOfWork.TransactionDescriptions.GetAll()
.Where(r => r.IsActive)
.Select(r => new SelectListItem()
{
Value = r.Id.ToString(),
Selected = r.Id.ToString() == selectedId,
Text = r.Name
})
.ToList();
}
public List<SelectListItem> TransactionTypes(string selectedId)
{
return _unitOfWork.TransactionTypes.GetAll()
.Where(r => r.IsActive)
.Select(r => new SelectListItem()
{
Value = r.Id.ToString(),
Selected = r.Id.ToString() == selectedId,
Text = r.Name
})
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Business.Models;
using Financial.Core.Models;
namespace Financial.Business.ServiceInterfaces
{
public interface IAccountSettingService
{
string GetAccountIdentificationInformation(Account bmAccount);
}
}
<file_sep>using Financial.Business.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.Business.ServiceInterfaces
{
public interface IAccountTransactionService
{
List<AccountTransaction> GetListOfActiveTransactions();
Account GetAccountForTransaction(int? assetId);
bool AddTransaction(AccountTransaction bmAssetTransaction);
AccountTransaction GetTransactionToEdit(int assetTransactionId);
bool UpdateTransaction(AccountTransaction bmAssetTransaction);
AccountTransaction GetTransactionToDelete(int assetTransactionId);
bool DeleteTransaction(int assetTransactionId);
List<SelectListItem> GetAccountSelectList(string selectedId);
List<SelectListItem> GetTransactionTypeSelectList(string selectedId);
List<SelectListItem> GetTransactionCategorySelectList(string selectedId);
}
}
<file_sep>using Financial.Business.ServiceInterfaces;
using Financial.Business.Models;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Core.Models;
using AssetType = Financial.Business.Models.AccountType;
using SettingType = Financial.Core.Models.SettingType;
namespace Financial.Business.Services
{
public class AccountSettingService : IAccountSettingService
{
private IUnitOfWork _unitOfWork;
public AccountSettingService()
{
_unitOfWork = new UnitOfWork();
}
public AccountSettingService(IUnitOfWork unitOfWork)
{
_unitOfWork = unitOfWork;
}
public string GetAccountIdentificationInformation(Account bmAccount)
{
throw new NotImplementedException();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.RelationshipType
{
public class EditViewModel
{
public EditViewModel() { }
public EditViewModel(Core.Models.RelationshipType dtoRelationshipType)
{
Id = dtoRelationshipType.Id;
Name = dtoRelationshipType.Name;
IsActive = dtoRelationshipType.IsActive;
}
public int Id { get; set; }
[Required]
[StringLength(50)]
public string Name { get; set; }
[Display(Name = "Active")]
public bool IsActive { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public class FakeTransactionCategories
{
public static IEnumerable<TransactionCategory> InitialFakeTransactionCategories()
{
yield return new TransactionCategory() { Id = 1, Name = "TransactionCategory1", IsActive = true };
yield return new TransactionCategory() { Id = 2, Name = "TransactionCategory2", IsActive = true };
yield return new TransactionCategory() { Id = 3, Name = "TransactionCategory3", IsActive = false };
yield return new TransactionCategory() { Id = 4, Name = "TransactionCategory4", IsActive = true };
yield return new TransactionCategory() { Id = 5, Name = "TransactionCategory5", IsActive = true };
}
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public class FakeRelationshipTypes
{
public static IEnumerable<RelationshipType> InitialFakeRelationshipTypes()
{
yield return new RelationshipType() { Id = 1, Name = "RelationshipTypeName1", IsActive = true };
yield return new RelationshipType() { Id = 2, Name = "RelationshipTypeName2", IsActive = true };
yield return new RelationshipType() { Id = 3, Name = "RelationshipTypeName3", IsActive = false };
yield return new RelationshipType() { Id = 4, Name = "RelationshipTypeName4", IsActive = true };
yield return new RelationshipType() { Id = 5, Name = "RelationshipTypeName5", IsActive = true };
}
}
}
<file_sep>using Financial.Core;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemoryRepository<TEntity> : IRepository<TEntity> where TEntity : BaseEntity
{
private List<TEntity> _entities;
public InMemoryRepository(IEnumerable<TEntity> entities)
{
_entities = entities as List<TEntity>;
}
public TEntity Get(int id)
{
return _entities.FirstOrDefault(r => r.Id == id);
}
public TEntity GetActive(int id)
{
return _entities.Where(r => r.IsActive).FirstOrDefault(r => r.Id == id);
}
public IEnumerable<TEntity> GetAll()
{
return _entities.ToList();
}
public IEnumerable<TEntity> GetAllActive()
{
return _entities.Where(r => r.IsActive).ToList();
}
public TEntity Find(Expression<Func<TEntity, bool>> predicate)
{
return _entities.AsQueryable().FirstOrDefault(predicate);
}
public IEnumerable<TEntity> FindAll(Expression<Func<TEntity, bool>> predicate)
{
return _entities.AsQueryable().Where(predicate);
}
public bool Exists(int id)
{
return _entities.FirstOrDefault(r => r.Id == id) == null ? false : true;
}
public void Add(TEntity entity)
{
// database sets following values
entity.Id = _entities.Count + 1;
_entities.Add(entity);
}
public void AddRange(IEnumerable<TEntity> entities)
{
_entities.AddRange(entities);
}
public void Update(TEntity entity)
{
TEntity oldEntity = _entities.FirstOrDefault(r => r.Id == entity.Id);
_entities.Remove(oldEntity);
_entities.Add(entity);
}
public void Remove(TEntity entity)
{
_entities.Remove(entity);
}
public void RemoveRange(IEnumerable<TEntity> entities)
{
foreach (var entity in entities)
{
_entities.Remove(entity);
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeSettingType
{
public class CreateViewModel
{
public CreateViewModel()
{
}
public CreateViewModel(int assetTypeId, Core.Models.SettingType dtoSettingType)
{
AssetTypeId = assetTypeId;
SettingTypeId = dtoSettingType.Id;
SettingTypeName = dtoSettingType.Name;
IsActive = false;
}
public CreateViewModel(int settingTypeId, Core.Models.AssetType dtoAssetType)
{
SettingTypeId = settingTypeId;
AssetTypeId = dtoAssetType.Id;
AssetTypeName = dtoAssetType.Name;
IsActive = false;
}
public int AssetTypeId { get; set; }
public string AssetTypeName { get; set; }
public int SettingTypeId { get; set; }
public string SettingTypeName { get; set; }
public bool IsActive { get; set; }
}
}
<file_sep>using Financial.Business.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.ServiceInterfaces
{
public interface ISettingTypeService
{
AttributeType GetSettingType(int settingTypeId);
List<AttributeType> GetListOfSettingTypes();
int AddSettingType(AttributeType bmSettingType);
bool EditSettingType(AttributeType bmSettingType);
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public static class FakeAssets
{
public static IEnumerable<Asset> InitialFakeAssets()
{
yield return new Asset() { Id = 1, AssetTypeId = 2, Name = "AssetTypeName1", IsActive = true };
yield return new Asset() { Id = 2, AssetTypeId = 1, Name = "AssetTypeName2", IsActive = true };
yield return new Asset() { Id = 3, AssetTypeId = 2, Name = "AssetTypeName3", IsActive = false };
yield return new Asset() { Id = 4, AssetTypeId = 2, Name = "AssetTypeName4", IsActive = true };
yield return new Asset() { Id = 5, AssetTypeId = 2, Name = "AssetTypeName5", IsActive = true };
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Business.Models;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeSettingType
{
public class EditLinkedSettingTypesViewModel
{
public EditLinkedSettingTypesViewModel()
{
}
public EditLinkedSettingTypesViewModel(Business.Models.AccountType bmAssetType)
{
AssetTypeId = bmAssetType.AssetTypeId;
AssetTypeName = bmAssetType.AssetTypeName;
}
public int AssetTypeId { get; set; }
[Display(Name = "Name")]
public string AssetTypeName { get; set; }
[Display(Name = "Active")]
public bool IsActive { get; set; }
public List<Business.Models.AttributeType> SettingTypes { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
using Financial.Data;
using Financial.WebApplication.Controllers;
using Moq;
using Financial.Business;
using Financial.Business.Models;
using Financial.WebApplication.Models.ViewModels.AccountTransaction;
using System.Web.Mvc;
using Financial.Core.Models;
using Financial.Tests.Mocks;
using Financial.Core;
namespace Financial.Tests.WebApplication.Controllers
{
[TestFixture]
public class AccountTransactionControllerTests
{
private Asset _dbAsset;
private AssetSetting _dbAssetSetting;
private AssetTransaction _dbAssetTransaction;
private AssetType _dbAssetType;
private SettingType _dbSettingType;
private TransactionCategory _dbTransactionCategory;
private TransactionDescription _dbTransactionDescription;
private TransactionType _dbTransactionType;
private FinancialDbContext _fakeDbContext;
private IUnitOfWork _fakeUnitOfWork;
private Account _bmAccount;
private AccountTransaction _bmAccountTransaction;
private Mock<IBusinessService> _mockBusinessService;
private IBusinessService _fakeBusinessService;
private AccountTransactionController _controller;
[SetUp]
public void SetUp()
{
// setup fake model
_bmAccount = new Account
{
AssetId = 1,
AssetName = "a",
AssetTypeId = 2,
AssetTypeName = "b",
};
_bmAccountTransaction = new AccountTransaction
{
AssetTransactionId = 3,
AssetId = _bmAccount.AssetId,
AssetName = _bmAccount.AssetName,
AssetTypeId = _bmAccount.AssetTypeId,
AssetTypeName = _bmAccount.AssetTypeName,
DueDate = new DateTime(2018, 1, 2),
ClearDate = new DateTime(2018, 3, 4),
Amount = 123.45M,
Note = "abc",
};
// setup DbContext
Setup_FakeDbContext();
// setup uow
_fakeUnitOfWork = new UnitOfWork(_fakeDbContext);
// setup Service
_fakeBusinessService = new BusinessService(_fakeUnitOfWork);
// setup controller
_controller = new AccountTransactionController(_fakeBusinessService);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void Index_WhenCalled_ReturnIndexView_Test()
{
var result = _controller.Index();
Assert.That(result.ViewName, Is.EqualTo("Index"));
}
[Test]
public void Index_WhenCalled_ReturnsIndexViewModelList_Test()
{
var result = _controller.Index();
Assert.That(result.ViewData.Model, Is.TypeOf<List<IndexViewModel>>());
}
[Test]
public void Index_WhenCalled_ShouldCallOneTimeAccountTransactionServiceMethodGetListOfActiveTransactions_Test()
{
_mockBusinessService = new Mock<IBusinessService>();
_mockBusinessService.Setup(bs => bs.AccountTransactionService.GetListOfActiveTransactions())
.Returns(It.IsAny<List<AccountTransaction>>);
_controller = new AccountTransactionController(_mockBusinessService.Object);
_controller.Index();
_mockBusinessService.Verify(bs => bs.AccountTransactionService.GetListOfActiveTransactions(),
Times.Once);
}
[Test]
public void Index_WhenAccountTransactionsFound_ReturnAccountTransactionsOrderedDescendingByDueDate_Test()
{
_mockBusinessService = new Mock<IBusinessService>();
var olderDueDate = new DateTime(2018, 1, 2);
var newerDueDate = new DateTime(2018, 3, 4);
SetUp_Service_AccountTransactions_OrderedAscendingByDueDate(olderDueDate, newerDueDate);
var result = _controller.Index();
var vmActual = (List<IndexViewModel>)result.Model;
Assert.Multiple(() =>
{
Assert.That(vmActual.Count, Is.EqualTo(2), "Count");
Assert.That(vmActual[0].DueDate, Is.EqualTo(newerDueDate), "First Index");
Assert.That(vmActual[1].DueDate, Is.EqualTo(olderDueDate), "Second Index");
});
}
[Test]
public void Index_WhenTempDataSuccessMessageIsNotNull_ReturnViewDataSuccessMessage_Test()
{
var expectedMessage = "test message";
_controller.TempData["SuccessMessage"] = expectedMessage;
var result = _controller.Index();
var vResult = (ViewResult)result;
Assert.That(vResult.ViewData["SuccessMessage"].ToString(), Is.EqualTo(expectedMessage));
}
[Test]
public void Index_WhenTempDataErrorMessageIsNotNull_ReturnViewDataErrorMessage_Test()
{
var expectedMessage = "test message";
_controller.TempData["ErrorMessage"] = expectedMessage;
var result = _controller.Index();
var vResult = (ViewResult)result;
Assert.That(vResult.ViewData["ErrorMessage"].ToString(), Is.EqualTo(expectedMessage));
}
// Create
[Test]
public void Create_WhenCalled_ReturnCreateView_Test()
{
var result = (ViewResult)_controller.Create(assetId: null);
Assert.That(result.ViewName, Is.EqualTo("Create"));
}
[Test]
public void Create_WhenCalled_ReturnCreateViewModel_Test()
{
var result = (ViewResult)_controller.Create(assetId: null);
Assert.That(result.Model, Is.InstanceOf<CreateViewModel>());
}
[Test]
public void Create_WhenCalled_ShouldCallOneTimeGetAccountForTransaction_Test()
{
_mockBusinessService = new Mock<IBusinessService>();
_mockBusinessService.Setup(bs => bs.AccountTransactionService.GetAccountForTransaction(null))
.Returns(It.IsAny<Account>());
_controller = new AccountTransactionController(_mockBusinessService.Object);
_controller.Create(assetId: null);
_mockBusinessService.Verify(bs => bs.AccountTransactionService.GetAccountForTransaction(It.IsAny<int?>()),
Times.Once);
}
[Test]
public void Create_WhenCalled_ShouldCallOneTimeGetAccountSelectList_Test()
{
_mockBusinessService = new Mock<IBusinessService>();
_mockBusinessService.Setup(bs => bs.AccountTransactionService.GetAccountSelectList(null))
.Returns(It.IsAny<List<SelectListItem>>());
_controller = new AccountTransactionController(_mockBusinessService.Object);
_controller.Create(assetId: null);
_mockBusinessService.Verify(bs => bs.AccountTransactionService.GetAccountSelectList(It.IsAny<string>()),
Times.Once);
}
[Test]
public void Create_WhenCalled_ReturnAccountSelectList_Test()
{
Setup_FakeService();
_controller = new AccountTransactionController(_fakeBusinessService);
var result = (ViewResult)_controller.Create(assetId: null);
var vmResult = (CreateViewModel)result.ViewData.Model;
Assert.That(vmResult.Accounts.Count(), Is.Not.EqualTo(null));
}
[Test]
public void Create_WhenCalled_ShouldCallOneTimeGetTransactionTypeSelectList_Test()
{
_mockBusinessService = new Mock<IBusinessService>();
_mockBusinessService.Setup(bs => bs.AccountTransactionService.GetTransactionTypeSelectList(null))
.Returns(It.IsAny<List<SelectListItem>>());
_controller = new AccountTransactionController(_mockBusinessService.Object);
_controller.Create(assetId: null);
_mockBusinessService.Verify(bs => bs.AccountTransactionService.GetTransactionTypeSelectList(It.IsAny<string>()),
Times.Once);
}
[Test]
public void Create_WhenCalled_ReturnTransactionTypeSelectList_Test()
{
Setup_FakeService();
_controller = new AccountTransactionController(_fakeBusinessService);
var result = (ViewResult)_controller.Create(assetId: null);
var vmResult = (CreateViewModel)result.ViewData.Model;
Assert.That(vmResult.TransactionTypes.Count(), Is.Not.EqualTo(null));
}
[Test]
public void Create_WhenCalled_ShouldCallOneTimeGetTransactionCategorySelectList_Test()
{
_mockBusinessService = new Mock<IBusinessService>();
_mockBusinessService.Setup(bs => bs.AccountTransactionService.GetTransactionCategorySelectList(null))
.Returns(It.IsAny<List<SelectListItem>>());
_controller = new AccountTransactionController(_mockBusinessService.Object);
_controller.Create(assetId: null);
_mockBusinessService.Verify(bs => bs.AccountTransactionService.GetTransactionCategorySelectList(It.IsAny<string>()),
Times.Once);
}
[Test]
public void Create_WhenCalled_ReturnTransactionCategorySelectList_Test()
{
Setup_FakeService();
_controller = new AccountTransactionController(_fakeBusinessService);
var result = (ViewResult)_controller.Create(assetId: null);
var vmResult = (CreateViewModel)result.ViewData.Model;
Assert.That(vmResult.TransactionCategories, Is.Not.EqualTo(null));
}
// private
private void Setup_FakeDb()
{
_dbAssetType = new AssetType { Id = 1, Name = "a", IsActive = true };
_dbAsset = new Asset { Id = 2, AssetTypeId = _dbAssetType.Id, Name = "b", IsActive = true };
_dbSettingType = new SettingType { Id = 3, Name = "c", IsActive = true };
_dbAssetSetting = new AssetSetting { Id = 4, AssetId = _dbAsset.Id, SettingTypeId = _dbSettingType.Id, Value = "d", IsActive = true };
_dbTransactionCategory = new TransactionCategory { Id = 5, Name = "e", IsActive = true };
_dbTransactionDescription = new TransactionDescription { Id = 6, Name = "f", IsActive = true };
_dbTransactionType = new TransactionType { Id = 7, Name = "g", IsActive = true };
_dbAssetTransaction = new AssetTransaction
{
Id = 8,
TransactionCategoryId = _dbTransactionCategory.Id,
TransactionDescriptionId = _dbTransactionDescription.Id,
TransactionTypeId = _dbTransactionType.Id,
CheckNumber = "123",
DueDate = new DateTime(1234, 5, 6),
ClearDate = new DateTime(1234, 7, 8),
Amount = 123.45M,
Note = "abcdef",
IsActive = true
};
}
private void Setup_FakeDbContext()
{
// setup db
Setup_FakeDb();
// setup dbContext
Setup_FakeDbContext(
new List<Asset> { _dbAsset },
new List<AssetSetting> { _dbAssetSetting },
new List<AssetTransaction> { _dbAssetTransaction },
new List<AssetType> { _dbAssetType },
new List<SettingType> { _dbSettingType },
new List<TransactionCategory> { _dbTransactionCategory },
new List<TransactionDescription> { _dbTransactionDescription },
new List<TransactionType> { _dbTransactionType });
}
private void Setup_FakeDbContext(
List<Asset> fakeAssetList,
List<AssetSetting> fakeAssetSettingList,
List<AssetTransaction> fakeAssetTransactionList,
List<AssetType> fakeAssetTypeList,
List<SettingType> fakeSettingTypeList,
List<TransactionCategory> fakeTransactionCategoryList,
List<TransactionDescription> fakeTransactionDescriptionList,
List<TransactionType> fakeTransactionTypeList)
{
// setup dbContext
_fakeDbContext = MockFinancialDbContext.Create(
assets: fakeAssetList,
assetSettings: fakeAssetSettingList,
assetTransactions: fakeAssetTransactionList,
assetTypes: fakeAssetTypeList,
settingTypes: fakeSettingTypeList,
transactionCategories: fakeTransactionCategoryList,
transactionDescriptions: fakeTransactionDescriptionList,
transactionTypes: fakeTransactionTypeList);
}
private void Setup_FakeUnitOfWork()
{
Setup_FakeDbContext();
_fakeUnitOfWork = new UnitOfWork(_fakeDbContext);
}
private void Setup_FakeService()
{
Setup_FakeUnitOfWork();
_fakeBusinessService = new BusinessService(_fakeUnitOfWork);
}
private void SetUp_Service_AccountTransactions_OrderedAscendingByDueDate(DateTime olderDueDate, DateTime newerDueDate)
{
_mockBusinessService.Setup(bs => bs.AccountTransactionService.GetListOfActiveTransactions())
.Returns(new List<AccountTransaction>
{
new AccountTransaction
{
AssetTransactionId = 1, AssetId = 2, AssetName = "a", AssetTypeId = 3, AssetTypeName = "b",
DueDate = olderDueDate, ClearDate = new DateTime(2018, 5, 6), Amount = 123.45M, Note = "abc",
},
new AccountTransaction
{
AssetTransactionId = 1, AssetId = 2, AssetName = "a", AssetTypeId = 3, AssetTypeName = "b",
DueDate = newerDueDate, ClearDate = new DateTime(2018, 5, 6), Amount = 123.45M, Note = "abc",
},
});
_controller = new AccountTransactionController(_mockBusinessService.Object);
}
}
}
<file_sep>using Financial.Business.Utilities;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.Business.Models
{
public class AccountTransaction
{
public AccountTransaction()
{
}
public AccountTransaction(Core.Models.Asset dbAsset)
{
AssetId = dbAsset.Id;
AssetName = dbAsset.Name;
}
public AccountTransaction(Core.Models.Asset dbAsset, string assetNameFormatted)
{
AssetId = dbAsset.Id;
AssetName = assetNameFormatted;
}
public AccountTransaction(Core.Models.AssetTransaction dbAssetTransaction,
Core.Models.Asset dbAsset)
{
AssetTransactionId = dbAssetTransaction.Id;
AssetId = dbAsset.Id;
AssetName = dbAsset.Name;
AssetTypeId = dbAsset.AssetTypeId;
AssetTypeName = dbAsset.AssetType.Name;
DueDate = dbAssetTransaction.DueDate;
ClearDate = dbAssetTransaction.ClearDate;
Amount = dbAssetTransaction.Amount;
Note = dbAssetTransaction.Note;
}
public AccountTransaction(Core.Models.AssetTransaction dbAssetTransaction,
Core.Models.Asset dbAsset,
Core.Models.AssetType dbAssetType,
string assetNameFormatted)
{
AssetTransactionId = dbAssetTransaction.Id;
Amount = dbAssetTransaction.Amount;
CheckNumber = dbAssetTransaction.CheckNumber;
ClearDate = dbAssetTransaction.ClearDate;
DueDate = dbAssetTransaction.DueDate;
Note = dbAssetTransaction.Note;
AssetId = dbAsset.Id;
AssetName = assetNameFormatted;
AssetTypeId = dbAssetType.Id;
AssetTypeName = dbAssetType.Name;
SelectedTransactionCategoryId = dbAssetTransaction.TransactionCategoryId.ToString();
}
public AccountTransaction(Core.Models.AssetTransaction dbAssetTransaction,
Core.Models.Asset dbAsset,
Core.Models.AssetType dbAssetType,
Core.Models.TransactionType dbTransactionType,
Core.Models.TransactionCategory dbTransactionCategory,
string assetNameFormatted)
{
AssetTransactionId = dbAssetTransaction.Id;
Amount = dbAssetTransaction.Amount;
CheckNumber = dbAssetTransaction.CheckNumber;
ClearDate = dbAssetTransaction.ClearDate;
DueDate = dbAssetTransaction.DueDate;
Note = dbAssetTransaction.Note;
AssetId = dbAsset.Id;
AssetName = assetNameFormatted;
AssetTypeId = dbAssetType.Id;
AssetTypeName = dbAssetType.Name;
TransactionTypeName = dbTransactionType.Name;
TransactionCategoryName = dbTransactionCategory.Name;
}
public int AssetTransactionId { get; set; }
public int AssetId { get; set; }
public string AssetName { get; set; }
//public List<SelectListItem> AssetSelectList { get; set; }
//public string SelectedAssetId { get; set; }
public int AssetTypeId { get; set; }
public string AssetTypeName { get; set; }
public int TransactionTypeId { get; set; }
public string TransactionTypeName { get; set; }
//public List<SelectListItem> TransactionTypeSelectList { get; set; }
//public string SelectedTransactionTypeId { get; set; }
public int TransactionCategoryId { get; set; }
public string TransactionCategoryName { get; set; }
//public List<SelectListItem> TransactionCategorySelectList { get; set; }
public string SelectedTransactionCategoryId { get; set; }
public string CheckNumber { get; set; }
public DateTime DueDate { get; set; }
public DateTime ClearDate { get; set; }
public decimal Amount { get; set; }
public string Note { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
namespace Financial.Tests._DmitriNesteruk
{
public class BankAccount
{
public int Balance { get; private set; }
public BankAccount(int startingBalance)
{
Balance = startingBalance;
}
public void Deposit(int amount)
{
if (amount <= 0)
throw new ArgumentException(
"Deposit amount must be positive",
nameof(amount));
Balance += amount;
}
public bool Withdraw(int amount)
{
if (Balance >= amount)
{
Balance -= amount;
return true;
}
return false;
}
}
[TestFixture]
public class BankAccountTests
{
private BankAccount ba;
[SetUp]
public void SetUp()
{
// https://github.com/nunit
ba = new BankAccount(100);
}
[Test]
public void WhenWarn_ReturnWarningMessages()
{
Warn.If(2 + 2 != 5);
Warn.If(2 + 2, Is.Not.EqualTo(5));
Warn.If(() => 2 + 2, Is.Not.EqualTo(5).After(2000));
Warn.Unless(2 + 2 == 5);
Warn.Unless(2 + 2, Is.EqualTo(5));
Warn.Unless(() => 2 + 2, Is.EqualTo(5).After(2000));
Assert.Warn("This is a warning");
}
[Test]
public void WhenMultipleAsserts_ReturnAllFailedMessages()
{
Assert.Multiple(() =>
{
Assert.That(2 + 2, Is.EqualTo(4));
Assert.That(3 + 3, Is.EqualTo(6));
});
}
[Test]
public void Deposit_WhenAmountIsPositiveNumber_BalanceIncreases()
{
ba.Deposit(100);
Assert.That(ba.Balance, Is.EqualTo(200));
}
[Test]
public void Deposit_WhenAmountIsNotPositive_ThrowArgumentException()
{
var ex = Assert.Throws<ArgumentException>(
() => ba.Deposit(-1)
);
StringAssert.StartsWith("Deposit amount must be positive",
ex.Message);
}
[Test]
[TestCase(50, true, 50)]
[TestCase(100, true, 0)]
[TestCase(1000, false, 100)]
public void Withdraw_WhenAmountToWithdrawShouldSucceed_UpdateBalance(
int amountToWithdraw, bool shouldSucceed, int expectedBalance)
{
var result = ba.Withdraw(amountToWithdraw);
Assert.Multiple(() =>
{
Assert.That(result, Is.EqualTo(shouldSucceed));
Assert.That(expectedBalance, Is.EqualTo(ba.Balance));
});
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Core.Metadata.Edm;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("Accounts")]
public class Asset : BaseEntity
{
[Required]
[Display(Name = "AssetType ID")]
[Column("AccountTypeId")]
public int AssetTypeId { get; set; }
[Required]
public string Name { get; set; }
[ForeignKey("AssetTypeId")]
public AssetType AssetType { get; set; }
//public ICollection<AssetSetting> AssetSettings { get; set; }
//public ICollection<AssetTransaction> AssetTransactions { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AssetSetting
{
public class IndexViewModel
{
public IndexViewModel() { }
public IndexViewModel(Core.Models.AssetSetting dtoAssetSetting, int assetId,
Core.Models.SettingType dtoSettingType )
{
Id = dtoAssetSetting.Id;
AssetId = assetId;
SettingTypeId = dtoSettingType.Id;
SettingTypeName = dtoSettingType.Name;
AssetSettingValue = dtoAssetSetting.Value;
}
public int Id { get; set; }
public int AssetId { get; set; }
public int SettingTypeId { get; set; }
public string SettingTypeName { get; set; }
public string AssetSettingValue { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.Account
{
public class EditViewModel
{
public EditViewModel() { }
public EditViewModel(Core.Models.Asset dtoAsset, List<SelectListItem> sliAssetTypes)
{
Id = dtoAsset.Id;
Name = dtoAsset.Name;
SelectedAssetTypeId = dtoAsset.AssetTypeId.ToString();
AssetTypes = sliAssetTypes;
}
public int Id { get; set; }
public string Name { get; set; }
[Required]
[Display(Name = "Type")]
public string SelectedAssetTypeId { get; set; }
public IEnumerable<SelectListItem> AssetTypes { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface IAssetTypeSettingTypeRepository : IRepository<AssetTypeSettingType>
{
AssetTypeSettingType Get(int assetTypeId, int settingTypeId);
AssetTypeSettingType GetActive(int assetTypeId, int settingTypeId);
IEnumerable<AssetTypeSettingType> GetAllForAssetType(int assetTypeId);
IEnumerable<AssetTypeSettingType> GetAllForSettingType(int settingTypeId);
IEnumerable<AssetTypeSettingType> GetAllActiveForAssetType(int assetTypeId);
IEnumerable<AssetTypeSettingType> GetAllActiveForSettingType(int settingTypeId);
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AccountTransaction
{
public class IndexViewModel
{
public IndexViewModel() { }
public IndexViewModel(Business.Models.AccountTransaction bmAssetTransaction)
{
Id = bmAssetTransaction.AssetTransactionId;
AssetId = bmAssetTransaction.AssetId;
AssetName = bmAssetTransaction.AssetName;
DueDate = bmAssetTransaction.DueDate;
ClearDate = bmAssetTransaction.ClearDate;
TransactionType = bmAssetTransaction.TransactionTypeName;
Amount = bmAssetTransaction.Amount;
Note = bmAssetTransaction.Note;
}
public int Index { get; set; }
public int Id { get; set; }
public int AssetId { get; set; }
[Display(Name = "Asset Name")]
public string AssetName { get; set; }
[Required]
[Display(Name = "Due")]
[DisplayFormat(DataFormatString = "{0:yyyy-MM-dd}")]
public DateTime DueDate { get; set; }
[Display(Name = "Cleared")]
[DisplayFormat(DataFormatString = "{0:yyyy-MM-dd}")]
public DateTime ClearDate { get; set; }
public string TransactionType { get; set; }
public string Income { get; set; }
public string Expense { get; set; }
[DisplayFormat(ApplyFormatInEditMode = true, DataFormatString = "{0:c}")]
public decimal Amount { get; set; }
[Display(Name = "Balance")]
[DisplayFormat(ApplyFormatInEditMode = true, DataFormatString = "{0:c}")]
public decimal Total { get; set; }
public string Note { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
using Financial.Business.Models;
using Financial.Business.ServiceInterfaces;
using Financial.Business.Utilities;
using Financial.Data;
using Financial.Core.Models;
namespace Financial.Business.Services
{
public class AccountTransactionService : IAccountTransactionService
{
private IUnitOfWork _unitOfWork;
public AccountTransactionService(IUnitOfWork unitOfWork)
{
_unitOfWork = unitOfWork;
}
// Index
public List<AccountTransaction> GetListOfActiveTransactions()
{
// create object to return
var bmAssetTransactionList = new List<AccountTransaction>();
// get active transactions
var dbAssetTransactionList = _unitOfWork.AssetTransactions.GetAllActiveByDueDate().ToList();
// transfer dto to bm
foreach (var dbAssetTransaction in dbAssetTransactionList)
{
// account information
var dbAsset = _unitOfWork.Assets.Get(dbAssetTransaction.AssetId);
if (dbAsset == null)
return new List<AccountTransaction>();
// format values
dbAsset.Name = GetAssetNameWithAccountNumber(dbAsset);
dbAssetTransaction.Amount = TransactionUtility.FormatAmount(
dbAssetTransaction.TransactionTypeId,
dbAssetTransaction.Amount);
// transfer to bm
bmAssetTransactionList.Add(new AccountTransaction(
dbAssetTransaction,
dbAsset));
}
return bmAssetTransactionList;
}
// Create
public Account GetAccountForTransaction(int? assetId)
{
if (assetId == null || assetId == 0)
return null;
var dbAsset = _unitOfWork.Assets.Get((int)assetId);
if (dbAsset == null)
return null;
dbAsset.Name = GetAssetNameWithAccountNumber(dbAsset);
return new Account(dbAsset);
}
/*
public AccountTransaction GetTransactionOptions(int? assetId)
{
var intAssetId = DataTypeUtility.GetIntegerFromString(assetId.ToString());
// get asset information
var dtoAsset = GetAssetFromDatabase(intAssetId);
if (dtoAsset == null)
{
throw new ArgumentNullException();
//return new Business.Models.AssetTransaction();
}
// asset name with additional information
var formattedAssetName = GetAssetNameWithAccountNumber(dtoAsset);
// transfer dto to bm
var bmAssetTransaction = new AccountTransaction(dtoAsset, formattedAssetName);
// get sli
//bmAssetTransaction.AssetSelectList = GetAssetSelectList(intAssetId.ToString());
//bmAssetTransaction.TransactionTypeSelectList = GetTransactionTypeSelectList(null);
//bmAssetTransaction.TransactionCategorySelectList = GetTransactionCategorySelectList(null);
// valid asset information
return bmAssetTransaction;
}
*/
/*
private Core.Models.Asset GetAssetFromDatabase(int assetId)
{
return _unitOfWork.Assets.Get(assetId);
}
*/
private string GetAssetNameWithAccountNumber(Asset dbAsset)
{
var dbAssetSetting = _unitOfWork.AssetSettings.GetActive(dbAsset.Id, SettingType.IdForAccountNumber);
if (dbAssetSetting == null)
return dbAsset.Name;
return AccountUtility.FormatAccountName(dbAsset.Name, dbAsset.AssetTypeId, dbAssetSetting.Value);
}
public List<SelectListItem> GetAccountSelectList(string selectedId)
{
return _unitOfWork.Assets.GetAllActiveOrderedByName()
.Select(r => new SelectListItem
{
Value = r.Id.ToString(),
Text = GetAssetNameWithAccountNumber(r),
Selected = r.Id.ToString() == selectedId
})
.ToList();
}
public List<SelectListItem> GetTransactionTypeSelectList(string selectedId)
{
return _unitOfWork.TransactionTypes.GetAllActiveOrderedByName()
.Select(r => new SelectListItem
{
Value = r.Id.ToString(),
Text = r.Name,
Selected = r.Id.ToString() == selectedId,
})
.ToList();
}
public List<SelectListItem> GetTransactionCategorySelectList(string selectedId)
{
return _unitOfWork.TransactionCategories.GetAllActiveOrderedByName()
.Select(r => new SelectListItem
{
Value = r.Id.ToString(),
Text = r.Name,
Selected = r.Id.ToString() == selectedId,
})
.ToList();
/*
return _unitOfWork.TransactionCategories.GetAll()
.Where(r => r.IsActive)
.Select(r => new SelectListItem()
{
Value = r.Id.ToString(),
Selected = r.Id.ToString() == selectedId,
Text = r.Name
})
.OrderBy(r => r.Text)
.ToList();
*/
}
public bool AddTransaction(Business.Models.AccountTransaction bmAssetTransaction)
{
// validate input
if(bmAssetTransaction == null)
{
return false;
}
// validate ID
if (_unitOfWork.Assets.Get(bmAssetTransaction.AssetId) == null)
{
return false;
}
if (_unitOfWork.TransactionTypes.Get(bmAssetTransaction.TransactionTypeId) == null)
{
return false;
}
if (_unitOfWork.TransactionCategories.Get(bmAssetTransaction.TransactionCategoryId) == null)
{
return false;
}
// transfer vm to dto
_unitOfWork.AssetTransactions.Add(new Core.Models.AssetTransaction()
{
AssetId = bmAssetTransaction.AssetId,
TransactionTypeId = bmAssetTransaction.TransactionTypeId,
TransactionCategoryId = bmAssetTransaction.TransactionCategoryId,
CheckNumber = bmAssetTransaction.CheckNumber,
DueDate = bmAssetTransaction.DueDate,
ClearDate = bmAssetTransaction.ClearDate,
Amount = bmAssetTransaction.Amount,
Note = bmAssetTransaction.Note,
IsActive = true
});
// update db
_unitOfWork.CommitTrans();
return true;
}
// Edit
public Business.Models.AccountTransaction GetTransactionToEdit(int assetTransactionId)
{
var dtoAssetTransaction = _unitOfWork.AssetTransactions.Get(assetTransactionId);
if (dtoAssetTransaction != null)
{
var dtoAsset = _unitOfWork.Assets.Get(dtoAssetTransaction.AssetId);
if (dtoAsset != null)
{
var dtoAssetType = _unitOfWork.AssetTypes.Get(dtoAsset.AssetTypeId);
if (dtoAssetType != null)
{
// add additional identifying info to asset title
var assetNameFormatted = GetAssetIdentificationInformation(dtoAsset);
// transfer dto to sli
var sliTransactionTypes = GetTransactionTypeSelectList(dtoAssetTransaction.TransactionTypeId.ToString());
var sliTransactionCategories = GetTransactionCategorySelectList(dtoAssetTransaction.TransactionCategoryId.ToString());
return new Business.Models.AccountTransaction(dtoAssetTransaction,
dtoAsset,
dtoAssetType,
assetNameFormatted
//sliTransactionTypes,
//sliTransactionCategories
);
}
}
}
return null;
}
public string GetAssetIdentificationInformation(Core.Models.Asset dtoAsset)
{
// validate input
if(dtoAsset == null)
{
return string.Empty;
}
// get additional information
var dtoAssetSetting = _unitOfWork.AssetSettings.GetAll()
.Where(r => r.IsActive)
.FirstOrDefault(r => r.AssetId == dtoAsset.Id);
if(dtoAssetSetting != null)
{
return AccountUtility.FormatAccountName(dtoAsset.Name, dtoAsset.AssetTypeId, dtoAssetSetting.Value);
}
// get standard information
return dtoAsset.Name;
}
public bool UpdateTransaction(Business.Models.AccountTransaction bmAssetTransaction)
{
// validate input
if(bmAssetTransaction == null)
{
return false;
}
// validate Id
if (_unitOfWork.Assets.Get(bmAssetTransaction.AssetId) == null ||
_unitOfWork.TransactionTypes.Get(bmAssetTransaction.TransactionTypeId) == null ||
_unitOfWork.TransactionCategories.Get(bmAssetTransaction.TransactionCategoryId) == null)
{
return false;
}
// get dto
var dtoAssetTransaction = _unitOfWork.AssetTransactions.Get(bmAssetTransaction.AssetTransactionId);
if(dtoAssetTransaction == null)
{
return false;
}
// update dto
dtoAssetTransaction.TransactionTypeId = bmAssetTransaction.TransactionTypeId;
dtoAssetTransaction.TransactionCategoryId = bmAssetTransaction.TransactionCategoryId;
dtoAssetTransaction.CheckNumber = bmAssetTransaction.CheckNumber;
dtoAssetTransaction.DueDate = bmAssetTransaction.DueDate;
dtoAssetTransaction.ClearDate = bmAssetTransaction.ClearDate;
dtoAssetTransaction.Amount = bmAssetTransaction.Amount;
dtoAssetTransaction.Note = bmAssetTransaction.Note;
// update db
_unitOfWork.CommitTrans();
return true;
}
// Delete
public AccountTransaction GetTransactionToDelete(int assetTransactionId)
{
var dtoAssetTransaction = _unitOfWork.AssetTransactions.Get(assetTransactionId);
if (dtoAssetTransaction != null)
{
var dtoAsset = _unitOfWork.Assets.Get(dtoAssetTransaction.AssetId);
if (dtoAsset != null)
{
var dtoAssetType = _unitOfWork.AssetTypes.Get(dtoAsset.AssetTypeId);
if (dtoAssetType != null)
{
var dtoTransactionType = _unitOfWork.TransactionTypes.Get(dtoAssetTransaction.TransactionTypeId);
if(dtoTransactionType != null)
{
var dtoTransactionCategory = _unitOfWork.TransactionCategories.Get(dtoAssetTransaction.TransactionCategoryId);
if (dtoTransactionCategory != null)
{
// add additional identifying info to asset name
dtoAsset.Name = GetAssetIdentificationInformation(dtoAsset);
return new AccountTransaction(dtoAssetTransaction, dtoAsset);
}
}
}
}
}
return null;
}
public bool DeleteTransaction(int assetTransactionId)
{
// get dto
var dtoAssetTransaction = _unitOfWork.AssetTransactions.Get(assetTransactionId);
if (dtoAssetTransaction == null)
{
return false;
}
// update dto
dtoAssetTransaction.IsActive = false;
// update db
_unitOfWork.CommitTrans();
return true;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.Account
{
public class DetailsViewModel
{
public DetailsViewModel() { }
public DetailsViewModel(Core.Models.Asset dtoAsset, Core.Models.AssetType dtoAssetType)
{
Id = dtoAsset.Id;
Name = dtoAsset.Name;
AssetTypeName = dtoAssetType.Name;
}
public int Id { get; set; }
public string Name { get; set; }
[Display(Name = "Type")]
public string AssetTypeName { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface IAssetTransactionRepository : IRepository<AssetTransaction>
{
IEnumerable<AssetTransaction> GetAllActiveByDueDate();
IEnumerable<AssetTransaction> GetAllActiveByDescendingDueDate(int assetId);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Core.Models;
namespace Financial.Business.Tests.Fakes.Database
{
public static class FakeAssetTypes
{
public static IEnumerable<AssetType> InitialFakeAssetTypes()
{
yield return new AssetType() { Id = 1, Name = "AssetTypeName1", IsActive = true };
yield return new AssetType() { Id = 2, Name = "AssetTypeName2", IsActive = true };
yield return new AssetType() { Id = 3, Name = "AssetTypeName3", IsActive = false };
yield return new AssetType() { Id = 4, Name = "AssetTypeName4", IsActive = true };
yield return new AssetType() { Id = 5, Name = "AssetTypeName5", IsActive = true };
}
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.WebApplication.Models.ViewModels.AssetType;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Financial.Business;
using Financial.Business.ServiceInterfaces;
namespace Financial.WebApplication.Controllers
{
public class AssetTypeController : BaseController
{
private IUnitOfWork _unitOfWork;
private IAccountTypeService _assetTypeService;
public AssetTypeController(IUnitOfWork unitOfWork, IAccountTypeService assetTypeService)
: base()
{
_unitOfWork = unitOfWork;
_assetTypeService = assetTypeService;
}
[HttpGet]
public ViewResult Index()
{
try
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
if (TempData["ErrorMessage"] != null)
{
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
}
// transfer bm to vm
var vmIndex = _assetTypeService.IndexGetModelList()
.Select(r => new IndexViewModel(r))
.ToList();
return View("Index", vmIndex);
}
catch(Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return View("Index", new List<IndexViewModel>());
}
}
[HttpGet]
public ActionResult Create()
{
try
{
return View("Create");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetType");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(CreateViewModel vmCreate)
{
try
{
if (!ModelState.IsValid)
{
return RedirectToAction("Index", "AssetType");
}
// transfer vm to bm
var bmAssetType = new Business.Models.AccountType()
{
AssetTypeName = vmCreate.Name,
};
// update db
bmAssetType.AssetTypeId = _assetTypeService.CreatePostUpdateDatabase(bmAssetType);
if(bmAssetType.AssetTypeId == 0)
{
ViewData["ErrorMessage"] = "Name already exists";
return View("Create", vmCreate);
}
// display View with message
TempData["SuccessMessage"] = "Asset Type Created";
return RedirectToAction("CreateLinkedSettingTypes", "AssetTypeSettingType", new { assetTypeId = bmAssetType.AssetTypeId });
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetType");
}
}
[HttpGet]
public ActionResult Edit(int id)
{
try
{
// get bm
var bmAssetType = _assetTypeService.EditGetModel(id);
if(bmAssetType == null)
{
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "AssetType");
}
// transfer bm to vm
return View("Edit", new EditViewModel(bmAssetType));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetType");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(EditViewModel vmEdit)
{
try
{
if (!ModelState.IsValid)
{
return RedirectToAction("Index", "AssetType");
}
// transfer vm to bm
var bmAssetType = new Business.Models.AccountType()
{
AssetTypeId = vmEdit.Id,
AssetTypeName = vmEdit.Name,
};
// update db
var message = _assetTypeService.EditPostUpdateDatabase(bmAssetType);
if(message != "Success")
{
TempData["ErrorMessage"] = message;
return RedirectToAction("Index", "AssetType");
}
TempData["SuccessMessage"] = "Record updated.";
return RedirectToAction("Index", "AssetType");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetType");
}
}
[HttpGet]
public ActionResult Details(int id)
{
try
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
if (TempData["ErrorMessage"] != null)
{
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
}
// transfer bm to vm
var bmAssetType = _assetTypeService.DetailsGetModel(id);
if(bmAssetType == null)
{
TempData["ErrorMessage"] = "Unable to display record. Try again.";
return RedirectToAction("Index", "AssetType");
}
return View("Details", new DetailsViewModel(bmAssetType));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetType");
}
}
}
}<file_sep>using Financial.Business.Utilities;
using Financial.Core;
using Financial.Core.Models;
using Financial.WebApplication.Models.ViewModels.SettingType;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Financial.Business;
namespace Financial.WebApplication.Controllers
{
public class SettingTypeController : BaseController
{
private IUnitOfWork _unitOfWork;
private IBusinessService _businessService;
public SettingTypeController(IUnitOfWork unitOfWork, IBusinessService businessService)
: base()
{
_unitOfWork = unitOfWork;
_businessService = businessService;
}
[HttpGet]
public ViewResult Index()
{
try
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
if (TempData["ErrorMessage"] != null)
{
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
}
// transfer bm to vm
var vmIndex = _businessService.SettingTypeService.GetListOfSettingTypes()
.OrderBy(r => r.SettingTypeName)
.Select(r => new IndexViewModel(r))
.ToList();
return View("Index", vmIndex);
}
catch(Exception)
{
ViewData["ErrorMessage"] = "Encountered problem";
return View("Index", new List<IndexViewModel>());
}
}
[HttpGet]
public ActionResult Create()
{
try
{
return View("Create");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "SettingType");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(CreateViewModel vmCreate)
{
try
{
if (!ModelState.IsValid)
{
return RedirectToAction("Index", "SettingType");
}
// transfer vm to bm
var bmSettingType = new Business.Models.AttributeType()
{
SettingTypeName = vmCreate.Name,
};
// update db
bmSettingType.SettingTypeId = _businessService.SettingTypeService.AddSettingType(bmSettingType);
if (bmSettingType.SettingTypeId == 0)
{
ViewData["ErrorMessage"] = "Name already exists";
return View("Create", vmCreate);
}
// display View with message
TempData["SuccessMessage"] = "Setting Type Created";
return RedirectToAction("CreateLinkedAssetTypes", "AssetTypeSettingType", new { settingTypeId = bmSettingType.SettingTypeId });
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "SettingType");
}
}
[HttpGet]
public ActionResult Edit(int id)
{
try
{
// get bm
var bmSettingType = _businessService.SettingTypeService.GetSettingType(id);
if (bmSettingType == null)
{
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "SettingType");
}
// transfer bm to vm
return View("Edit", new EditViewModel(bmSettingType));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "SettingType");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(EditViewModel vmEdit)
{
try
{
if (!ModelState.IsValid)
{
return RedirectToAction("Index", "SettingType");
}
// transfer vm to bm
var bmSettingType = new Business.Models.AttributeType()
{
SettingTypeId = vmEdit.Id,
SettingTypeName = vmEdit.Name,
};
// update db
var updated = _businessService.SettingTypeService.EditSettingType(bmSettingType);
if (!updated)
{
TempData["ErrorMessage"] = "Problem updating record. Try again.";
return RedirectToAction("Index", "SettingType");
}
TempData["SuccessMessage"] = "Record updated.";
return RedirectToAction("Index", "SettingType");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "SettingType");
}
}
[HttpGet]
public ActionResult Details(int id)
{
try
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
if (TempData["ErrorMessage"] != null)
{
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
}
// transfer bm to vm
var bmSettingType = _businessService.SettingTypeService.GetSettingType(id);
if (bmSettingType == null)
{
TempData["ErrorMessage"] = "Unable to display record. Try again.";
return RedirectToAction("Index", "SettingType");
}
return View("Details", new DetailsViewModel(bmSettingType));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "SettingType");
}
}
}
}<file_sep>using Financial.Business;
using Financial.Core;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Financial.WebApplication.Controllers
{
public class BaseController : Controller
{
public BaseController()
{
}
}
}<file_sep>using Financial.Business.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.Business.ServiceInterfaces
{
public interface IAccountTypeService
{
List<Business.Models.AccountType> IndexGetModelList();
int CreatePostUpdateDatabase(AccountType bmAssetType);
Business.Models.AccountType EditGetModel(int assetTypeId);
string EditPostUpdateDatabase(Business.Models.AccountType bmAssetType);
Business.Models.AccountType DetailsGetModel(int assetTypeId);
List<SelectListItem> GetAssetTypesDropDownList(int? selectedId);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("AttributeTypes")]
public class SettingType : BaseEntity
{
[Required]
public string Name { get; set; }
public ICollection<AssetSetting> AssetSettings { get; set; }
public static readonly int IdForAccountNumber = 1;
}
}
<file_sep>using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data
{
public interface IUnitOfWork : IDisposable
{
IAssetSettingRepository AssetSettings { get; }
IAssetRelationshipRepository AssetRelationships { get; }
IAssetRepository Assets { get; }
IAssetTransactionRepository AssetTransactions { get; }
IAssetTypeSettingTypeRepository AssetTypeSettingTypes { get; }
IAssetTypeRelationshipTypeRepository AssetTypeRelationshipTypes { get; }
IAssetTypeRepository AssetTypes { get; }
IParentChildRelationshipTypeRepository ParentChildRelationshipTypes { get; set; }
IRelationshipTypeRepository RelationshipTypes { get; }
ISettingTypeRepository SettingTypes { get; }
ITransactionCategoryRepository TransactionCategories { get; }
ITransactionDescriptionRepository TransactionDescriptions { get; }
ITransactionTypeRepository TransactionTypes { get; }
void BeginTrans();
void CommitTrans();
void RollBackTrans();
void Complete();
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class AssetTypeSettingTypeRepository : Repository<AssetTypeSettingType>, IAssetTypeSettingTypeRepository
{
public AssetTypeSettingTypeRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
public AssetTypeSettingType Get(int assetTypeId, int settingTypeId)
{
return FinancialDbContext.AssetTypesSettingTypes
.Where(r => r.AssetTypeId == assetTypeId)
.FirstOrDefault(r => r.SettingTypeId == settingTypeId);
}
public AssetTypeSettingType GetActive(int assetTypeId, int settingTypeId)
{
return FinancialDbContext.AssetTypesSettingTypes
.Where(r => r.IsActive)
.Where(r => r.AssetTypeId == assetTypeId)
.FirstOrDefault(r => r.SettingTypeId == settingTypeId);
}
public IEnumerable<AssetTypeSettingType> GetAllForAssetType(int assetTypeId)
{
return FinancialDbContext.AssetTypesSettingTypes
.Where(r => r.AssetTypeId == assetTypeId)
.ToList();
}
public IEnumerable<AssetTypeSettingType> GetAllForSettingType(int settingTypeId)
{
return FinancialDbContext.AssetTypesSettingTypes
.Where(r => r.SettingTypeId == settingTypeId)
.ToList();
}
public IEnumerable<AssetTypeSettingType> GetAllActiveForAssetType(int assetTypeId)
{
return FinancialDbContext.AssetTypesSettingTypes
.Where(r => r.IsActive)
.Where(r => r.AssetTypeId == assetTypeId)
.ToList();
}
public IEnumerable<AssetTypeSettingType> GetAllActiveForSettingType(int settingTypeId)
{
return FinancialDbContext.AssetTypesSettingTypes
.Where(r => r.IsActive)
.Where(r => r.SettingTypeId == settingTypeId)
.ToList();
}
}
}
<file_sep>using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System.Collections.Generic;
using System.Linq;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemoryAssetTransactionRepository : InMemoryRepository<AssetTransaction>, IAssetTransactionRepository
{
private List<AssetTransaction> _entities = null;
public InMemoryAssetTransactionRepository(IEnumerable<AssetTransaction> entities)
: base(entities)
{
_entities = entities as List<AssetTransaction>;
}
public IEnumerable<AssetTransaction> GetAllActiveByDescendingDueDate(int assetId)
{
return _entities
.Where(r => r.IsActive)
.Where(r => r.AssetId == assetId)
.OrderByDescending(r => r.DueDate)
.ToList();
}
public IEnumerable<AssetTransaction> GetAllActiveByDueDate()
{
return _entities
.Where(r => r.IsActive)
.OrderBy(r => r.DueDate)
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AccountTransaction
{
public class DisplayForAssetViewModel
{
public DisplayForAssetViewModel() { }
public DisplayForAssetViewModel(Core.Models.AssetTransaction dtoAssetTransaction,
string clearDate, Core.Models.TransactionCategory dtoTransactionCategory)
{
Id = dtoAssetTransaction.Id;
DueDate = dtoAssetTransaction.DueDate.ToString("MM/dd/yyyy");
ClearDate = clearDate;
Category = dtoTransactionCategory.Name;
Amount = dtoAssetTransaction.Amount;
Note = dtoAssetTransaction.Note;
}
public int Id { get; set; }
[Display(Name = "Due")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}")]
public string DueDate { get; set; }
[Display(Name = "Cleared")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}")]
public string ClearDate { get; set; }
public string Category { get; set; }
[DisplayFormat(ApplyFormatInEditMode = true, DataFormatString = "{0:c}")]
public decimal Amount { get; set; }
public string Note { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Business.Models;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeSettingType
{
public class IndexLinkedSettingTypesViewModel
{
public IndexLinkedSettingTypesViewModel()
{
}
public IndexLinkedSettingTypesViewModel(Business.Models.AttributeType bmSettingType)
{
Id = bmSettingType.AssetTypeSettingTypeId;
//AssetTypeId = dtoAssetTypeSettingType.AssetTypeId;
//IsActive = dtoAssetTypeSettingType.IsActive;
SettingTypeId = bmSettingType.SettingTypeId;
SettingTypeName = bmSettingType.SettingTypeName;
//SettingTypeIsActive = dtoSettingType.IsActive;
}
public IndexLinkedSettingTypesViewModel(Business.Models.AccountType bmAssetType)
{
Id = bmAssetType.AssetTypeSettingTypeId;
//AssetTypeId = dtoAssetTypeSettingType.AssetTypeId;
//IsActive = dtoAssetTypeSettingType.IsActive;
AssetTypeId = bmAssetType.AssetTypeId;
AssetTypeName = bmAssetType.AssetTypeName;
//SettingTypeIsActive = dtoSettingType.IsActive;
}
public IndexLinkedSettingTypesViewModel(Core.Models.SettingType dtoSettingType,
Core.Models.AssetTypeSettingType dtoAssetTypeSettingType)
{
Id = dtoAssetTypeSettingType.Id;
AssetTypeId = dtoAssetTypeSettingType.AssetTypeId;
IsActive = dtoAssetTypeSettingType.IsActive;
SettingTypeId = dtoSettingType.Id;
SettingTypeName = dtoSettingType.Name;
SettingTypeIsActive = dtoSettingType.IsActive;
}
public int Id { get; set; }
public int AssetTypeId { get; set; }
[Display(Name = "Asset Type")]
public string AssetTypeName { get; set; }
public int SettingTypeId { get; set; }
[Display(Name = "Setting Type")]
public string SettingTypeName { get; set; }
[Display(Name = "SettingType IsActive")]
public bool SettingTypeIsActive { get; set; }
[Display(Name = "AssetTypeSettingType IsActive")]
public bool IsActive { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemoryParentChildRelationshipTypeRepository : InMemoryRepository<ParentChildRelationshipType>, IParentChildRelationshipTypeRepository
{
private List<ParentChildRelationshipType> _entities = null;
public InMemoryParentChildRelationshipTypeRepository(IEnumerable<ParentChildRelationshipType> entities)
: base(entities)
{
_entities = entities as List<ParentChildRelationshipType>;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.Business.ServiceInterfaces
{
public interface ISelectListService
{
List<SelectListItem> TransactionCategories(string selectedId);
List<SelectListItem> TransactionDescriptions(string selectedId);
List<SelectListItem> TransactionTypes(string selectedId);
}
}
<file_sep>using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemoryAssetSettingRepository : InMemoryRepository<AssetSetting>, IAssetSettingRepository
{
private List<AssetSetting> _entities = null;
public InMemoryAssetSettingRepository(IEnumerable<AssetSetting> entities)
: base(entities)
{
_entities = entities as List<AssetSetting>;
}
public AssetSetting GetActive(int assetId, int settingTypeId)
{
return _entities
.Where(r => r.IsActive)
.Where(r => r.AssetId == assetId)
.FirstOrDefault(r => r.SettingTypeId == settingTypeId);
}
public IEnumerable<AssetSetting> GetAllActiveForAsset(int assetId)
{
return _entities
.Where(r => r.IsActive)
.Where(r => r.AssetId == assetId)
.ToList();
}
public IEnumerable<AssetSetting> GetAllActiveForSettingType(int settingTypeId)
{
return _entities
.Where(r => r.IsActive)
.Where(r => r.SettingTypeId == settingTypeId)
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.ParentChildRelationshipType
{
public class DeleteViewModel
{
public DeleteViewModel() { }
public DeleteViewModel(Core.Models.ParentChildRelationshipType dtoParentChildRelationshipType,
Core.Models.RelationshipType dtoRelationshipType,
Core.Models.RelationshipType dtoParentRelationshipType,
Core.Models.RelationshipType dtoChildRelationshipType)
{
Id = dtoParentChildRelationshipType.Id;
RelationshipTypeId = dtoRelationshipType.Id;
RelationshipTypeName = dtoRelationshipType.Name;
ParentRelationshipTypeName = dtoParentRelationshipType.Name;
ChildRelationshipTypeName = dtoChildRelationshipType.Name;
}
public int Id { get; set; }
public int RelationshipTypeId { get; set; }
public string RelationshipTypeName { get; set; }
[Display(Name = "Parent")]
public string ParentRelationshipTypeName { get; set; }
[Display(Name = "Child")]
public string ChildRelationshipTypeName { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemorySettingTypeRepository : InMemoryRepository<SettingType>, ISettingTypeRepository
{
private List<SettingType> _entities = null;
public InMemorySettingTypeRepository(IEnumerable<SettingType> entities)
: base(entities)
{
_entities = entities as List<SettingType>;
}
public IEnumerable<SettingType> GetAllOrderedByName()
{
return _entities
.OrderBy(r => r.Name)
.ToList();
}
public int CountMatching(string name)
{
return _entities
.Count(r => r.Name == name);
}
public int CountMatching(int excludeId, string name)
{
return _entities
.Where(r => r.Id != excludeId)
.Count(r => r.Name == name);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("AccountTypeRelationshipTypes")]
public class AssetTypeRelationshipType : BaseEntity
{
[Required]
[Display(Name = "ParentAssetType ID")]
[Column("ParentAccountTypeId")]
public int ParentAssetTypeId { get; set; }
[Display(Name = "ChildAssetType ID")]
[Column("ChildAccountTypeId")]
public int ChildAssetTypeId { get; set; }
[Required]
[Display(Name = "ParentChildRelationshipType ID")]
public int ParentChildRelationshipTypeId { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Models
{
public class AccountTypeSettingType
{
public AccountTypeSettingType() { }
/*
public AssetTypeSettingType(AssetType assetType, Core.Models.SettingType settingType)
{
AssetTypeId = assetType.Id;
AssetTypeName = assetType.Name;
SettingTypeId = settingType.Id;
SettingTypeName = settingType.Name;
}
*/
/*
public AssetTypeSettingType(Core.Models.AssetTypeSettingType assetTypeSettingType,
AssetType assetType,
Core.Models.SettingType settingType)
{
AssetTypeSettingTypeId = assetTypeSettingType.Id;
AssetTypeId = assetType.Id;
AssetTypeName = assetType.Name;
SettingTypeId = settingType.Id;
SettingTypeName = settingType.Name;
//IsActive = assetTypeSettingType.IsActive;
}
*/
public int AssetTypeSettingTypeId { get; set; }
public int AssetTypeId { get; set; }
public string AssetTypeName { get; set; }
public int SettingTypeId { get; set; }
public string SettingTypeName { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Core;
using Financial.Core.Models;
namespace Financial.Tests.Mocks
{
public static class MockFinancialDbContext
{
public static FinancialDbContext Create(
List<Asset> assets = null,
List<AssetSetting> assetSettings = null,
List<AssetTransaction> assetTransactions = null,
List<AssetType> assetTypes = null,
List<SettingType> settingTypes = null,
List<TransactionCategory> transactionCategories = null,
List<TransactionDescription> transactionDescriptions = null,
List<TransactionType> transactionTypes = null)
{
// setup dbContext
return new FinancialDbContext
{
Assets = MockDbSet.Create<Asset>(assets),
AssetSettings = MockDbSet.Create<AssetSetting>(assetSettings),
AssetTransactions = MockDbSet.Create<AssetTransaction>(assetTransactions),
AssetTypes = MockDbSet.Create<AssetType>(assetTypes),
SettingTypes = MockDbSet.Create<SettingType>(settingTypes),
TransactionCategories = MockDbSet.Create<TransactionCategory>(transactionCategories),
TransactionDescriptions = MockDbSet.Create<TransactionDescription>(transactionDescriptions),
TransactionTypes = MockDbSet.Create<TransactionType>(transactionTypes),
};
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
namespace Financial.Tests.Business.Services
{
[TestFixture]
public class AccountSettingServiceTests
{
[SetUp]
public void SetUp()
{
}
[TearDown]
public void TearDown()
{
}
[Test]
public void GetAccountIdentificationInformation_Test()
{
}
}
}
<file_sep>using Financial.Business;
using Financial.Data;
using Financial.WebApplication.Models.ViewModels.TransactionCategory;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Financial.WebApplication.Controllers
{
public class TransactionCategoryController : BaseController
{
private IUnitOfWork _unitOfWork;
private IBusinessService _businessService;
public TransactionCategoryController(IUnitOfWork unitOfWork, IBusinessService businessService)
: base()
{
_unitOfWork = unitOfWork;
_businessService = businessService;
}
[HttpGet]
public ActionResult Index()
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
if (TempData["ErrorMessage"] != null)
{
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
}
// transfer dto to vm
var vmIndex = _unitOfWork.TransactionCategories.GetAll()
.Where(r => r.IsActive)
.Select(r => new IndexViewModel(r))
.OrderBy(r => r.Name)
.ToList();
// display view
return View("Index", vmIndex);
}
[HttpGet]
public ActionResult Create()
{
// display view
return View("Create");
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(CreateViewModel vmCreate)
{
if(!ModelState.IsValid)
{
return View("Create", vmCreate);
}
// transfer vm to dto
_unitOfWork.TransactionCategories.Add(new Core.Models.TransactionCategory()
{
Name = vmCreate.Name,
IsActive = true
});
// update db
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "New Category Added";
return RedirectToAction("Index", "TransactionCategory");
}
[HttpGet]
public ActionResult Edit(int id)
{
// transfer id to dto
var dtoTransactionCategory = _unitOfWork.TransactionCategories.Get(id);
// validate dto
if(dtoTransactionCategory == null)
{
TempData["ErrorMessage"] = "Encountered problem updated transaction category. Try again.";
return RedirectToAction("Index", "TransactionCategory");
}
// display view
return View("Edit", new EditViewModel(dtoTransactionCategory));
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(EditViewModel vmEdit)
{
// transfer vm to dto
var dtoTransactionCategory = _unitOfWork.TransactionCategories.Get(vmEdit.Id);
dtoTransactionCategory.Name = vmEdit.Name;
// update db
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Record updated.";
return RedirectToAction("Index", "TransactionCategory");
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.AccountTransaction
{
public class CreateViewModel
{
public CreateViewModel() { }
public CreateViewModel(List<SelectListItem> sliAccount, List<SelectListItem> sliTransactionType, List<SelectListItem> sliTransactionCategory)
{
Accounts = sliAccount;
TransactionTypes = sliTransactionType;
TransactionCategories = sliTransactionCategory;
}
/*
public CreateViewModel(Core.Models.Asset dtoAsset,string assetNameAdditionalInformaiton,
Core.Models.AssetType dtoAssetType, DateTime date,
List<SelectListItem> sliTransactionTypes, List<SelectListItem> sliTransactionCategories)
{
AssetId = dtoAsset.Id;
AssetName = dtoAsset.Name + assetNameAdditionalInformaiton;
AssetTypeName = dtoAssetType.Name;
DueDate = date.ToString("MM/dd/yyyy");
TransactionTypes = sliTransactionTypes;
TransactionCategories = sliTransactionCategories;
}*/
public CreateViewModel(Business.Models.AccountTransaction bmAssetTransaction)
{
AssetId = bmAssetTransaction.AssetId;
AccountName = bmAssetTransaction.AssetName;
//Assets = bmAssetTransaction.AssetSelectList;
AssetTypeName = bmAssetTransaction.AssetTypeName;
//TransactionTypes = bmAssetTransaction.TransactionTypeSelectList;
SelectedTransactionTypeId = bmAssetTransaction.TransactionTypeId.ToString();
//TransactionCategories = bmAssetTransaction.TransactionCategorySelectList;
SelectedTransactionCategoryId = bmAssetTransaction.TransactionCategoryId.ToString();
}
public int AssetId { get; set; }
[Display(Name = "Account Name")]
public string AccountName { get; set; }
[Display(Name = "Account Type")]
public string AssetTypeName { get; set; }
[Display(Name = "Check Number")]
public string CheckNumber { get; set; }
[Required]
[Display(Name = "Due")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}")]
public DateTime DueDate { get; set; }
[Display(Name = "Cleared")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}")]
public DateTime ClearDate { get; set; }
[Required]
public decimal Amount { get; set; }
public string Note { get; set; }
[Required]
[Display(Name = "Asset")]
public string SelectedAccountId { get; set; }
public IEnumerable<SelectListItem> Accounts { get; set; }
[Required]
[Display(Name = "Type")]
public string SelectedTransactionTypeId { get; set; }
public IEnumerable<SelectListItem> TransactionTypes { get; set; }
[Required]
[Display(Name = "Category")]
public string SelectedTransactionCategoryId { get; set; }
public IEnumerable<SelectListItem> TransactionCategories { get; set; }
//[Required]
//[Display(Name = "Description")]
//public string SelectedTransactionDescriptionId { get; set; }
//public IEnumerable<SelectListItem> TransactionDescriptions { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface IAssetTypeRepository : IRepository<AssetType>
{
AssetType GetActive(int id);
IEnumerable<AssetType> GetAllOrderedByName();
IEnumerable<AssetType> GetAllActiveOrderedByName();
int CountMatching(string name);
int CountMatching(int excludeId, string name);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeSettingType
{
public class EditViewModel
{
public EditViewModel()
{
}
public EditViewModel(Core.Models.SettingType dtoSettingType,
Core.Models.AssetTypeSettingType dtoAssetTypeSettingType)
{
Id = dtoAssetTypeSettingType.Id;
AssetTypeId = dtoAssetTypeSettingType.AssetTypeId;
SettingTypeId = dtoSettingType.Id;
SettingTypeName = dtoSettingType.Name;
IsActive = dtoAssetTypeSettingType.IsActive;
}
public EditViewModel(Core.Models.AssetType dtoAssetType,
Core.Models.AssetTypeSettingType dtoAssetTypeSettingType)
{
Id = dtoAssetTypeSettingType.Id;
SettingTypeId = dtoAssetTypeSettingType.SettingTypeId;
AssetTypeId = dtoAssetType.Id;
AssetTypeName = dtoAssetType.Name;
IsActive = dtoAssetTypeSettingType.IsActive;
}
public int Id { get; set; }
public int AssetTypeId { get; set; }
public string AssetTypeName { get; set; }
public int SettingTypeId { get; set; }
public string SettingTypeName { get; set; }
public bool IsActive { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemoryTransactionCategoryRepository : InMemoryRepository<TransactionCategory>, ITransactionCategoryRepository
{
private List<TransactionCategory> _entities = null;
public InMemoryTransactionCategoryRepository(IEnumerable<TransactionCategory> entities)
: base(entities)
{
_entities = entities as List<TransactionCategory>;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("TransactionCategories")]
public class TransactionCategory : BaseEntity
{
[Required]
public string Name { get; set; }
public ICollection<AssetTransaction> AssetTransactions { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeRelationshipType
{
public class EditViewModel
{
public EditViewModel() { }
public EditViewModel(Core.Models.AssetTypeRelationshipType dtoAssetTypeRelationshipType,
Core.Models.AssetType dtoAssetType, List<SelectListItem> sliRelationshipLevels,
string selectedRelationshipLevel, string selectedParentChildRelationshipTypeId,
List<SelectListItem> sliLinkAssetTypes, string selectedLinkedAssetType)
{
Id = dtoAssetTypeRelationshipType.Id;
SuppliedAssetTypeId = dtoAssetType.Id;
SuppliedAssetTypeName = dtoAssetType.Name;
RelationshipLevels = sliRelationshipLevels;
SelectedRelationshipLevel = selectedRelationshipLevel;
SelectedParentChildRelationshipTypeId = selectedParentChildRelationshipTypeId;
LinkAssetTypes = sliLinkAssetTypes;
SelectedLinkedAssetTypeId = selectedLinkedAssetType;
}
public int Id { get; set; }
public int SuppliedAssetTypeId { get; set; }
[Display(Name = "Asset Type")]
public string SuppliedAssetTypeName { get; set; }
[Required]
[Display(Name = "Relationship Level")]
public string SelectedRelationshipLevel { get; set; }
public IEnumerable<SelectListItem> RelationshipLevels { get; set; }
[Required]
[Display(Name = "Relationship Type")]
public string SelectedParentChildRelationshipTypeId { get; set; }
public IEnumerable<SelectListItem> ParentChildRelationshipTypes { get; set; }
[Required]
[Display(Name = "Link Asset Type")]
public string SelectedLinkedAssetTypeId { get; set; }
public IEnumerable<SelectListItem> LinkAssetTypes { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Moq;
using Moq.Protected;
using NUnit.Framework;
namespace Financial.Tests._DmitriNesteruk
{
public class Bar : IEquatable<Bar>
{
public bool Equals(Bar other)
{
if (ReferenceEquals(null, other)) return false;
if (ReferenceEquals(this, other)) return true;
return string.Equals(Name, other.Name);
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
if (ReferenceEquals(this, obj)) return true;
if (obj.GetType() != this.GetType()) return false;
return Equals((Bar) obj);
}
public override int GetHashCode()
{
return (Name != null ? Name.GetHashCode() : 0);
}
public static bool operator ==(Bar left, Bar right)
{
return Equals(left, right);
}
public static bool operator !=(Bar left, Bar right)
{
return !Equals(left, right);
}
public string Name { get; set; }
}
public interface IBaz
{
string Name { get; }
}
public interface IFooWithMoq
{
bool DoSomething(string value);
string ProcessString(string value);
bool TryParse(string value, out string outputValue);
bool Submit(ref Bar bar);
int GetCount();
bool Add(int amount);
string Name { get; set; }
IBaz SomeBaz { get; }
int SomeOtherProperty { get; set; }
}
public class FooWithMoq
{
}
public delegate void AlienAbductionEventHandler(int galaxy, bool returned);
public interface IAnimal
{
event EventHandler FallsIll;
void Stumble();
event AlienAbductionEventHandler AbductedByAliens;
}
public class Doctor
{
public int TimesCured;
public int AbductionsObserved;
public Doctor(IAnimal animal)
{
animal.FallsIll += (sender, args) =>
{
Console.WriteLine("I will cure you!");
TimesCured++;
};
animal.AbductedByAliens += (galaxy, returned) => ++AbductionsObserved;
}
}
public class Consumer
{
private IFooWithMoq foo;
public Consumer(IFooWithMoq foo)
{
this.foo = foo;
}
public void Hello()
{
foo.DoSomething("ping");
var name = foo.Name;
foo.SomeOtherProperty = 123;
}
}
public abstract class Person
{
protected int SSN { get; set; }
protected abstract void Execute(string cmd);
}
[TestFixture]
public class FooWithMoqTests
{
// METHODS
[Test]
public void SetUpMockMethod_WhenCheckingForStringValue_ReturnTrue_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(foo => foo.DoSomething("ping"))
.Returns(true);
Assert.IsTrue(mock.Object.DoSomething("ping"));
}
[Test]
public void SetUpMockMethod_WhenCheckingForStringValue_ReturnFalse_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(foo => foo.DoSomething("pong"))
.Returns(false);
Assert.IsFalse(mock.Object.DoSomething("pong"));
}
[Test]
public void SetUpMockMethod_WhenCheckingForMultipleStringValues_ReturnFalseForEachValue_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(foo => foo.DoSomething(It.IsIn("foo", "bar")))
.Returns(false);
Assert.Multiple(() =>
{
Assert.IsFalse(mock.Object.DoSomething("foo"));
Assert.IsFalse(mock.Object.DoSomething("bar"));
});
}
[Test]
public void SetUpMockMethod_WhenCheckingForAnyStringInput_ReturnTrue_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(foo => foo.DoSomething(It.IsAny<string>()))
.Returns(true);
Assert.IsTrue(mock.Object.DoSomething("abc"));
}
[Test]
public void SetUpMockMethod_WhenCheckingForAnyNumericInput_ReturnFalse_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(foo => foo.DoSomething(It.IsRegex("[0-9]+")))
.Returns(false);
Assert.IsFalse(mock.Object.DoSomething("123"));
}
[Test]
public void SetUpMockMethod_WhenNumberIsEven_ReturnTrue_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(foo => foo.Add(It.Is<int>(x => x % 2 == 0)))
.Returns(true);
Assert.IsTrue(mock.Object.Add(2));
}
[Test]
public void SetUpMockMethod_WhenNumberIsOdd_ReturnFalse_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(foo => foo.Add(It.Is<int>(x => x % 2 == 0)))
.Returns(true);
Assert.IsFalse(mock.Object.Add(3));
}
[Test]
public void SetUpMockMethod_WhenNumberIsWithinRange_ReturnTrue_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(foo => foo.Add(It.IsInRange<int>(1, 10, Range.Inclusive)))
.Returns(true);
Assert.IsTrue(mock.Object.Add(3));
}
[Test]
public void SetUpMockMethod_WhenNumberIsOutsideOfRange_ReturnFalse_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(foo => foo.Add(It.IsInRange<int>(1, 10, Range.Inclusive)))
.Returns(true);
Assert.IsFalse(mock.Object.Add(11));
}
[Test]
public void SetUpMockMethod_WhenCheckingForStringValue_SetOutArgumentAndReturnTrue_Test()
{
var mock = new Mock<IFooWithMoq>();
var requiredOutput = "ok";
mock.Setup(foo => foo.TryParse("ping", out requiredOutput))
.Returns(true);
string result;
Assert.Multiple(() =>
{
Assert.IsTrue(mock.Object.TryParse("ping", out result));
Assert.That(result, Is.EqualTo(requiredOutput));
});
}
[Test]
public void SetUpMockMethod_WhenCheckingForInvalidStringValue_SetOutArgumentToNullAndReturnFalse_Test()
{
var mock = new Mock<IFooWithMoq>();
var requiredOutput = "ok";
mock.Setup(foo => foo.TryParse("ping", out requiredOutput))
.Returns(true);
string result;
Assert.Multiple(() =>
{
Assert.IsFalse(mock.Object.TryParse("pong", out result));
Assert.That(result, Is.EqualTo(null));
});
}
[Test]
public void SetUpMockMethod_WhenCheckingMultipleStringValuesWithSameOutArgument_ProblemSettingOutArgument_Test()
{
var mock = new Mock<IFooWithMoq>();
var requiredOutput = "ok";
mock.Setup(foo => foo.TryParse("ping", out requiredOutput))
.Returns(true);
string result;
Assert.Multiple(() =>
{
Assert.IsTrue(mock.Object.TryParse("ping", out result));
Assert.That(result, Is.EqualTo(requiredOutput));
Assert.IsFalse(mock.Object.TryParse("pong", out result));
//Assert.That(result, Is.Not.EqualTo(requiredOutput));
Assert.Warn($"Using the same result field for both [ping] & [pong] can have unexpected results. " +
$"[pong] result should be [string.empty] but instead equals [{result}]");
});
}
[Test]
public void SetupMockMethod_WhenReferenceArgumentIsValid_ReturnTrue_Test()
{
var mock = new Mock<IFooWithMoq>();
var bar = new Bar() {Name="abc"};
mock.Setup(foo => foo.Submit(ref bar))
.Returns(true);
Assert.That(mock.Object.Submit(ref bar), Is.EqualTo(true));
}
[Test]
public void SetupMockMethod_WhenReferenceArgumentIsNotValid_ReturnFalse_Test()
{
var mock = new Mock<IFooWithMoq>();
var bar = new Bar() { Name = "abc" };
mock.Setup(foo => foo.Submit(ref bar))
.Returns(true);
var anotherBar = new Bar() { Name = "def" };
Assert.IsFalse(mock.Object.Submit(ref anotherBar));
}
[Test]
public void SetupMockMethod_WhenCheckingTwoDifferentReferenceArgumentWithSameName_ProblemReturningUnexpectedBoolean_Test()
{
var mock = new Mock<IFooWithMoq>();
var bar = new Bar() { Name = "abc" };
mock.Setup(foo => foo.Submit(ref bar))
.Returns(true);
var anotherBar = new Bar() { Name = "abc" };
Assert.Warn("compares reference location, " +
"names are the same but for two different reference objects");
Assert.IsFalse(mock.Object.Submit(ref anotherBar));
}
[Test]
public void SetUpMockMethod_WhenAnyStringValueProvided_ReturnLowerCaseString()
{
var mock = new Mock<IFooWithMoq>();
// setup method that takes input string, formats and returns new string
mock.Setup(foo => foo.ProcessString(It.IsAny<string>()))
.Returns((string s) => s.ToLowerInvariant());
Assert.That(mock.Object.ProcessString("ABC"),
Is.EqualTo("abc"));
}
[Test]
public void SetupMockMethod_WhenNoInputsProvidedAndMethodIsCalled_IncrementAndReturnCount_Test()
{
var mock = new Mock<IFooWithMoq>();
// setup method to increment calls when no inputs provided
var calls = 0;
mock.Setup(foo => foo.GetCount())
.Returns(() => calls)
.Callback(() => ++calls);
// increment calls
mock.Object.GetCount();
mock.Object.GetCount();
Assert.That(mock.Object.GetCount(), Is.EqualTo(2));
}
[Test]
public void SetupMockMethod_WhenStringValueProvided_ReturnException_Test()
{
var mock = new Mock<IFooWithMoq>();
// setup method to throw an exception for input value
var calls = 0;
mock.Setup(foo => foo.DoSomething("kill"))
.Throws<InvalidOperationException>();
Assert.Throws<InvalidOperationException>(
()=> mock.Object.DoSomething("kill")
);
}
[Test]
public void SetupMockMethod_WhenStringValueProvidedEqualsNull_ReturnException_Test()
{
var mock = new Mock<IFooWithMoq>();
// setup method to throw an exception for input value
var calls = 0;
mock.Setup(foo => foo.DoSomething(null))
.Throws(new ArgumentException("cmd"));
Assert.Throws<ArgumentException>(
() =>
{
mock.Object.DoSomething(null);
}, "cmd");
}
// PROPERTIES
[Test]
public void SetupMockProperty_SetNameValue_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(Foo => Foo.Name)
.Returns("bar");
Assert.That(mock.Object.Name, Is.EqualTo("bar"));
}
[Test]
public void SetupMockProperty_WrongWayToSetNameValue_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Object.Name = "invalid bar";
Assert.Warn("Name is not set this way, setter not setup yet");
Assert.That(mock.Object.Name, Is.Not.EqualTo("invalid bar"));
}
[Test]
public void SetupMockProperty_SetPropertyOfAPropertyNameValue_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.Setup(Foo => Foo.SomeBaz.Name)
.Returns("hello");
Assert.That(mock.Object.SomeBaz.Name, Is.EqualTo("hello"));
}
/*
[Test]
public void SetupMockProperty_SetPropertySetter_Test()
{
var mock = new Mock<IFooWithMoq>();
var setterCalled = false;
mock.SetupSet(foo =>
{
foo.Name = It.IsAny<string>();
})
.Callback<string>(value =>
{
setterCalled = true;
});
IFooWithMoq fooWithMoq = mock.Object;
fooWithMoq.Name = "def";
mock.VerifySet(foo =>
{
foo.Name = "def";
}, Times.AtLeastOnce);
Assert.Multiple(() =>
{
Assert.That(mock.Object.Name, Is.EqualTo("def"));
Assert.IsTrue(setterCalled);
});
}
*/
[Test]
public void SetupMockProperty_SetUpAllProperties_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.SetupAllProperties();
IFooWithMoq foo = mock.Object;
foo.Name = "def";
foo.SomeOtherProperty = 123;
Assert.That(mock.Object.Name, Is.EqualTo("def"));
Assert.That(mock.Object.SomeOtherProperty, Is.EqualTo(123));
}
[Test]
public void SetupMockProperty_SetUpSingleProperty_Test()
{
var mock = new Mock<IFooWithMoq>();
mock.SetupProperty(f => f.Name);
IFooWithMoq foo = mock.Object;
foo.Name = "def";
foo.SomeOtherProperty = 123;
Assert.That(mock.Object.Name, Is.EqualTo("def"));
Assert.That(mock.Object.SomeOtherProperty, Is.Not.EqualTo(123));
}
// EVENTS
[Test]
public void SetupMockEvents_WhenEventRaised_Test()
{
var mock = new Mock<IAnimal>();
var doctor = new Doctor(mock.Object);
mock.Raise(
a => a.FallsIll += null, // event action/subscription
new EventArgs()
);
Assert.That(doctor.TimesCured, Is.EqualTo(1));
}
[Test]
public void SetupMockEvents_WhenCalledMethodRaisesEvent_Test()
{
var mock = new Mock<IAnimal>();
var doctor = new Doctor(mock.Object);
mock.Setup(a => a.Stumble())
.Raises(a => a.FallsIll += null,
new EventArgs()
);
mock.Object.Stumble();
Assert.That(doctor.TimesCured, Is.EqualTo(1));
}
[Test]
public void SetupMockEvents_WhenCustomEventRaised_Test()
{
var mock = new Mock<IAnimal>();
var doctor = new Doctor(mock.Object);
mock.Raise(a => a.AbductedByAliens += null,
42, true
);
Assert.That(doctor.AbductionsObserved, Is.EqualTo(1));
}
// CALLBACKS
[Test]
public void CallBack_WhenMethodCalledIncrementCount_Test()
{
var mock = new Mock<IFooWithMoq>();
int x = 0;
mock.Setup(foo => foo.DoSomething("ping"))
.Returns(true)
.Callback(() => x++);
mock.Object.DoSomething("ping");
Assert.That(x, Is.EqualTo(1));
}
[Test]
public void CallBack_WhenMethodCalledManipulateArgument_Test()
{
var mock = new Mock<IFooWithMoq>();
int x = 0;
mock.Setup(foo => foo.DoSomething(It.IsAny<string>()))
.Returns(true)
.Callback((string s) => x+= s.Length);
mock.Object.DoSomething("ping");
Assert.That(x, Is.EqualTo(4));
}
[Test]
public void CallBack_WhenMethodCalledManipulateArgumentWithGenericOverLoad_Test()
{
var mock = new Mock<IFooWithMoq>();
int x = 0;
mock.Setup(foo => foo.DoSomething(It.IsAny<string>()))
.Returns(true)
.Callback<string>(s => x += s.Length);
mock.Object.DoSomething("ping");
Assert.That(x, Is.EqualTo(4));
}
[Test]
public void CallBack_BeforeInvocationOfMethod_Test()
{
var mock = new Mock<IFooWithMoq>();
int x = 0;
mock.Setup(foo => foo.DoSomething(It.IsAny<string>()))
.Callback(() => Console.WriteLine("before ping"))
.Returns(true);
}
[Test]
public void CallBack_AfterInvocationOfMethod_Test()
{
var mock = new Mock<IFooWithMoq>();
int x = 0;
mock.Setup(foo => foo.DoSomething(It.IsAny<string>()))
.Returns(true)
.Callback(() => Console.WriteLine("after ping"));
}
// VERIFICATION
[Test]
public void Verification_VerifyMethodInvoked_Test()
{
var mock = new Mock<IFooWithMoq>();
var consumer = new Consumer(mock.Object);
consumer.Hello();
mock.Verify(foo => foo.DoSomething("ping"), Times.AtLeastOnce);
}
[Test]
public void Verification_VerifyMethodNotInvoked_Test()
{
var mock = new Mock<IFooWithMoq>();
var consumer = new Consumer(mock.Object);
consumer.Hello();
mock.Verify(foo => foo.DoSomething("pong"), Times.Never);
}
[Test]
public void Verification_VerifyGetterUsedToAccessVariable_Test()
{
var mock = new Mock<IFooWithMoq>();
var consumer = new Consumer(mock.Object);
consumer.Hello();
mock.VerifyGet(foo => foo.Name);
}
[Test]
public void Verification_VerifySetterCalledWithSpecificValue_Test()
{
var mock = new Mock<IFooWithMoq>();
var consumer = new Consumer(mock.Object);
consumer.Hello();
mock.VerifySet(foo => foo.SomeOtherProperty = It.IsInRange(100, 200, Range.Inclusive));
}
// BEHAVIOR
[Test]
public void Behavior_SetupAllInvocationsOnTheMock_Test()
{
var mock = new Mock<IFooWithMoq>(MockBehavior.Strict);
mock.Setup(f => f.DoSomething("abc"))
.Returns(true);
mock.Object.DoSomething("abc");
}
[Test]
public void Behavior_AutomaticRecursiveMocking_Test()
{
var mock = new Mock<IFooWithMoq>()
{
DefaultValue = DefaultValue.Mock
};
var baz = mock.Object.SomeBaz;
var bazMock = Mock.Get(baz);
bazMock.SetupGet(f => f.Name)
.Returns("abc");
}
[Test]
public void Behavior_MockRepository_Test()
{
var mockRepository = new MockRepository(MockBehavior.Strict)
{
DefaultValue = DefaultValue.Mock
};
var fooMock = mockRepository.Create<IFooWithMoq>();
var otherMock = mockRepository.Create<IBaz>(MockBehavior.Loose);
mockRepository.Verify();
}
// PROTECTED MEMBERS
[Test]
public void ProtectedMembers_SetupProtectedPropertyToReturnSpecifiedValue_Test()
{
var mock = new Mock<Person>();
/*
mock.Protected()
.SetupGet<int>("SSN") // can not use nameof(Person.SSN)
.Returns(42);
*/
}
[Test]
public void ProtectedMembers_SetupProtectedPropertyWithGenericArgument_Test()
{
var mock = new Mock<Person>();
/*
mock.Protected()
.Setup<string>("Execute", ItExpr.IsAny<string>()); // can not use It.IsAny
*/
}
}
}
<file_sep>using Microsoft.VisualStudio.TestTools.UnitTesting;
using Financial.Business.Utilities;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Utilities
{
public static class AccountUtilityObjectMother
{
public static int AssetTypeIdForCreditCard = 3;
}
[TestClass()]
public class AccountUtilityTests
{
[TestMethod()]
public void FormatAccountName_WhenProvidedValidInput_ReturnValue_Test()
{
// Arrange
var assetName = "name";
int assetTypeId = 1;
var assetSettingValue = "value";
// Act
var result = AccountUtility.FormatAccountName(assetName, assetTypeId, assetSettingValue);
// Assert
Assert.IsInstanceOfType(result, typeof(string), "Result Type");
}
[TestMethod()]
public void FormatAccountName_WhenProvidedValidInputForCreditCard_ReturnValue_Test()
{
// Arrange
var assetName = "name";
int assetTypeId = AccountUtilityObjectMother.AssetTypeIdForCreditCard;
var assetSettingValue = "value";
// Act
var result = AccountUtility.FormatAccountName(assetName, assetTypeId, assetSettingValue);
// Assert
Assert.IsInstanceOfType(result, typeof(string), "Result Type");
Assert.AreEqual("name (value)", result, "Result");
}
[TestMethod()]
public void FormatAccountName_WhenProvidedValidInputNotCreditCard_ReturnValue_Test()
{
// Arrange
var assetName = "name";
int assetTypeId = 1;
var assetSettingValue = "value";
// Act
var result = AccountUtility.FormatAccountName(assetName, assetTypeId, assetSettingValue);
// Assert
Assert.IsInstanceOfType(result, typeof(string), "Result Type");
Assert.AreEqual("name", result, "Result");
}
[TestMethod()]
public void FormatAccountName_WhenProvidedInvalidInputAssetName_ReturnValue_Test()
{
// Arrange
var assetName = string.Empty;
int assetTypeId = 1;
var assetSettingValue = "value";
// Act
var result = AccountUtility.FormatAccountName(assetName, assetTypeId, assetSettingValue);
// Assert
Assert.IsInstanceOfType(result, typeof(string), "Result Type");
Assert.AreEqual(string.Empty, result, "Result");
}
[TestMethod()]
public void FormatAccountName_WhenProvidedInvalidInputAssetTypeId_ReturnValue_Test()
{
// Arrange
var assetName = "name";
int assetTypeId = 0;
var assetSettingValue = "value";
// Act
var result = AccountUtility.FormatAccountName(assetName, assetTypeId, assetSettingValue);
// Assert
Assert.IsInstanceOfType(result, typeof(string), "Result Type");
Assert.AreEqual(string.Empty, result, "Result");
}
[TestMethod()]
public void FormatAccountName_WhenProvidedInvalidInputAssetSettingValue_ReturnValue_Test()
{
// Arrange
var assetName = "name";
int assetTypeId = 1;
var assetSettingValue = "value";
// Act
var result = AccountUtility.FormatAccountName(assetName, assetTypeId, assetSettingValue);
// Assert
Assert.IsInstanceOfType(result, typeof(string), "Result Type");
Assert.AreEqual("name", result, "Result");
}
}
}<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface IAssetTypeRelationshipTypeRepository : IRepository<AssetTypeRelationshipType>
{
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.WebApplication.Models.ViewModels.AssetTypeRelationshipType;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Financial.Business;
namespace Financial.WebApplication.Controllers
{
public class AssetTypeRelationshipTypeController : BaseController
{
private IUnitOfWork _unitOfWork;
private IBusinessService _businessService;
public AssetTypeRelationshipTypeController(IUnitOfWork unitOfWork, IBusinessService businessService)
: base()
{
_unitOfWork = unitOfWork;
_businessService = businessService;
}
[ChildActionOnly]
public ActionResult Index(int assetTypeId)
{
// transfer supplied Id to dto
var dtoSuppliedAssetType = _unitOfWork.AssetTypes.Get(assetTypeId);
// transfer dto to vm for supplied asset type id == child
var vmIndex = _unitOfWork.AssetTypes.FindAll(r => r.IsActive)
.Join(_unitOfWork.AssetTypeRelationshipTypes.FindAll(r => r.IsActive),
at => at.Id, atrt => atrt.ParentAssetTypeId,
(at, atrt) => new { at, atrt })
.Where(j => j.atrt.ChildAssetTypeId == dtoSuppliedAssetType.Id)
.ToList()
.Join(_unitOfWork.ParentChildRelationshipTypes.FindAll(r => r.IsActive),
j => j.atrt.ParentChildRelationshipTypeId, pcrt => pcrt.Id,
(j, pcrt) => new { j, pcrt })
.ToList()
.Join(_unitOfWork.RelationshipTypes.FindAll(r => r.IsActive),
j2 => j2.pcrt.ChildRelationshipTypeId, rt => rt.Id,
(j2, rt) => new IndexViewModel(j2.j.atrt, dtoSuppliedAssetType, j2.j.at, rt))
.OrderBy(r => r.LinkedAssetTypeName)
.ToList();
// transfer dto to vm for supplied asset type id == parent
var vmIndexParent = _unitOfWork.AssetTypes.FindAll(r => r.IsActive)
.Join(_unitOfWork.AssetTypeRelationshipTypes.FindAll(r => r.IsActive),
at => at.Id, atrt => atrt.ChildAssetTypeId,
(at, atrt) => new { at, atrt })
.Where(j => j.atrt.ParentAssetTypeId == dtoSuppliedAssetType.Id)
.ToList()
.Join(_unitOfWork.ParentChildRelationshipTypes.FindAll(r => r.IsActive),
j => j.atrt.ParentChildRelationshipTypeId, pcrt => pcrt.Id,
(j, pcrt) => new { j, pcrt })
.ToList()
.Join(_unitOfWork.RelationshipTypes.FindAll(r => r.IsActive),
j2 => j2.pcrt.ParentRelationshipTypeId, rt => rt.Id,
(j2, rt) => new IndexViewModel(j2.j.atrt, dtoSuppliedAssetType, j2.j.at, rt))
.OrderBy(r => r.LinkedAssetTypeName)
.ToList();
foreach(var vmParent in vmIndexParent)
{
vmIndex.Add(vmParent);
}
return PartialView("_Index", vmIndex);
}
[HttpGet]
public ViewResult Create(int assetTypeId)
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
// transfer id to dto
var dtoSuppliedAssetType = _unitOfWork.AssetTypes.Get(assetTypeId);
// get drop down lists
List<SelectListItem> sliRelationshipLevels = GetRelationshipLevels(null);
List<SelectListItem> sliLinkAssetTypes = GetAssetTypes(assetTypeId, null, null, null);
// display view
return View("Create", new CreateViewModel(dtoSuppliedAssetType, sliRelationshipLevels, sliLinkAssetTypes, null, null));
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(CreateViewModel vmCreate)
{
// validation
if (!ModelState.IsValid)
{
return View("Create", vmCreate);
}
// count existing link
int id = 0;
int count = CountExistingLinks(id, vmCreate.SuppliedAssetTypeId, vmCreate.SelectedLinkedAssetTypeId, vmCreate.SelectedParentChildRelationshipTypeId);
// links found
if (count > 0)
{
// YES. get drop down lists
vmCreate.RelationshipLevels = GetRelationshipLevels(null);
// redisplay view with message
ViewData["ErrorMessage"] = "Link already exists";
return View("Create", vmCreate);
}
// transfer vm to dto
if (vmCreate.SelectedRelationshipLevel == "Parent")
{
_unitOfWork.AssetTypeRelationshipTypes.Add(new AssetTypeRelationshipType
{
ParentAssetTypeId = vmCreate.SuppliedAssetTypeId,
ChildAssetTypeId = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmCreate.SelectedLinkedAssetTypeId.ToString()),
ParentChildRelationshipTypeId = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmCreate.SelectedParentChildRelationshipTypeId),
IsActive = true
});
}
else // supplied AssetType == Child
{
_unitOfWork.AssetTypeRelationshipTypes.Add(new AssetTypeRelationshipType
{
ParentAssetTypeId = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmCreate.SelectedLinkedAssetTypeId),
ChildAssetTypeId = vmCreate.SuppliedAssetTypeId,
ParentChildRelationshipTypeId = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmCreate.SelectedParentChildRelationshipTypeId),
IsActive = true
});
}
// update db
_unitOfWork.CommitTrans();
// return view with message
TempData["SuccessMessage"] = "Parent-Child link created.";
return RedirectToAction("Details", "AssetType", new { id = vmCreate.SuppliedAssetTypeId });
}
private int CountExistingLinks(int id, int suppliedAssetTypeId, string selectedAssetTypeId, string selectedParentChildRelationshipType)
{
var countParent = _unitOfWork.AssetTypeRelationshipTypes.GetAll()
.Where(r => r.Id != id)
.Where(r => r.ParentAssetTypeId == suppliedAssetTypeId)
.Where(r => r.ChildAssetTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(selectedAssetTypeId))
.Where(r => r.ParentChildRelationshipTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(selectedParentChildRelationshipType))
.Count(r => r.IsActive);
var countChild = _unitOfWork.AssetTypeRelationshipTypes.GetAll()
.Where(r => r.Id != id)
.Where(r => r.ParentAssetTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(selectedAssetTypeId))
.Where(r => r.ChildAssetTypeId == suppliedAssetTypeId)
.Where(r => r.ParentChildRelationshipTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(selectedParentChildRelationshipType))
.Count(r => r.IsActive);
return countParent + countChild;
}
[HttpGet]
public ActionResult DisplayParentChildRelationshipTypes(int suppliedAssetTypeId, string selectedRelationshipLevelId, int? selectedParentChildRelationshipTypeId)
{
// get filtered list to display
List<SelectListItem> sliParentChildRelationshipTypes = GetParentChildRelationshipTypes(suppliedAssetTypeId, selectedRelationshipLevelId, selectedParentChildRelationshipTypeId);
// display view
return PartialView("_DisplayParentChildRelationshipTypes", new DisplayParentChildRelationshipTypesViewModel(sliParentChildRelationshipTypes, selectedParentChildRelationshipTypeId.ToString()));
}
[HttpGet]
public ViewResult Edit(int id, int suppliedAssetTypeId)
{
// get dto for supplied id
var dtoSuppliedAssetType = _unitOfWork.AssetTypes.Get(suppliedAssetTypeId);
var dtoAssetTypeRelationshipType = _unitOfWork.AssetTypeRelationshipTypes.Get(id);
// Selected values
var selectedRelationshipLevelId = dtoAssetTypeRelationshipType.ParentAssetTypeId == dtoSuppliedAssetType.Id ?
"Parent" : "Child";
var selectedParentChildRelationshipTypeId = dtoAssetTypeRelationshipType.ParentChildRelationshipTypeId;
var selectedLinkedAssetTypeId = dtoAssetTypeRelationshipType.ParentAssetTypeId == dtoSuppliedAssetType.Id ?
dtoAssetTypeRelationshipType.ChildAssetTypeId : dtoAssetTypeRelationshipType.ParentAssetTypeId;
// get drop down lists
List<SelectListItem> sliRelationshipLevels = GetRelationshipLevels(selectedRelationshipLevelId);
List<SelectListItem> sliLinkAssetTypes = GetAssetTypes(suppliedAssetTypeId, selectedRelationshipLevelId,
dtoAssetTypeRelationshipType.ParentChildRelationshipTypeId.ToString(), selectedLinkedAssetTypeId);
// display view
return View("Edit", new EditViewModel(dtoAssetTypeRelationshipType, dtoSuppliedAssetType, sliRelationshipLevels, selectedRelationshipLevelId,
selectedParentChildRelationshipTypeId.ToString(), sliLinkAssetTypes, selectedLinkedAssetTypeId.ToString()));
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(EditViewModel vmEdit)
{
// validation
if (!ModelState.IsValid)
{
return View("Edit", vmEdit);
}
// count existing link
int count = CountExistingLinks(vmEdit.Id, vmEdit.SuppliedAssetTypeId, vmEdit.SelectedLinkedAssetTypeId, vmEdit.SelectedParentChildRelationshipTypeId);
// links found
if (count > 0)
{
// YES. get drop down lists
vmEdit.RelationshipLevels = GetRelationshipLevels(vmEdit.SelectedRelationshipLevel);
// redisplay view with message
ViewData["ErrorMessage"] = "Link already exists";
return View("Edit", vmEdit);
}
// check for identical record
var countParentRelationship = _unitOfWork.AssetTypeRelationshipTypes.GetAll()
.Where(r => r.Id == vmEdit.Id)
.Where(r => r.ParentAssetTypeId == vmEdit.SuppliedAssetTypeId)
.Where(r => r.ChildAssetTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedLinkedAssetTypeId))
.Where(r => r.ParentChildRelationshipTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedParentChildRelationshipTypeId))
.Count(r => r.IsActive);
var countChildRelationship = _unitOfWork.AssetTypeRelationshipTypes.GetAll()
.Where(r => r.Id == vmEdit.Id)
.Where(r => r.ParentAssetTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedLinkedAssetTypeId))
.Where(r => r.ChildAssetTypeId == vmEdit.SuppliedAssetTypeId)
.Where(r => r.ParentChildRelationshipTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedParentChildRelationshipTypeId))
.Count(r => r.IsActive);
// record changed?
if (countParentRelationship + countChildRelationship == 0)
{
// transfer vm to dto
var dtoAssetTypeRelationshipType = _unitOfWork.AssetTypeRelationshipTypes.Get(vmEdit.Id);
// transfer vm to dto
dtoAssetTypeRelationshipType.ParentChildRelationshipTypeId = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedParentChildRelationshipTypeId);
if (vmEdit.SelectedRelationshipLevel == "Parent")
{
dtoAssetTypeRelationshipType.ParentAssetTypeId = vmEdit.SuppliedAssetTypeId;
dtoAssetTypeRelationshipType.ChildAssetTypeId = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedLinkedAssetTypeId);
}
else
{
dtoAssetTypeRelationshipType.ParentAssetTypeId = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedLinkedAssetTypeId);
dtoAssetTypeRelationshipType.ChildAssetTypeId = vmEdit.SuppliedAssetTypeId;
}
// update db
_unitOfWork.CommitTrans();
}
// display view with message
TempData["SuccessMessage"] = "Parent-Child link updated.";
return RedirectToAction("Details", "AssetType", new { id = vmEdit.SuppliedAssetTypeId });
}
[HttpGet]
public ViewResult Delete(int id, int suppliedAssetTypeId)
{
// transfer values to dto
var dtoAssetTypeRelationshipType = _unitOfWork.AssetTypeRelationshipTypes.Get(id);
var dtoSuppliedAssetType = _unitOfWork.AssetTypes.Get(suppliedAssetTypeId);
var dtoParentChildRelationshipType = _unitOfWork.ParentChildRelationshipTypes.Get(dtoAssetTypeRelationshipType.ParentChildRelationshipTypeId);
var dtoLinkedAssetType = new AssetType();
var dtoRelationshipType = new RelationshipType();
// transfer Parent or Child info to dto
if(dtoAssetTypeRelationshipType.ParentAssetTypeId == dtoSuppliedAssetType.Id)
{
dtoLinkedAssetType = _unitOfWork.AssetTypes.Get(dtoAssetTypeRelationshipType.ChildAssetTypeId);
dtoRelationshipType = _unitOfWork.RelationshipTypes.Get(dtoParentChildRelationshipType.ParentRelationshipTypeId);
}
else if (dtoAssetTypeRelationshipType.ChildAssetTypeId == dtoSuppliedAssetType.Id)
{
dtoLinkedAssetType = _unitOfWork.AssetTypes.Get(dtoAssetTypeRelationshipType.ParentAssetTypeId);
dtoRelationshipType = _unitOfWork.RelationshipTypes.Get(dtoParentChildRelationshipType.ChildRelationshipTypeId);
}
// display view
return View("Delete", new DeleteViewModel(dtoAssetTypeRelationshipType, dtoSuppliedAssetType, dtoLinkedAssetType, dtoRelationshipType));
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Delete(DeleteViewModel vmDelete)
{
// transfer vm to dto
var dtoAssetTypeRelationshipType = _unitOfWork.AssetTypeRelationshipTypes.Get(vmDelete.Id);
dtoAssetTypeRelationshipType.IsActive = false;
// update db
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Relationship deleted.";
return RedirectToAction("Details", "AssetType", new { id = vmDelete.SuppliedAssetTypeId });
}
private List<SelectListItem> GetAssetTypes(int suppliedAssetTypeId, string selectedRelationshipLevelId, string selectedParentChildRelationshipTypeId, int? selectedAssetTypeId)
{
// create sli
List<SelectListItem> sliAssetTypes = new List<SelectListItem>();
// store all available asset types
var dbAssetTypes = _unitOfWork.AssetTypes.FindAll(r => r.IsActive);
// add to sli if link does NOT exist
foreach(var dtoAssetType in dbAssetTypes)
{
// check for matching Parent-Child link
var countParentLinks = _unitOfWork.AssetTypeRelationshipTypes.GetAll()
.Where(r => r.ParentAssetTypeId == suppliedAssetTypeId)
.Where(r => r.ChildAssetTypeId == dtoAssetType.Id)
.Where(r => r.ChildAssetTypeId != selectedAssetTypeId)
.Count(r => r.IsActive);
// check for matching Child-Parent link
var countChildLinks = _unitOfWork.AssetTypeRelationshipTypes.GetAll()
.Where(r => r.ParentAssetTypeId == dtoAssetType.Id)
.Where(r => r.ParentAssetTypeId != selectedAssetTypeId)
.Where(r => r.ChildAssetTypeId == suppliedAssetTypeId)
.Count(r => r.IsActive);
// add if link not found
if(countParentLinks + countChildLinks == 0)
{
sliAssetTypes.Add(new SelectListItem()
{
Value = dtoAssetType.Id.ToString(),
Selected = dtoAssetType.Id == selectedAssetTypeId,
Text = dtoAssetType.Name
});
}
}
return sliAssetTypes;
}
private List<SelectListItem> GetParentChildRelationshipTypes(int suppliedAssetTypeId, string selectedRelationshipLevelId, int? selectedParentChildRelationshipTypeId)
{
// get list based on relationship level
if (selectedRelationshipLevelId == "Parent")
{
// display list of all child relationship types
return _unitOfWork.RelationshipTypes.GetAll()
.Where(r => r.IsActive)
.Join(_unitOfWork.ParentChildRelationshipTypes.FindAll(r => r.IsActive),
rt => rt.Id, pcrt => pcrt.ParentRelationshipTypeId,
(rt, pcrt) => new SelectListItem()
{
Value = pcrt.Id.ToString(),
Selected = pcrt.Id == selectedParentChildRelationshipTypeId,
Text = rt.Name
})
.ToList();
}
else if (selectedRelationshipLevelId == "Child")
{
// display list of all parent relationship types
return _unitOfWork.RelationshipTypes.GetAll()
.Where(r => r.IsActive)
.Join(_unitOfWork.ParentChildRelationshipTypes.FindAll(r => r.IsActive),
rt => rt.Id, pcrt => pcrt.ChildRelationshipTypeId,
(rt, pcrt) => new SelectListItem()
{
Value = pcrt.Id.ToString(),
Selected = pcrt.Id == selectedParentChildRelationshipTypeId,
Text = rt.Name
})
.ToList();
}
// default empty list
return new List<SelectListItem>();
}
private static List<SelectListItem> GetRelationshipLevels(string selectedValue)
{
// create sli
List<SelectListItem> sliRelationshipLevel = new List<SelectListItem>();
// transfer values to sli
sliRelationshipLevel.Add(new SelectListItem()
{
Value = "Parent",
Selected = "Parent" == selectedValue,
Text = "Parent"
});
sliRelationshipLevel.Add(new SelectListItem()
{
Value = "Child",
Selected = "Child" == selectedValue,
Text = "Child"
});
return sliRelationshipLevel;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
using Financial.Business.Models;
namespace Financial.Business.ServiceInterfaces
{
public interface IAccountService
{
List<Account> GetListOfAccounts();
List<SelectListItem> GetSelectListOfAccounts(int? selectedId);
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class RelationshipTypeRepository : Repository<RelationshipType>, IRelationshipTypeRepository
{
public RelationshipTypeRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
}
}
<file_sep>using Financial.Business.Tests.Fakes;
using Financial.Business.Tests.Fakes.Database;
using Financial.Business.Tests.Fakes.Repositories;
using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Services
{
public class ServiceTestsBase
{
public ServiceTestsBase()
{
// Fake Data
_dataAssets = FakeAssets.InitialFakeAssets().ToList();
_dataAssetAttributes = FakeAssetSettings.InitialFakeAssetSettings().ToList();
_dataAssetTransactions = FakeAssetTransactions.InitialFakeAssetTransactions().ToList();
_dataAssetTypes = FakeAssetTypes.InitialFakeAssetTypes().ToList();
//_dataAssetTypesRelationsihpTypes = FakeAssetTypesRelationshipTypes.InitialFakeAssetTypesRelationshipTypes().ToList();
//_dataAssetTypesSettingTypes = FakeAssetTypesSettingTypes.InitialFakeAssetTypesSettingTypes().ToList();
//_dataParentChildRelationshipTypes = FakeParentChildRelationshipTypes.InitialFakeParentChildRelationshipTypes().ToList();
//_dataRelationshipTypes = FakeRelationshipTypes.InitialFakeRelationshipTypes().ToList();
//_dataSettingTypes = FakeSettingTypes.InitialFakeSettingTypes().ToList();
_dataTransactionCategories = FakeTransactionCategories.InitialFakeTransactionCategories().ToList();
//_dataTransactionDescriptions = FakeTransactionDescriptions.InitialFakeTransactionDescriptions().ToList();
_dataTransactionTypes = FakeTransactionTypes.InitialFakeTransactionTypes().ToList();
// Fake Repositories
_repositoryAsset = new InMemoryAssetRepository(_dataAssets);
_repositoryAssetSetting = new InMemoryAssetSettingRepository(_dataAssetAttributes);
_repositoryAssetTransaction = new InMemoryAssetTransactionRepository(_dataAssetTransactions);
_repositoryAssetType = new InMemoryAssetTypeRepository(_dataAssetTypes);
//_repositoryAssetTypeRelationshipType = new InMemoryAssetTypeRelationshipTypeRepository(_dataAssetTypesRelationsihpTypes);
//_repositoryAssetTypeSettingType = new InMemoryAssetTypeSettingTypeRepository(_dataAssetTypesSettingTypes);
//_repositoryParentChildRelationshipType = new InMemoryParentChildRelationshipTypeRepository(_dataParentChildRelationshipTypes);
//_repositoryRelationshipType = new InMemoryRelationshipTypeRepository(_dataRelationshipTypes);
//_repositorySettingType = new InMemorySettingTypeRepository(_dataSettingTypes);
_repositoryTransactionCategory = new InMemoryTransactionCategoryRepository(_dataTransactionCategories);
//_repositoryTransactionDescription = new InMemoryTransactionDescriptionRepository(_dataTransactionDescriptions);
_repositoryTransactionType = new InMemoryTransactionTypeRepository(_dataTransactionTypes);
// Fake Unit of Work
ResetUnitOfWork();
}
// Fake Data
protected IList<Asset> _dataAssets;
protected IList<AssetSetting> _dataAssetAttributes;
protected IList<AssetTransaction> _dataAssetTransactions;
protected IList<AssetType> _dataAssetTypes;
protected IList<AssetTypeRelationshipType> _dataAssetTypesRelationshipTypes;
protected IList<AssetTypeSettingType> _dataAssetTypesAttributeTypes;
protected IList<ParentChildRelationshipType> _dataParentChildRelationshipTypes;
protected IList<RelationshipType> _dataRelationshipTypes;
protected IList<SettingType> _dataAttributeTypes;
protected IList<TransactionCategory> _dataTransactionCategories;
//protected IList<TransactionDescription> _dataTransactionDescriptions;
protected IList<TransactionType> _dataTransactionTypes;
// Fake Repositories
protected InMemoryAssetRepository _repositoryAsset;
protected InMemoryAssetSettingRepository _repositoryAssetSetting;
protected InMemoryAssetTransactionRepository _repositoryAssetTransaction;
protected InMemoryAssetTypeRepository _repositoryAssetType;
//protected InMemoryAssetTypeRelationshipTypeRepository _repositoryAssetTypeRelationshipType;
//protected InMemoryAssetTypeSettingTypeRepository _repositoryAssetTypeSettingType;
//protected InMemoryParentChildRelationshipTypeRepository _repositoryParentChildRelationshipType;
//protected InMemoryRelationshipTypeRepository _repositoryRelationshipType;
//protected InMemorySettingTypeRepository _repositorySettingType;
protected InMemoryTransactionCategoryRepository _repositoryTransactionCategory;
//protected InMemoryTransactionDescriptionRepository _repositoryTransactionDescription;
protected InMemoryTransactionTypeRepository _repositoryTransactionType;
// Fake UOW
protected InMemoryUnitOfWork _unitOfWork;
public void ResetUnitOfWork()
{
_unitOfWork = new InMemoryUnitOfWork()
{
Assets = _repositoryAsset,
AssetSettings = _repositoryAssetSetting,
AssetTransactions = _repositoryAssetTransaction,
AssetTypes = _repositoryAssetType,
//AssetTypesRelationshipTypes = _repositoryAssetTypeRelationshipType,
//AssetTypesSettingTypes = _repositoryAssetTypeSettingType,
//ParentChildRelationshipTypes = _repositoryParentChildRelationshipType,
//RelationshipTypes = _repositoryRelationshipType,
//SettingTypes = _repositorySettingType,
TransactionCategories = _repositoryTransactionCategory,
//TransactionDescriptions = _repositoryTransactionDescription,
TransactionTypes = _repositoryTransactionType
};
}
}
}
<file_sep>using Financial.Business.ServiceInterfaces;
using Financial.Business.Services;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business
{
public class BusinessService : IBusinessService
{
public BusinessService(IUnitOfWork unitOfWork)
{
SetServices(unitOfWork);
}
private void SetServices(IUnitOfWork unitOfWork)
{
AccountSettingService = new AccountSettingService(unitOfWork);
AccountService = new AccountService(unitOfWork, AccountSettingService);
AccountTransactionService = new AccountTransactionService(unitOfWork);
AccountTypeService = new AccountTypeService(unitOfWork);
AccountTypeSettingTypeService = new AccountTypeSettingTypeService(unitOfWork);
SettingTypeService = new SettingTypeService(unitOfWork);
}
public IAccountService AccountService { get; private set; }
public IAccountSettingService AccountSettingService { get; private set; }
public IAccountTransactionService AccountTransactionService { get; private set; }
public IAccountTypeService AccountTypeService { get; private set; }
public IAccountTypeSettingTypeService AccountTypeSettingTypeService { get; private set; }
public ISettingTypeService SettingTypeService { get; private set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.ParentChildRelationshipType
{
public class EditViewModel
{
public EditViewModel() { }
public EditViewModel(Core.Models.ParentChildRelationshipType dtoSuppliedParentChildRelationshipType,
Core.Models.RelationshipType dtoSuppliedRelationshipType,
List<SelectListItem> sliRelationshipLevels, string selectedRelationshipLevelId,
List<SelectListItem> sliRelationshipTypes, int selectedRelationshipTypeId)
{
Id = dtoSuppliedParentChildRelationshipType.Id;
RelationshipTypeId = dtoSuppliedRelationshipType.Id;
RelationshipTypeName = dtoSuppliedRelationshipType.Name;
SelectedRelationshipLevel = selectedRelationshipLevelId;
RelationshipLevels = sliRelationshipLevels;
SelectedRelationshipType = selectedRelationshipTypeId.ToString();
RelationshipTypes = sliRelationshipTypes;
}
public int Id { get; set; }
public int RelationshipTypeId { get; set; }
[Display(Name = "Relationship Type")]
public string RelationshipTypeName { get; set; }
[Required]
[Display(Name = "Relationship Level")]
public string SelectedRelationshipLevel { get; set; }
public IEnumerable<SelectListItem> RelationshipLevels { get; set; }
[Required]
[Display(Name = "Linked Relationship Type")]
public string SelectedRelationshipType { get; set; }
public IEnumerable<SelectListItem> RelationshipTypes { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.AssetSetting
{
public class CreateViewModel
{
public CreateViewModel()
{
}
public CreateViewModel(Core.Models.Asset dtoAsset, Core.Models.SettingType dtoSettingType)
{
AssetId = dtoAsset.Id;
SettingTypeId = dtoSettingType.Id;
SettingTypeName = dtoSettingType.Name;
}
public int AssetId { get; set; }
public int SettingTypeId { get; set; }
public string SettingTypeName { get; set; }
public string Value { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("ParentChildRelationshipTypes")]
public class ParentChildRelationshipType : BaseEntity
{
[Required]
[Display(Name = "ParentRelationshipType ID")]
public int ParentRelationshipTypeId { get; set; }
[Required]
[Display(Name = "ChildRelationshipType ID")]
public int ChildRelationshipTypeId { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeRelationshipType
{
public class DisplayParentChildRelationshipTypesViewModel
{
public DisplayParentChildRelationshipTypesViewModel() { }
public DisplayParentChildRelationshipTypesViewModel(List<SelectListItem> sliParentChildRelationshipTypes,
string selectedParentChildRelationshipTypeId)
{
ParentChildRelationshipTypes = sliParentChildRelationshipTypes;
SelectedParentChildRelationshipTypeId = selectedParentChildRelationshipTypeId;
}
[Required]
[Display(Name = "Relationship Type")]
public string SelectedParentChildRelationshipTypeId { get; set; }
public IEnumerable<SelectListItem> ParentChildRelationshipTypes { get; set; }
}
}
<file_sep>using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Data.RepositoryInterfaces;
namespace Financial.Business.Tests.Fakes
{
public class InMemoryUnitOfWork : IUnitOfWork
{
private bool _trans;
public InMemoryUnitOfWork()
{
Committed = false;
}
public bool Committed { get; set; }
public IAssetSettingRepository AssetSettings { get; set; }
public IAssetRelationshipRepository AssetRelationships { get; set; }
public IAssetRepository Assets { get; set; }
public IAssetTransactionRepository AssetTransactions { get; set; }
public IAssetTypeSettingTypeRepository AssetTypeSettingTypes { get; set; }
public IAssetTypeRelationshipTypeRepository AssetTypeRelationshipTypes { get; set; }
public IAssetTypeRepository AssetTypes { get; set; }
public IParentChildRelationshipTypeRepository ParentChildRelationshipTypes { get; set; }
public IRelationshipTypeRepository RelationshipTypes { get; set; }
public ISettingTypeRepository SettingTypes { get; set; }
public ITransactionCategoryRepository TransactionCategories { get; set; }
public ITransactionDescriptionRepository TransactionDescriptions { get; set; }
public ITransactionTypeRepository TransactionTypes { get; set; }
public void BeginTrans()
{
}
public void CommitTrans()
{
Complete();
}
public void RollBackTrans()
{
}
public void Complete()
{
if (!_trans)
{
//_context.SaveChanges();
Committed = true;
}
}
public void Dispose()
{
//_context.Dispose();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.TransactionCategory
{
public class IndexViewModel
{
public IndexViewModel() { }
public IndexViewModel(Core.Models.TransactionCategory dtoTransactionCategory)
{
Id = dtoTransactionCategory.Id;
Name = dtoTransactionCategory.Name;
}
public int Id { get; set; }
public string Name { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("TransactionDescriptions")]
public class TransactionDescription : BaseEntity
{
[Required]
public string Name { get; set; }
public ICollection<AssetTransaction> AssetTransactions { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Models
{
public class AccountType
{
public AccountType() { }
public AccountType(Core.Models.AssetType dtoAssetType)
{
AssetTypeId = dtoAssetType.Id;
AssetTypeName = dtoAssetType.Name;
}
public AccountType(Core.Models.AssetType dtoAssetType,
Core.Models.AssetTypeSettingType dtoAssetTypeSettingType)
{
AssetTypeId = dtoAssetType.Id;
AssetTypeName = dtoAssetType.Name;
AssetTypeSettingTypeId = dtoAssetTypeSettingType.Id;
}
public int AssetTypeId { get; set; }
public string AssetTypeName { get; set; }
public int AssetTypeSettingTypeId { get; set; }
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class TransactionTypeRepository : Repository<TransactionType>, ITransactionTypeRepository
{
public TransactionTypeRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
public IEnumerable<TransactionType> GetAllActiveOrderedByName()
{
return FinancialDbContext.TransactionTypes
.Where(r => r.IsActive)
.OrderBy(r => r.Name)
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
using Financial.Core.Models;
using System.Data.Entity;
using Moq;
using Financial.Core;
using Financial.Data.Repositories;
using Financial.Tests.Mocks;
namespace Financial.Tests.Data.Repositories
{
[TestFixture]
public class AssetTransactionRepositoryTests
{
private Asset _dbAsset;
private AssetSetting _dbAssetSetting;
private AssetTransaction _dbAssetTransaction;
private AssetType _dbAssetType;
private SettingType _dbSettingType;
private TransactionCategory _dbTransactionCategory;
private TransactionDescription _dbTransactionDescription;
private TransactionType _dbTransactionType;
private DbSet<AssetTransaction> _mockAssetTransactionDbSet;
private Mock<FinancialDbContext> _mockDbContext;
private FinancialDbContext _fakeDbContext;
private AssetTransactionRepository _repository;
[SetUp]
public void SetUp()
{
// setup fake model
_dbAssetType = new AssetType { Id = 1, Name = "a", IsActive = true };
_dbAsset = new Asset { Id = 2, AssetTypeId = _dbAssetType.Id, Name = "b", IsActive = true };
_dbSettingType = new SettingType { Id = 3, Name = "c", IsActive = true };
_dbAssetSetting = new AssetSetting { Id = 4, AssetId = _dbAsset.Id, SettingTypeId = _dbSettingType.Id, Value = "d", IsActive = true };
_dbTransactionCategory = new TransactionCategory { Id = 5, Name = "e", IsActive = true };
_dbTransactionDescription = new TransactionDescription { Id = 6, Name = "f", IsActive = true };
_dbTransactionType = new TransactionType { Id = 7, Name = "g", IsActive = true };
_dbAssetTransaction = new AssetTransaction
{
Id = 8,
TransactionCategoryId = _dbTransactionCategory.Id,
TransactionDescriptionId = _dbTransactionDescription.Id,
TransactionTypeId = _dbTransactionType.Id,
CheckNumber = "123",
DueDate = new DateTime(1234, 5, 6),
ClearDate = new DateTime(1234, 7, 8),
Amount = 123.45M,
Note = "abcdef",
IsActive = true
};
// setup DbContext
Setup_FakeDbContext();
// set up repository
_repository = new AssetTransactionRepository(_fakeDbContext);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void GetActive_WhenCalled_ReturnAssetTransaction_Test()
{
var result = _repository.GetAllActiveByDueDate();
Assert.That(result, Is.InstanceOf<List<AssetTransaction>>());
}
// private methods
private void Setup_FakeDbContext()
{
// setup dbContext
Setup_FakeDbContext(
new List<Asset> { _dbAsset },
new List<AssetSetting> { _dbAssetSetting },
new List<AssetTransaction> { _dbAssetTransaction },
new List<AssetType> { _dbAssetType },
new List<SettingType> { _dbSettingType },
new List<TransactionCategory> { _dbTransactionCategory },
new List<TransactionDescription> { _dbTransactionDescription },
new List<TransactionType> { _dbTransactionType });
}
private void Setup_FakeDbContext(
List<Asset> fakeAssetList,
List<AssetSetting> fakeAssetSettingList,
List<AssetTransaction> fakeAssetTransactionList,
List<AssetType> fakeAssetTypeList,
List<SettingType> fakeSettingTypeList,
List<TransactionCategory> fakeTransactionCategoryList,
List<TransactionDescription> fakeTransactionDescriptionList,
List<TransactionType> fakeTransactionTypeList)
{
// setup dbContext
_fakeDbContext = MockFinancialDbContext.Create(
assets: fakeAssetList,
assetSettings: fakeAssetSettingList,
assetTransactions: fakeAssetTransactionList,
assetTypes: fakeAssetTypeList,
settingTypes: fakeSettingTypeList,
transactionCategories: fakeTransactionCategoryList,
transactionDescriptions: fakeTransactionDescriptionList,
transactionTypes: fakeTransactionTypeList);
}
}
}<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public class FakeTransactionTypes
{
public static IEnumerable<TransactionType> InitialFakeTransactionTypes()
{
yield return new TransactionType() { Id = 1, Name = "TransactionType1", IsActive = true };
yield return new TransactionType() { Id = 2, Name = "TransactionType2", IsActive = true };
yield return new TransactionType() { Id = 3, Name = "TransactionType3", IsActive = false };
yield return new TransactionType() { Id = 4, Name = "TransactionType4", IsActive = true };
yield return new TransactionType() { Id = 5, Name = "TransactionType5", IsActive = true };
}
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface IRelationshipTypeRepository : IRepository<RelationshipType>
{
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("AccountTransactions")]
public class AssetTransaction : BaseEntity
{
[Required]
[Display(Name = "Asset ID")]
[Column("AccountId")]
public int AssetId { get; set; }
[Required]
public int TransactionTypeId { get; set; }
[Required]
public int TransactionCategoryId { get; set; }
[Required]
public int TransactionDescriptionId { get; set; }
[Display(Name = "Check Number")]
public string CheckNumber { get; set; }
[Required]
[Display(Name = "Due")]
[DataType(DataType.Date)]
[DisplayFormat(DataFormatString = "{0:yyyy-MM-dd hh:mm tt}", ApplyFormatInEditMode = true)]
public DateTime DueDate { get; set; }
[Required]
[Display(Name = "Cleared")]
[DataType(DataType.Date)]
[DisplayFormat(DataFormatString = "{0:yyyy-MM-dd hh:mm tt}", ApplyFormatInEditMode = true)]
public DateTime ClearDate { get; set; }
[Required]
public decimal Amount { get; set; }
public string Note { get; set; }
[ForeignKey("AssetId")]
public Asset Asset { get; set; }
[ForeignKey("TransactionTypeId")]
public TransactionType TransactionType { get; set; }
[ForeignKey("TransactionCategoryId")]
public TransactionCategory TransactionCategory { get; set; }
[ForeignKey("TransactionDescriptionId")]
public TransactionDescription TransactionDescription { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
using Financial.Core.Models;
using System.Data.Entity;
using Financial.Core;
using Moq;
using Financial.Data.Repositories;
using Financial.Tests.Mocks;
namespace Financial.Tests.Data.Repositories
{
[TestFixture]
public class TransactionCategoryRepositoryTests
{
private TransactionCategory _dbTransactionCategory;
private DbSet<TransactionCategory> _mockTransactionTypeDbSet;
private Mock<FinancialDbContext> _mockDbContext;
private FinancialDbContext _fakeDbContext;
private TransactionCategoryRepository _repository;
[SetUp]
public void SetUp()
{
// setup fake model
_dbTransactionCategory = new TransactionCategory { Id = 1, Name = "a", IsActive = true };
// setup DbContext
Setup_FakeDbContext();
// set up repository
_repository = new TransactionCategoryRepository(_fakeDbContext);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void GetAllActiveOrderedByName_WhenCalled_ReturnTransactionCategoryIEnumerable_Test()
{
var result = _repository.GetAllActiveOrderedByName();
Assert.That(result, Is.InstanceOf<IEnumerable<TransactionCategory>>());
}
[Test]
public void GetAllActiveOrderedByName_WhenCalled_ReturnTransactionCategoryValues_Test()
{
var result = _repository.GetAllActiveOrderedByName().ToList();
Assert.Multiple(() =>
{
Assert.That(result[0].Id, Is.EqualTo(_dbTransactionCategory.Id), "Id");
Assert.That(result[0].Name, Is.EqualTo(_dbTransactionCategory.Name), "Name");
Assert.That(result[0].IsActive, Is.EqualTo(_dbTransactionCategory.IsActive), "IsActive");
});
}
[Test]
public void GetAllActiveOrderedByName_WhenMultipleTransactionCategoriesFound_ReturnListSortedAscendingByName_Test()
{
var fakeTransactionCategories = new List<TransactionCategory>
{
new TransactionCategory { Id = 1, Name = "z", IsActive = true },
new TransactionCategory { Id = 2, Name = "a", IsActive = true }
};
Setup_Repository_FakeDbContext(fakeTransactionCategories);
var result = _repository.GetAllActiveOrderedByName().ToList();
Assert.Multiple(() =>
{
Assert.That(result[0].Name, Is.EqualTo("a"), "First Index");
Assert.That(result[1].Name, Is.EqualTo("z"), "Second Index");
});
}
[Test]
public void GetAllActiveOrderedByName_WhenIsActiveEqualsFalse_DoNotReturnRecord_Test()
{
_dbTransactionCategory.IsActive = false;
Setup_Repository_FakeDbContext();
var result = _repository.GetAllActiveOrderedByName();
Assert.That(result.Count(), Is.EqualTo(0));
}
// private methods
private void Setup_FakeDbContext()
{
// setup dbContext
Setup_FakeDbContext(new List<TransactionCategory> {_dbTransactionCategory});
}
private void Setup_FakeDbContext(List<TransactionCategory> fakeTransactionCategoryList)
{
// setup dbContext
_fakeDbContext = MockFinancialDbContext.Create(transactionCategories: fakeTransactionCategoryList);
}
private void Setup_Repository_FakeDbContext()
{
// setup dbContext
Setup_FakeDbContext();
// set up repository
_repository = new TransactionCategoryRepository(_fakeDbContext);
}
private void Setup_Repository_FakeDbContext(List<TransactionCategory> fakeTransactionCategoryList)
{
// setup dbContext
Setup_FakeDbContext(fakeTransactionCategoryList);
// set up repository
_repository = new TransactionCategoryRepository(_fakeDbContext);
}
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Data.Entity;
using System.Linq;
using System.Runtime.Remoting.Contexts;
using System.Text;
using System.Threading.Tasks;
using System.Web.Configuration;
namespace Financial.Core
{
public class FinancialDbContext : DbContext, IFinancialDbContext
{
public virtual DbSet<Asset> Assets { get; set; }
public virtual DbSet<AssetSetting> AssetSettings { get; set; }
public virtual DbSet<AssetRelationship> AssetRelationships { get; set; }
public virtual DbSet<AssetTransaction> AssetTransactions { get; set; }
public virtual DbSet<AssetType> AssetTypes { get; set; }
public virtual DbSet<AssetTypeSettingType> AssetTypesSettingTypes { get; set; }
public virtual DbSet<AssetTypeRelationshipType> AssetTypesRelationshipTypes { get; set; }
public virtual DbSet<ParentChildRelationshipType> ParentChildRelationshipTypes { get; set; }
public virtual DbSet<RelationshipType> RelationshipTypes { get; set; }
public virtual DbSet<SettingType> SettingTypes { get; set; }
public virtual DbSet<TransactionCategory> TransactionCategories { get; set; }
public virtual DbSet<TransactionDescription> TransactionDescriptions { get; set; }
public virtual DbSet<TransactionType> TransactionTypes { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Core.Models;
namespace Financial.Business.Utilities
{
public static class AccountUtility
{
public static string FormatAccountName(string assetName, int assetTypeId, string assetAttributeValue)
{
// validate input
if (string.IsNullOrEmpty(assetName))
return string.Empty;
// validate additional information
if(string.IsNullOrEmpty(assetAttributeValue))
return assetName;
// (Credit Card)
if (assetTypeId == AssetType.IdForCreditCard)
return $"{assetName} ({assetAttributeValue})";
return assetName;
}
}
}
<file_sep>using Financial.Business.ServiceInterfaces;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Business.Models;
namespace Financial.Business.Services
{
public class SettingTypeService : ISettingTypeService
{
private IUnitOfWork _unitOfWork;
public SettingTypeService(IUnitOfWork unitOfWork)
{
_unitOfWork = unitOfWork;
}
public AttributeType GetSettingType(int settingTypeId)
{
var dtoSettingType = _unitOfWork.SettingTypes.Get(settingTypeId);
if (dtoSettingType == null)
{
return null;
}
return new AttributeType(dtoSettingType);
}
public List<AttributeType> GetListOfSettingTypes()
{
// get all active setting types from db
return _unitOfWork.SettingTypes.GetAllActive()
.Select(r => new AttributeType(r))
.ToList();
}
/// <summary>
///
/// </summary>
/// <param name="bmSettingType"></param>
/// <returns>
/// positive integer = record added.
/// zero integer = name already exists.
/// </returns>
public int AddSettingType(AttributeType bmSettingType)
{
// check for existing name
var exists = _unitOfWork.SettingTypes.GetAllActive()
.Any(r => r.Name == bmSettingType.SettingTypeName);
if (exists)
{
return 0;
}
// transfer bm to dto
var dtoSettingType = new Core.Models.SettingType()
{
Name = bmSettingType.SettingTypeName,
IsActive = true,
};
// update db
_unitOfWork.SettingTypes.Add(dtoSettingType);
_unitOfWork.CommitTrans();
// return new ID
return dtoSettingType.Id;
}
public bool EditSettingType(AttributeType bmSettingType)
{
// get dto
var dtoSettingType = _unitOfWork.SettingTypes.Get(bmSettingType.SettingTypeId);
if (dtoSettingType == null)
{
return false;
}
// transfer bm to dto
dtoSettingType.Name = bmSettingType.SettingTypeName;
// update db
_unitOfWork.CommitTrans();
return true;
}
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Data.Entity;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class AssetSettingRepository : Repository<AssetSetting>, IAssetSettingRepository
{
public AssetSettingRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
public AssetSetting GetActive(int assetId, int settingTypeId)
{
return FinancialDbContext.AssetSettings
.Include(r => r.Asset)
.Include(r => r.Asset.AssetType)
.Include(r => r.SettingType)
.Where(r => r.IsActive)
.Where(r => r.AssetId == assetId)
.FirstOrDefault(r => r.SettingTypeId == settingTypeId);
}
public IEnumerable<AssetSetting> GetAllActiveForAsset(int assetId)
{
return FinancialDbContext.AssetSettings
.Include(r => r.Asset)
.Include(r => r.SettingType)
.Where(r => r.IsActive)
.Where(r => r.AssetId == assetId)
.ToList();
}
public IEnumerable<AssetSetting> GetAllActiveForSettingType(int settingTypeId)
{
return FinancialDbContext.AssetSettings
.Include(r => r.Asset)
.Include(r => r.SettingType)
.Where(r => r.IsActive)
.Where(r => r.SettingTypeId == settingTypeId)
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeRelationshipType
{
public class DeleteViewModel
{
public DeleteViewModel() { }
public DeleteViewModel(Core.Models.AssetTypeRelationshipType dtoAssetTypeRelationshipType,
Core.Models.AssetType dtoSuppliedAssetType,
Core.Models.AssetType dtoLinkedAssetType,
Core.Models.RelationshipType dtoRelationshipType)
{
Id = dtoAssetTypeRelationshipType.Id;
SuppliedAssetTypeId = dtoSuppliedAssetType.Id;
SuppliedAssetTypeName = dtoSuppliedAssetType.Name;
LinkedAssetTypeName = dtoLinkedAssetType.Name;
RelationshipTypeName = dtoRelationshipType.Name;
}
public int Id { get; set; }
public int SuppliedAssetTypeId { get; set; }
[Display(Name = "Asset Type")]
public string SuppliedAssetTypeName { get; set; }
[Display(Name = "Relationship")]
public string RelationshipTypeName { get; set; }
[Display(Name = "Linked Asset Type")]
public string LinkedAssetTypeName { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.Account
{
public class CreateViewModel
{
public CreateViewModel()
{
}
public CreateViewModel(List<SelectListItem> sliAssetTypes)
{
AssetTypes = sliAssetTypes;
}
public int Id { get; set; }
[Required]
[Display(Name = "Name")]
public string AssetName { get; set; }
[Required]
[Display(Name = "Type")]
public string SelectedAssetTypeId { get; set; }
public IEnumerable<SelectListItem> AssetTypes { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("RelationshipTypes")]
public class RelationshipType : BaseEntity
{
[Required]
public string Name { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
using Moq;
using System.Web.Mvc;
using System.Web.Razor.Generator;
using Financial;
using Financial.Business.Models;
using Financial.Business.Services;
using Financial.Business.ServiceInterfaces;
using Financial.Core.Models;
using Financial.Data;
using Financial.Data.RepositoryInterfaces;
namespace Financial.Tests.Business.Services
{
[TestFixture]
public class AccountServiceTests
{
private AccountService _service;
private Mock<IUnitOfWork> _unitOfWork;
private Mock<IAccountSettingService> _accountSettingService;
private Asset _dbAsset;
private AssetType _dbAssetType;
private SettingType _dbSettingType;
private AssetTypeSettingType _dbAssetTypeSettingType;
[SetUp]
public void SetUp()
{
_dbAsset = new Asset() { Id = 1, AssetTypeId = 2, Name = "a", IsActive = true };
_dbAssetType = new AssetType() { Id = 2, Name = "b", IsActive = true };
_dbSettingType = new Core.Models.SettingType() { Id = 3, Name = "c", IsActive = true };
_dbAssetTypeSettingType = new AssetTypeSettingType() { AssetTypeId = 2, SettingTypeId = 3, IsActive = true };
_unitOfWork = new Mock<IUnitOfWork>();
_unitOfWork.Setup(uow => uow.Assets.GetAllActiveOrderedByName())
.Returns(new List<Asset> { _dbAsset });
_unitOfWork.Setup(uow => uow.AssetTypes.Get(_dbAsset.AssetTypeId))
.Returns(_dbAssetType);
_accountSettingService = new Mock<IAccountSettingService>();
_service = new AccountService(
_unitOfWork.Object,
_accountSettingService.Object);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void GetListOfAccounts_WhenCalled_ReturnAccountList_Test()
{
var result = _service.GetListOfAccounts();
Assert.That(result, Is.TypeOf<List<Account>>());
}
[Test]
public void GetListOfAccounts_WhenCalled_ShouldCallOneTimeUnitOfWorkRepositoryAssetsMethodGetAllActiveOrderedByName_Test()
{
_service.GetListOfAccounts();
_unitOfWork.Verify(uow => uow.Assets.GetAllActiveOrderedByName(),
Times.Once);
}
[Test]
public void GetListOfAccounts_WhenAccountListHasAccounts_ShouldCallUnitOfWorkRepositoryAssetTypesMethodGet_Test()
{
_service.GetListOfAccounts();
_unitOfWork.Verify(uow => uow.AssetTypes.Get(_dbAsset.AssetTypeId),
Times.AtLeastOnce);
}
[Test]
public void GetListOfAccounts_WhenAccountListHasAccount_ReturnAccountValues_Test()
{
var result = _service.GetListOfAccounts();
Assert.Multiple(() =>
{
Assert.That(result.Count, Is.EqualTo(1), "Count");
Assert.That(result[0].AssetId, Is.EqualTo(_dbAsset.Id), "Asset Id");
Assert.That(result[0].AssetName, Is.EqualTo(_dbAsset.Name), "Asset Name");
Assert.That(result[0].AssetTypeId, Is.EqualTo(_dbAsset.AssetTypeId), "AssetType Id");
Assert.That(result[0].AssetTypeName, Is.EqualTo(_dbAssetType.Name), "AssetType Name");
});
}
[Test]
public void GetListOfAccounts_WhenAccountTypeEqualsCreditCard_ReturnNameWithAccountNumber_Test()
{
SetUpForOneAccountWithAccountSettingEqualsCreditCard(accountName: "a", accountSettingValue: "1234");
var expectedAssetName = "a (1234)";
var result = (List<Account>)_service.GetListOfAccounts();
Assert.That(result[0].AssetName, Is.EqualTo(expectedAssetName));
}
[Test]
public void GetListOfAccounts_WhenAccountTypeIdEqualsZero_ReturnEmptyAccountList_Test()
{
_dbAsset.AssetTypeId = 0;
var result = _service.GetListOfAccounts();
Assert.That(result, Is.EquivalentTo(new List<Account>()));
}
[Test]
public void GetListOfAccounts_WhenAccountListIsEmpty_ReturnEmptyAccountList_Test()
{
_unitOfWork.Setup(uow => uow.Assets.GetAllActiveOrderedByName())
.Returns(new List<Financial.Core.Models.Asset>());
_service = new AccountService(
_unitOfWork.Object,
_accountSettingService.Object);
var result = _service.GetListOfAccounts();
Assert.That(result.Count, Is.EqualTo(0));
}
[Test]
public void GetSelectListOfAccounts_WhenCalled_ReturnSelectListItemList_Test()
{
var result = _service.GetSelectListOfAccounts(selectedId: null);
Assert.That(result, Is.TypeOf<List<SelectListItem>>());
}
[Test]
public void GetSelectListOfAccounts_WhenAssetFound_ShouldCallOneTimeAssetSettingServiceMethodGetAccountIdentificationInformation_Test()
{
_service.GetSelectListOfAccounts(_dbAsset.Id);
_accountSettingService.Verify(
asSvc => asSvc.GetAccountIdentificationInformation(new Account(_dbAsset)),
Times.Once);
}
[Test]
public void GetSelectListOfAccounts_WhenSelectedAssetIdProvided_ReturnListWithAccountSelected_Test()
{
var result = _service.GetSelectListOfAccounts(_dbAsset.Id);
Assert.IsTrue(result.Any(r => r.Selected));
}
// private
private void SetUpForOneAccountWithAccountSettingEqualsCreditCard(string accountName, string accountSettingValue)
{
var assetId = 1;
var assetSettingId = 2;
var assetTypeId = AssetType.IdForCreditCard;
var settingTypeId = Core.Models.SettingType.IdForAccountNumber;
_unitOfWork.Setup(uow => uow.Assets.GetAllActiveOrderedByName())
.Returns(new List<Asset> { new Asset { Id = assetId, Name = accountName, AssetTypeId = assetTypeId, IsActive = true } });
_unitOfWork.Setup(uow => uow.AssetTypes.Get(assetTypeId))
.Returns(new AssetType { Id = assetTypeId, Name = "b", IsActive = true });
_unitOfWork.Setup(uow => uow.AssetSettings.GetActive(assetId, settingTypeId))
.Returns(new AssetSetting { Id = assetSettingId, AssetId = assetId, SettingTypeId = assetTypeId, Value = accountSettingValue, IsActive = true });
_service = new AccountService(
_unitOfWork.Object,
_accountSettingService.Object);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AssetSetting
{
public class EditLinkedSettingTypesViewModel
{
public EditLinkedSettingTypesViewModel() { }
public EditLinkedSettingTypesViewModel(Core.Models.Asset dtoAsset,
Core.Models.AssetType dtoAssetType, List<EditViewModel> vmEdit)
{
AssetId = dtoAsset.Id;
AssetName = dtoAsset.Name;
AssetTypeName = dtoAssetType.Name;
EditViewModels = vmEdit;
}
public int AssetId { get; set; }
[Display(Name = "Name")]
public string AssetName { get; set; }
[Display(Name = "Type")]
public string AssetTypeName { get; set; }
public List<EditViewModel> EditViewModels { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
using Financial.Core.Models;
using System.Data.Entity;
using Moq;
using Financial.Core;
using Financial.Data.Repositories;
using Financial.Tests.Mocks;
namespace Financial.Tests.Data.Repositories
{
[TestFixture]
public class AssetSettingRepositoryTests
{
private Asset _dbAsset;
private AssetSetting _dbAssetSetting;
private AssetType _dbAssetType;
private SettingType _dbSettingType;
private DbSet<AssetSetting> _mockAssetSettingDbSet;
private Mock<FinancialDbContext> _mockDbContext;
private FinancialDbContext _fakeDbContext;
private AssetSettingRepository _repository;
[SetUp]
public void SetUp()
{
// setup fake model
_dbAssetType = new AssetType { Id = 1, Name = "a", IsActive = true };
_dbAsset = new Asset
{
Id = 2,
AssetTypeId = _dbAssetType.Id,
AssetType = _dbAssetType, // setup include
Name = "b",
IsActive = true
};
_dbSettingType = new SettingType { Id = 3, Name = "c", IsActive = true };
_dbAssetSetting = new AssetSetting
{
Id = 4,
AssetId = _dbAsset.Id,
Asset = _dbAsset, // setup include
SettingTypeId = _dbSettingType.Id,
SettingType = _dbSettingType, // setup include
Value = "d",
IsActive = true
};
// setup DbContext
Setup_FakeDbContext();
// setup repository
_repository = new AssetSettingRepository(_fakeDbContext);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void GetActive_WhenCalled_ReturnAssetSetting_Test()
{
var result = _repository.GetActive(_dbAssetSetting.AssetId, _dbAssetSetting.SettingTypeId);
Assert.That(result, Is.InstanceOf<AssetSetting>());
}
[Test]
public void GetActive_WhenCalled_ReturnAssetSettingValues_Test()
{
var result = _repository.GetActive(_dbAssetSetting.AssetId, _dbAssetSetting.SettingTypeId);
Assert.Multiple(() =>
{
Assert.That(result.Id, Is.EqualTo(_dbAssetSetting.Id), "AssetSetting Id");
Assert.That(result.AssetId, Is.EqualTo(_dbAssetSetting.AssetId), "Asset Id");
Assert.That(result.Asset.Name, Is.EqualTo(_dbAsset.Name), "Asset Name");
Assert.That(result.SettingTypeId, Is.EqualTo(_dbAssetSetting.SettingTypeId), "SettingType Id");
Assert.That(result.SettingType.Name, Is.EqualTo(_dbSettingType.Name), "SettingType Name");
Assert.That(result.Value, Is.EqualTo(_dbAssetSetting.Value), "AssetSetting Value");
Assert.That(result.IsActive, Is.EqualTo(_dbAssetSetting.IsActive), "IsActive");
});
}
// private methods
private void Setup_FakeDbContext()
{
// setup dbContext
Setup_FakeDbContext(
new List<Asset> { _dbAsset },
new List<AssetType> { _dbAssetType },
new List<AssetSetting> { _dbAssetSetting },
new List<SettingType> { _dbSettingType });
}
private void Setup_FakeDbContext(
List<Asset> fakeAssetList,
List<AssetType> fakeAssetTypeList,
List<AssetSetting> fakeAssetSettingList,
List<SettingType> fakeSettingTypeList)
{
// setup dbContext
_fakeDbContext = MockFinancialDbContext.Create(
assets: fakeAssetList,
assetTypes: fakeAssetTypeList,
assetSettings: fakeAssetSettingList,
settingTypes: fakeSettingTypeList);
}
}
}
<file_sep>using Financial.Business.ServiceInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business
{
public interface IBusinessService
{
IAccountService AccountService { get; }
IAccountSettingService AccountSettingService { get; }
IAccountTransactionService AccountTransactionService { get; }
IAccountTypeService AccountTypeService { get; }
IAccountTypeSettingTypeService AccountTypeSettingTypeService { get; }
ISettingTypeService SettingTypeService { get; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Core;
using NUnit.Framework;
using Moq;
using Financial.Data.Repositories;
using System.Data.Entity;
using Financial.Core.Models;
using Financial.Tests.Mocks;
namespace Financial.Tests.Data.Repositories
{
[TestFixture]
public class RepositoryTests
{
private BaseEntity _baseEntity;
private DbSet<BaseEntity> _mockDbSet;
private Mock<FinancialDbContext> _mockDbContext;
private FinancialDbContext _fakeDbContext;
private Repository<BaseEntity> _repository;
[SetUp]
public void SetUp()
{
// setup fake db
var fakeAssets = new List<BaseEntity>();
SetupMockWithNewFakeDb(fakeAssets);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void Get_WhenIdIsValid_ReturnEntity_Test()
{
// Arrange
var fakeEntity = new BaseEntity {Id = 1, IsActive = true};
var fakeEntities = new List<BaseEntity>
{
fakeEntity,
};
SetupMockWithNewFakeDb(fakeEntities);
// Act
var result = _repository.Get(id: 1);
// Assert
Assert.That(result, Is.EqualTo(fakeEntity));
}
[Test]
public void Get_WhenIdIsNotValid_ReturnNull_Test()
{
// Arrange
var fakeEntity = new BaseEntity { Id = 1, IsActive = true };
var fakeEntities = new List<BaseEntity>
{
fakeEntity,
};
SetupMockWithNewFakeDb(fakeEntities);
// Act
var result = _repository.Get(id: 2);
// Assert
Assert.That(result, Is.EqualTo(null));
}
[Test]
public void GetActive_WhenEntityIsActiveEqualsTrue_ReturnEntity_Test()
{
// Arrange
var fakeEntity = new BaseEntity { Id = 1, IsActive = true };
var fakeEntities = new List<BaseEntity>
{
fakeEntity,
};
SetupMockWithNewFakeDb(fakeEntities);
// Act
var result = _repository.GetActive(id: 1);
// Assert
Assert.That(result, Is.EqualTo(fakeEntity));
}
[Test]
public void GetActive_WhenEntityIsActiveEqualsFalse_ReturnNull_Test()
{
// Arrange
var fakeEntity = new BaseEntity { Id = 1, IsActive = false };
var fakeEntities = new List<BaseEntity>
{
fakeEntity,
};
SetupMockWithNewFakeDb(fakeEntities);
// Act
var result = _repository.GetActive(id: 1);
// Assert
Assert.That(result, Is.EqualTo(null));
}
[Test]
public void GetAll_WhenCalled_ReturnAllEntities_Test()
{
// Arrange
var fakeEntities = new List<BaseEntity>
{
new BaseEntity { Id = 1, IsActive = true },
new BaseEntity { Id = 2, IsActive = true }
};
SetupMockWithNewFakeDb(fakeEntities);
// Act
var result = _repository.GetAll();
// Assert
Assert.That(result.Count(), Is.EqualTo(2));
}
[Test]
public void GetAllActive_WhenCalled_ReturnEntitiesThatIsActiveEqualsTrue_Test()
{
// Arrange
var fakeEntities = new List<BaseEntity>
{
new BaseEntity { Id = 1, IsActive = true },
new BaseEntity { Id = 2, IsActive = false }
};
SetupMockWithNewFakeDb(fakeEntities);
// Act
var result = _repository.GetAllActive();
// Assert
Assert.That(result.Count(), Is.EqualTo(1));
}
[Test]
public void Exists_WhenIdIsValid_ReturnTrue_Test()
{
// Arrange
var fakeEntity = new BaseEntity { Id = 1, IsActive = true };
var fakeEntities = new List<BaseEntity>
{
fakeEntity,
};
SetupMockWithNewFakeDb(fakeEntities);
// Act
var result = _repository.Exists(id: 1);
// Assert
Assert.That(result, Is.EqualTo(true));
}
[Test]
public void Exists_WhenIdIsNotValid_ReturnFalse_Test()
{
// Arrange
var fakeEntity = new BaseEntity { Id = 1, IsActive = true };
var fakeEntities = new List<BaseEntity>
{
fakeEntity,
};
SetupMockWithNewFakeDb(fakeEntities);
// Act
var result = _repository.Exists(id: 2);
// Assert
Assert.That(result, Is.EqualTo(false));
}
[Test]
public void Add_WhenEntityProvided_CallDbContextAddProperty_Test()
{
// Arrange
var newEntity = new BaseEntity { /*Id = 1,*/ IsActive = true };
var count = 0;
_mockDbContext.Setup(a => a.Set<BaseEntity>().Add(It.IsAny<BaseEntity>()))
.Callback(() => count++);
// Act
_repository.Add(newEntity);
// Assert
Assert.That(count, Is.EqualTo(1));
}
[Test]
public void Add_WhenEntityProvided_UpdateDbContextWithEntity_Test()
{
// Arrange
var newEntity = new BaseEntity { /*Id = 1,*/ IsActive = true };
var count = 0;
_mockDbContext.Setup(a => a.Set<BaseEntity>().Add(It.IsAny<BaseEntity>()))
.Callback(() => count++);
// Act
_repository.Add(newEntity);
// Assert
Assert.That(count, Is.EqualTo(1));
}
[Test]
public void AddRange_WhenEntitiesProvided_CallDbContextAddRangeProperty_Test()
{
// Arrange
var newEntities = new List<BaseEntity>
{
new BaseEntity { /*Id = 1,*/ IsActive = true },
new BaseEntity { /*Id = 2,*/ IsActive = true },
};
var count = 0;
_mockDbContext.Setup(a => a.Set<BaseEntity>().AddRange(It.IsAny<List<BaseEntity>>()))
.Callback(() => count++);
// Act
_repository.AddRange(newEntities);
// Assert
Assert.That(count, Is.EqualTo(1));
}
[Test]
public void Remove_WhenEntityProvided_CallDbContextRemoveProperty_Test()
{
// Arrange
var newEntity = new BaseEntity { /*Id = 1,*/ IsActive = true };
var count = 0;
_mockDbContext.Setup(a => a.Set<BaseEntity>().Remove(It.IsAny<BaseEntity>()))
.Callback(() => count++);
// Act
_repository.Remove(newEntity);
// Assert
Assert.That(count, Is.EqualTo(1));
}
[Test]
public void RemoveRange_WhenEntitiesProvided_CallDbContextRemoveProperty_Test()
{
// Arrange
var newEntities = new List<BaseEntity>
{
new BaseEntity { /*Id = 1,*/ IsActive = true },
new BaseEntity { /*Id = 2,*/ IsActive = true },
};
var count = 0;
_mockDbContext.Setup(a => a.Set<BaseEntity>().RemoveRange(It.IsAny<List<BaseEntity>>()))
.Callback(() => count++);
// Act
_repository.RemoveRange(newEntities);
// Assert
Assert.That(count, Is.EqualTo(1));
}
// private methods
private void SetupMockWithNewFakeDb(List<BaseEntity> fakeBaseEntities)
{
// setup DbSet
_mockDbSet = MockDbSet.Create<BaseEntity>(fakeBaseEntities);
// setup DbContext
_mockDbContext = new Mock<FinancialDbContext>();
_mockDbContext.Setup(c => c.Set<BaseEntity>())
.Returns(_mockDbSet);
// set up repository
_repository = new Repository<BaseEntity>(_mockDbContext.Object);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using Financial.Business.Models;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeSettingType
{
public class IndexSettingTypesForAssetTypeViewModel
{
public IndexSettingTypesForAssetTypeViewModel() { }
public IndexSettingTypesForAssetTypeViewModel(Business.Models.AttributeType bmSettingType, Business.Models.AccountTypeSettingType bmAssetTypeSettingType)
{
AssetTypeSettingTypeId = bmAssetTypeSettingType.AssetTypeSettingTypeId;
AssetTypeId = bmAssetTypeSettingType.AssetTypeId;
SettingTypeId = bmSettingType.SettingTypeId;
SettingTypeName = bmSettingType.SettingTypeName;
IsLinked = bmAssetTypeSettingType.AssetTypeId > 0;
}
public IndexSettingTypesForAssetTypeViewModel(int assetTypeId, Business.Models.AttributeType bmSettingTypeLinked)
{
AssetTypeSettingTypeId = bmSettingTypeLinked.AssetTypeSettingTypeId;
AssetTypeId = assetTypeId;
SettingTypeId = bmSettingTypeLinked.SettingTypeId;
SettingTypeName = bmSettingTypeLinked.SettingTypeName;
IsLinked = bmSettingTypeLinked.AssetTypeSettingTypeId > 0;
}
public int AssetTypeId { get; set; }
public int SettingTypeId { get; set; }
public string SettingTypeName { get; set; }
public int AssetTypeSettingTypeId { get; set; }
public bool IsLinked { get; set; }
}
}<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class AssetTypeRelationshipTypeRepository : Repository<AssetTypeRelationshipType>, IAssetTypeRelationshipTypeRepository
{
public AssetTypeRelationshipTypeRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
using NUnit.Framework;
using Moq;
using Financial;
using Financial.Business;
using Financial.Business.Models;
using Financial.Core;
using Financial.Data;
using Financial.WebApplication.Controllers;
using Financial.WebApplication.Models.ViewModels.Account;
namespace Financial.Tests.WebApplication.Controllers
{
[TestFixture]
public class AccountControllerTests
{
private AccountController _controller;
private Mock<IUnitOfWork> _unitOfWork;
private Mock<IBusinessService> _businessService;
private Account _account;
private Financial.Core.Models.Asset _asset;
private SelectListItem _sliAsset;
[SetUp]
public void SetUp()
{
_asset = new Financial.Core.Models.Asset { Id = 1, AssetTypeId = 2, Name = "a", IsActive = true };
_sliAsset = new SelectListItem { Value = _asset.Id.ToString(), Text = _asset.Name, Selected = false };
_account = new Account{ AssetId = 1, AssetName = "a", AssetTypeId = 2 };
_unitOfWork = new Mock<IUnitOfWork>();
_unitOfWork.SetupAllProperties();
_businessService = new Mock<IBusinessService>();
_businessService.Setup(bs => bs.AccountService.GetListOfAccounts())
.Returns(new List<Account> { _account });
_controller = new AccountController(_unitOfWork.Object, _businessService.Object);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void Index_WhenCalled_ReturnsIndexView_Test()
{
var result = _controller.Index();
Assert.That(result.ViewName, Is.EqualTo("Index"));
}
[Test]
public void Index_WhenCalled_ReturnsIndexViewModelList_Test()
{
var result = _controller.Index();
Assert.That(result.ViewData.Model, Is.TypeOf<List<IndexViewModel>>());
}
[Test]
public void Index_WhenCalled_ShouldCallOneTimeAccountServiceMethodGetListOfAccounts_Test()
{
_controller.Index();
_businessService.Verify(bs => bs.AccountService.GetListOfAccounts(),
Times.Once);
}
[Test]
public void Index_WhenAccountsFound_ReturnAccountsOrderedAscendingByName_Test()
{
var alphaFirstAssetName = "a";
var alphaLastAssetName = "z";
SetUpAccountsOrderedDescendingByAssetName(alphaFirstAssetName, alphaLastAssetName);
var result = _controller.Index();
var vmActual = (List<IndexViewModel>) result.Model;
Assert.Multiple(() =>
{
Assert.That(vmActual.Count, Is.EqualTo(2), "Count");
Assert.That(vmActual[0].AssetName, Is.EqualTo(alphaFirstAssetName), "First Index");
Assert.That(vmActual[1].AssetName, Is.EqualTo(alphaLastAssetName), "Second Index");
});
}
[Test]
public void Index_WhenTempDataSuccessMessageIsNotNull_ReturnViewDataSuccessMessage_Test()
{
var expectedMessage = "test message";
_controller.TempData["SuccessMessage"] = expectedMessage;
var result = _controller.Index();
var vResult = (ViewResult) result;
Assert.That(vResult.ViewData["SuccessMessage"].ToString(), Is.EqualTo(expectedMessage));
}
[Test]
public void Index_WhenTempDataErrorMessageIsNotNull_ReturnViewDataErrorMessage_Test()
{
var expectedMessage = "test message";
_controller.TempData["ErrorMessage"] = expectedMessage;
var result = _controller.Index();
var vResult = (ViewResult)result;
Assert.That(vResult.ViewData["ErrorMessage"].ToString(), Is.EqualTo(expectedMessage));
}
[Test]
public void Index_WhenAccountListEqualsNull_ReturnEmptyViewModelList_Test()
{
_businessService.Setup(bs => bs.AccountService.GetListOfAccounts());
_controller = new AccountController(_unitOfWork.Object, _businessService.Object);
var result = _controller.Index();
var vmActual = (List<IndexViewModel>)result.Model;
Assert.That(vmActual.Count, Is.EqualTo(0));
}
// private
private void SetUpAccountsOrderedDescendingByAssetName(string alphaFirstAssetName, string alphaLastAssetName)
{
_businessService.Setup(bs => bs.AccountService.GetListOfAccounts())
.Returns(new List<Account>
{
new Account {AssetId = 1, AssetName = alphaLastAssetName, AssetTypeId = 3, AssetTypeName = "type" },
new Account {AssetId = 1, AssetName = alphaFirstAssetName, AssetTypeId = 3, AssetTypeName = "type" },
});
_controller = new AccountController(_unitOfWork.Object, _businessService.Object);
}
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public static class FakeAssetTransactions
{
public static IEnumerable<AssetTransaction> InitialFakeAssetTransactions()
{
yield return new AssetTransaction() { Id = 1, AssetId = 2, TransactionTypeId = 4, TransactionCategoryId = 5, TransactionDescriptionId = 2, Amount = 1.11M, IsActive = true };
yield return new AssetTransaction() { Id = 2, AssetId = 1, TransactionTypeId = 5, TransactionCategoryId = 4, TransactionDescriptionId = 4, Amount = 2.22M, IsActive = true };
yield return new AssetTransaction() { Id = 3, AssetId = 2, TransactionTypeId = 1, TransactionCategoryId = 4, TransactionDescriptionId = 5, Amount = 3.33M, IsActive = false };
yield return new AssetTransaction() { Id = 4, AssetId = 5, TransactionTypeId = 2, TransactionCategoryId = 1, TransactionDescriptionId = 5, Amount = 4.44M, IsActive = true };
yield return new AssetTransaction() { Id = 5, AssetId = 4, TransactionTypeId = 1, TransactionCategoryId = 2, TransactionDescriptionId = 1, Amount = 5.55M, IsActive = true };
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Business.Models;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeSettingType
{
public class CreateLinkedSettingTypesViewModel
{
public CreateLinkedSettingTypesViewModel()
{
}
public CreateLinkedSettingTypesViewModel(Business.Models.AccountType bmAssetType,
List<Business.Models.AttributeType> bmSettingTypes)
{
AssetTypeId = bmAssetType.AssetTypeId;
AssetTypeName = bmAssetType.AssetTypeName;
SettingTypes = bmSettingTypes;
}
public int AssetTypeId { get; set; }
[Display(Name = "Asset Type")]
public string AssetTypeName { get; set; }
public List<Business.Models.AccountTypeSettingType> LinkedAssetTypeSettingTypes { get; set; }
public List<Business.Models.AttributeType> SettingTypes { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Financial.Business;
//using Financial.Business.Models.BusinessModels;
using Financial.Business.Utilities;
using Financial.WebApplication.Models.ViewModels.AssetTypeSettingType;
using Financial.Data;
using Financial.Business.Models;
using Financial.Business.ServiceInterfaces;
namespace Financial.WebApplication.Controllers
{
public class AssetTypeSettingTypeController : BaseController
{
private IUnitOfWork _unitOfWork;
private IAccountTypeSettingTypeService _assetTypeSettingTypeService;
public AssetTypeSettingTypeController(IUnitOfWork unitOfWork, IAccountTypeSettingTypeService assetTypeSettingTypeService)
: base()
{
_unitOfWork = unitOfWork;
_assetTypeSettingTypeService = assetTypeSettingTypeService;
}
[ChildActionOnly]
public ActionResult IndexLinkedSettingTypes(int assetTypeId)
{
try
{
// transfer bm to vm
var vmIndexLinkedSettingTypes = _assetTypeSettingTypeService.GetListOfLinkedSettingTypes(assetTypeId)
.OrderBy(r => r.SettingTypeName)
.Select(r => new IndexLinkedSettingTypesViewModel(r))
.ToList();
return PartialView("_IndexLinkedSettingTypes", vmIndexLinkedSettingTypes);
}
catch (Exception)
{
ViewData["ErrorMessage"] = "Encountered problem";
return PartialView("_IndexLinkedSettingTypes", new List<IndexLinkedSettingTypesViewModel>());
}
}
[ChildActionOnly]
public ActionResult IndexLinkedAssetTypes(int settingTypeId)
{
try
{
// transfer bm to vm
var vmIndexLinkedAssetTypes = _assetTypeSettingTypeService.GetListOfLinkedAssetTypes(settingTypeId)
.OrderBy(r => r.AssetTypeName)
.Select(r => new IndexLinkedSettingTypesViewModel(r))
.ToList();
return PartialView("_IndexLinkedAssetTypes", vmIndexLinkedAssetTypes);
}
catch (Exception)
{
ViewData["ErrorMessage"] = "Encountered problem";
return PartialView("_IndexLinkedAssetTypes", new List<IndexLinkedAssetTypesViewModel>());
}
}
[HttpGet]
public ActionResult CreateLinkedSettingTypes(int assetTypeId)
{
try
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
if (TempData["ErrorMessage"] != null)
{
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
}
// get bm for supplied id
var bmAssetType = _assetTypeSettingTypeService.CreateLinkedSettingTypesGetModel(assetTypeId);
if(bmAssetType == null)
{
TempData["ErrorMessage"] = "Unable to create record. Try again.";
return RedirectToAction("Index", "AssetType");
}
// get bm for linked setting types
var bmSettingTypes = _assetTypeSettingTypeService.GetListOfSettingTypesWithLinkedAssetType(bmAssetType.AssetTypeId);
// transfer bm to vm
return View("CreateLinkedSettingTypes", new CreateLinkedSettingTypesViewModel(bmAssetType, bmSettingTypes));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetType");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult CreateLinkedSettingTypes(CreateLinkedSettingTypesViewModel vmCreateLinkedSettingTypes)
{
try
{
if (!ModelState.IsValid)
{
return RedirectToAction("Index", "AssetType");
}
// transfer vm to bm
foreach(var vmSettingType in vmCreateLinkedSettingTypes.SettingTypes)
{
}
if (ModelState.IsValid)
{
// transfer vm to db
foreach (var atstLink in vmCreateLinkedSettingTypes.LinkedAssetTypeSettingTypes)
{
_unitOfWork.AssetTypeSettingTypes.Add(new Core.Models.AssetTypeSettingType()
{
AssetTypeId = atstLink.AssetTypeId,
SettingTypeId = atstLink.SettingTypeId,
//IsActive = atstLink.IsActive
});
}
// complete db update
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Linked setting types created.";
return RedirectToAction("Index", "AssetType");
}
TempData["ErrorMessage"] = "Unable to create record. Try again.";
return RedirectToAction("Index", "AssetType");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetType");
}
}
[HttpGet]
public ActionResult CreateLinkedAssetTypes(int? settingTypeId)
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
try
{
// transfer dto to vm
var dtoSettingType = _unitOfWork.AssetTypes.Get(DataTypeUtility.GetIntegerFromString(settingTypeId.ToString()));
if (dtoSettingType != null)
{
/*
var atstLinks = _unitOfWork.AssetTypes.GetAllActive()
.Select(r => new AssetTypeSettingType(r, dtoSettingType))
.ToList();
*/
// display view
//return View("CreateLinkedAssetTypes", new CreateLinkedAssetTypesViewModel(dtoSettingType, atstLinks));
return View("CreateLinkedAssetTypes", new CreateLinkedAssetTypesViewModel());
}
TempData["ErrorMessage"] = "Unable to create record. Try again.";
return RedirectToAction("Index", "SettingType");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "SettingType");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult CreateLinkedAssetTypes(CreateLinkedAssetTypesViewModel vmCreateLinkedAssetTypes)
{
try
{
if (ModelState.IsValid)
{
// transfer vm to db
foreach (var atstLink in vmCreateLinkedAssetTypes.LinkedAssetTypeSettingTypes)
{
_unitOfWork.AssetTypeSettingTypes.Add(new Core.Models.AssetTypeSettingType()
{
AssetTypeId = atstLink.AssetTypeId,
SettingTypeId = atstLink.SettingTypeId,
//IsActive = atstLink.IsActive
});
}
// complete db update
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Linked asset types created";
return RedirectToAction("Index", "SettingType", new { id = vmCreateLinkedAssetTypes.SettingTypeId });
}
TempData["ErrorMessage"] = "Unable to create record. Try again.";
return RedirectToAction("Index", "SettingType");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "SettingType");
}
}
[HttpGet]
public ActionResult EditLinkedSettingTypes(int assetTypeId)
{
try
{
// transfer dto to bm
var bmAssetType = _assetTypeSettingTypeService.EditLinkedSettingTypesGetModel(assetTypeId);
if(bmAssetType == null)
{
TempData["ErrorMessage"] = "Problem displaying asset type";
return RedirectToAction("Index", "AssetType");
}
//var bmSettingTypes = _businessService.AssetTypeSettingTypeService.GetListOfSettingTypesWithLinkedAssetType(assetTypeId);
return View("EditLinkedSettingTypes", new EditLinkedSettingTypesViewModel(bmAssetType));
/*
// transfer dto for Id
var dtoAssetType = _unitOfWork.AssetTypes.Get(assetTypeId);
if (dtoAssetType != null)
{
// get list of all active setting types
var atstLinks = new List<AssetTypeSettingType>();
var dbSettingTypes = _unitOfWork.SettingTypes.GetAllActive();
foreach (var dtoSettingType in dbSettingTypes)
{
// transfer dto to vm
var dtoAssetTypeSettingType = _unitOfWork.AssetTypesSettingTypes.Get(dtoAssetType.Id, dtoSettingType.Id);
var link = dtoAssetTypeSettingType != null
? new AssetTypeSettingType(dtoAssetTypeSettingType, dtoAssetType, dtoSettingType)
: new AssetTypeSettingType(new Core.Models.AssetTypeSettingType(), dtoAssetType, dtoSettingType);
atstLinks.Add(link);
}
// display view
//return View("EditLinkedSettingTypes", new EditLinkedSettingTypesViewModel(dtoAssetType, atstLinks));
return View("EditLinkedSettingTypes", new EditLinkedSettingTypesViewModel());
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "AssetType");
*/
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetType");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult EditLinkedSettingTypes(EditLinkedSettingTypesViewModel vmEditLinks)
{
try
{
if (ModelState.IsValid)
{
/*
foreach (var atstLink in vmEditLinks.LinkedAssetTypeSettingTypes)
{
// transfer vm to dto
var dtoAssetTypeSettingType = _unitOfWork.AssetTypesSettingTypes.Get(atstLink.Id);
if (dtoAssetTypeSettingType != null)
{
// update dto
dtoAssetTypeSettingType.IsActive = atstLink.IsActive;
}
else if(atstLink.Id == 0)
{
// create new dto
_unitOfWork.AssetTypesSettingTypes.Add(new Core.Models.AssetTypeSettingType()
{
AssetTypeId = atstLink.AssetTypeId,
SettingTypeId = atstLink.SettingTypeId,
IsActive = true
});
}
}
// complete db update
_unitOfWork.CommitTrans();
*/
// display view with message
TempData["SuccessMessage"] = "Linked setting types updated.";
return RedirectToAction("Details", "AssetType", new { id = vmEditLinks.AssetTypeId });
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "AssetType");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetType");
}
}
[ChildActionOnly]
public ActionResult IndexSettingTypesForAssetType(int assetTypeId)
{
try
{
var vmIndex = _assetTypeSettingTypeService.GetListOfSettingTypesWithLinkedAssetType(assetTypeId)
.Select(st => new IndexSettingTypesForAssetTypeViewModel(assetTypeId, st))
.ToList();
return PartialView("_IndexSettingTypesForAssetType", vmIndex);
}
catch (Exception)
{
return PartialView("_IndexSettingTypesForAssetType", new List<IndexSettingTypesForAssetTypeViewModel>());
}
}
[HttpGet]
public ActionResult EditLinkedAssetTypes(int settingTypeId)
{
try
{
// transfer dto for id
var dtoSettingType = _unitOfWork.SettingTypes.Get(settingTypeId);
if (dtoSettingType != null)
{
/*
// get list of all active asset types
var atstLinks = new List<AssetTypeSettingType>();
var dbAssetTypes = _unitOfWork.AssetTypes.GetAllActive();
foreach (var dtoAssetType in dbAssetTypes)
{
// transfer dto to vm
var dtoAssetTypeSettingType = _unitOfWork.AssetTypesSettingTypes.Get(dtoAssetType.Id, dtoSettingType.Id);
var link = dtoAssetTypeSettingType != null
? new AssetTypeSettingType(dtoAssetTypeSettingType, dtoAssetType, dtoSettingType)
: new AssetTypeSettingType(new Core.Models.AssetTypeSettingType(), dtoAssetType, dtoSettingType);
atstLinks.Add(link);
}
*/
// display view
//return View("EditLinkedAssetTypes", new EditLinkedAssetTypesViewModel(dtoSettingType, atstLinks));
return View("EditLinkedAssetTypes", new EditLinkedAssetTypesViewModel());
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "SettingType");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "SettingType");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult EditLinkedAssetTypes(EditLinkedAssetTypesViewModel vmEditLinkedAssetTypes)
{
try
{
if(ModelState.IsValid)
{
/*
// transfer vm to dto
foreach(var atstLink in vmEditLinkedAssetTypes.LinkedAssetTypeSettingTypes)
{
// transfer vm to dto
var dtoAssetTypeSettingType = _unitOfWork.AssetTypesSettingTypes.Get(atstLink.Id);
if (dtoAssetTypeSettingType != null)
{
// update dto
dtoAssetTypeSettingType.IsActive = atstLink.IsActive;
}
else if (atstLink.Id == 0)
{
// create new dto
_unitOfWork.AssetTypesSettingTypes.Add(new Core.Models.AssetTypeSettingType()
{
AssetTypeId = atstLink.AssetTypeId,
SettingTypeId = atstLink.SettingTypeId,
IsActive = true
});
}
}
// update db
_unitOfWork.CommitTrans();
*/
// display view with message
TempData["SuccessMessage"] = "Linked asset types updated.";
return RedirectToAction("Details", "SettingType", new { id = vmEditLinkedAssetTypes.SettingTypeId });
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "SettingType");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "SettingType");
}
}
}
}<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.WebApplication.Models.ViewModels.ParentChildRelationshipType;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Financial.Business;
namespace Financial.WebApplication.Controllers
{
public class ParentChildRelationshipTypeController : BaseController
{
private IUnitOfWork _unitOfWork;
private IBusinessService _businessService;
public ParentChildRelationshipTypeController(IUnitOfWork unitOfWork, IBusinessService businessService)
: base()
{
_unitOfWork = unitOfWork;
_businessService = businessService;
}
[ChildActionOnly]
public ActionResult Index(int relationshipTypeId)
{
//transfer dto for id
var dtoSuppliedRelationshipType = _unitOfWork.RelationshipTypes.Get(relationshipTypeId);
// transfer db for supplied ParentId
var dbParentRelationshipTypes = _unitOfWork.ParentChildRelationshipTypes.GetAll()
.Where(r => r.IsActive)
.Where(r => r.ParentRelationshipTypeId == relationshipTypeId)
.ToList();
// transfer db for supplied ChildId
var dbChildRelationshipTypes = _unitOfWork.ParentChildRelationshipTypes.GetAll()
.Where(r => r.IsActive)
.Where(r => r.ChildRelationshipTypeId == relationshipTypeId)
.ToList();
// tranfer dbParent to vm
var vmIndex = new List<IndexViewModel>();
foreach(var dtoParentChildRelationshipType in dbParentRelationshipTypes)
{
var dtoParentRelationshipType = _unitOfWork.RelationshipTypes.Get(dtoParentChildRelationshipType.ParentRelationshipTypeId);
var dtoChildRelationshipType = _unitOfWork.RelationshipTypes.Get(dtoParentChildRelationshipType.ChildRelationshipTypeId);
vmIndex.Add(new IndexViewModel(dtoParentChildRelationshipType, dtoSuppliedRelationshipType, dtoParentRelationshipType, dtoChildRelationshipType));
}
// tranfer dbChild to vm
foreach (var dtoParentChildRelationshipType in dbChildRelationshipTypes)
{
var dtoParentRelationshipType = _unitOfWork.RelationshipTypes.Get(dtoParentChildRelationshipType.ParentRelationshipTypeId);
var dtoChildRelationshipType = _unitOfWork.RelationshipTypes.Get(dtoParentChildRelationshipType.ChildRelationshipTypeId);
vmIndex.Add(new IndexViewModel(dtoParentChildRelationshipType, dtoSuppliedRelationshipType, dtoParentRelationshipType, dtoChildRelationshipType));
}
// display view
return PartialView("_Index", vmIndex);
}
[HttpGet]
public ViewResult Create(int relationshipTypeId)
{
// transfer dto for id
var dtoSuppliedRelationshipType = _unitOfWork.RelationshipTypes.Get(relationshipTypeId);
// transfer db to sli
List<SelectListItem> sliRelationshipTypes = GetDropDownListForRelationshipTypes(relationshipTypeId, null);
// transfer levels to sli
List<SelectListItem> sliRelationshipLevels = GetDropDownListForRelationshipLevels(null);
// display view
return View("Create", new CreateViewModel(dtoSuppliedRelationshipType, sliRelationshipLevels, sliRelationshipTypes));
}
[HttpPost]
public ActionResult Create(CreateViewModel vmCreate)
{
// validation
if(!ModelState.IsValid)
{
return View("Create", vmCreate);
}
// link duplicated?
var countExistingParentLinks = _unitOfWork.ParentChildRelationshipTypes.GetAll()
.Where(r => r.ParentRelationshipTypeId == vmCreate.SuppliedRelationshipTypeId)
.Where(r => r.ChildRelationshipTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(vmCreate.SelectedLinkedRelationshipType))
.Count(r => r.IsActive);
var countExistingChildLinks = _unitOfWork.ParentChildRelationshipTypes.GetAll()
.Where(r => r.ChildRelationshipTypeId == vmCreate.SuppliedRelationshipTypeId)
.Where(r => r.ParentRelationshipTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(vmCreate.SelectedLinkedRelationshipType))
.Count(r => r.IsActive);
if (countExistingParentLinks > 0 || countExistingChildLinks > 0)
{
// update Drop Down Lists for vm
vmCreate.LinkedRelationshipTypes = GetDropDownListForRelationshipTypes(vmCreate.SuppliedRelationshipTypeId, null);
vmCreate.RelationshipLevels = GetDropDownListForRelationshipLevels(null);
// redisplay view
ViewData["ErrorMessage"] = "Record already exists";
return View("Create", vmCreate);
}
// determine relationship level
int parentRelationshipType = 0;
int childRelationshipType = 0;
if(vmCreate.SelectedRelationshipLevel == "Parent-Child")
{
parentRelationshipType = vmCreate.SuppliedRelationshipTypeId;
childRelationshipType = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmCreate.SelectedLinkedRelationshipType);
}
else // Child-Parent
{
parentRelationshipType = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmCreate.SelectedLinkedRelationshipType);
childRelationshipType = vmCreate.SuppliedRelationshipTypeId;
}
// transfer vm to dto
_unitOfWork.ParentChildRelationshipTypes.Add(new ParentChildRelationshipType()
{
ParentRelationshipTypeId = parentRelationshipType,
ChildRelationshipTypeId = childRelationshipType,
IsActive = true
});
// update db
_unitOfWork.CommitTrans();
// display view
return RedirectToAction("Details", "RelationshipType", new { id = vmCreate.SuppliedRelationshipTypeId });
}
[HttpGet]
public ViewResult Edit(int id, int relationshipTypeId)
{
// transfer dto for id
var dtoSuppliedParentChildRelationshipType = _unitOfWork.ParentChildRelationshipTypes.Get(id);
var dtoSuppliedRelationshipType = _unitOfWork.RelationshipTypes.Get(relationshipTypeId);
// transfer levels to sli
var selectedRelationshipLevelId = dtoSuppliedParentChildRelationshipType.ParentRelationshipTypeId == relationshipTypeId ?
"Parent-Child" : "Child-Parent";
List<SelectListItem> sliRelationshipLevels = GetDropDownListForRelationshipLevels(selectedRelationshipLevelId);
// transfer db to sli
var selectedRelationshipTypeId = dtoSuppliedParentChildRelationshipType.ParentRelationshipTypeId == relationshipTypeId ?
dtoSuppliedParentChildRelationshipType.ChildRelationshipTypeId :
dtoSuppliedParentChildRelationshipType.ParentRelationshipTypeId;
List<SelectListItem> sliRelationshipTypes = GetDropDownListForRelationshipTypes(relationshipTypeId, selectedRelationshipTypeId);
// display view
return View("Edit", new EditViewModel(dtoSuppliedParentChildRelationshipType, dtoSuppliedRelationshipType, sliRelationshipLevels, selectedRelationshipLevelId, sliRelationshipTypes, selectedRelationshipTypeId));
}
[HttpPost]
public ActionResult Edit(EditViewModel vmEdit)
{
if(!ModelState.IsValid)
{
return View("Edit", vmEdit);
}
// duplicated relationship?
var countExistingParentChildRelationship = _unitOfWork.ParentChildRelationshipTypes.GetAll()
.Where(r => r.Id != vmEdit.Id)
.Where(r => r.ParentRelationshipTypeId == vmEdit.RelationshipTypeId)
.Where(r => r.ChildRelationshipTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedRelationshipType))
.Count(r => r.IsActive);
var countExistingChildParentRelationship = _unitOfWork.ParentChildRelationshipTypes.GetAll()
.Where(r => r.Id != vmEdit.Id)
.Where(r => r.ChildRelationshipTypeId == vmEdit.RelationshipTypeId)
.Where(r => r.ParentRelationshipTypeId == Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedRelationshipType))
.Count(r => r.IsActive);
if (countExistingParentChildRelationship > 0 || countExistingChildParentRelationship > 0)
{
// update Drop Down Lists for vm
vmEdit.RelationshipTypes = GetDropDownListForRelationshipTypes(vmEdit.RelationshipTypeId, Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedRelationshipType));
vmEdit.RelationshipLevels = GetDropDownListForRelationshipLevels(vmEdit.SelectedRelationshipLevel);
// redisplay view
ViewData["ErrorMessage"] = "Record already exists";
return View("Edit", vmEdit);
}
// transfer vm to dto
var dtoParentChildRelationshipType = _unitOfWork.ParentChildRelationshipTypes.Get(vmEdit.Id);
if(vmEdit.SelectedRelationshipLevel == "Parent-Child")
{
dtoParentChildRelationshipType.ParentRelationshipTypeId = vmEdit.RelationshipTypeId;
dtoParentChildRelationshipType.ChildRelationshipTypeId = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedRelationshipType);
}
else // Child-Parent
{
dtoParentChildRelationshipType.ParentRelationshipTypeId = Business.Utilities.DataTypeUtility.GetIntegerFromString(vmEdit.SelectedRelationshipType);
dtoParentChildRelationshipType.ChildRelationshipTypeId = vmEdit.RelationshipTypeId;
}
// update db
_unitOfWork.CommitTrans();
// display view
return RedirectToAction("Details", "RelationshipType", new { id = vmEdit.RelationshipTypeId });
}
[HttpGet]
public ViewResult Delete(int id, int relationshipTypeId)
{
// transfer values to dto
var dtoParentChildRelationshipType = _unitOfWork.ParentChildRelationshipTypes.Get(id);
var dtoRelationshipType = _unitOfWork.RelationshipTypes.Get(relationshipTypeId);
var dtoParentRelationshipType = _unitOfWork.RelationshipTypes.Get(dtoParentChildRelationshipType.ParentRelationshipTypeId);
var dtoChildRelationshipType = _unitOfWork.RelationshipTypes.Get(dtoParentChildRelationshipType.ChildRelationshipTypeId);
// display view
return View("Delete", new DeleteViewModel(dtoParentChildRelationshipType, dtoRelationshipType, dtoParentRelationshipType, dtoChildRelationshipType));
}
[HttpPost]
public ActionResult Delete(DeleteViewModel vmDelete)
{
if(!ModelState.IsValid)
{
return View("Delete", vmDelete);
}
// update dto
var dtoParentChildRelationshipType = _unitOfWork.ParentChildRelationshipTypes.Get(vmDelete.Id);
dtoParentChildRelationshipType.IsActive = false;
// update db
_unitOfWork.CommitTrans();
// display view
return RedirectToAction("Details", "RelationshipType", new { id = vmDelete.RelationshipTypeId });
}
private List<SelectListItem> GetDropDownListForRelationshipTypes(int relationshipTypeId, int? selectedId)
{
// validate selectedId
var intSelectedId = 0;
int.TryParse(selectedId.ToString(), out intSelectedId);
// transfer db
var dbRelationshipTypes = _unitOfWork.RelationshipTypes.GetAll()
.Where(r => r.IsActive)
.Where(r => r.Id != relationshipTypeId)
.ToList();
// transfer db to sli
var sliRelationshipTypes = new List<SelectListItem>();
foreach (var dtoRelationshipType in dbRelationshipTypes)
{
var countLinkedParentRelationships = _unitOfWork.ParentChildRelationshipTypes.GetAll()
.Where(r => r.ParentRelationshipTypeId == relationshipTypeId)
.Where(r => r.ChildRelationshipTypeId == dtoRelationshipType.Id)
.Where(r => r.ParentRelationshipTypeId != selectedId)
.Where(r => r.ChildRelationshipTypeId != selectedId)
.Count(r => r.IsActive);
var countLinkedChildRelationships = _unitOfWork.ParentChildRelationshipTypes.GetAll()
.Where(r => r.ChildRelationshipTypeId == relationshipTypeId)
.Where(r => r.ParentRelationshipTypeId == dtoRelationshipType.Id)
.Where(r => r.ChildRelationshipTypeId != selectedId)
.Where(r => r.ParentRelationshipTypeId != selectedId)
.Count(r => r.IsActive);
// add if existing link not found
if (countLinkedParentRelationships == 0 && countLinkedChildRelationships == 0)
{
sliRelationshipTypes.Add(new SelectListItem()
{
Value = dtoRelationshipType.Id.ToString(),
Selected = dtoRelationshipType.Id == intSelectedId,
Text = dtoRelationshipType.Name
});
}
}
return sliRelationshipTypes;
}
private static List<SelectListItem> GetDropDownListForRelationshipLevels(string selectedValue)
{
var sliRelationshipLevels = new List<SelectListItem>();
sliRelationshipLevels.Add(new SelectListItem()
{
Value = "Parent-Child",
Selected = "Parent-Child" == selectedValue,
Text = "Parent-Child"
});
sliRelationshipLevels.Add(new SelectListItem()
{
Value = "Child-Parent",
Selected = "Child-Parent" == selectedValue,
Text = "Child-Parent"
});
return sliRelationshipLevels;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("AccountRelationships")]
public class AssetRelationship : BaseEntity
{
[Required]
[Display(Name = "Parent Asset ID")]
[Column("ParentAccountId")]
public int ParentAssetId { get; set; }
[Required]
[Display(Name = "Child Asset ID")]
[Column("ChildAccountId")]
public int ChildAssetId { get; set; }
[Required]
[Display(Name = "AccountTypeRelationshipType ID")]
public int AssetTypeRelationshipTypeId { get; set; }
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class ParentChildRelationshipTypeRepository : Repository<ParentChildRelationshipType>, IParentChildRelationshipTypeRepository
{
public ParentChildRelationshipTypeRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("AccountTypes")]
public class AssetType : BaseEntity
{
[Required]
public string Name { get; set; }
//public ICollection<Asset> Assets { get; set; }
public static readonly int IdForCreditCard = 3;
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class SettingTypeRepository : Repository<SettingType>, ISettingTypeRepository
{
public SettingTypeRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
public IEnumerable<SettingType> GetAllOrderedByName()
{
return FinancialDbContext.SettingTypes
.OrderBy(r => r.Name)
.ToList();
}
public int CountMatching(string name)
{
return FinancialDbContext.SettingTypes
.Count(r => r.Name == name);
}
public int CountMatching(int excludeId, string name)
{
return FinancialDbContext.SettingTypes
.Where(r => r.Id != excludeId)
.Count(r => r.Name == name);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core.Models
{
[Table("TransactionTypes")]
public class TransactionType : BaseEntity
{
[Required]
public string Name { get; set; }
public ICollection<AssetTransaction> AssetTransactions { get; set; }
public static readonly int IdForExpense = 1;
public static readonly int IdForIncome = 2;
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.AccountTransaction
{
public class EditViewModel
{
public EditViewModel() { }
public EditViewModel(Business.Models.AccountTransaction bmAssetTransaction)
{
Id = bmAssetTransaction.AssetTransactionId;
AssetId = bmAssetTransaction.AssetId;
AssetName = bmAssetTransaction.AssetName;
AssetTypeName = bmAssetTransaction.AssetTypeName;
DueDate = bmAssetTransaction.DueDate;
ClearDate = bmAssetTransaction.ClearDate;
CheckNumber = bmAssetTransaction.CheckNumber;
Amount = bmAssetTransaction.Amount;
Note = bmAssetTransaction.Note;
//TransactionTypes = bmAssetTransaction.TransactionTypeSelectList;
//SelectedTransactionTypeId = bmAssetTransaction.SelectedTransactionTypeId;
//TransactionCategories = bmAssetTransaction.TransactionCategorySelectList;
SelectedTransactionCategoryId = bmAssetTransaction.SelectedTransactionCategoryId;
}
public int Id { get; set; }
public int AssetId { get; set; }
[Display(Name = "Asset Name")]
public string AssetName { get; set; }
[Display(Name = "Asset Type")]
public string AssetTypeName { get; set; }
[Display(Name = "Check Number")]
public string CheckNumber { get; set; }
[Required]
[Display(Name = "Due")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}", ApplyFormatInEditMode = true)]
public DateTime DueDate { get; set; }
[Display(Name = "Cleared")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}", ApplyFormatInEditMode = true)]
public DateTime ClearDate { get; set; }
[Required]
public decimal Amount { get; set; }
public string Note { get; set; }
[Required]
[Display(Name = "Type")]
public string SelectedTransactionTypeId { get; set; }
public IEnumerable<SelectListItem> TransactionTypes { get; set; }
[Required]
[Display(Name = "Category")]
public string SelectedTransactionCategoryId { get; set; }
public IEnumerable<SelectListItem> TransactionCategories { get; set; }
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using TransactionCategory = Financial.Core.Models.TransactionCategory;
namespace Financial.Data.Repositories
{
public class TransactionCategoryRepository : Repository<TransactionCategory>, ITransactionCategoryRepository
{
public TransactionCategoryRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
public IEnumerable<TransactionCategory> GetAllActiveOrderedByName()
{
return FinancialDbContext.TransactionCategories
.Where(r => r.IsActive)
.OrderBy(r => r.Name);
}
}
}
<file_sep>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
using Financial.Business.Models;
using Financial.Business.ServiceInterfaces;
using Financial.Business.Utilities;
using Financial.Core.Models;
using Financial.Data;
namespace Financial.Business.Services
{
public class AccountService : IAccountService
{
private readonly IUnitOfWork _unitOfWork;
private readonly IAccountSettingService _assetSettingService;
public AccountService()
{
_unitOfWork = new UnitOfWork();
}
public AccountService(
IUnitOfWork unitOfWork,
IAccountSettingService assetSettingService)
{
_unitOfWork = unitOfWork;
_assetSettingService = assetSettingService;
}
public List<Account> GetListOfAccounts()
{
// create list object to return
var bmAccountList = new List<Account>();
// get assets from db
var dbAssetList = _unitOfWork.Assets.GetAllActiveOrderedByName();
foreach (var dbAsset in dbAssetList)
{
// get asset type from db
var dbAssetType = _unitOfWork.AssetTypes.Get(dbAsset.AssetTypeId);
if (dbAssetType == null)
return new List<Account>();
// add additional information to asset name
if (dbAssetType.Id == AssetType.IdForCreditCard)
{
var dbAssetSetting = _unitOfWork.AssetSettings.GetActive(dbAsset.Id, Core.Models.SettingType.IdForAccountNumber);
if(dbAssetSetting != null)
dbAsset.Name = AccountUtility.FormatAccountName(dbAsset.Name, dbAssetType.Id, dbAssetSetting.Value);
}
// transfer dto to bm
bmAccountList.Add(new Account(dbAsset, dbAssetType));
}
return bmAccountList;
}
public string GetAccountIdentificationInformation(Account bmAccount)
{
if (bmAccount == null)
return string.Empty;
if (bmAccount.AssetTypeId == AssetType.IdForCreditCard)
{
var dtoAssetSetting = _unitOfWork.AssetSettings.GetActive(bmAccount.AssetId, Core.Models.SettingType.IdForAccountNumber);
if (dtoAssetSetting == null)
return string.Empty;
return $" ({dtoAssetSetting.Value})";
}
return string.Empty;
}
public List<SelectListItem> GetSelectListOfAccounts(int? selectedId = null)
{
// transfer values from db
var dbAssets = _unitOfWork.Assets.GetAllActiveOrderedByName();
// transfer dto to sli
var sliAssets = new List<SelectListItem>();
foreach (var dtoAsset in dbAssets)
{
// add credit card account number to name
var assetName = dtoAsset.Name;
var assetNameInformation = _assetSettingService.GetAccountIdentificationInformation(new Account(dtoAsset));
sliAssets.Add(new SelectListItem()
{
Value = dtoAsset.Id.ToString(),
Text = string.Format("{0}{1}", assetName, assetNameInformation),
Selected = dtoAsset.Id.ToString() == selectedId.ToString(),
});
}
return sliAssets;
}
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface IParentChildRelationshipTypeRepository : IRepository<ParentChildRelationshipType>
{
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface IAssetSettingRepository : IRepository<AssetSetting>
{
AssetSetting GetActive(int assetId, int settingTypeId);
IEnumerable<AssetSetting> GetAllActiveForAsset(int assetId);
IEnumerable<AssetSetting> GetAllActiveForSettingType(int settingTypeId);
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public class FakeAssetTypesRelationshipTypes
{
public static IEnumerable<AssetTypeRelationshipType> InitialFakeAssetTypesRelationshipTypes()
{
yield return new AssetTypeRelationshipType() { Id = 1, ParentAssetTypeId = 2, ChildAssetTypeId = 4, ParentChildRelationshipTypeId = 5, IsActive = true };
yield return new AssetTypeRelationshipType() { Id = 2, ParentAssetTypeId = 4, ChildAssetTypeId = 5, ParentChildRelationshipTypeId = 1, IsActive = true };
yield return new AssetTypeRelationshipType() { Id = 3, ParentAssetTypeId = 3, ChildAssetTypeId = 3, ParentChildRelationshipTypeId = 3, IsActive = false };
yield return new AssetTypeRelationshipType() { Id = 4, ParentAssetTypeId = 5, ChildAssetTypeId = 1, ParentChildRelationshipTypeId = 2, IsActive = true };
yield return new AssetTypeRelationshipType() { Id = 5, ParentAssetTypeId = 1, ChildAssetTypeId = 2, ParentChildRelationshipTypeId = 4, IsActive = true };
}
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Utilities
{
public class TransactionUtility
{
public static decimal FormatAmount(int TransactionTypeId, decimal amount)
{
if(TransactionTypeId == TransactionType.IdForExpense)
return amount * -1;
return amount;
}
/*
public static decimal CalculateTransaction(decimal initialBalance, decimal transactionAmount, string transactionType)
{
if (transactionType == "Income")
{
return initialBalance + transactionAmount;
}
else if (transactionType == "Expense")
{
return initialBalance - transactionAmount;
}
return initialBalance;
}
public static string FormatCheckNumber(string checkNumber)
{
return string.IsNullOrWhiteSpace(checkNumber)
? string.Empty
: checkNumber;
}
public static string FormatTransactionNote(string note)
{
return string.IsNullOrWhiteSpace(note)
? string.Empty
: note;
}
*/
}
}
<file_sep>using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Collections.Generic;
using Financial.Business.Services;
using Financial.Business.Tests.Fakes.Repositories;
namespace Financial.Business.Tests.Services
{
public class AssetTypeServiceTestsBase : ServiceTestsBase
{
public AssetTypeServiceTestsBase()
{
_service = new AccountTypeService(_unitOfWork);
}
protected AccountTypeService _service;
}
[TestClass()]
public class AssetTypeServiceTests : AssetTypeServiceTestsBase
{
[TestMethod()]
public void GetAssetType_WhenProvidedValidInput_ReturnValue_Test()
{
// Assemble
var assetTypeId = 1;
// Act
var result = _service.GetAssetType(assetTypeId);
// Assert
Assert.IsInstanceOfType(result, typeof(Business.Models.AccountType), "Result Type");
Assert.AreEqual(assetTypeId, result.AssetTypeId, "Asset Type Id");
}
[TestMethod()]
public void GetAssetType_WhenProvidedInvalidAssetTypeId_ReturnValue_Test()
{
// Assemble
var assetTypeId = 0;
// Act
var result = _service.GetAssetType(assetTypeId);
// Assert
Assert.IsNull(result, "Result");
}
[TestMethod()]
public void GetAssetType_WhenProvidedValidInputIsNotActive_ReturnValue_Test()
{
// Assemble
var _fakeAssetTypes = new List<Core.Models.AssetType>() {
new Core.Models.AssetType() { Id = 10, Name = "Name 1", IsActive = false }
};
_unitOfWork.AssetTypes = new InMemoryAssetTypeRepository(_fakeAssetTypes);
var service = new AccountTypeService(_unitOfWork);
// Act
var result = _service.GetAssetType(_fakeAssetTypes[0].Id);
// Assert
Assert.IsNull(result, "Result");
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.Account
{
public class DeleteViewModel
{
public DeleteViewModel() { }
public DeleteViewModel(Core.Models.Asset dtoAsset, Core.Models.AssetType dtoAssetType)
{
Id = dtoAsset.Id;
AssetName = dtoAsset.Name;
AssetTypeName = dtoAssetType.Name;
}
public int Id { get; set; }
[Display(Name = "Name")]
public string AssetName { get; set; }
[Display(Name = "Type")]
public string AssetTypeName { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeRelationshipType
{
public class CreateViewModel
{
public CreateViewModel() { }
public CreateViewModel(Core.Models.AssetType dtoSuppliedAssetType,
List<SelectListItem> sliRelationshipLevels, List<SelectListItem> sliLinkAssetTypes,
string selectedRelationshipLevel, string selectedLinkedAssetType)
{
SuppliedAssetTypeId = dtoSuppliedAssetType.Id;
SuppliedAssetTypeName = dtoSuppliedAssetType.Name;
RelationshipLevels = sliRelationshipLevels;
SelectedRelationshipLevel = selectedRelationshipLevel;
LinkAssetTypes = sliLinkAssetTypes;
SelectedLinkedAssetTypeId = selectedLinkedAssetType;
}
public int SuppliedAssetTypeId { get; set; }
[Display(Name = "Asset Type")]
public string SuppliedAssetTypeName { get; set; }
[Required]
[Display(Name = "Relationship Level")]
public string SelectedRelationshipLevel { get; set; }
public IEnumerable<SelectListItem> RelationshipLevels { get; set; }
[Required]
[Display(Name = "Relationship Type")]
public string SelectedParentChildRelationshipTypeId { get; set; }
public IEnumerable<SelectListItem> ParentChildRelationshipTypes { get; set; }
[Required]
[Display(Name = "Link Asset Type")]
public string SelectedLinkedAssetTypeId { get; set; }
public IEnumerable<SelectListItem> LinkAssetTypes { get; set; }
}
}
<file_sep>using Financial.Business;
using Financial.Business.ServiceInterfaces;
using Financial.Business.Utilities;
using Financial.Core;
using Financial.Core.Models;
using Financial.Data;
using Financial.WebApplication.Models.ViewModels.AccountTransaction;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Financial.Business.Models;
namespace Financial.WebApplication.Controllers
{
public class AccountTransactionController : BaseController
{
private IBusinessService _businessService;
public AccountTransactionController(IBusinessService businessService)
: base()
{
_businessService = businessService;
}
[HttpGet]
public ViewResult Index()
{
try
{
if (TempData["ErrorMessage"] != null)
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
if (TempData["SuccessMessage"] != null)
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
// transfer bm to vm
var vmIndex = _businessService.AccountTransactionService.GetListOfActiveTransactions()
.OrderByDescending(r => r.DueDate)
.Select(r => new IndexViewModel(r))
.ToList();
return View("Index", vmIndex);
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return View("Index", new List<IndexViewModel>());
}
}
[HttpGet]
public ActionResult Create(int? assetId)
{
try
{
var bmAccount = _businessService.AccountTransactionService.GetAccountForTransaction(assetId);
var sliAccounts = _businessService.AccountTransactionService.GetAccountSelectList(assetId.ToString());
var sliTransactionTypes = _businessService.AccountTransactionService.GetTransactionTypeSelectList(null);
var sliTransactionCategory = _businessService.AccountTransactionService.GetTransactionCategorySelectList(null);
/*
// get bm
var bmAssetTransaction = _businessService.AccountTransactionService.GetTransactionOptions(assetId);
if (bmAssetTransaction == null)
{
TempData["ErrorMessage"] = "Unable to create record. Try again.";
return RedirectToAction("Index", "AccountTransaction");
}
// transfer bm to vm
var vmCreate = new CreateViewModel(bmAssetTransaction);
*/
return View("Create", new CreateViewModel(sliAccounts, sliTransactionTypes, sliTransactionCategory));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AccountTransaction");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(CreateViewModel vmCreate)
{
try
{
if (!ModelState.IsValid)
{
TempData["ErrorMessage"] = "Problem creating record. Try again.";
return RedirectToAction("Index", "AccountTransaction");
}
// transfer vm to bm
var bmAssetTransaction = new Business.Models.AccountTransaction()
{
AssetId = DataTypeUtility.GetIntegerFromString(vmCreate.SelectedAccountId),
TransactionTypeId = DataTypeUtility.GetIntegerFromString(vmCreate.SelectedTransactionTypeId),
TransactionCategoryId = DataTypeUtility.GetIntegerFromString(vmCreate.SelectedTransactionCategoryId),
CheckNumber = vmCreate.CheckNumber,
DueDate = Convert.ToDateTime(vmCreate.DueDate),
ClearDate = Convert.ToDateTime(vmCreate.ClearDate),
Amount = vmCreate.Amount,
Note = vmCreate.Note,
};
// update db
if (!_businessService.AccountTransactionService.AddTransaction(bmAssetTransaction))
{
//bmAssetTransaction = _businessService.AccountTransactionService.GetTransactionOptions(vmCreate.AssetId);
//vmCreate.Assets = bmAssetTransaction.AssetSelectList;
//vmCreate.TransactionTypes = bmAssetTransaction.TransactionTypeSelectList;
//vmCreate.TransactionCategories= bmAssetTransaction.TransactionCategorySelectList;
ViewData["ErrorMessage"] = "Problem creating record";
return View("Create", vmCreate);
}
TempData["SuccessMessage"] = "Record created";
return RedirectToAction("Details", "Account", new { Id = vmCreate.AssetId });
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AccountTransaction");
}
}
[HttpGet]
public ActionResult Edit(int id)
{
try
{
// get bm
var bmAssetTransaction = _businessService.AccountTransactionService.GetTransactionToEdit(id);
if (bmAssetTransaction == null)
{
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "AccountTransaction");
}
return View("Edit", new EditViewModel(bmAssetTransaction));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered Problem";
return RedirectToAction("Index", "AccountTransaction");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(EditViewModel vmEdit)
{
try
{
if(!ModelState.IsValid)
{
TempData["ErrorMessage"] = "Problem editing record. Try again.";
return RedirectToAction("Index", "AccountTransaction");
}
// transfer vm to bm
var bmAssetTransaction = new Business.Models.AccountTransaction()
{
AssetTransactionId = vmEdit.Id,
AssetId = vmEdit.AssetId,
TransactionTypeId = DataTypeUtility.GetIntegerFromString(vmEdit.SelectedTransactionTypeId),
TransactionCategoryId = DataTypeUtility.GetIntegerFromString(vmEdit.SelectedTransactionCategoryId),
CheckNumber = vmEdit.CheckNumber,
DueDate = Convert.ToDateTime(vmEdit.DueDate),
ClearDate = Convert.ToDateTime(vmEdit.ClearDate),
Amount = vmEdit.Amount,
Note = vmEdit.Note,
};
// update db
if (!_businessService.AccountTransactionService.UpdateTransaction(bmAssetTransaction))
{
ViewData["ErrorMessage"] = "Problem updating record";
return View("Edit", vmEdit);
}
TempData["SuccessMessage"] = "Record updated";
return RedirectToAction("Details", "Account", new { id = vmEdit.AssetId });
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered Problem";
return RedirectToAction("Index", "AccountTransaction");
}
}
[HttpGet]
public ActionResult Delete(int id)
{
try
{
// get bm
var bmAssetTransaction = _businessService.AccountTransactionService.GetTransactionToDelete(id);
// tranfer bm to vm
return View("Delete", new DeleteViewModel(bmAssetTransaction));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered Problem";
return RedirectToAction("Index", "AccountTransaction");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Delete(DeleteViewModel vmDelete)
{
try
{
if (!ModelState.IsValid)
{
TempData["ErrorMessage"] = "Problem deleting record. Try again.";
return RedirectToAction("Index", "AccountTransaction");
}
// update db
if (!_businessService.AccountTransactionService.DeleteTransaction(vmDelete.Id))
{
ViewData["ErrorMessage"] = "Problem deleting record";
return View("Delete", vmDelete);
}
TempData["SuccessMessage"] = "Record deleted";
return RedirectToAction("Index", "AccountTransaction");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered Problem";
return RedirectToAction("Index", "AccountTransaction");
}
}
[HttpGet]
public ActionResult DisplayForAsset(int assetId)
{
try
{
/*
// transfer assetId from db
var dbAssetTransactions = _unitOfWork.AssetTransactions.GetAllActiveByDescendingDueDate(assetId);
// transfer dto to vm
var vmDisplayForAsset = new List<DisplayForAssetViewModel>();
foreach(var dtoAssetTransaction in dbAssetTransactions)
{
var dtoTransactionCategory = _unitOfWork.TransactionCategories.Get(dtoAssetTransaction.TransactionCategoryId);
// is Expense?
if(dtoAssetTransaction.TransactionTypeId == 1)
{
// update to "-0.00"
dtoAssetTransaction.Amount = dtoAssetTransaction.Amount * -1;
}
// format date
string clearDate = DataTypeUtility.GetDateValidatedToShortDateString(dtoAssetTransaction.ClearDate);
// transfer to vm
vmDisplayForAsset.Add(new DisplayForAssetViewModel(dtoAssetTransaction, clearDate, dtoTransactionCategory));
}
*/
// display view
return PartialView("_DisplayForAsset", new List<DisplayForAssetViewModel>());
}
catch (Exception)
{
ViewData["ErrorMessage"] = "Encountered problem";
return PartialView("_DisplayForAsset", new List<DisplayForAssetViewModel>());
}
}
/*
* TODO: implement service
[HttpGet]
public ActionResult SelectAssetToCreate()
{
try
{
// transfer dto to sli
var sliAssets = _businessService.AssetService.GetSelectListOfAssets(null);
// display view
return View("SelectAssetToCreate", new SelectAssetToCreateViewModel(sliAssets));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AssetTransaction");
}
}
*/
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult SelectAssetToCreate(SelectAssetToCreateViewModel vmSelectedAssetToCreate)
{
try
{
// get & validate selected id
var id = DataTypeUtility.GetIntegerFromString(vmSelectedAssetToCreate.SelectedAssetId);
if (id > 0)
{
// display view
return RedirectToAction("Create", "AccountTransaction", new { assetId = id });
}
TempData["ErrorMessage"] = "Value must be selected.";
return RedirectToAction("SelectAssetToCreate", "AccountTransaction");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "AccountTransaction");
}
}
/*
* TODO: implement service
[HttpGet]
public ActionResult EditAsset(int id, int assetId)
{
try
{
// transfer id to dto
var dtoAssetTransaction = _unitOfWork.AssetTransactions.Get(id);
var dtoAsset = _unitOfWork.Assets.Get(assetId);
if (dtoAssetTransaction != null && dtoAsset != null)
{
var dtoAssetType = _unitOfWork.AssetTypes.Get(dtoAsset.AssetTypeId);
var dtoTransactionType = _unitOfWork.TransactionTypes.Get(dtoAssetTransaction.TransactionTypeId);
var dtoTransactionCategory = _unitOfWork.TransactionCategories.Get(dtoAssetTransaction.TransactionCategoryId);
if (dtoAssetType != null && dtoTransactionType != null && dtoTransactionCategory != null)
{
// validate values to display
dtoAssetTransaction.CheckNumber = TransactionUtility.FormatCheckNumber(dtoAssetTransaction.CheckNumber);
dtoAssetTransaction.Note = TransactionUtility.FormatTransactionNote(dtoAssetTransaction.Note);
// transfer dto to sli
var sliAssets = _businessService.AssetService.GetSelectListOfAssets(dtoAsset.Id.ToString());
// transfer to vm and display view
return View("EditAsset", new EditAssetViewModel(dtoAssetTransaction, sliAssets, dtoAsset.Id.ToString(), dtoAssetType, dtoTransactionType.Name, dtoTransactionCategory.Name));
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Edit", "AssetTransaction", new { id = id });
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "AssetTransaction");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered Problem";
return RedirectToAction("Index", "AssetTransaction");
}
}
*/
[HttpPost]
public ActionResult EditAsset(EditAssetViewModel vmEditAsset)
{
try
{
/*
if (ModelState.IsValid)
{
// transfer vm to dto
var dtoAssetTransaction = _unitOfWork.AssetTransactions.Get(vmEditAsset.Id);
if (dtoAssetTransaction != null)
{
dtoAssetTransaction.AssetId = DataTypeUtility.GetIntegerFromString(vmEditAsset.SelectedAssetId);
// update db
_unitOfWork.CommitTrans();
// display view
TempData["SuccessMessage"] = "Asset Name updated";
return RedirectToAction("Edit", "AccountTransaction", new { id = vmEditAsset.Id });
}
}
*/
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "AccountTransaction");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered Problem";
return RedirectToAction("Index", "AccountTransaction");
}
}
}
}<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class AssetTransactionRepository : Repository<AssetTransaction>, IAssetTransactionRepository
{
public AssetTransactionRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
public IEnumerable<AssetTransaction> GetAllActiveByDueDate()
{
return FinancialDbContext.AssetTransactions
.Include(r => r.Asset)
.Include(r => r.Asset.AssetType)
.Include(r => r.TransactionType)
.Where(r => r.IsActive)
.OrderBy(r => r.DueDate)
.ToList();
}
public IEnumerable<AssetTransaction> GetAllActiveByDescendingDueDate(int assetId)
{
return FinancialDbContext.AssetTransactions
.Where(r => r.IsActive)
.Where(r => r.AssetId == assetId)
.OrderByDescending(r => r.DueDate)
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AssetSetting
{
public class CreateLinkedSettingTypesViewModel
{
public CreateLinkedSettingTypesViewModel() { }
public CreateLinkedSettingTypesViewModel(Core.Models.Asset dtoAsset,
Core.Models.AssetType dtoAssetType,List<CreateViewModel> vmCreate)
{
AssetId = dtoAsset.Id;
AssetName = dtoAsset.Name;
AssetTypeName = dtoAssetType.Name;
CreateViewModels = vmCreate;
}
public int AssetId { get; set; }
[Display(Name = "Name")]
public string AssetName { get; set; }
[Display(Name = "Type")]
public string AssetTypeName { get; set; }
public List<CreateViewModel> CreateViewModels { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.ParentChildRelationshipType
{
public class IndexViewModel
{
public IndexViewModel() { }
public IndexViewModel(Core.Models.ParentChildRelationshipType dtoParentChildRelationshipType, Core.Models.RelationshipType dtoSuppliedRelationshipType,
Core.Models.RelationshipType dtoParentRelationshipType, Core.Models.RelationshipType dtoChildRelationshipType)
{
Id = dtoParentChildRelationshipType.Id;
RelationshipTypeId = dtoSuppliedRelationshipType.Id;
ParentRelationshipTypeName = dtoParentRelationshipType.Name;
ChildRelationshipTypeName = dtoChildRelationshipType.Name;
}
public int Id { get; set; }
public int RelationshipTypeId { get; set; }
[Display(Name = "Parent")]
public string ParentRelationshipTypeName { get; set; }
[Display(Name = "Child")]
public string ChildRelationshipTypeName { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Models
{
public class AttributeType
{
public AttributeType() { }
public AttributeType(Core.Models.SettingType dtoSettingType)
{
SettingTypeId = dtoSettingType.Id;
SettingTypeName = dtoSettingType.Name;
}
public AttributeType(Core.Models.SettingType dtoSettingType,
Core.Models.AssetTypeSettingType dtoAssetTypeSettingType)
{
SettingTypeId = dtoSettingType.Id;
SettingTypeName = dtoSettingType.Name;
AssetTypeSettingTypeId = dtoAssetTypeSettingType.Id;
}
public int SettingTypeId { get; set; }
public string SettingTypeName { get; set; }
public int AssetTypeSettingTypeId { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemoryRelationshipTypeRepository : InMemoryRepository<RelationshipType>, IRelationshipTypeRepository
{
private List<RelationshipType> _entities = null;
public InMemoryRelationshipTypeRepository(IEnumerable<RelationshipType> entities)
: base(entities)
{
_entities = entities as List<RelationshipType>;
}
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public static class FakeAssetSettings
{
public static IEnumerable<AssetSetting> InitialFakeAssetSettings()
{
yield return new AssetSetting() { Id = 1, AssetId = 2, SettingTypeId = 4, Value = "AssetSetting1", IsActive = true };
yield return new AssetSetting() { Id = 2, AssetId = 1, SettingTypeId = 4, Value = "AssetSetting2", IsActive = true };
yield return new AssetSetting() { Id = 3, AssetId = 1, SettingTypeId = 2, Value = "AssetSetting3", IsActive = false };
yield return new AssetSetting() { Id = 4, AssetId = 5, SettingTypeId = 2, Value = "AssetSetting4", IsActive = true };
yield return new AssetSetting() { Id = 5, AssetId = 4, SettingTypeId = 1, Value = "AssetSetting5", IsActive = true };
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AssetTypeRelationshipType
{
public class IndexViewModel
{
public IndexViewModel() { }
public IndexViewModel(Core.Models.AssetTypeRelationshipType dtoAssetTypeRelationshipType,
Core.Models.AssetType dtoSuppliedAssetType,
Core.Models.AssetType dtoLinkedAssetType,
Core.Models.RelationshipType dtoRelationshipType)
{
Id = dtoAssetTypeRelationshipType.Id;
SuppliedAssetTypeId = dtoSuppliedAssetType.Id;
SuppliedAssetTypeName = dtoSuppliedAssetType.Name;
LinkedAssetTypeName = dtoLinkedAssetType.Name;
RelationshipTypeName = dtoRelationshipType.Name;
}
public IndexViewModel(Core.Models.AssetTypeRelationshipType dtoAssetTypeRelationshipType,
Core.Models.AssetType dtoParentAssetType,
Core.Models.AssetType dtoChildAssetType,
Core.Models.RelationshipType dtoParentRelationshipType,
Core.Models.RelationshipType dtoChildRelationshipType)
{
Id = dtoAssetTypeRelationshipType.Id;
ParentAssetTypeName = dtoParentAssetType.Name;
ChildAssetTypeName = dtoChildAssetType.Name;
ParentRelationshipTypeName = dtoParentRelationshipType.Name;
ChildRelationshipTypeName = dtoChildRelationshipType.Name;
}
public int Id { get; set; }
public int SuppliedAssetTypeId { get; set; }
public string SuppliedAssetTypeName { get; set; }
[Display(Name = "Linked Asset Type")]
public string LinkedAssetTypeName { get; set; }
[Display(Name = "Relationship Type")]
public string RelationshipTypeName { get; set; }
[Display(Name = "Parent")]
public string ParentAssetTypeName { get; set; }
[Display(Name = "Child")]
public string ChildAssetTypeName { get; set; }
[Display(Name = "Parent-Child")]
public string ParentRelationshipTypeName { get; set; }
[Display(Name = "Child-Parent")]
public string ChildRelationshipTypeName { get; set; }
}
}
<file_sep>using Financial.Business;
using Financial.Business.Utilities;
using Financial.Core;
using Financial.Core.Models;
using Financial.WebApplication.Models.ViewModels.Account;
using Financial.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Financial.WebApplication.Controllers
{
public class AccountController : BaseController
{
private IUnitOfWork _unitOfWork;
private IBusinessService _businessService;
public AccountController(IUnitOfWork unitOfWork, IBusinessService businessService)
: base()
{
_unitOfWork = unitOfWork;
_businessService = businessService;
}
[HttpGet]
public ViewResult Index()
{
try
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
if (TempData["ErrorMessage"] != null)
{
ViewData["ErrorMessage"] = TempData["ErrorMessage"];
}
// transfer bm to vm
var vmIndexList = _businessService.AccountService.GetListOfAccounts()
.OrderBy(r => r.AssetName)
.Select(r => new IndexViewModel(r))
.ToList();
// display view
return View("Index", vmIndexList);
}
catch (Exception e)
{
// todo: setup logging
TempData["ErrorMessage"] = "Encountered problem";
return View("Index", new List<IndexViewModel>());
}
}
[HttpGet]
public ActionResult Create()
{
try
{
// transfer dto to sli
var sliAssetTypes = _businessService.AccountTypeService.GetAssetTypesDropDownList(null);
// display view
return View("Create", new CreateViewModel(sliAssetTypes));
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "Asset");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(CreateViewModel vmCreate)
{
try
{
if (ModelState.IsValid)
{
// transfer vm to dto
var dtoAsset = new Asset()
{
AssetTypeId = DataTypeUtility.GetIntegerFromString(vmCreate.SelectedAssetTypeId),
Name = vmCreate.AssetName,
IsActive = true
};
_unitOfWork.Assets.Add(dtoAsset);
// update db
_unitOfWork.CommitTrans();
// display view
TempData["SuccessMessage"] = "Asset Created";
return RedirectToAction("Create", "AssetSetting", new { assetId = dtoAsset.Id });
}
TempData["ErrorMessage"] = "Unable to create record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "Asset");
}
}
[HttpGet]
public ActionResult Edit(int id)
{
try
{
// transfer id to dto
var dtoAsset = _unitOfWork.Assets.Get(id);
if (dtoAsset != null)
{
var sliAssetTypes = _businessService.AccountTypeService.GetAssetTypesDropDownList(dtoAsset.AssetTypeId);
// display view
return View("Edit", new EditViewModel(dtoAsset, sliAssetTypes));
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "Asset");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(EditViewModel vmEdit)
{
try
{
// transfer vm to dto
var dtoAsset = _unitOfWork.Assets.Get(vmEdit.Id);
if (dtoAsset != null)
{
dtoAsset.Name = vmEdit.Name;
dtoAsset.AssetTypeId = DataTypeUtility.GetIntegerFromString(vmEdit.SelectedAssetTypeId);
// update db
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Record updated.";
return RedirectToAction("Details", "Asset", new { id = vmEdit.Id });
}
TempData["ErrorMessage"] = "Unable to edit record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "Asset");
}
}
[HttpGet]
public ActionResult Details(int id)
{
// get messages from other controllers to display in view
if (TempData["SuccessMessage"] != null)
{
ViewData["SuccessMessage"] = TempData["SuccessMessage"];
}
try
{
// transfer id to dto
var dtoAsset = _unitOfWork.Assets.Get(id);
if (dtoAsset != null)
{
var dtoAssetType = _unitOfWork.AssetTypes.Get(dtoAsset.AssetTypeId);
// display view with message
return View("Details", new DetailsViewModel(dtoAsset, dtoAssetType));
}
TempData["ErrorMessage"] = "Unable to display record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "Asset");
}
}
[HttpGet]
public ActionResult Delete(int id)
{
try
{
// transfer id to dto
var dtoAsset = _unitOfWork.Assets.Get(id);
if (dtoAsset != null)
{
var dtoAssetType = _unitOfWork.AssetTypes.Get(dtoAsset.AssetTypeId);
// display view
return View("Delete", new DeleteViewModel(dtoAsset, dtoAssetType));
}
TempData["ErrorMessage"] = "Unable to delete record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "Asset");
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Delete(DeleteViewModel vmDelete)
{
try
{
// transfer vm to dto
var dtoAsset = _unitOfWork.Assets.Get(vmDelete.Id);
if (dtoAsset != null)
{
dtoAsset.IsActive = false;
// update db
_unitOfWork.CommitTrans();
// display view with message
TempData["SuccessMessage"] = "Record Deleted";
return RedirectToAction("Index", "Asset");
}
TempData["ErrorMessage"] = "Unable to delete record. Try again.";
return RedirectToAction("Index", "Asset");
}
catch (Exception)
{
TempData["ErrorMessage"] = "Encountered problem";
return RedirectToAction("Index", "Asset");
}
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NSubstitute;
using System.Data.Entity.Infrastructure;
using System.Runtime.Remoting.Metadata.W3cXsd2001;
using Moq.Language.Flow;
using Moq.Language;
using Moq;
namespace Financial.Tests.Mocks
{
public static class MockDbSet
{
public static DbSet<T> Create<T>(IEnumerable<T> data = null) where T : class
{
// create a mock DbSet exposing both DbSet and IQueryable interfaces for setup
var mockDbSet = Substitute.For<DbSet<T>, IQueryable<T>>();
if (data != null)
{
// convert list to queryable
var qData = data.AsQueryable();
// setup all IQueryable methods using what you have from "data"
((IQueryable<T>)mockDbSet).Provider.Returns(qData.Provider);
((IQueryable<T>)mockDbSet).Expression.Returns(qData.Expression);
((IQueryable<T>)mockDbSet).ElementType.Returns(qData.ElementType);
((IQueryable<T>)mockDbSet).GetEnumerator().Returns(qData.GetEnumerator());
}
// bypass include
mockDbSet.Include(Arg.Any<string>())
.Returns(mockDbSet);
return mockDbSet;
}
}
}
<file_sep>using Financial.Core;
using Financial.Data.RepositoryInterfaces;
using Financial.Data.Repositories;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data
{
public class UnitOfWork : IUnitOfWork
{
private readonly FinancialDbContext _context;
private bool _trans;
public UnitOfWork()
{
_context = new FinancialDbContext();
SetRepositories();
}
public UnitOfWork(FinancialDbContext context)
{
_context = context;
SetRepositories();
}
private void SetRepositories()
{
AssetSettings = new AssetSettingRepository(_context);
AssetRelationships = new AssetRelationshipRepository(_context);
Assets = new AssetRepository(_context);
AssetTransactions = new AssetTransactionRepository(_context);
AssetTypeSettingTypes = new AssetTypeSettingTypeRepository(_context);
AssetTypeRelationshipTypes = new AssetTypeRelationshipTypeRepository(_context);
AssetTypes = new AssetTypeRepository(_context);
ParentChildRelationshipTypes = new ParentChildRelationshipTypeRepository(_context);
RelationshipTypes = new RelationshipTypeRepository(_context);
SettingTypes = new SettingTypeRepository(_context);
TransactionCategories = new TransactionCategoryRepository(_context);
TransactionDescriptions = new TransactionDescriptionRepository(_context);
TransactionTypes = new TransactionTypeRepository(_context);
}
public IAssetSettingRepository AssetSettings { get; private set; }
public IAssetRelationshipRepository AssetRelationships { get; private set; }
public IAssetRepository Assets { get; private set; }
public IAssetTransactionRepository AssetTransactions { get; private set; }
public IAssetTypeSettingTypeRepository AssetTypeSettingTypes { get; private set; }
public IAssetTypeRelationshipTypeRepository AssetTypeRelationshipTypes { get; private set; }
public IAssetTypeRepository AssetTypes { get; private set; }
public IParentChildRelationshipTypeRepository ParentChildRelationshipTypes { get; set; }
public IRelationshipTypeRepository RelationshipTypes { get; private set; }
public ISettingTypeRepository SettingTypes { get; private set; }
public ITransactionCategoryRepository TransactionCategories { get; private set; }
public ITransactionDescriptionRepository TransactionDescriptions { get; private set; }
public ITransactionTypeRepository TransactionTypes { get; private set; }
public void BeginTrans()
{
_trans = true;
}
public void CommitTrans()
{
_trans = false;
Complete();
}
public void RollBackTrans()
{
_trans = false;
}
public void Complete()
{
if(!_trans)
{
_context.SaveChanges();
}
}
public void Dispose()
{
_context.Dispose();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
namespace Financial.Business.Tests.Fakes.Repositories
{
public class InMemoryAssetTypeRepository : InMemoryRepository<AssetType>, IAssetTypeRepository
{
private List<AssetType> _entities = null;
public InMemoryAssetTypeRepository(IEnumerable<AssetType> entities)
: base(entities)
{
_entities = entities as List<AssetType>;
}
public int CountMatching(string name)
{
return _entities
.Count(r => r.Name == name);
}
public int CountMatching(int excludeId, string name)
{
return _entities
.Where(r => r.Id != excludeId)
.Count(r => r.Name == name);
}
public IEnumerable<AssetType> GetAllActiveOrderedByName()
{
throw new NotImplementedException();
}
public IEnumerable<AssetType> GetAllOrderedByName()
{
return _entities
.OrderBy(r => r.Name)
.ToList();
}
}
}
<file_sep>using Financial.Core;
using Financial.Core.Models;
using Financial.Data.RepositoryInterfaces;
using System;
using System.Data.Entity;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.Repositories
{
public class AssetRepository : Repository<Asset>, IAssetRepository
{
public AssetRepository(FinancialDbContext context)
: base(context)
{
}
private FinancialDbContext FinancialDbContext
{
get { return _context as FinancialDbContext; }
}
public new Asset Get(int id)
{
return FinancialDbContext.Assets
.Include(r => r.AssetType)
.FirstOrDefault(r => r.Id == id);
}
public IEnumerable<Asset> GetAllActiveOrderedByName()
{
return FinancialDbContext.Assets
.Include(r => r.AssetType)
.Where(r => r.IsActive)
.OrderBy(r => r.Name)
.ToList();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.SettingType
{
public class IndexViewModel
{
public IndexViewModel()
{
}
public IndexViewModel(Business.Models.AttributeType bmSettingType)
{
Id = bmSettingType.SettingTypeId;
Name = bmSettingType.SettingTypeName;
//IsActive = bmSettingType.IsActive;
}
public int Id { get; set; }
[Required]
public string Name { get; set; }
[Display(Name = "Active")]
public bool IsActive { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Core;
namespace Financial.Business.Models
{
public class Account
{
public Account()
{
}
public Account(Core.Models.Asset dtoAsset)
{
AssetId = dtoAsset.Id;
AssetName = dtoAsset.Name;
AssetTypeId = dtoAsset.AssetTypeId;
AssetTypeName = dtoAsset.AssetType.Name;
}
public Account(Core.Models.Asset dtoAsset, Core.Models.AssetType dtoAssetType)
{
AssetId = dtoAsset.Id;
AssetName = dtoAsset.Name;
AssetTypeId = dtoAssetType.Id;
AssetTypeName = dtoAssetType.Name;
}
public int AssetId { get; set; }
public string AssetName { get; set; }
public int AssetTypeId { get; set; }
public string AssetTypeName { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
using Financial.Core.Models;
using System.Data.Entity;
using Moq;
using Financial.Core;
using Financial.Data.Repositories;
using Financial.Tests.Mocks;
namespace Financial.Tests.Data.Repositories
{
[TestFixture]
public class AssetTypeRepositoryTests
{
private Asset _dbAsset;
private AssetType _dbAssetType;
private DbSet<Asset> _mockAssetDbSet;
private DbSet<AssetType> _mockAssetTypeDbSet;
private Mock<FinancialDbContext> _mockDbContext;
private FinancialDbContext _fakeDbContext;
private int _callCount;
private AssetTypeRepository _repository;
[SetUp]
public void SetUp()
{
// setup fake model
_dbAssetType = new AssetType { Id = 1, Name = "a", IsActive = true };
_dbAsset = new Asset { Id = 2, AssetTypeId = _dbAssetType.Id, Name = "b", IsActive = true };
// setup DbContext
Setup_FakeDbContext();
// set up repository
_repository = new AssetTypeRepository(_fakeDbContext);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void Get_WhenCalled_ReturnAssetType_Test()
{
var result = _repository.Get(_dbAssetType.Id);
Assert.That(result, Is.InstanceOf<AssetType>());
}
// private methods
private void Setup_FakeDbContext()
{
// setup dbContext
Setup_FakeDbContext(
new List<Asset> { _dbAsset },
new List<AssetType> { _dbAssetType });
}
private void Setup_FakeDbContext(
List<Asset> fakeAssetList,
List<AssetType> fakeAssetTypeList)
{
// setup dbContext
_fakeDbContext = MockFinancialDbContext.Create(
assets: fakeAssetList,
assetTypes: fakeAssetTypeList);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Data.Entity;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Dynamitey;
using NUnit.Framework;
using Moq;
using NSubstitute;
using Financial.Data;
using Financial.Core;
using Financial.Core.Models;
using Financial.Data.Repositories;
using Financial.Data.RepositoryInterfaces;
using Financial.Tests.Mocks;
using Financial.Tests._DmitriNesteruk;
namespace Financial.Tests.Data.Repositories
{
[TestFixture]
public class AssetRepositoryTests
{
private Asset _dbAsset;
private AssetSetting _dbAssetSetting;
private AssetType _dbAssetType;
private SettingType _dbSettingType;
private DbSet<Asset> _mockAssetDbSet;
private DbSet<AssetType> _mockAssetTypeDbSet;
private Mock<FinancialDbContext> _mockDbContext;
private FinancialDbContext _fakeDbContext;
private int _callCount;
private AssetRepository _repository;
[SetUp]
public void SetUp()
{
// setup fake model
_dbAssetType = new AssetType { Id = 1, Name = "a", IsActive = true };
_dbAsset = new Asset
{
Id = 2,
AssetTypeId = _dbAssetType.Id,
AssetType = _dbAssetType, // setup include
Name = "b",
IsActive = true
};
_dbSettingType = new SettingType { Id = 3, Name = "c", IsActive = true };
_dbAssetSetting = new AssetSetting
{
Id = 4,
AssetId = _dbAsset.Id,
Asset = _dbAsset, // setup include
SettingTypeId = _dbSettingType.Id,
SettingType = _dbSettingType, // setup include
Value = "d",
IsActive = true
};
// setup DbContext
Setup_FakeDbContext();
// set up repository
_repository = new AssetRepository(_fakeDbContext);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void Add_WhenAssetProvided_ShouldCallDbContextSetAssetProperty_Test()
{
Setup_Repository_MockDbContext(new List<Asset>());
// Arrange
var newAsset = new Asset { /*Id = 1,*/ AssetTypeId = 2, Name = "b", IsActive = true };
// reset count for repository call
_callCount = 0;
// Act
_repository.Add(newAsset);
// Assert
Assert.That(_callCount, Is.EqualTo(1));
}
[Test]
public void Add_WhenAssetProvided_AddEntityToDbContext_Test()
{
// Arrange
_fakeDbContext = new FinancialDbContext();
_repository = new AssetRepository(_fakeDbContext);
var expectedAsset = new Asset { /*Id = 1,*/ AssetTypeId = 2, Name = "b", IsActive = true };
// Act
_repository.Add(expectedAsset);
var actualAsset = _fakeDbContext.Assets.Local.ToList()[0];
// Assert
Assert.Multiple(() =>
{
Assert.That(actualAsset.Id, Is.EqualTo(0), "Asset Id");
Assert.That(actualAsset.Name, Is.EqualTo(expectedAsset.Name), "Asset Name");
Assert.That(actualAsset.AssetTypeId, Is.EqualTo(expectedAsset.AssetTypeId), "AssetType Id");
Assert.That(actualAsset.IsActive, Is.EqualTo(expectedAsset.IsActive), "IsActive");
});
}
[Test]
public void Get_WhenCalled_ReturnAsset_Test()
{
var result = _repository.Get(_dbAsset.Id);
Assert.That(result, Is.InstanceOf<Asset>());
}
[Test]
public void Get_WhenCalled_ReturnAssetValues_Test()
{
var result = _repository.Get(_dbAsset.Id);
Assert.Multiple(() =>
{
Assert.That(result.Id, Is.EqualTo(_dbAsset.Id), "Asset Id");
Assert.That(result.AssetTypeId, Is.EqualTo(_dbAsset.AssetTypeId), "AssetType Id");
Assert.That(result.AssetType.Name, Is.EqualTo(_dbAssetType.Name), "AssetType Name");
Assert.That(result.Name, Is.EqualTo(_dbAsset.Name), "Asset Name");
Assert.That(result.IsActive, Is.EqualTo(_dbAsset.IsActive), "IsActive");
});
}
[Test]
public void GetAllActiveOrderedByName_WhenCalled_ReturnAssetIEnumerable_Test()
{
var result = _repository.GetAllActiveOrderedByName();
Assert.That(result, Is.InstanceOf<IEnumerable<Asset>>());
}
[Test]
public void GetAllActiveOrderedByName_WhenCalled_ReturnAssetValues_Test()
{
var result = _repository.GetAllActiveOrderedByName().ToList();
Assert.Multiple(() =>
{
Assert.That(result[0].Id, Is.EqualTo(_dbAsset.Id), "Asset Id");
Assert.That(result[0].AssetTypeId, Is.EqualTo(_dbAsset.AssetTypeId), "AssetType Id");
Assert.That(result[0].Name, Is.EqualTo(_dbAsset.Name), "Asset Name");
Assert.That(result[0].IsActive, Is.EqualTo(_dbAsset.IsActive), "IsActive");
});
}
[Test]
public void GetAllActiveOrderedByName_WhenIsActiveEqualsTrue_ReturnAsset_Test()
{
var fakeAssetTypes = new List<AssetType> {_dbAssetType};
var fakeAssets = new List<Asset>
{
new Asset { Id = 1, AssetTypeId = _dbAssetType.Id, Name = "a", IsActive = true },
};
Setup_Repository_FakeDbContext(fakeAssets, fakeAssetTypes);
var result = _repository.GetAllActiveOrderedByName();
Assert.That(result.Count(), Is.EqualTo(1));
}
[Test]
public void GetAllActiveOrderedByName_WhenIsActiveEqualsFalse_DoNotReturnAsset_Test()
{
var fakeAssetTypes = new List<AssetType> { _dbAssetType };
var fakeAssets = new List<Asset>
{
new Asset { Id = 1, AssetTypeId = _dbAssetType.Id, Name = "a", IsActive = false },
};
Setup_Repository_FakeDbContext(fakeAssets, fakeAssetTypes);
var result = _repository.GetAllActiveOrderedByName();
Assert.That(result.Count(), Is.EqualTo(0));
}
[Test]
public void GetAllActiveOrderedByName_WhenMultipleAssetsFound_ReturnListSortedAscendingByAssetName_Test()
{
var fakeAssetTypes = new List<AssetType> { _dbAssetType };
var fakeAssets = new List<Asset>
{
new Asset { Id = 1, AssetTypeId = _dbAssetType.Id, Name = "z", IsActive = true },
new Asset { Id = 2, AssetTypeId = _dbAssetType.Id, Name = "a", IsActive = true }
};
Setup_Repository_FakeDbContext(fakeAssets, fakeAssetTypes);
var result = _repository.GetAllActiveOrderedByName().ToList();
Assert.Multiple(() =>
{
Assert.That(result[0].Name, Is.EqualTo("a"), "First Index");
Assert.That(result[1].Name, Is.EqualTo("z"), "Second Index");
});
}
// private methods
private void Setup_FakeDbContext()
{
// setup dbContext
Setup_FakeDbContext(
new List<Asset> { _dbAsset },
new List<AssetType> { _dbAssetType },
new List<AssetSetting> { _dbAssetSetting },
new List<SettingType> { _dbSettingType });
}
private void Setup_FakeDbContext(
List<Asset> fakeAssetList,
List<AssetType> fakeAssetTypeList,
List<AssetSetting> fakeAssetSettingList,
List<SettingType> fakeSettingTypeList)
{
// setup dbContext
_fakeDbContext = MockFinancialDbContext.Create(
assets: fakeAssetList,
assetTypes: fakeAssetTypeList,
assetSettings: fakeAssetSettingList,
settingTypes: fakeSettingTypeList);
}
private void Setup_FakeDbContext(
List<Asset> fakeAssetList,
List<AssetType> fakeAssetTypeList)
{
// setup dbContext
_fakeDbContext = MockFinancialDbContext.Create(
assets: fakeAssetList,
assetTypes: fakeAssetTypeList);
}
private void Setup_Repository_FakeDbContext(List<Asset> fakeAssetList, List<AssetType> fakeAssetTypeList)
{
// setup DbContext
Setup_FakeDbContext(fakeAssetList, fakeAssetTypeList);
// set up repository
_repository = new AssetRepository(_fakeDbContext);
}
private void Setup_Repository_MockDbContext(List<Asset> fakeAssets)
{
// setup DbSet
_mockAssetDbSet = MockDbSet.Create<Asset>(fakeAssets);
// setup DbContext
_callCount = 0;
_mockDbContext = new Mock<FinancialDbContext>();
_mockDbContext.Setup(c => c.Set<Asset>())
.Returns(_mockAssetDbSet)
.Callback(() => _callCount++);
// set up repository
_repository = new AssetRepository(_mockDbContext.Object);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.WebApplication.Models.ViewModels.AssetSetting
{
public class EditViewModel
{
public EditViewModel() { }
public EditViewModel(Core.Models.AssetSetting dtoAssetSetting, Core.Models.Asset dtoAsset,
Core.Models.SettingType dtoSettingType)
{
Id = dtoAssetSetting.Id;
AssetId = dtoAsset.Id;
SettingTypeId = dtoSettingType.Id;
SettingTypeName = dtoSettingType.Name;
Value = dtoAssetSetting.Value;
IsActive = dtoAssetSetting.IsActive;
}
public int Id { get; set; }
public int AssetId { get; set; }
public int SettingTypeId { get; set; }
public string SettingTypeName { get; set; }
public string Value { get; set; }
public bool IsActive { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public class FakeSettingTypes
{
public static IEnumerable<SettingType> InitialFakeSettingTypes()
{
yield return new SettingType() { Id = 1, Name = "SettingTypeName1", IsActive = true };
yield return new SettingType() { Id = 2, Name = "SettingTypeName2", IsActive = true };
yield return new SettingType() { Id = 3, Name = "SettingTypeName3", IsActive = false };
yield return new SettingType() { Id = 4, Name = "SettingTypeName4", IsActive = true };
yield return new SettingType() { Id = 5, Name = "SettingTypeName5", IsActive = true };
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.AccountTransaction
{
public class EditAssetViewModel
{
public EditAssetViewModel() { }
public EditAssetViewModel(Core.Models.AssetTransaction dtoAssetTransaction,
List<SelectListItem> sliAssets, string selectdAssetId,
Core.Models.AssetType dtoAssetType, string transactionType, string transactionCategory)
{
Id = dtoAssetTransaction.Id;
Assets = sliAssets;
SelectedAssetId = selectdAssetId;
AssetTypeName = dtoAssetType.Name;
DueDate = dtoAssetTransaction.DueDate.ToString("MM/dd/yyyy");
ClearDate = dtoAssetTransaction.ClearDate.ToString("MM/dd/yyyy");
CheckNumber = dtoAssetTransaction.CheckNumber;
Amount = string.Format("{0:C}", dtoAssetTransaction.Amount);
Note = dtoAssetTransaction.Note;
TransactionType = transactionCategory;
TransactionCategory = transactionCategory;
}
public int Id { get; set; }
[Required]
[Display(Name = "Asset Name")]
public string SelectedAssetId { get; set; }
public IEnumerable<SelectListItem> Assets { get; set; }
[Display(Name = "Asset Type")]
public string AssetTypeName { get; set; }
[Display(Name = "Check Number")]
public string CheckNumber { get; set; }
[Display(Name = "Due")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}")]
public string DueDate { get; set; }
[Display(Name = "Cleared")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}")]
public string ClearDate { get; set; }
public string Amount { get; set; }
public string Note { get; set; }
[Display(Name = "Type")]
public string TransactionType { get; set; }
[Display(Name = "Category")]
public string TransactionCategory { get; set; }
}
}
<file_sep>using AutoMapper;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Configuration;
namespace Financial.WebApplication.App_Start
{
public class MappingProfile : Profile
{
public MappingProfile()
{
//Mapper.Map<>();
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace Financial.WebApplication.Models.ViewModels.AccountTransaction
{
public class DeleteViewModel
{
public DeleteViewModel() { }
public DeleteViewModel(Business.Models.AccountTransaction bmAssetTransaction)
{
Id = bmAssetTransaction.AssetTransactionId;
AssetId = bmAssetTransaction.AssetId;
AssetName = bmAssetTransaction.AssetName;
AssetTypeName = bmAssetTransaction.AssetTypeName;
DueDate = bmAssetTransaction.DueDate;
ClearDate = bmAssetTransaction.ClearDate;
CheckNumber = bmAssetTransaction.CheckNumber;
Amount = bmAssetTransaction.Amount;
Note = bmAssetTransaction.Note;
TransactionTypeName = bmAssetTransaction.TransactionTypeName;
TransactionCategoryName = bmAssetTransaction.TransactionCategoryName;
}
public int Id { get; set; }
public int AssetId { get; set; }
[Display(Name = "Asset Name")]
public string AssetName { get; set; }
[Display(Name = "Asset Type")]
public string AssetTypeName { get; set; }
[Display(Name = "Check Number")]
public string CheckNumber { get; set; }
[Display(Name = "Due")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}", ApplyFormatInEditMode = true)]
public DateTime DueDate { get; set; }
[Display(Name = "Cleared")]
[DisplayFormat(DataFormatString = "{0:MM/dd/yyyy}", ApplyFormatInEditMode = true)]
public DateTime ClearDate { get; set; }
public decimal Amount { get; set; }
public string Note { get; set; }
[Display(Name = "Type")]
public string TransactionTypeName { get; set; }
[Display(Name = "Category")]
public string TransactionCategoryName { get; set; }
}
}<file_sep>using Financial.Business.Services;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Financial.Business.Tests.Fakes.Repositories;
using System.Web.Mvc;
using NUnit.Framework;
namespace Financial.Business.Tests.Services
{
/*
public class AssetTransactionServiceTestsBase : ServiceTestsBase
{
public AssetTransactionServiceTestsBase()
{
_service = new AssetTransactionService(_unitOfWork);
}
protected AssetTransactionService _service;
}
*/
public static class AssetTransactionServiceObjectMother
{
public static int AssetTypeIdForCreditCard = 3;
public static int SettingTypeIdForAccountNumber = 1;
}
[TestFixture]
public class AssetTransactionServiceTests : ServiceTestsBase
{
private AccountTransactionService _service;
// SetUp Setting
// TearDown Setting
[SetUp]
public void SetUp()
{
ResetUnitOfWork();
_service = new AccountTransactionService(_unitOfWork);
}
[Test]
public void GetListOfActiveTransactions_WhenNoInputValues_ReturnList_Test()
{
// Arrange
// Act
var result = _service.GetListOfActiveTransactions();
// Assert
Assert.IsInstanceOf(typeof(List<Business.Models.AccountTransaction>), result, "Result Type");
Assert.That(result, Is.TypeOf<Business.Models.AccountTransaction>());
Assert.IsNotNull(result, "Asset Transaction List");
}
/*
[Test]
public void GetTransactionOptions_WhenValidAssetIdProvided_ReturnAssetId_Test()
{
// Arrange
// Act
var result = _service.GetTransactionOptions(1);
// Assert
Assert.That(result.AssetId, Is.EqualTo(1));
}
*/
/*
[Test]
[TestCase(99, 0)]
[TestCase(null, 0)]
public void GetTransactionOptions_WhenInvalidAssetIdProvided_ReturnAssetIdEqualsZero_Test(int? assetId, int expectedResult)
{
// Arrange
//var expAssetId = 1;
//var sut = _service;
// Act
var result = _service.GetTransactionOptions(assetId);
// Assert
Assert.That(result.AssetId, Is.EqualTo(0));
}
*/
/*
[Test]
public void GetTransactionOptions_WhenAssetIdExists_ReturnAssetTransaction_Test()
{
// Arrange
var expAssetId = 1;
//var sut = _service;
// Act
var result = _service.GetTransactionOptions(expAssetId);
// Assert
Assert.IsInstanceOf(typeof(Business.Models.AssetTransaction), result, "Result Type");
Assert.IsNotNull(result, "Asset Transaction");
}
[Test]
public void GetTransactionOptions_WhenAssetIdNotFound_ReturnNull_Test()
{
// Arrange
int? expAssetId = 99;
//var sut = _service;
// Act
var result = _service.GetTransactionOptions(expAssetId);
// Assert
Assert.IsNull(result, "Asset Transaction");
}
[Test]
public void GetTransactionOptions_WhenAssetIdIsNull_ReturnNull_Test()
{
// Arrange
int? expAssetId = null;
//var sut = _service;
// Act
var result = _service.GetTransactionOptions(expAssetId);
// Assert
Assert.IsNull(result, "Asset Transaction");
}
*/
/*
[Test]
public void GetTransactionOptions_WhenAssetIdNotFound_ThrowException()
{
var result = _service.GetTransactionOptions(99);
Assert.That(() => _service.GetTransactionOptions(99), Throws.ArgumentNullException);
}
*/
/*
[Test]
[TestCase("1", true)]
[TestCase("0", false)]
[TestCase("99", false)]
[TestCase(null, false)]
public void GetAssetSelectList_WhenCalled_ReturnSelectListWithSelectedValue_Test(string selectedId, bool expectedResult)
{
// Arrange
// Act
var result = _service.GetAssetSelectList(selectedId);
// Assert
Assert.That(result.Any(r => r.Selected), Is.EqualTo(expectedResult));
}
*/
/*
[Test]
public void GetAssetSelectList_WhenSuccess_ReturnSelectList_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = null;
// Act
var result = _service.GetAssetSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.IsNotNull(result, "Result");
Assert.AreNotEqual(0, result.Count, "Result Count");
}
[Test]
public void GetAssetSelectList_WhenSelectedValueIsNull_ReturnNoSelectedValue_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = null;
// Act
var result = _service.GetAssetSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.IsFalse(result.Any(r => r.Selected), "Result Selected Value");
}
[Test]
public void GetAssetSelectList_WhenSelectedValueIsFound_ReturnSelectedValue_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = "1";
// Act
var result = _service.GetAssetSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.AreEqual(expSelectedId, result.FirstOrDefault(r => r.Selected).Value, "Result Selected Value");
}
[Test]
public void GetAssetSelectList_WhenSelectedValueIsNotFound_ReturnNoSelectedValue_Test()
{
// Arrange
// sut = _service;
string expSelectedId = "99";
// Act
var result = _service.GetAssetSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.IsFalse(result.Any(r => r.Selected), "Result Selected Value");
}
[Test]
public void GetAssetSelectList_WhenSuccess_ReturnActiveAssets_Test()
{
// Arrange
var _dataAssets = new List<Core.Models.Asset>()
{
new Core.Models.Asset() {Id = 10, Name = "Active", IsActive = true },
new Core.Models.Asset() {Id = 11, Name = "Not Active", IsActive = false },
};
_unitOfWork.Assets = new InMemoryAssetRepository(_dataAssets);
_service = new AssetTransactionService(_unitOfWork);
string expSelectedId = null;
// Act
var result = _service.GetAssetSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.AreEqual(1, result.Count, "Result Count");
}
[Test]
public void GetAssetSelectList_WhenSuccess_ReturnOrderedByTransactionTypeName_Test()
{
// Arrange
var _dataAssets = new List<Core.Models.Asset>()
{
new Core.Models.Asset() {Id = 10, Name = "Z", IsActive = true },
new Core.Models.Asset() {Id = 11, Name = "A", IsActive = true },
new Core.Models.Asset() {Id = 12, Name = "B", IsActive = true },
};
_unitOfWork.Assets = new InMemoryAssetRepository(_dataAssets);
_service = new AssetTransactionService(_unitOfWork);
string expSelectedId = "99";
// Act
var result = _service.GetAssetSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.AreEqual("A", result[0].Text, "Result Name A");
Assert.AreEqual("B", result[1].Text, "Result Name B");
Assert.AreEqual("Z", result[2].Text, "Result Name Z");
}
*/
[Test]
[Ignore("Because I am testing this feature")]
public void AddTransaction_WhenAssetTransactionIsValid_ReturnTrue_Test()
{
// Arrange
var expAssetTransaction = new Business.Models.AccountTransaction()
{
AssetId = 1,
TransactionTypeId = 2,
TransactionCategoryId = 3
};
//var sut = _service;
// Act
var result = _service.AddTransaction(expAssetTransaction);
// Assert
Assert.IsTrue(result, "Result");
}
[Test]
public void AddTransaction_WhenAssetIdIsNotFound_ReturnFalse_Test()
{
// Arrange
var expAssetTransaction = new Business.Models.AccountTransaction()
{
AssetId = 99,
TransactionTypeId = 2,
TransactionCategoryId = 3
};
//var sut = _service;
// Act
var result = _service.AddTransaction(expAssetTransaction);
// Assert
Assert.IsFalse(result, "Result");
}
[Test]
public void AddTransaction_WhenTransactionTypeIdIsNotFound_ReturnFalse_Test()
{
// Arrange
var expAssetTransaction = new Business.Models.AccountTransaction()
{
AssetId = 1,
TransactionTypeId = 99,
TransactionCategoryId = 3
};
//var sut = _service;
// Act
var result = _service.AddTransaction(expAssetTransaction);
// Assert
Assert.IsFalse(result, "Result");
}
[Test]
public void AddTransaction_WhenTransactionCateogryIdIsNotFound_ReturnFalse_Test()
{
// Arrange
var expAssetTransaction = new Business.Models.AccountTransaction()
{
AssetId = 1,
TransactionTypeId = 2,
TransactionCategoryId = 99
};
//var sut = _service;
// Act
var result = _service.AddTransaction(expAssetTransaction);
// Assert
Assert.IsFalse(result, "Result");
}
[Test]
public void GetTransactionToEdit_WhenAssetTransactionIdIsValid_ReturnAssetTransaction_Test()
{
// Arrange
var expAssetTransactionId = 1;
//var sut = _service;
// Act
var result = _service.GetTransactionToEdit(expAssetTransactionId);
// Assert
Assert.IsInstanceOf(typeof(Business.Models.AccountTransaction), result, "Result Type");
Assert.IsNotNull(result, "Asset Transaction List");
//Assert.IsNotNull(result.TransactionTypeSelectList, "Transaction Type List");
//Assert.IsNotNull(result.TransactionCategorySelectList, "Transaction Category List");
}
[Test]
public void GetTransactionToEdit_WhenAssetTransactionIdNotFound_ReturnNull_Test()
{
// Arrange
var expAssetTransactionId = 99;
//var sut = _service;
// Act
var result = _service.GetTransactionToEdit(expAssetTransactionId);
// Assert
Assert.IsNull(result, "Result");
}
[Test]
public void GetAssetIdentificationInformation_WhenAssetIsValid_ReturnAssetName_Test()
{
// Arrange
var expAsset = new Core.Models.Asset()
{
Id = 1,
AssetTypeId = 2,
Name = "Asset Name",
IsActive = true,
};
// var sut = _service;
// Act
var result = _service.GetAssetIdentificationInformation(expAsset);
// Assert
Assert.IsInstanceOf(typeof(string), result, "Result Type");
Assert.IsNotNull(result, "Formatted Asset Name");
}
[Test]
public void GetAssetIdentificationInformation_WhenAssetIsNull_ReturnEmptyString_Test()
{
// Arrange
//var sut = _service;
// Act
var result = _service.GetAssetIdentificationInformation(null);
// Assert
Assert.IsInstanceOf(typeof(string), result, "Result Type");
Assert.AreEqual(string.Empty, result, "Formatted Asset Name");
}
[Test]
public void GetAssetIdentificationInformation_WhenAssetTypeIsCreditCard_ReturnUpdatedAssetName_Test()
{
// Arrange
var expAsset = new Core.Models.Asset()
{
Id = 1,
AssetTypeId = AssetTransactionServiceObjectMother.AssetTypeIdForCreditCard,
Name = "Asset Name",
IsActive = true,
};
var expSettingTypeId = AssetTransactionServiceObjectMother.SettingTypeIdForAccountNumber;
var _dataSettingTypes = new List<Core.Models.SettingType>()
{
new Core.Models.SettingType()
{
Id = expSettingTypeId,
IsActive = true,
}
};
var _dataAssetSettings = new List<Core.Models.AssetSetting>()
{
new Core.Models.AssetSetting() {
Id = 10,
AssetId = expAsset.Id,
SettingTypeId = expSettingTypeId,
Value = "1234",
IsActive = true,
}
};
_unitOfWork.AssetSettings = new InMemoryAssetSettingRepository(_dataAssetSettings);
var expAssetName = expAsset.Name + " (1234)";
_service = new AccountTransactionService(_unitOfWork);
// Act
var result = _service.GetAssetIdentificationInformation(expAsset);
// Assert
Assert.IsInstanceOf(typeof(string), result, "Result Type");
Assert.AreEqual(expAssetName, result, "Asset Name");
}
[Test]
public void GetTransactionTypeSelectList_WhenSuccess_ReturnSelectList_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = null;
// Act
var result = _service.GetTransactionTypeSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.IsNotNull(result, "Result");
Assert.AreNotEqual(0, result.Count, "Result Count");
}
[Test]
public void GetTransactionTypeSelectList_WhenSelectedValueIsNull_ReturnNoSelectedValue_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = null;
// Act
var result = _service.GetTransactionTypeSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.IsFalse(result.Any(r => r.Selected), "Result Selected Value");
}
[Test]
public void GetTransactionTypeSelectList_WhenSelectedValueIsFound_ReturnSelectedValue_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = "1";
// Act
var result = _service.GetTransactionTypeSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.AreEqual(expSelectedId, result.FirstOrDefault(r => r.Selected).Value, "Result Selected Value");
}
[Test]
public void GetTransactionTypeSelectList_WhenSelectedValueIsNotFound_ReturnNoSelectedValue_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = "99";
// Act
var result = _service.GetTransactionTypeSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.IsFalse(result.Any(r => r.Selected), "Result Selected Value");
}
[Test]
public void GetTransactionTypeSelectList_WhenSuccess_ReturnActiveTransactionTypes_Test()
{
// Arrange
var _dataTransactionTypes = new List<Core.Models.TransactionType>()
{
new Core.Models.TransactionType() {Id = 10, Name = "Active", IsActive = true },
new Core.Models.TransactionType() {Id = 11, Name = "Not Active", IsActive = false },
};
_unitOfWork.TransactionTypes = new InMemoryTransactionTypeRepository(_dataTransactionTypes);
_service = new AccountTransactionService(_unitOfWork);
string expSelectedId = null;
// Act
var result = _service.GetTransactionTypeSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.AreEqual(1, result.Count, "Result Count");
}
[Test]
public void GetTransactionTypeSelectList_WhenSuccess_ReturnOrderedByTransactionTypeName_Test()
{
// Arrange
var _dataTransactionTypes = new List<Core.Models.TransactionType>()
{
new Core.Models.TransactionType() {Id = 10, Name = "Z", IsActive = true },
new Core.Models.TransactionType() {Id = 11, Name = "A", IsActive = true },
new Core.Models.TransactionType() {Id = 12, Name = "B", IsActive = true },
};
_unitOfWork.TransactionTypes = new InMemoryTransactionTypeRepository(_dataTransactionTypes);
_service = new AccountTransactionService(_unitOfWork);
string expSelectedId = "99";
// Act
var result = _service.GetTransactionTypeSelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.AreEqual("A", result[0].Text, "Result Name A");
Assert.AreEqual("B", result[1].Text, "Result Name B");
Assert.AreEqual("Z", result[2].Text, "Result Name Z");
}
[Test]
public void GetTransactionCategorySelectList_WhenSuccess_ReturnSelectList_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = null;
// Act
var result = _service.GetTransactionCategorySelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.IsNotNull(result, "Result");
Assert.AreNotEqual(0, result.Count, "Result Count");
}
[Test]
public void GetTransactionCategorySelectList_WhenSelectedValueIsNull_ReturnNoSelectedValue_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = null;
// Act
var result = _service.GetTransactionCategorySelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.IsFalse(result.Any(r => r.Selected), "Result Selected Value");
}
[Test]
public void GetTransactionCategorySelectList_WhenSelectedValueIsFound_ReturnSelectedValue_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = "1";
// Act
var result = _service.GetTransactionCategorySelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.AreEqual(expSelectedId, result.FirstOrDefault(r => r.Selected).Value, "Result Selected Value");
}
[Test]
public void GetTransactionCategorySelectList_WhenSelectedValueIsNotFound_ReturnNoSelectedValue_Test()
{
// Arrange
//var sut = _service;
string expSelectedId = "99";
// Act
var result = _service.GetTransactionCategorySelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.IsFalse(result.Any(r => r.Selected), "Result Selected Value");
}
[Test]
public void GetTransactionCategorySelectList_WhenSuccess_ReturnActiveTransactionTypes_Test()
{
// Arrange
var _dataTransactionCategories = new List<Core.Models.TransactionCategory>()
{
new Core.Models.TransactionCategory() {Id = 10, Name = "Active", IsActive = true },
new Core.Models.TransactionCategory() {Id = 11, Name = "Not Active", IsActive = false },
};
_unitOfWork.TransactionCategories = new InMemoryTransactionCategoryRepository(_dataTransactionCategories);
_service = new AccountTransactionService(_unitOfWork);
string expSelectedId = null;
// Act
var result = _service.GetTransactionCategorySelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.AreEqual(1, result.Count, "Result Count");
}
[Test]
public void GetTransactionCategorySelectList_WhenSuccess_ReturnOrderedByTransactionTypeName_Test()
{
// Arrange
var _dataTransactionCategories = new List<Core.Models.TransactionCategory>()
{
new Core.Models.TransactionCategory() {Id = 10, Name = "Z", IsActive = true },
new Core.Models.TransactionCategory() {Id = 11, Name = "A", IsActive = true },
new Core.Models.TransactionCategory() {Id = 12, Name = "B", IsActive = true },
};
_unitOfWork.TransactionCategories = new InMemoryTransactionCategoryRepository(_dataTransactionCategories);
_service = new AccountTransactionService(_unitOfWork);
string expSelectedId = "99";
// Act
var result = _service.GetTransactionCategorySelectList(expSelectedId);
// Assert
Assert.IsInstanceOf(typeof(List<SelectListItem>), result, "Result Type");
Assert.AreEqual("A", result[0].Text, "Result Name A");
Assert.AreEqual("B", result[1].Text, "Result Name B");
Assert.AreEqual("Z", result[2].Text, "Result Name Z");
}
[Test]
public void UpdateTransaction_WhenAssetTransactionIsValid_ReturnTrue_Test()
{
// Arrange
var expAssetTransaction = new Business.Models.AccountTransaction()
{
AssetTransactionId = 1,
AssetId = 2,
TransactionTypeId = 3,
TransactionCategoryId = 4,
};
//var sut = _service;
// Act
var result = _service.UpdateTransaction(expAssetTransaction);
// Assert
Assert.IsTrue(result, "Result");
}
[Test]
public void UpdateTransaction_WhenAssetTransactionIsNull_ReturnFalse_Test()
{
// Arrange
//var sut = _service;
// Act
var result = _service.UpdateTransaction(null);
// Assert
Assert.IsFalse(result, "Result");
}
[Test]
public void UpdateTransaction_WhenAssetTransactionIdIsNotFound_ReturnFalse_Test()
{
// Arrange
var expAssetTransaction = new Business.Models.AccountTransaction()
{
AssetTransactionId = 99,
AssetId = 2,
TransactionTypeId = 3,
TransactionCategoryId = 4,
};
//var sut = _service;
// Act
var result = _service.UpdateTransaction(expAssetTransaction);
// Assert
Assert.IsFalse(result, "Result");
}
[Test]
public void UpdateTransaction_WhenAssetIdIsNotFound_ReturnFalse_Test()
{
// Arrange
var expAssetTransaction = new Business.Models.AccountTransaction()
{
AssetTransactionId = 1,
AssetId = 99,
TransactionTypeId = 3,
TransactionCategoryId = 4,
};
//var sut = _service;
// Act
var result = _service.UpdateTransaction(expAssetTransaction);
// Assert
Assert.IsFalse(result, "Result");
}
[Test]
public void UpdateTransaction_WhenTransactionTypeIdIsNotFound_ReturnFalse_Test()
{
// Arrange
var expAssetTransaction = new Business.Models.AccountTransaction()
{
AssetTransactionId = 1,
AssetId = 2,
TransactionTypeId = 99,
TransactionCategoryId = 4,
};
//var sut = _service;
// Act
var result = _service.UpdateTransaction(expAssetTransaction);
// Assert
Assert.IsFalse(result, "Result");
}
[Test]
public void UpdateTransaction_WhenTransactionCateogryIdIsNotFound_ReturnFalse_Test()
{
// Arrange
var expAssetTransaction = new Business.Models.AccountTransaction()
{
AssetTransactionId = 1,
AssetId = 2,
TransactionTypeId = 3,
TransactionCategoryId = 99,
};
//var sut = _service;
// Act
var result = _service.UpdateTransaction(expAssetTransaction);
// Assert
Assert.IsFalse(result, "Result");
}
[Test]
public void GetTransactionToDelete_WhenAssetTransactionIdIsValid_ReturnAssetTransaction_Test()
{
// Arrange
//var sut = _service;
var expAssetTransactionId = 1;
// Act
var result = _service.GetTransactionToDelete(expAssetTransactionId);
// Assert
Assert.IsInstanceOf(typeof(Business.Models.AccountTransaction), result, "Result Type");
Assert.IsNotNull(result, "Asset Transaction List");
}
[Test]
public void GetTransactionToDelete_WhenAssetTransactionIdNotFound_ReturnNull_Test()
{
// Arrange
//var sut = _service;
var expAssetTransactionId = 99;
// Act
var result = _service.GetTransactionToDelete(expAssetTransactionId);
// Assert
Assert.IsNull(result, "Result");
}
[Test]
public void DeleteTransaction_WhenAssetTransactionIdIsValid_ReturnTrue_Test()
{
// Arrange
//var sut = _service;
var expAssetTransactionId = 1;
// Act
var result = _service.DeleteTransaction(expAssetTransactionId);
// Assert
Assert.IsInstanceOf(typeof(bool), result, "Result Type");
Assert.IsTrue(result, "Result");
}
[Test]
public void DeleteTransaction_WhenAssetTransactionIdNotFound_ReturnFalse_Test()
{
// Arrange
//var sut = _service;
var expAssetTransactionId = 99;
// Act
var result = _service.DeleteTransaction(expAssetTransactionId);
// Assert
Assert.IsInstanceOf(typeof(bool), result, "Result Type");
Assert.IsFalse(result, "Result");
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Mvc;
namespace Financial.WebApplication.Models.ViewModels.AccountTransaction
{
public class SelectAssetToCreateViewModel
{
public SelectAssetToCreateViewModel() { }
public SelectAssetToCreateViewModel(List<SelectListItem> sliAssets)
{
Assets = sliAssets;
}
[Required]
[Display(Name = "Asset Name")]
public string SelectedAssetId { get; set; }
public IEnumerable<SelectListItem> Assets { get; set; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Core
{
public interface IFinancialDbContext
{
DbSet<Asset> Assets { get; }
DbSet<AssetSetting> AssetSettings { get; }
DbSet<AssetRelationship> AssetRelationships { get; }
DbSet<AssetTransaction> AssetTransactions { get; }
DbSet<AssetType> AssetTypes { get; }
DbSet<AssetTypeSettingType> AssetTypesSettingTypes { get; }
DbSet<AssetTypeRelationshipType> AssetTypesRelationshipTypes { get; }
DbSet<ParentChildRelationshipType> ParentChildRelationshipTypes { get; }
DbSet<RelationshipType> RelationshipTypes { get; }
DbSet<SettingType> SettingTypes { get; }
DbSet<TransactionCategory> TransactionCategories { get; }
DbSet<TransactionDescription> TransactionDescriptions { get; }
DbSet<TransactionType> TransactionTypes { get; }
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Data.RepositoryInterfaces
{
public interface ISettingTypeRepository : IRepository<SettingType>
{
SettingType GetActive(int id);
IEnumerable<SettingType> GetAllOrderedByName();
int CountMatching(string name);
int CountMatching(int excludeId, string name);
}
}
<file_sep>using Financial.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.Tests.Fakes.Database
{
public class FakeAssetTypesSettingTypes
{
public static IEnumerable<AssetTypeSettingType> InitialFakeAssetTypesSettingTypes()
{
yield return new AssetTypeSettingType() { Id = 1, AssetTypeId = 1, SettingTypeId = 4, IsActive = true };
yield return new AssetTypeSettingType() { Id = 2, AssetTypeId = 1, SettingTypeId = 5, IsActive = true };
yield return new AssetTypeSettingType() { Id = 3, AssetTypeId = 1, SettingTypeId = 2, IsActive = false };
yield return new AssetTypeSettingType() { Id = 4, AssetTypeId = 5, SettingTypeId = 2, IsActive = true };
yield return new AssetTypeSettingType() { Id = 5, AssetTypeId = 4, SettingTypeId = 1, IsActive = true };
}
}
}
<file_sep>using Financial.Business.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Financial.Business.ServiceInterfaces
{
public interface IAccountTypeSettingTypeService
{
Business.Models.AccountType CreateLinkedSettingTypesGetModel(int assetTypeId);
Business.Models.AccountType EditLinkedSettingTypesGetModel(int assetTypeId);
List<AccountType> GetListOfLinkedAssetTypes(int settingTypeId);
List<AttributeType> GetListOfLinkedSettingTypes(int assetTypeId);
List<AttributeType> GetListOfSettingTypesWithLinkedAssetType(int assetTypeId);
}
}
| 986eb9b27ff4aa21f62202013bcfbf760c2f0424 | [
"C#"
] | 155 | C# | bloomjjl/Financial | 224ccbd6841b64b49a1b94694f4d50ede6fd7377 | bbe2400bda4ad2d8edd75484d692d1ac250eb39a | |
refs/heads/master | <file_sep># Swift
Home work #1
<file_sep>//
// main.swift
// L1_Kirill_Petrov
//
// Created by кирилл on 19.11.18.
// Copyright © 2018 <NAME>. All rights reserved.
//
let aInt:Int = 10
var bInt:Int = 12
<file_sep>//
// main.swift
// HW_1
//
// Created by кирилл on 21.11.18.
// Copyright © 2018 <NAME>. All rights reserved.
// Решить квадратное уравнение ax² + bx + c = 0
import Darwin
print ("ЗАДАНИЕ №1.Решить квадратное уравнение: ax² + bx + c = 0, если a = 1 b = 2 c = -3. ")
let a : Int = 1
let b : Int = -8
let c : Int = 12
let Discriminant = ("1x² + 2x + (-3) = \((Float)(b*b)-(Float)(4*a*c))")
print ("\(Discriminant) > 0, значит уравнение имеет 2 корня")
//.Найти площадь, периметр и гипотенузу треугольника.
print ("ЗАДАНИЕ №2.Если катеты прямоугольного треугольника 3 и 4 соответственно. Найти площадь, периметр и гипотенузу треугольника. ")
let catetA = 3
let catetB = 4
let squre = ("Площадь : (catetA * catetB) / 2 = \((Float)(catetA * catetB)/2)")
let hypotenuseC = ("Гипотенуза : (catetA * catetA) + (catetB * catetB) = \((Float)(catetA * catetA)+(Float)(catetB * catetB))")
let P = ("Периметр : ( catetA + catetB + hypotenuseC = \(((Float)(catetB)+(Float)(catetA)) + ((Float)(catetA * catetA)+(Float)(catetB * catetB)))")
print (squre)
print (hypotenuseC)
print (P)
// Если сумма вклада в банк 10 000 рублей, под 12% годовых. Найти сумму вклада через 5 лет.
print ("ЗАДАНИЕ №3.Если сумма вклада в банк 10 000 рублей, под 12% годовых. Найти сумму вклада через 5 лет. ")
let contSum = 10000
let persentCont = 12
let year1 = ((contSum) + (contSum * persentCont)/100)
let year2 = ((year1) + (year1 * persentCont)/100)
let year3 = ((year2) + (year2 * persentCont)/100)
let year4 = ((year3) + (year3 * persentCont)/100)
let year5 = ((year4) + (year4 * persentCont)/100)
print ("Сумма вклада за 1 год будет = \(year1) рублей.")
print ("Сумма вклада за 2 год будет = \(year2) рублей.")
print ("Сумма вклада за 3 год будет = \(year3) рублей.")
print ("Сумма вклада за 4 год будет = \(year4) рублей.")
print ("Сумма вклада за 5 год будет = \(year5) рублей.")
| 2c31f669b411596d2fcad9539327c2a8eb6877be | [
"Markdown",
"Swift"
] | 3 | Markdown | Kirillpm/Swift | 28abb823fd844572add32497e3105ddf7414173b | f4247d3c96acfae7ac7044f8efe447d230ddd32a | |
refs/heads/master | <file_sep>ps -ef | grep postgresql | grep -v grep
[ $? -eq "0" ] && echo "PostgreSQL check!" || echo "process is not running"
<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 17
# @Last at Aprl. 18
# @Music The tear of PPIERO - Outsider(Moo-oong) ( feat Bae-Chi-Gi )
# @Information This class is child class of BackupEngine.
# @details
# DBE(Database Backup Engine) is only for doing backup service.
# You have to connect master and slave server, and connect local database in parent initalizer.
# This is a previous work for using this class. After this work, you have to connect local database.
# Obviously this work is held in this class. It is easy but may take some time to do this work.
import os, sys
sys.path.insert(0, os.getcwd())
from BackupEngine import BackupEngine
from ObjectInfo import Server
from ObjectInfo import AdministratorClass
from ObjectInfo import DatabaseClass
from System import Logger
class DatabaseBackupEngine ( BackupEngine ) :
def __init__(self, Master=None, Slave=None, Admin=None) :
BackupEngine.__init__(self, Master, Slave, Admin)
#
# Test if DBE status is good
#
if __name__ == "__main__" :
# Test Configuration :
# Master server is server id (1)
# Slave server is server id (4)
# Admin is me
#
# Test algorithm
# 1. Server connection test
# - Master server connect [ Parent class done ]
# - Slave server connect [ Parent class done ]
#
# 2. Get master's local db_key from local database.
# - Using master server's owner_key to get DB_key -> will be written in DatabaseClass
# - Using master server's db_key, get the db_data from local db.
#
# 3. Database connection test
# The database which is gotton from task number 2, connect test it.
#
# 4. Send the command which back-up database.
# Make the command ( Using scheduler is good way. )
# Send the command to server.
'''
* Initializing for test
'''
# Server & Database Setting
MasterServer = Server.Server(1, 22, 'ssh', '192.168.3.11', '3@mHze=5K{1wj){}', 'root', 'Wonseok.J', 970403, 'ubuntu', 'wonseokbuntu', None, '2018-03-02', None, None)
SlaveServer = Server.Server(4, 22, 'ssh', '192.168.3.11', 'P]9p{PWKRu=+o7y]', 'root', 'Wonseok.J', 970403, 'ubuntu', 'WonseokTestCent', None, '2018-03-02', None, None)
MasterServer.db = DatabaseClass.DB("psql", "'localhost'", "'5432'", "'testdb'", "'1234'", "'test'")
SlaveServer.db = DatabaseClass.DB("psql", "'localhost'", "'5432'", "'testdb'", "'1234'", "'test'")
# Connect local db
MasterServer.db.Connect_DB()
SlaveServer.db.Connect_DB()
# DB = db for grammar error
MasterServer.DB = MasterServer.db
SlaveServer.DB = SlaveServer.db
# Initalize Admin
MasterServer.Admin = AdministratorClass.Administrator('Wonseok', '/root/바탕화면/ServerPlayer/Report/', 'root', 'Admin', 'root')
SlaveServer.Admin = AdministratorClass.Administrator('Wonseok', '/root/바탕화면/ServerPlayer/Report/', 'root', 'Admin', 'root')
# Push the admin
MasterServer.local_admin = MasterServer.GetServerOwner()
SlaveServer.local_admin = SlaveServer.GetServerOwner()
'''
* Real test of DBE
'''
# Connection test (1-1, 1-2)
DBE = DatabaseBackupEngine(MasterServer, SlaveServer, MasterServer.Admin)
<file_sep>'''
If you want to share this system to others or another people,
you have to execute this file to remove your own data.
unless, your data will be get fire by someone.
So please erase your local data from here. I believe you may understand.
'''
import os, sys
def ConfigureFire() :
Configure = open('Configure.txt', 'w')
Configure.write('Execute=no')
Configure.close()
def UserConfigFire() :
os.system('rm ./UserConfig/*.txt')
def ReportFire() :
os.system('rm ./Report/*.txt')
def srcFire() :
os.system('rm -r src')
def FireForGit() :
ConfigureFire()
UserConfigFire()
ReportFire()
srcFire()
MakeRequirements()
def MakeRequirements() :
os.system('pipreqs . --force')
# If you want to share your project to git,
# just execute it!
if __name__ == '__main__' :
FireForGit()
<file_sep># SOUL the server manager.
* System requirements
- Python 2.7 needs.
- Your OS must be Linux (Ubuntu will be best choice)
* pre - requirements
pip install -r requirements.txt
apt get install sshpass
* How to execute?
cd -/ServerManager-for-Buzzz
python ./System/Engine.py
* Before share < Caution! >
cd -/ServerManager-for-Buzzz
python ./BeforeShare.py
<file_sep># @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 13
# @Last at Aprl. 13
# @Music Parallax, U-Won-Jae
# @Information
import os, sys
from SystemLoader import SystemLoader
from Connector import Connector
from Configurator import Configurator
sys.path.insert( 0, os.getcwd() )
from ObjectInfo import DatabaseClass
from ObjectInfo import Server
# if you want to use server class in server, you have to write Server.Server( ... ) - interesting issue
class Kernel(object) :
# SystemLoader which kernel has provide information to connector,
# and check if connection is successful. If not, kernel will load logger
# to save logs for programmer.
def serverToServer(self, server) :
# the server is just element in list.
# Server is a element which is class named 'server'
# print(server)
_Server = Server.Server( server[0], server[1], server[2], server[3], server[4], server[5], server[6], server[7], server[8], server[9], server[10] )
_Server.db = self.Conn.db
_Server.admin = self.Conn.admin
return _Server
# def __init__(self):
# print('Log : Kernel Initilizer is loaded!')
# self.Conn = Connector()
def __init__(self, object = None) :
# Initializer check
if( object == None ) :
print('Kernel error. You must define kerenel with System Loader!')
return
self.SystemLoader = object
#self.SystemLoader.printInfo()
self.GoodServerList = []
self.BadServerList = []
self.Conn = Connector(self.SystemLoader) # Conn has logger DB
self.GoodServerList = self.Conn.GoodServerList
self.BadServerList = self.Conn.BadServerList
Configurators_Badguys = []
print(" Re-checking bad servers connection starts")
for i in self.BadServerList :
serv = self.serverToServer(i)
Conf = Configurator(serv, self.Conn.admin)
Conf.ConnectSSH()
Configurators_Badguys.append(Conf)
print('Kernel is successfully loaded!')
def __str__ (self):
return "KERNEL"
<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os, sys
# @Author Wonseok
# @Designer Wonseok
# @Start at May 2
# @Last at May 2
# @Music CRAYON - G.DRAGON
# @Information This class is only for User Interface.
# You don't need to be worried by designing UI.
from anytree import Node, RenderTree
sys.path.insert( 0, os.getcwd() )
from System import FileToServer
from ObjectInfo import Server
from ObjectInfo import DatabaseClass
from System import Kernel
class UserInterface(object) :
def clear(self) :
# you just call this function to clean the screen.
os.system(self.cls)
def __init__(self, objects ) :
# I want to make this class in GUI mode, but I don't have enough time and work force.
self.SCPManager = FileToServer.SCPManager()
self.MODE = 'CUI'
self.Engine = objects
if( objects.OS_SORT == 1 ) : # 1 is Linux
self.cls = 'clear'
elif( objects.OS_SORT == 2 ) : # Windows
self.cls = 'cls'
# level = 0, you have to exit menu.
self.root = Node("Null")
# level = UI
self.nodUI = Node("UserInterface", parent=self.root)
# level = 2, PSM, DM, CM, SM, PO
self.ServerMenu = Node("PrintServerManageMenu", parent=self.nodUI)
self.DBMenu = Node("DatabaseManage", parent=self.nodUI)
self.ConfigMenu = Node("Configuration Mode", parent=self.nodUI)
self.SecurityMode = Node("Security Mode", parent=self.nodUI)
self.PowerOff = Node("Power Off", parent=self.nodUI)
# level = 3, TM, ID, GBC, FM
self.TargetManage = Node("Target Manage", parent=self.ServerMenu)
self.InstallDatabase = Node("Install database", parent=self.ServerMenu)
self.GoBackupCons = Node("Go Backup Console", parent=self.ServerMenu)
self.FirewallManag = Node("Firewall manage", parent=self.ServerMenu)
# level = 4, TMM
self.TMM = Node("Target Manage Menu", parent=self.TargetManage)
# level = 5, AM, DM, SystemUpdate, ThrowMessage
self.AM = Node("AddtargetMenu", parent=self.TMM)
self.DM = Node("DeltargetMenu", parent=self.TMM)
self.SystemUpdate = Node("System Update", parent = self.TMM)
self.ThrowCommandMenu = Node("Throw command menu", parent=self.TMM)
# level = 6, OSUpgrade, Cron update
self.OSUpgrade = Node("Operating System Upgrade", parent=self.SystemUpdate)
self.UpdateUpgrade = Node("Update & Upgrade", parent=self.SystemUpdate)
self.CronUpdate = Node("Cron update", parent=self.SystemUpdate)
self.ThrowFile = Node("Throw File", parent=self.ThrowCommandMenu)
self.ThrowMsg = Node("Throw Command", parent=self.ThrowCommandMenu)
# level = 7
self.SetCommandForFile = Node("Set the command for file", parent = self.ThrowFile)
self.SendCommand = Node("Send command", parent= self.ThrowFile)
def SystemUpdateMenu(self,target, nod) :
self.PrintTargetMenu(target)
print('')
print( '1 - 1. Operating System Upgrade')
print( '1 - 2. Update & Upgrade')
print( '1 - 3. update at cron ')
print(' 1 - 0. return to menu ')
usr_input = self.InputByUsr( 'Which one do you want to go?', 3 )
if( usr_input == 1 ) :
nod[0] = self.OSUpgrade
elif( usr_input == 2 ) :
nod[0] = self.UpdateUpgrade
elif( usr_input == 3 ) :
nod[0] = self.CronUpdate
elif( usr_input == 0 ) :
nod[0] = nod[0].parent
raw_input('')
def OperatingSystemUpgrade(self, target=[[]], nod=None ) :
self.PrintTargetMenu(target)
'''
You can add more OS in here
'''
OSList = {}
OSList['ubuntu'] = []
OSList['cent'] = []
OSList['debian'] = []
for i in target :
if( i == [] ) :
print("You must regist target in befre menu.")
raw_input('')
else :
if( str(i[8]).find('Ubuntu') != -1 ) :
OSList['ubuntu'].append(i)
elif( str(i[8]).find('Cent') != -1 ) :
OSList['cent'].append(i)
print(OSList)
raw_input()
return OSList
def InputByUsr(self, msg='', numMax=None) :
if numMax == None :
return raw_input(msg)
while( True ) :
usr_input = int( input('What do you want to do? : '))
if( usr_input < 0 or usr_input > numMax ) :
# Zero always be 'power off'
print('Input Error, try again!')
flag = raw_input(msg)
else :
return usr_input
def PrintMainMenu(self, badnum, goodnum, nod) :
self.clear()
print(' * * * ')
print(' * SOUL * * ')
print(' * * * * * * ')
print(' ver 0.5')
print('')
print(' 1. Server manage. ')
print(' 2. Database manage. ')
print(' 3. Configuration mode. ')
print(' 4. Security mode. ')
print(' 0. Power off. ')
print('')
print('')
print(' Num( bad servers ) : ' + str(badnum))
print(' Num( good servers ) : ' + str(goodnum))
print('')
usr_input = self.InputByUsr( 'Which one do you want to go?', 4 ) # if you add functions, you have to add '1' in parameter.
if usr_input == 1 :
nod[0] = self.ServerMenu
raw_input()
elif usr_input == 2 :
nod[0] = self.DBMenu
elif usr_input == 3 :
nod[0] = self.ConfigMenu
elif usr_input == 4 :
nod[0] = self.SecurityMode
# add additional function at this line.
else :
nod[0] = self.root
def PrintTargetMenu(self, target=[]) :
# Target list is chosen targets which user picked.
print(' ')
print(' Target Console ----------------------------------------------- ')
for i in target :
print( ' [*]. ' + str(i) )
print(' --------------------------------------------------- target end.')
def PrintServerManageMenu(self, target=[[]], nod=None) :
# Target list is chosen targets which user picked.
self.PrintTargetMenu(target)
print('')
print( '1 - 1. Target manage')
print( '1 - 2. Install database ')
print( '1 - 3. Go backup console ')
print(' 1 - 4. Firewall manage ')
print(' 1 - 0. Return')
key = self.InputByUsr('Which one do you want to go?', 4)
if( key == 1 ) :
nod[0] = self.TargetManage
elif key == 2 :
nod[0] = self.InstallDatabase
elif key == 3 :
nod[0] = self.GoBackupCons
elif key == 4 :
nod[0] = self.FirewallManag
else :
print('input error!')
raw_input('')
def TargetManageMenu(self, target=[], nod=None) :
self.PrintTargetMenu(target)
print( '1-1 - 1. Add target.')
print( '1-1 - 2. Del target.')
print( '1-1 - 3. System Update Menu')
print(' 1-1 - 4. Throw Command Menu')
print( '1-1 - 0. Return.')
key = self.InputByUsr('Which one do you want to go?', 4)
if( key == 1 ) :
nod[0] = self.AM
elif key == 2 :
nod[0] = self.DM
elif key == 3 :
nod[0] = self.SystemUpdate
elif key == 4 :
nod[0] = self.ThrowCommandMenu
elif key == 0 :
nod[0] = self.ServerMenu
else :
print('Input error!')
raw_input()
#[[2, 22, u'ssh', u'123', u'123', u'123', u'123', 123, u'test', u'test', u'YES', u'2018-05-07 21:52:48.022944', None, None], [4, 22, u'ssh', u'172.16.58.3', u'P]9p{PWKRu=+o7y]', u'root', u'Wonseok.J', 970403, u'CentOS 7 x64', u'WonseokTest2Cent', u'', u'2018-05-07 21:52:58.786149', None, None], [6, 23, u'ssh', u'172.16.58.3', u'Wonseok786!', u'swc', u'Wonseok.J', 1065, u'Ubuntu14', u'WonseokTest3', u'YES', u'2018-05-07 21:53:49.822362', None, None]]
def PrintAllTargetsDetails(self, targets=[[]], badTargets=[[]], goodTargets=[[]] ,nod=None):
# Target list is chosen list user picked, and allTargets list is a list of all servers.
# [ID], [PORT], [SORT], [IP], [PASSWORD]. [USRNAME], [OWNR NAME], [ OWNR_ID ], [SERVER OS], [SERVER NAME], [IS ERROR], [LAST_LOGIN], [dbkey], [obj key]
print("CONNECTION STATUS : BAD ")
print("[ADDED]\t\t[ER]\t[ID]\t\t[SERVER NAME]\t\t\t\t[IP]\t\t\t\t[SSH ID]\t\t[SSH PW]\t\t\t[LAST LOGIN]")
for badtarget in badTargets :
if badtarget == [] : break
print("{}\t\t{}\t{}\t\t{}\t\t\t\t{}\t\t\t\t{}\t\t{}\t\t\t{}".format(str(badtarget in targets), str(badtarget[10]), str(badtarget[0]), str(badtarget[9]), str(badtarget[3]), str(badtarget[7]),str(badtarget[4]), str(badtarget[11])))
print("")
print("[ADDED] CONNECTION STATUS : GOOD")
for badtarget in goodTargets : # Yes I know. badtarget must be good target. but... I'm lazy... so.. I won't change it. badtarget is good target.. you know...
if badtarget == [] : break
print("{}\t\t{}\t{}\t\t{}\t\t\t\t{}\t\t\t\t{}\t\t{}\t\t\t{}".format(str(badtarget in targets), str(badtarget[10]), str(badtarget[0]), str(badtarget[9]), str(badtarget[3]), str(badtarget[7]),str(badtarget[4]), str(badtarget[11])))
print("\n\n")
print("Already Targets : ")
print("[ID] [SERVERNAME] [IP] [SORT]")
for target in targets :
if target == [] : break
print("{} {} {} {}".format(str(target[0]), str(target[9]), str(target[3]), str(target[2])))
nod[0] = self.TMM
raw_input('')
def ChkValue(self, id, target, bad, good) :
'''
@return 1 - is in target, MSG = None <type>
2 - is in bad MSG = list[ that node ]
3 - is in good MSG = list[ that node ]
4 - not found MSG = That is not in both bad and good lists.
'''
# target, bad, good = [ [] ] is list in list.
# check is in target.
for i in target :
if i == [] : break
#print(id,type(id), i[0], type(i[0]))
if( str(id) == str(i[0]) ) :
return 1, i
# check is in bad
for i in bad :
#print(id,type(id), i[0], type(i[0]))
if i == [] : break
if( str(id) == str(i[0]) ) :
return 2, i
# check is in good
for i in good :
#print(id,type(id), i[0], type(i[0]))
if i == [] : break
if( str(id) == str(i[0]) ) :
return 3, i
return 4, 'That is not in both bad and good lists.'
def AddtargetMenu(self, target=[[]], badTargets = [[]], goodTargets = [[]], nod=None) :
self.PrintAllTargetsDetails(target, badTargets, goodTargets,nod)
print("\n")
want_id = self.InputByUsr('Which one do you want to add? input [ID] value : ', None)
num, msg = self.ChkValue(want_id, target, badTargets, goodTargets)
if( num == 2 or num == 3 ) :
if( target.count([]) != 0 ) : target.remove([])
target.append( msg )
nod[0] = nod[0].parent
else :
print("Caution! Error occured!")
print("Error code : " + str(num))
print("Error msg : " + str(msg))
print("\nReturn before menu!")
def DeltargetMenu(self, target=[[]], badTargets = [[]], goodTargets = [[]], nod=None) :
self.PrintAllTargetsDetails(target, badTargets, goodTargets,nod)
print("\n")
want_id = self.InputByUsr('Which one do you want to delete? input [ID] value : ', None)
num, msg = self.ChkValue(want_id, target, badTargets, goodTargets)
if( num == 1 ) :
target.remove( msg )
nod[0] = nod[0].parent
else :
print("Caution! Error occured!")
print("Error code : " + str(num))
print("Error msg : " + str(msg))
print("\nReturn before menu!")
raw_input()
def func_ThrowCommandMenu(self, nod=None) :
self.clear()
print( '1-1-4 - 1. Throw file')
print( '1-1-4 - 2. Throw Command')
key = self.InputByUsr('Which one do you want to go?', 2)
if( key == 1 ) :
nod[0] = self.ThrowFile
elif key == 2 :
nod[0] = self.SendCommand
else :
nod[0] = nod[0].parent
print('Input error!')
raw_input()
def getServerFromTarget(self, target=[], nod=None) :
return self.Engine.KernelObj.serverToServer(target)
# Return Server class by server's ID.
def getServerFromTargetsById(self, id, target=[[]]):
# Target list is chosen list user picked, and allTargets list is a list of all servers.
# [ID], [PORT], [SORT], [IP], [PASSWORD]. [USRNAME], [OWNR NAME], [ OWNR_ID ], [SERVER OS], [SERVER NAME], [IS ERROR], [LAST_LOGIN], [dbkey], [obj key]
for Serv in target :
if Serv[0] == id :
return self.Engine.KernelObj.serverToServer(Serv)
return None
def func_ThrowFilescp(self, nod=None) :
self.clear()
print('1..- 1. Set the command for file')
print('2..- 2. Send command')
key = self.InputByUsr('Which one do you want to go?', 2)
if( key == 1 ) :
nod[0] = self.SetCommandForFile
elif key == 2 :
nod[0] = self.SendCommand
else :
print("Input error!")
raw_input()
# This function is linked with 'FILE TO SERVER.py'
def func_SetCommandForFile(self, target=[[]],nod=None ) :
self.clear()
print(' ! Caution! ' )
print(' - your command is will be sent for your targets. ')
print(' - even if you want to back, system can not be back. ')
# Sync target with SCPManager.Target
for i in target :
self.SCPManager.TryAddSlave(self.getServerFromTarget(i))
Flag = str(raw_input(' Do you want to have specific target? (y/n) '))
if( Flag == 'n' or Flag == 'N') :
self.SCPManager.TargetSetById( 'all' )
else :
print('If you want to end or exit, input -1')
tmpServIds = []
while( True ) :
tmpServIds.append(int( input('ID -> ')))
if( tmpServIds[ int(len(tmpServIds)) - 1] == -1) : # The last of tmpServIds
tmpServIds.pop()
break;
for i in tmpServIds :
tmpServer = self.getServerFromTargetsById(i)
if( tmpServer == None ) :
print("Error occurs at 'UIManager.py[func_setCommandForFile]'")
self.SCPManager.TargetSetById( tmpServer )
def func_SendCommand(self, nod=None) :
'''
[ID] [COMMAND]
1 asf
2 12
3 ads
4 asd
Are you sure? or Delete? ( sure = 1, delete = 2)
'''
print('[ID] [COMMAND]')
self.SCPManager.PrintTargetCommands()
print('')
flag = int(input('Are you sure? or Delete? ( sure = 1, delete = 2 )'))
if( flag == 1 ) :
self.SCPManager.SendToAllTargets()
else :
print('Input ids you want to delete.')
print('If you want to end or exit, input -1')
tmpServIds = []
while( True ) :
tmpServIds.append(int( input('ID -> ')))
if( tmpServIds[ int(len(tmpServIds)) - 1] == -1) : # The last of tmpServIds
tmpServIds.pop()
break
for i in tmpServIds :
tmpServ= self.getServerFromTargetsById(i)
if( tmpServ == None ):
print("UImanager.py[func_sendcommand] has some issue")
continue
self.SCPManager.DeleteServerInTargets( tmpServ )<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os, sys
sys.path.insert(0, os.getcwd() )
from ObjectInfo import DatabaseClass
from ObjectInfo import AdministratorClass
from ObjectInfo import Server
import time, datetime
from System import Logger
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 18
# @Last at Aprl. 18
# @Music
# @Information This class is made for checking the system. You can check the system to use this class.
# But if you want to make server checker, server checker must has server class,
# And define some modules.
# Database checker may need Database class. That's all.
class CheckerEngine (object) :
def __init__(self, LocalServer=None, LocalDatabase=None, LocalAdmin=None) :
# Checker must has local settings.
self.LocalServer = LocalServer
self.LocalDatabase = LocalDatabase
self.db = self.LocalDatabase # for Logger
self.LocalAdmin = LocalAdmin
self.Logger = Logger.Logger(self)
self.EngineName = "CHECKERENGINE"
def CheckerConditionCheck(self) :
# Server check.
isOkay, Msg = self.LocalServer.isTryConnect()
if( isOkay != True ) :
self.SendLog_ServerConnectionBad(self.Logger, Msg)
return False, "Program can not make a link with local server. Check the log."
# Send Log
isOkay, Msg = self.LocalDatabase.isTryConnect()
if( isOkay != True ) :
return False, "Program can not link with local database. check the log."
self.SendLog_DatabaseConnectionBad(self.Logger, Msg)
# Send Log
if( str(self.LocalAdmin) != "ADMINISTRATORCLASS" ) :
return False, "Program can not make a link with local administrator. Check the admin."
# 여기 하면 됨
return True, "Good"
# Send Log
def SendLog_ServerConnectionBad (self, Logger, ExceptionMsg) :
# Log structure :
## [ADMIN.ID] tried to connect [ServerID] by [ServerRole]@[Host] at [Date.time]
## Server was [Server.isOkay]. And program tried to connect, but server connection is BAD.
## specific report which pssh says is here : [Exception E]
strLogMsg = str(self.LocalAdmin.ID) + " tried to connect " + str(self.LocalServer.ID) + " by " + str(self.LocalServer.CONNECTION_USERNAME)+"@" + str(self.LocalServer.CONNECTION_IPADDRESS) + " at " + str(datetime.datetime.now()) + "\n" + \
"Server was " + self.LocalServer.IS_ERROR + ". And program tried to connect, but server connection is BAD." + "\n" + \
"specific report which pssh says is here : " + str(ExceptionMsg)
Logger.SetOrigin('KNOWN_LOG')
RK = Logger.MakeReport( 'SERVICE_STATUS_CHECK', self.LocalAdmin.PATH, self.LocalAdmin.NAME, strLogMsg)
Logger.push_log( self.EngineName, self.LocalServer.ID, RK, 'KNOWN_LOG', 'BAD', 'CheckerEngine.SendLog_ConnectionBad', 'SERVER')
def SendLog_DatabaseConnectionBad (self, Logger, ExceptionMsg) :
# Log structure :
## [ADMIN.ID] tried to connect Database ID : [DB_ID]
## Database Setting is :
## SORTS, HOSTS, NAME, PW, USER, DB_KEY, IS_CONNECTED, OBJECT, SERVER_KEY :
## Values;
strLogMsg = str(self.LocalAdmin.ID) + " tried to connect Database ID : " + str(self.LocalDatabase.DB_KEY) + "\n" + \
"Database setting is : " + "\n" + \
"SORTS, HOSTS, NAME, PW, USER, DB_KEY, IS_CONNECTED, OBJECT, SERVER_KEY : " + "\n" + \
self.LocalDatabase.getInfo() + "\n" + \
"Exception Msg : " + ExceptionMsg + "\n"
Logger.SetOrigin('KNOWN_LOG')
RK = Logger.MakeReport( 'SERVICE_STATUS_CHECK', self.LocalAdmin.PATH, self.LocalAdmin.NAME, strLogMsg)
Logger.push_log( self.EngineName, self.LocalAdmin.SERVER_KEY, RK, 'KNOWN_LOG', 'BAD', 'CheckerEngine.SendLog_ConnectionBad', 'SERVER')
def __str__(self) :
return "CHECKERENGINE"<file_sep>service postgresql start
python /usr/local/lib/python2.7/dist-packages/pgadmin4/pgAdmin4.py
<file_sep>psycopg2==2.7.4
SQLAlchemy==1.2.5
anytree==2.4.3
pandas==0.22.0
Fabric==1.14.0
pexpect==4.5.0
<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os, sys
sys.path.insert(0, os.getcwd() )
from ObjectInfo import DatabaseClass
from ObjectInfo import AdministratorClass
from ObjectInfo import Server
from CheckerEngine import CheckerEngine
import time, datetime
from System import Logger
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 18
# @Last at Aprl. 18
# @Music Sing a song at TV program - Shin Yong Jae.
# @Information This class is made for checking the database system. This system is a child class of Checker Engine.
# You must add some functions and values in this functions. that's all.
class DatabaseChecker( CheckerEngine) :
def __init__(self, LocalServer=None, LocalDatabase=None, LocalAdmin=None ) :
CheckerEngine.__init__(self, LocalServer, LocalDatabase, LocalAdmin)
self.Logger = Logger.Logger(self)
self.EngineName = "DATABASECHECKER"
def CheckerConditionCheck(self) :
isOkay, msg = self.CheckerConditionCheck()
if( isOkay ) :
return True, "Good"
else :
return False, msg
<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os, sys
sys.path.insert(0, os.getcwd() )
from ObjectInfo import DatabaseClass
from ObjectInfo import AdministratorClass
from ObjectInfo import Server
from CheckerEngine import CheckerEngine
import time, datetime
from System import Logger
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email.header import Header
from email import encoders
from pexpect import pxssh
import getpass
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 20
# @Last at Aprl. 20
# @Music Time paradox
# @Information This class will send the report from Serverchecker or DatabaseChecker to you.
# This class will supports user to check server conditions by watching mail from this class
# That's why this class name is mail checker.
#
class MailChecker( CheckerEngine ) :
'''
@Written by wonseok.
@Date at 2018. 04. 20
Dear my source code watchers! Thank you for watching my sources. If you have some issues, please mail me
<EMAIL>! I'm glad to see all your issues and reports!
Why I write letters in here is, this class contains my company boss' mail ID and password.
Please do not use that and log-in. If you do that I will fell really sad and have to lock my git-hub.
So, I hope that I won't locak any source codes and make my source open.
+) That mail is for using test. not contain any heavy infromation, and I got the agree from him.
But I don't want other guys use that account.
Thanks!
'''
# Nothing special with ServerChecker.
def __init__(self, LocalServer=None, LocalDatabase= None, LocalAdmin=None ) :
# For sending mail, you need host, port, mimebase.
# If you want to use 'Microsoft mail server', you have to change
# --> MailChecker.host = 'smtp.live.com' (maybe)
# And mimebase is for mixed mail (text+image+content ... etc)
'''
About Plain.
The mail plain is for mail getter. Think about it, don't you think if you
get mail just a log, it is not kind? So, I made a basis of 'mailplain'.
The name is MailPlanText at '/LocalAdmin.PATH/MailBase/, name is 'MailPlainText'
The content of 'mailplaintext' will be at front of mail.
'''
CheckerEngine.__init__(self, LocalServer, LocalDatabase, LocalAdmin )
self.Logger = Logger.Logger(self)
self.EngineName = "MAILCHECKER"
self.host = 'smtp.gmail.com' # This is a host of smtp - googel-Gmail service.
self.port = '587' # This is a port of smtp at google-Gmail service..
self.MIMEBASE = ['multipart', 'mixed']
# Mail base setting starts
self.MailBaseFile = open(self.LocalAdmin.PATH+"MailBase/MailPlainText.txt", 'r') # check the log !
self.Content = "" # define
while True : # Here is the line for setting plain text.
line = self.MailBaseFile.readline()
if not line : break
else : self.Content += (line + "\n")
# Mail base setting ends
# Find the report by using name which you put at parameter.
def AddReportInContent_byName(self, Name) :
BaseFile = open(self.LocalAdmin.PATH+str(Name), "r")
while True :
line = BaseFile.readline()
if not line : break
else : self.Content += (line+"\n")
# This function is only for smtplib
def MailServer_Login(self, senderAddress, senderPassword ) :
# If you want to make your own mail server, call the smtplib.SMTP!
# like this!
self.mailServer = smtplib.SMTP(self.host, self.port) # you need host and port number.
self.mailServer.set_debuglevel(1) # This will print issues from google server.
self.mailServer.ehlo() # This is the protocol regulation of SMTP
self.mailServer.starttls() # TLS Service starts. if you don't want it, don't call this function.
self.mailServer.ehlo() # After call the tls service, check the server if I can call mail functions
self.senderAddress = senderAddress
self.senderPassword = <PASSWORD>
self.mailServer.login(senderAddress, senderPassword)
def MaillServer_CreateMail(self, recipient, Subject, Content) :
self.MailMsg = MIMEMultipart('alternative')
self.MailMsg['From'] = self.senderAddress
self.MailMsg['To'] = recipient
self.MailMsg['Subject'] = Header(Subject, 'utf-8')
self.MailMsg.attach(MIMEText(self.Content + Content, 'plain', 'utf-8'))
self.recipient = recipient
def MailServer_SendMail(self) :
try :
if( self.MailMsg['From'] != None and self.MailMsg['To'] != None and self.MailMsg['Subject'] != None ) :
self.mailServer.sendmail(self.senderAddress, self.recipient, self.MailMsg.as_string())
self.mailServer.close()
else :
print('Error!')
except Exception as e :
print(e)
if __name__ == "__main__" :
S = Server.Server(1, 22, 'ssh', '192.168.127.12', '3@mHze=5K{1wj){}', 'root', 'Wonseok.J', 970403, 'ubuntu', 'wonseokbuntu', None, '2018-03-02', None, None)
S.DB = DatabaseClass.DB("psql", "'localhost'", "'5432'", "'testdb'", "'1234'", "'test'")
S.DB.Connect_DB()
S.db = S.DB
S.Admin = AdministratorClass.Administrator('Wonseok', '/root/바탕화면/ServerPlayer/Report/', 'root', 'Admin', 'root')
MailChecker = MailChecker(S, S.DB, S.Admin)
MailChecker.MailServer_Login('<EMAIL>', '<PASSWORD>!')
MailChecker.AddReportInContent_byName('1.wonseokbuntu.LastCommand.txt')
msg = '\n\nTEST at, 2018-04-2001:37:35:378081 and the server host name is "wonseokbuntu" ' + '\n\n ATTENTION! THIS IS FOR TEST \n\n'
MailChecker.MaillServer_CreateMail('<EMAIL>', 'Last log from server test [test]', msg)
MailChecker.MailServer_SendMail()
<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 10
# @Last at Aprl. 15
# @Music Pianoman - MAMAMOO
# @Information This class makes report, make and throw log to local DB.
# You have to make sure to connect your local DB with program.
import os, sys
import psycopg2
import time, datetime
sys.path.insert(0, os.getcwd() )
from ObjectInfo import DatabaseClass
def Generate_PrivateCode():
# Return structure is :
# YEAR-MONTH-DAYHOUR:MIN:SEC.MICROSEC
# ex) 2017-04-03:08:40:00.010101
return str(datetime.datetime.now()).replace(" ","")
def Generate_Filename(className, ownerName) :
# Return structure is :
# (ClassName).(OwnerName).log
if( ownerName == "" ) :
ownerName = "Logger"
else :
return className+"."+ ownerName +".log"
def Generate_ReportKey(FileName) :
# Return structure is :
# (ClassName).(OwnerName).log.(Time)
# ex) Kernel.log.2018-04-03-08:40:00.01010...
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
# for this rule (windows file name rule) ':' is changed for '-' --> ex) Kernel.log.2018-01-01-01-01-01.01010...
return FileName + str(datetime.datetime.now()).replace(" ","")
def Exit(code) :
# code 100 : OS Error. Your os is not supported in this system.
print("WARNNING : You can't execute program. \n Error code : " + str(code))
SetExecuteLog('Engine initialize is failed', code)
exit()
BeforeTime = datetime.datetime.now()
def SetExecuteLog(code, ErrorCode) :
global BeforeTime
now = datetime.datetime.now()
LogFile = open(os.getcwd() + '/UserConfig/LoggerLog.txt', "a")
LogFile.write('Code : ' + str(code) + '\n')
if( ErrorCode ) :
LogFile.write('ErCode : ' + str(ErrorCode) + '\n')
LogFile.write('Written time : ' + str(now) + ' [' + str(now - BeforeTime) + ']'+ '\n')
BeforeTime = now
class Logger(object) :
# If you want just define logger,
def MakeReport(self, ReportType, Path, Name, Content) :
# push_log( self, request_key, server_key, ReportKey, origin_key, status_key, return_val, program_key )
FileName = Generate_Filename(self.className, Name) # it is as same as file name
ReportKey = Generate_ReportKey(FileName)
isInServer, ErrorMsg = self.track_exists('report', 'report_type', ReportType)
if( isInServer ) :
# Make file for report
ReportFile = open(Path+FileName+".txt", "a")
ReportFile.write('\n'+Content)
ReportFile.close()
# Push log for you made a report
vars = "'" + ReportKey + "', '" + ReportType + "', '" + str(datetime.datetime.now()).replace(" ","") + "', '" + Path +"', '" + FileName +"'"
# Aprl 13. Name -> FileName by wonseok
self.SQL_Insert_Into_values('report', vars)
# Push log for return report key
return ReportKey
else :
UnknownMsg = "\nAnd unknown error occured in Logger.MakeReport. You have to check it.\n"
ReportFile = open(Path+FileName+".txt", "a")
ReportFile.write('\n'+Content + UnknownMsg)
ReportFile.close()
return ReportKey
# RK = self.MakeReport( 'WARNING_SERVICE_REPORT', Path, 'Logger', 'Report type : ' + ReportType + ' is not in DB! Please check service owner... before content : ' + Content + ' by ' + Name)
# vars = "'" + ReportKey + "', 'ERROR_SERVICE_REPORT', '" + str(datetime.datetime.now()).replace(" ","") + "', '" + Path + "', '" + FileName + "'"
# self.SQL_Insert_Into_values('report', vars)
# self.push_log( 'DB_REQUEST', 'localhost', RK, 'PROGRAM_OWNER', 'BAD', 'None', 'LOGGER')
def SQL_Select_From_Where_In(self, column_names, table_name, column_name2, values) :
cur = self.conn.cursor()
cur.execute("SELECT " + column_names + " FROM " + table_name + " WHERE " + column_name2 + " IN " + values)
def SQL_Insert_Into_values(self, table_name, values) :
cur = self.conn.cursor()
cur.execute("INSERT INTO " + table_name + " VALUES ( " + values + " )" )
self.conn.commit()
def track_exists(self, table_name, column_name2, values):
# If there is no value in table #table_name, return None.
# Not if, return True.
# It is differnt at SQL_Select_From_Where_In.... But it can be same.. so plz check the code.
# If you can think better idea at code, modify it and report me please. :)
try :
cur = self.conn.cursor()
cur.execute("SELECT * FROM " + table_name + " WHERE " + column_name2 + " = '" + values + "' ")
except psycopg2.Error as e :
print(e.message)
print('error occur!')
return False, e
except Exception as e :
print(e)
return False, e
return True, None
def SQL_Update_Set_Where(self, table_name, column_name, value, whereCondition, whereValue ) :
self.conn.cursor.execute("UPDATE " + table_name + " SET " + column_name + " = " + value + " WHERE " + whereCondition + " = " + whereValue )
def SetOrigin(self, origin_k) :
if( self.track_exists( 'origin', 'origin_key', origin_k ) == False ) :
# No origin_k is in there [ Before test ]
# 6646464
SetExecuteLog(str(origin_k) + ' is not in db! program exit!', 'LOGER.SETORIGIN')
print( 'Please check the Logger.log' )
Exit('Logger.setorigin')
else :
SetExecuteLog(str(origin_k) + ' is in DB', None)
def Connect_LogDB(self) :
try :
self.conn = psycopg2.connect(self.conn_string)
except psycopg2.Error as e :
# Throw error log < DB Connection failed >
print(e.message)
return False
return True
# If you define logger as something special
def __init__(self, object=None) :
if( object == None ) :
print('Logger is made by nothing')
self.className = ""
self.Connect_LogDB()
else:
# print('Logger making start!')
# print( object )
self.className = str(object)
self.object = object
self.conn_string ="host="+object.db.HOST+" dbname=logdb "+ "user="+object.db.USER+" password="+<PASSWORD>
self.DB_Connection = self.Connect_LogDB()
def PushLog_UnknownError(self, Admin, location, ExceptionMsg=None) :
# if( self.DB_Connection == False ) :
# print('DB CONNECTION FAIL CHECK THE LOGGER LOG')
# return
# LogStructure :
## [ADMIN.ID] makes unknown error in [object], location is in [location] at [Date]
## Exception massage is [ExceptionMsg].
## --------------- Object info ---------------
## [Object.INFO]
## --------------- Admin info ---------------
## [Admin.INFO]
## Log END.
print('Unknown error occur!')
strLogMsg = str(Admin.ID) + " makes unknown error in " + str(object) + ", location is in " + str(location) + " at " + str(datetime.datetime.now()) + "\n" + \
"Exception massage is " + str(ExceptionMsg) + ".\n" + \
"--------------- Object info ---------------\n" + \
str(self.object.getInfo()) + "\n" + \
"--------------- Admin info ---------------\n" + \
str(Admin.getInfo()) + "\n" + \
"Log End."
self.SetOrigin('UNKNOWN_LOG')
RK = self.MakeReport('UNKNOWN', Admin.PATH, Admin.NAME, strLogMsg)
self.push_log('DONT_KNOW', '404', RK, 'UNKNOWN_LOG', 'UNKNOWN', 'Logger.UnknownError', 'LOGGER')
def push_log(self, request_key, server_key, Report_Key, origin_key, status_key, return_val, program_key, extraKey=None):
# You have to check if those keys are in DB
# push_log( self, request_key, server_key, Report_Key, origin_key, status_key, return_val, program_key )
# 1) execution_id = yyyy-mm-ddhh:mm:ss.ms < Generate_PrivateCode >
# request_key = request_key
# server_key = server_key
# report_key = Report_key
# origin_key = origin_key
# status_key = status_key
# return_val = return_val
# program_key = program_key
# occur_timedetect
# occur_timeends, return_value
# track_exists(self, table_name, column_name2, values)
if( self.track_exists('request_types', 'request_key', request_key) == None or \
self.track_exists('report', 'report_key', Report_Key) == None or \
self.track_exists('origin', 'origin_key', origin_key) == None or
self.track_exists('status', 'status_key', status_key) == None or
self.track_exists('program', 'program_key', program_key) == None) :
# MakeReport(self, ReportType, Path, Name, Content)
self.MakeReport('WARNING_SERVICE_REPORT', '/root/바탕화면/ServerPlayer/Report/', 'Logger', 'Please key check! : ' + \
str(request_key, Report_Key, origin_key, status_key, program_key) )
print('Line 129 is completed!')
execution_id = Generate_PrivateCode() # Generate by time
program_key = self.className
occur_timedetect = str( datetime.datetime.now() )
if( status_key == "IGNORE" or status_key == "DONE" ) :
# "UPDATE " + table_name + " SET " + column_name + " = " + value + " WHERE " + whereCondition + " = " + whereValue
try :
self.SQL_Update_Set_Where('"execution_logs"', '"occur_timeends"', occur_timedetect , '"execution_id"', extraKey)
# 여기 exception에서 infinite loop이 발생함.
# sql try catch 오류 부분 해결할 것. 또한 아래 exception에서 psycopg exception이면 push log 하지 않아야함.
# 재귀호출 ㄴㄴ
except psycopg2.Error as e :
print(e.message)
print('Sorry. push log failed because of database connection')
except Exception as e:
RepContent = " This error is occured at Logger.py, You have to check if exceution_log is deleted! "
#self.push_log('"CONNECT"', '"LOCALHOST"', '"PROGRAM_OWNER"', True, RepContent, '"DANGER"', None)
print('line 144 is completed')
else :
# SQL_Insert_Into_values(self, table_name, values)
# request_key, server_key, Report_Key, origin_key, status_key, return_val, program_key
values = "'" + str(execution_id) + "', '" + request_key + "', '" + str(server_key) + "', '" + str(Report_Key) + "', '" + origin_key +"', '" + status_key + "', '" + str(datetime.datetime.now()) + "', '" + ""+ "', '" + return_val + "', '" + program_key + "'"
try :
self.SQL_Insert_Into_values( 'execution_logs', values)
except Exception as e :
print(e.message)
print('Sorry. push log failed because of database connection')
## FOR TEST */
## Listening Roller coaster : Aprl 12. */
## <NAME> */
# You don't need to check under this line.
# Test is completed at Aprl 17.
def ParseDataList_FromPath(FilePath) :
# returns file contents in 'FilePath'
# You can check your dir by using'print(os.getcwd())'
File = open(str(FilePath), "r")
return File.readlines()
def ParseSortCont_FromString(List_forParse):
# It only returns two data which are Sort and Content of string in listself.
# EX) NAME:Wonseok
# Sort = Name, Content = Wonseok
ParsedStr = List_forParse.split('=')
Sort = str(ParsedStr[0])
Content = str(ParsedStr[1]).strip()
return Sort, Content
class testClass(object):
def __init__ (self) :
# logger SETTINGS
print("DB class is made")
self.db = DatabaseClass.DB()
self.LoadDBFiles()
def __str__(self) :
return "CONNECTOR"
def LoadDBFiles(self):
# ParseDataFromPath returns list in file contents
# This function returns nothing
print("load is complete")
DatabaseData = ParseDataList_FromPath("./ProgramSettings/DataBaseSettings.txt")
for i in range(0, len(DatabaseData)) :
Sort, Content = ParseSortCont_FromString( DatabaseData[i] )
if Sort == 'SORTS' :
self.db.SORTS = Content
elif Sort == 'USER' :
self.db.USER = Content
elif Sort == 'HOST' :
self.db.HOST = Content
elif Sort == 'PORT' :
self.db.PORT = Content
elif Sort == 'NAME' :
self.db.NAME = Content
elif Sort == 'PW' :
self.db.PW = Content
else : # For catch the error
print (' INPUT ERROR AT DB SETTINGS.TXT ' )
print (' (Input) Sort : ', Sort, ' Content : ', Content)
# # END LOOP & for check
# self.DB.printInfo()N
if __name__ == "__main__" :
print("Logger tests ......... ")
TestClass = testClass()
print(Generate_PrivateCode())
L = Logger(TestClass)
L.SetOrigin('KNOWN_LOG')
# self, ReportType, Path, Name, Content
RK = L.MakeReport( 'ALERT_SERVICE_REPORT', '/root/바탕화면/ServerPlayer/Report/', 'Wonseok', 'LogCheck!' )
# self, request_key, server_key, Report_Key, origin_key, status_key, return_val, program_key
L.push_log( 'DONT_KNOW', 'localhost', RK, 'KNOWN_LOG', 'BAD', 'None', 'CONNECTOR')
<file_sep>from SystemLoader import SystemLoader
from Logger import Logger
import psycopg2
import pandas.io.sql as pdsql
import pandas
import sqlalchemy
from pexpect import pxssh
import time, datetime
import os
BeforeTime = datetime.datetime.now()
def SetExecuteLog(code, ErrorCode) :
global BeforeTime
now = datetime.datetime.now()
LogFile = open(os.getcwd() + '/UserConfig/ConnectorLog.txt', "a")
LogFile.write('Code : ' + str(code) + '\n')
if( ErrorCode ) :
LogFile.write('ErCode : ' + str(ErrorCode) + '\n')
LogFile.write('Written time : ' + str(now) + ' [' + str(now - BeforeTime) + ']'+ '\n')
BeforeTime = now
def SendLog_ConnectionBad (Logger, Admin, ID, Role, Host, ExceptionE) :
# Log structure :
## [ADMIN.ID] named [ADMIN.NAME] tried to connect [ServerID] by [ServerRole]@[Host] at [Date.time]
## Server was [Server.isOkay]. And program tried to connect, but server connection is BAD.
## specific report which pssh says is here : [Exception E]
strLogMsg = str(Admin.ID) + " named " + str(Admin.NAME) + " tried to connect " + str(ID) + " by " + str(Role)+"@" + str(Host) + " at " + str(datetime.datetime.now()) + "\n" + \
"Program tried to connect, but server connection is BAD." + "\n" + \
"specific report which pssh says is here : " + str(ExceptionE)
Logger.SetOrigin('KNOWN_LOG')
RK = Logger.MakeReport( 'SERVICE_STATUS_CHECK', Admin.PATH, Admin.NAME, strLogMsg)
Logger.push_log( 'CONNECT', Host, RK, 'KNOWN_LOG', 'BAD', 'Connector.SendLog_ConnectionBad', 'CONNECTOR')
def SendLog_ConnectionGood (Logger, Admin, ID, Role, Host) :
# Log structure :
## [ADMIN.ID] tried to connect [ServerID] by [ServerRole]@[Host] at [Date.time]
## Server was [Server.isOkay]. And program tried to connect, and server connection is GOOD
strLogMsg = str(Admin.ID) + " named " + str(Admin.NAME) + " tried to connect " + str(ID) + " by " + str(Role)+"@" + str(Host) + " at " + str(datetime.datetime.now()) + "\n" + \
".Program tried to connect, and it was successful.\n"
Logger.SetOrigin('KNOWN_LOG')
RK = Logger.MakeReport( 'SERVICE_STATUS_CHECK', Admin.PATH, Admin.NAME, strLogMsg)
Logger.push_log( 'CONNECT', Host, RK, 'KNOWN_LOG', 'BAD', 'Connector.SendLog_ConnectionBad', 'CONNECTOR')
def Shell_login(Shell, Hostname, Username, Password):
Shell.login( Hostname, Username, Password)
Shell.sendline('uname -a')
Shell.prompt()
SetExecuteLog(str(Hostname) + " Msg : " + Shell.before, None)
def Update_Success(cursor, conn, Id, isSuccess) :
if isSuccess == True :
cursor.execute("UPDATE servers SET \"LAST_LOGIN\"=\'"+str(datetime.datetime.now())+"\' WHERE \"ID\"="+str(Id))
cursor.execute("UPDATE servers SET \"IS_ERROR\"=\'"+str("")+"\' WHERE \"ID\"="+str(Id))
conn.commit()
# Push Log in here
# self.logger.push_log()
else :
cursor.execute("UPDATE servers SET \"LAST_LOGIN\"=\'"+str(datetime.datetime.now())+"\' WHERE \"ID\"="+str(Id))
cursor.execute("UPDATE servers SET \"IS_ERROR\"=\'"+str("YES")+"\' WHERE \"ID\"="+str(Id))
conn.commit()
conn.commit()
cursor.close()
def MakeListToMsg( listUsrInput ) :
Msg = ""
for i in listUsrInput :
Msg += str(i) + '\n'
return Msg
class Connector(object) :
# def __init__(self) :
# self.SystemLoader = SystemLoader()
def __init__(self, objects = None):
# you must input 'SystemLoader in here'
self.SystemLoader = objects
# o yes
self.db = self.SystemLoader.DB
self.admin = self.SystemLoader.Admin
self.ServerList = [[]]
self.conn_string = ""
self.logger = Logger(self)
self.GoodServerList= []
self.BadServerList = []
self.Connecting()
def Connecting(self) :
# First, you have to connect DataBase in your local computer.
# PostgreSQL will be the best choice. But if you want to use other version,
# please check program version.
self.Connect_DB()
# Second, program will get server data from your local database.
self.Connect_getServerDB()
# Third, program check the servers okay to connectt
self.Connect_Servers()
def Connect_Servers(self) :
# In this function, program check servers which owner has in local DB.
# If there are errors in this logic, program will send log in DB.
# You can check your error log in this program, and in other module.
# i is each server list of serverlists
for i in self.ServerList :
# The rule of env.host = 'user@host:port'
str_tmp = str(i[5])+"@"+str(i[3])+":"+str(i[1])
try :
# shell and host setting
s = pxssh.pxssh()
hostname = str(i[3])
username = str(i[5])
password = str(i[4])
# Login to shell, if it has error, it may goes under 'except' lines.
Shell_login(s, hostname, username, password)
# If you want check what if server respond in pxssh, execute under lines.
####### s.sendline('whoami')
####### s.prompt()
####### print( "before:\n"+ s.before )
s.logout()
cursor = self.conn.cursor()
Update_Success(cursor, self.conn, i[0], True)
# Logger, Admin, ID, Role, Host, ExceptionE
SendLog_ConnectionGood(self.logger, self.admin, i[0], username, hostname)
# Added Aprl 12
self.GoodServerList.append(i)
except pxssh.ExceptionPxssh as e :
cursor = self.conn.cursor()
Update_Success(cursor, self.conn, i[0], False)
SendLog_ConnectionBad(self.logger, self.admin, i[0], username, hostname, e)
SetExecuteLog( str(hostname) + " pxssh failed on login.", str(e))
# Added Aprl 12
self.BadServerList.append(i)
def Connect_getServerDB(self) :
SetExecuteLog('(getServerDB) ' + self.conn_string, None)
self.conn = psycopg2.connect(self.conn_string)
the_frame = pdsql.read_sql_table("servers", self.engine)
self.ServerList = the_frame.values.tolist()
ListMsg = MakeListToMsg(self.ServerList)
SetExecuteLog('Server List : ' + ListMsg, None)
# remain log
def Connect_DB(self):
# print('connect db') # for log
if self.db.SORTS == 'psql' :
self.conn_string = "host="+self.db.HOST+" dbname="+self.db.NAME+" user="+self.db.USER+" password="+self.db.PW
#print( self.conn_string ) # for log
# dialect+driver://username:password@host:port/database
self.engine = sqlalchemy.create_engine("postgresql+psycopg2://" + self.db.USER.replace("'","") + ":" + self.db.PW.replace("'","")+"@" + self.db.HOST.replace("'","") + ":" + self.db.PORT.replace("'","")+ "/" + self.db.NAME.replace("'",""))
self.conn = psycopg2.connect(self.conn_string)
self.db.IS_CONNECTED = True
else :
print ( "Sorry, " + self.db.SORTS + " isn't supported yet.....")
def __str__(self) :
return "CONNECTOR"
#
# Develop Log ( Aprl 15 )
#
# Aprl 15
# <NAME>
#
# Some lines are deleted and added some '\n' characters.
# Becuase I changed IDE from atom to VS Code.<file_sep># @Author Wonseok
# @Designer Wonseok
# @Start at June, 4
# @Last at June, 4
# @Music
# @Information
import os, sys
sys.path.insert( 0, os.getcwd() )
from ObjectInfo import DatabaseClass
from ObjectInfo import Server
from pexpect import pxssh
import psycopg2
class SCPCommand(object) :
def __init__(self, _fromDir="", _connectId="", _iP="", _toDir="") :
self.FromDir = _fromDir
self.connectId = _connectId
self.IP = _iP
self.ToDir = _toDir
def Set(self, _fromDir, _connectId, _ip, _toDir) :
self.FromDir = _fromDir
self.connectId = _connectId
self.IP = _ip
self._toDir = _toDir
def MakeCommand(self) :
if( self.FromDir == None or \
self.connectId == None or \
self.IP == None or \
self.ToDir == None ) :
return False
else :
tmpStr = " scp -o StrictHostKeyChecking=no " + str(self.FromDir) + " " + self.connectId + "@" + \
str(self.IP) + ":" + str(self.ToDir)
return tmpStr
def __str__(self) :
return self.MakeCommand()
class SCPManager(object) :
def __init__(self) :
self.Slaves = {}
def PrintTargetCommands(self) :
for i in (self.Slaves.keys()) :
print("{}\t\t{}".format(str(i.ID), str(self.Slaves[i].MakeCommand())))
def TryAddSlave(self,SlaveServer) :
if( SlaveServer.IS_ERROR == "YES" ) :
return False
_SCPCommand = SCPCommand("", "", "", "")
self.Slaves[SlaveServer] = _SCPCommand
return True
def TargetSetById(self, id) :
SlaveList = self.Slaves.keys()
for Serv in SlaveList :
print(type(Serv), Serv.getInfo())
if( id == 'all') :
_fromDir = str(raw_input("From Dir (+FileName) : "))
_TryConID = Serv.CONNECTION_USERNAME
_IP = Serv.CONNECTION_IPADDRESS
_toDir = str(raw_input("Where to go? (+FileName) : "))
Command = SCPCommand(_fromDir, _TryConID, _IP, _toDir)
self.Slaves[Serv] = Command
if( Serv.ID == id ) :
_fromDir = str(raw_input("From Dir (+FileName) : "))
_TryConID = Serv.CONNECTION_USERNAME
_IP = Serv.CONNECTION_IPADDRESS
_toDir = str(raw_input("Where to go? (+FileName) : "))
Command = SCPCommand(_fromDir, _TryConID, _IP, _toDir)
self.Slaves[Serv] = Command
def SetCommandForAllTargets(self, Comm) :
for Serv in self.Slaves :
self.Slaves[Serv] = Comm
def SendToAllTargets(self) :
for Serv in self.Slaves :
Comm = self.Slaves[Serv].MakeCommand()
if( Comm == False ) : continue
print(Serv.CONNECTION_PASSWORD)
os.system( "sshpass -p" + Serv.CONNECTION_PASSWORD + self.Slaves[Serv].MakeCommand() )
print('done!')
raw_input()
def DeleteServerInTargets(self, ServerKey) :
self.Slaves.pop(ServerKey)<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 17
# @Last at Aprl. 17
# @Music ANTI - ZICO( feat.. )
# @Information This class is parent class for backup system. This will need some class.
# @details
# First, This class need server class. For example, if you want to back-up your
# postgreSQL data to server, you have to regist master server and slave server.
# (master = server which is backed up. slave = server which back up.)
# -> slave will take data from master to save in itself.
# (If you want to make database backup system) Second, you need DataBase.
# If you want to make copy-database, just copy it and put it in your database.
import os, sys
sys.path.insert(0, os.getcwd())
from ObjectInfo import Server
from ObjectInfo import AdministratorClass
from ObjectInfo import DatabaseClass
from System import Logger
import time, datetime
class BackupEngine(object) :
def __init__(self, Master, Slave, Admin) :
self.IS_INITIALIZE = False
if( (str(Master) is not "SERVER") or
(str(Slave) is not "SERVER") or
(str(Admin) is not "ADMINISTRATORCLASS")) :
print(str(Master), str(Slave), str(Admin))
print('Backup initialize fail!')
return
# Engine will back up from master to slave.
self.Master = Master
self.Slave = Slave
self.Admin = Admin
# Server connection test.
if( self.ConnectionTest_toServer() ) :
print('Test accepted!')
else :
print('BackupEngine tried to connect master & server. But it failed.')
# Connection test needs to be accepted. If not, Backup Engine failed to initialize.
def ConnectionTest_toServer(self) :
IS_OKAY_MASTER, Msg_MASTER = self.Master.isTryConnect()
IS_OKAY_SLAVE, Msg_SLAVE = self.Slave.isTryConnect()
if( (IS_OKAY_MASTER and IS_OKAY_SLAVE ) == False ) :
self.SendLog_ConnectionWrong(IS_OKAY_MASTER, IS_OKAY_SLAVE, Msg_MASTER, Msg_SLAVE)
self.Master = None
self.Slave = None
return False
return True
def SendLog_ConnectionWrong(self, ConnMaster, ConnSlave, Msg_MASTER, Msg_SLAVE ) :
# LogStructure :
## [Admin.ID] tried to connect these at [Date.time] :
## Master : [Master.ID], [Master.Role]@[MAST.Host] / Slave : [Slave.ID], [Slave.Role]@[Slav.Host]
## But server Connection is BAD.
## Master connection status : [ConnMaster], Slave connection status : [ConnSlave]
## Here is the Msg from master : [Msg_Master]
## Here is the Msg from slave : [Msg_Slave]
StrLogMsg = str(self.Admin.ID) + " tried to connect these at " + str(datetime.datetime.now()) + " : \n" + \
"Master : " + str(self.Master.ID) + ", " + str(self.Master.ROLE) + "@" + str(self.Master.HOST) + \
" / Slve : " + str(self.Slave.ID) + ", " + str(self.Slave.ROLE) + "@" + str(self.Slave.HOST) + "\n" + \
"But server Connection is BAD." + \
"Master connection status : " + str(ConnMaster) + ", Slave connection status : " + str(ConnSlave) + "\n" + \
"Here is the Msg from master : " + str(Msg_MASTER) + "\n" + \
"Here is the Msg from slave : " + str(Msg_SLAVE) + "\n"
BackupLogger = Logger.Logger(self)
BackupLogger.SetOrigin('KNOWN_LOG')
RK = BackupLogger.MakeReport('SERVICE_STATUS_CHECK', self.Admin.PATH, self.Admin.NAME, StrLogMsg)
if( ConnMaster ) :
ConnMaster = "BAD"
else :
ConnMaster = "GOOD"
if( ConnSlave ) :
ConnSlave = "BAD"
else :
ConnSlave = "GOOD"
# Push master log and slave log.
BackupLogger.push_log('CONNECT', self.Master.ID, RK, 'KNOWN_LOG', ConnMaster, 'BackupEngine.SendLog_ConnectionBad', 'BACKUPENGINE')
BackupLogger.push_log('CONNECT', self.Slave.ID, RK, 'KNOWN_LOG', ConnMaster, 'BackupEngine.SendLog_ConnectionBad', 'BACKUPENGINE')
#
# Test if BackupEngine's status is good.
#
if __name__ == "__main__" :
S = Server.Server(1, 22, 'ssh', '172.16.31.10', '3@mHze=5K{1wj){}', 'root', 'Wonseok.J', 970403, 'ubuntu', 'wonseokbuntu', None, '2018-03-02', None, None)
S.db = DatabaseClass.DB("psql", "'localhost'", "'5432'", "'testdb'", "'12345'", "'test'")
S.db.Connect_DB()
S.DB=S.db
S.Admin = AdministratorClass.Administrator('Wonseok', '/root/바탕화면/ServerPlayer/Report/', 'root', 'Admin', 'root')
S.local_admin = S.GetServerOwner()
BE = BackupEngine(S, S, S.Admin)<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os, sys
sys.path.insert(0, os.getcwd() )
from ObjectInfo import DatabaseClass
from ObjectInfo import AdministratorClass
from ObjectInfo import Server
from CheckerEngine import CheckerEngine
import time, datetime
from System import Logger
from pexpect import pxssh
import getpass
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 18
# @Last at Aprl. 20
# @Music Sing a song at TV program - Shin Yong Jae.
# @Information This class is made for checking the database system. This system is a child class of Checker Engine.
# You must add some functions and values in this functions. that's all.
class ServerChecker( CheckerEngine) :
'''
@ Written by wonseok.
@ Date at 2018.04.20
You don't need to define ConditionChecker function or sending log function
which sends 'server connection bad' or 'database connection bad' status.
Server checker just needs to define some functions which can control servers,
check attacks from bad hackers.
And also, ServerChecker needs to make a report which can be seen easily.
Day 18 :
Define class and functions < Checker engine which are parent class of this class >
Day 19 :
Define this class and some functions which are initailizer, condition checker,
shell commands, SSH commanding functions, some functions for security check.
Day 20 :
I will make some functions about making report, and for sending this mail,
I will make a mail class named, 'mailchecker'.
'''
# Nothing special.
# But you have to notice that this class uses parent class's initializer.
def __init__(self, LocalServer=None, LocalDatabase=None, LocalAdmin=None ) :
CheckerEngine.__init__(self, LocalServer, LocalDatabase, LocalAdmin)
self.Logger = Logger.Logger(self)
self.EngineName = "SERVERCHECKER"
# This program will check condition of local Database and etc.
# If checking status is bad, parent class will remains log.
def ServerChecker_ConditionCheck(self) :
isOkay, msg = self.CheckerConditionCheck()
if( isOkay ) :
return True, "Good"
else :
return False, msg
# Pexpect defines wonderful functions for using shell script in remote server.
# It's really good to use, and don't need hard codes.
def MakeShell(self) :
# For using shell script at remote server, you have to define shell.
self.Shell = pxssh.pxssh()
def LoginShell(self) :
# After execute makeshell function, you have to login in server.
host = self.LocalServer.CONNECTION_IPADDRESS # local server is
user = self.LocalServer.CONNECTION_USERNAME # master server.
pw = self.LocalServer.CONNECTION_PASSWORD
self.Shell.login(host, user, pw) # This is the most important line
# You just execute this function and putting msg in this function's parameter,
# you can send message to remote server.
def ThrowMsg_Shell(self, msg) :
self.Shell.sendline(msg) # run a command
self.Shell.prompt() # match the prompt - pexpect github.
def LogoutShell(self) :
self.Shell.logout()
'''
From this line, program will check some security checks at remote server.
You don't need to execute these commands in every servers.
This commands will execute for you automatically.
What you just need to check is, execute this function, check the report. That's all!
'''
def LastCommand(self) :
# Last command tells you who visited and succeed to login in your server.
# It's a very basic command of checking security.
self.MakeShell()
self.LoginShell()
self.ThrowMsg_Shell("last")
data = self.Shell.before
self.LogoutShell()
return data
def SSHAttemptsCommand_Debian(self, num) :
# This commands will tell you who wanted to log in your remote server.
# This function is for debian( ubuntu is based at debian ) OS.
self.MakeShell()
self.LoginShell()
self.ThrowMsg_Shell("tail -n "+str(num)+" /var/log/auth.log | grep sshd")
data = self.Shell.before
self.LogoutShell()
return data
def SSHAttemptsCommand_CentRedHat(self, num) :
# CentOS and RedHat have to execute this function.
self.MakeShell()
self.LoginShell()
self.ThrowMsg_Shell("tail -n "+str(num)+" /var/log/secure | grep 'sshd'")
data = self.Shell.before
self.LogoutShell()
return data
def BashHistory(self, num) :
# This function tells you what hackers did in your server.
# But you have to know that, history includes commands that you did.
self.MakeShell()
self.LoginShell()
self.ThrowMsg_Shell("tail -n "+str(num)+" ~/.bash_history")
data = str(self.Shell.before)
print(self.Shell.before)
self.LogoutShell()
return data
'''
From this line, program will make the result from command to report.
You can check the reports by using below functions.
'''
#@param content is the msg from server. You have to make report this value.
# caller is the command which you used in server.
# - if you use 'last' command at server, caller will be 'last'
def MakeReport(self, content, caller) :
# LogStructure :
#
# File name : (ServerID).(ServerName).(Command).(Date) - new file
# + (ServerID).(ServerName).(Command) - based file.
FileName_new = str(self.LocalServer.ID)+"."+str(self.LocalServer.SERVER_NAME)+"."+str(caller)+"."+ (str(datetime.datetime.now()).replace(" ",""))
FileName_base = str(self.LocalServer.ID)+"."+str(self.LocalServer.SERVER_NAME)+"."+str(caller)
newFile = open(self.LocalAdmin.PATH + FileName_new + ".txt", "w")
baseFile = open(self.LocalAdmin.PATH + FileName_base + ".txt", "a")
newFile.write(content+'\n\n')
baseFile.write(content+'\n\n')
newFile.close()
baseFile.close()
def __str__(self) :
return "SERVERCHECKER"
if __name__ == "__main__" :
S = Server.Server(1, 22, 'ssh', '192.168.127.12', '3@mHze=5K{1wj){}', 'root', 'Wonseok.J', 970403, 'ubuntu', 'wonseokbuntu', None, '2018-03-02', None, None)
S.DB = DatabaseClass.DB("psql", "'localhost'", "'5432'", "'testdb'", "'1234'", "'test'")
S.DB.Connect_DB()
S.db = S.DB
S.Admin = AdministratorClass.Administrator('Wonseok', '/root/바탕화면/ServerPlayer/Report/', 'root', 'Admin', 'root')
ServerChecker = ServerChecker(S, S.DB, S.Admin)
#S.ThrowCommand('last')
print( ServerChecker.ServerChecker_ConditionCheck() )
#ServerChecker.LastCommand()
#ServerChecker.SSHAttemptsCommand_Debian()
#ServerChecker.dont()
data = ServerChecker.BashHistory(500)
ServerChecker.MakeReport( data , 'BashHistory')
data = ServerChecker.LastCommand()
ServerChecker.MakeReport( data, 'LastCommand' )<file_sep>
# This class will have information between this program with program log & configure DB
# After load onnection, connector has to have connection with servers.
import psycopg2
from AdministratorClass import Administrator
import sqlalchemy
class DB(object) :
def __init__(self, Sorts=None, Host=None, Port=None, Name=None, Pw=None, User=None, DB_KEY=None, SERV_KEY=None):
print('Log : Database initializer is loaded! ')
# If you use sort 'PostgreSQL', input 'psql', 'MS-SQL', input 'mssql',
# 'MySQL', input 'mysql', 'ORACLE', input 'orac', 'SQLITE', 'sqlite'
self.SORTS = Sorts # Database management name
self.HOST = Host # Database host ip xxx.xxx.xxx.xxx
self.PORT = Port # Database port (1000 ~ 9999)
self.NAME = Name # Database Name
self.PW = Pw # Database password
self.USER = User # Database user
self.DB_KEY = DB_KEY
self.IS_CONNECTED = False
self.OBJECT = None
self.SERVER_KEY = SERV_KEY
def AdminToDatabaseConnect(self, Admin) :
# This class using admin's key to get Database information, and connect it.
if( self.IS_CONNECTED == False ) :
print("You should try connect Database first!")
return
else :
cur = self.conn.cursor()
cur.execute("SELECT * FROM db_profile WHERE owner_id = '" + str(Admin.ID) + "'")
owner_info = cur.fetchall()
owner_info = owner_info[0]
DB_KEY = owner_info[0]
HOST = owner_info[1]
PW = owner_info[2]
PORT = owner_info[4]
SORTS = owner_info[5]
cur.execute("SELECT * FROM database WHERE db_key = '" + str(DB_KEY) + "'")
db_info = cur.fetchall()
db_info = db_info[0]
USER = db_info[5]
NAME = db_info[4]
TmpDB = DB(SORTS, HOST, PORT, NAME, PW, USER, DB_KEY, Admin.ID)
TmpDB.Connect_DB()
TmpDB.SERVER_KEY = Admin.ID
def getInfo(self) :
return ( str(self.SORTS) + " " + str(self.HOST) + " " + str(self.PORT) + " " + str(self.NAME) + \
str(self.PW) + " " + str(self.USER) + " " + str(self.DB_KEY) + " " + str(self.IS_CONNECTED) + " " + \
str(self.OBJECT) + " " + str(self.SERVER_KEY) + "\n" )
def printInfo(self) :
# STRUCTURE :
# NAME : \nHOST : \n ...
print('Database Information ~')
print('Name : ' + self.NAME)
print('SORTS : ' + self.SORTS)
print('PORT : ' + self.PORT)
print('HOST : ' + self.HOST)
print('USER : ' + self.USER)
print('PW : ' + "Check the file (for security)")
print('')
# This function is from Connector.py
# This function connect DB with elements which are initialized
# remain log
def Connect_DB(self) :
print('connect db')
if self.SORTS == 'psql' :
try :
self.conn_string = "host="+self.HOST+" dbname="+self.NAME+" user="+self.USER+" password="+self.PW
print( self.conn_string )
self.conn = psycopg2.connect(self.conn_string)
except psycopg2.Error as e :
# remain log
print(e)
self.IS_CONNECTED = False
self.IS_CONNECTED = True
else :
print("Sorry, " + self.SORTS + " isn't supported yet.")
return False
def isTryConnect(self) :
print('connect db')
if self.SORTS == 'psql' :
try :
self.conn_string = "host="+self.HOST+" dbname="+self.NAME+" user="+self.USER+" password="+self.PW
print( self.conn_string )
self.conn = psycopg2.connect(self.conn_string)
except psycopg2.Error as e :
# remain log
return False, e
except Exception as e :
return False, e
return True, "GOOD"
else :
return False, "That DB is not supported."
def __str__(self) :
return "DATABASECLASS"
<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 5
# @Last at Aprl. 15
# @Music Castle of magic by MC Sniper
# @Information This class is only for server each of being managed.
# Unfortunately, now, if your DB is not 'postgreSQl', you can't control it.
from pexpect import pxssh # Added at Aprl 15
import psycopg2
from DatabaseClass import DB
from AdministratorClass import Administrator
import time, datetime
import os, sys
sys.path.insert(0, os.getcwd())
from System import Logger
class Server (object) :
def __init__(self, i=None, p=None, s=None, ip=None, pa=None, u=None, n=None, id=None, os=None, Na=None, IE=None, LST_DATE=None, db_key=None, obj_key=None) :
self.ID = i # Primary Key for Serv
self.CONNECTION_PORT = p # Connection port ( ssh = 22 )
self.CONNECTION_SORT = s # ssh or ftp ...
self.CONNECTION_IPADDRESS = ip # 192.168.10.1 ..
self.CONNECTION_PASSWORD = pa # Wonseok...
self.CONNECTION_USERNAME = u # root
self.OWNER_NAME = n # Wonseok
self.OWNER_ID = id # key of wonseok. Primary key of Administrator will be a good point for understanding.
self.SERVER_OS = os # Ubuntu 16.04 ..
self.SERVER_NAME = Na # WonseokServer..
self.IS_ERROR = IE # True? False? or something
self.CONNECTION_LASTDATE = LST_DATE
self.DB_KEY = db_key
self.OBJECT_KEY = obj_key
self.DB= None
self.Admin= None
# Local is server's owner.
self.local_db = None
self.local_admin = None
# Try connecting if server is okay
# okay -> self.iserror= true, No -> self.iserror= false
def isTryConnect(self) :
try :
shell = pxssh.pxssh()
shell.login( self.CONNECTION_IPADDRESS, self.CONNECTION_USERNAME, self.CONNECTION_PASSWORD)
shell.sendline('ls -al')
shell.prompt()
print( "before\n" + shell.before)
shell.logout()
except pxssh.ExceptionPxssh as e :
self.IS_ERROR = "YES" # Error occur
return False, e
self.IS_ERROR = None # No error
return True, 'GOOD'
# Using this function makes you easier to throw command.
def ThrowCommand(self, comd) :
try :
shell = pxssh.pxssh()
shell.login( self.CONNECTION_IPADDRESS, self.CONNECTION_USERNAME, self.CONNECTION_PASSWORD)
shell.sendline(comd)
shell.prompt()
print("command : " + shell.before)
shell.logout()
except pxssh.ExceptionPxssh as e :
ServerLogger = Logger.Logger(Server) # Because, Logger is in other directory.
self.SendLog_ThrowMsgError(ServerLogger, comd, e)
return False, e
return True, 'GOOD'
# Using this function, you can get server's owner easy
def GetServerOwner(self) :
# @Return True -> get admin information successfully, False -> No
# @Befroe You have to make sure that this server class has 'local_db'
# Algo : Connect DB -> Using Foreinn key -> Get Admin's information -> put it in admin class
if self.DB.IS_CONNECTED == False :
print("This server doesn't have local database!\n \
You hae to execute db.Connect_DB first!")
return False, "Not connected local_db"
else :
try :
cur = self.DB.conn.cursor()
cur.execute("SELECT * FROM administrator WHERE admin_key=" + str(self.OWNER_ID) )
owner_info = cur.fetchall() # administrator call succeed
except psycopg2.Error as e :
# send log
print(e)
return False, e
except Exception as e :
print('error:', e)
ServerLogger = Logger.Logger(self)
ServerLogger.PushLog_UnknownError(self.Admin, 'Server.GetServerOwner', e)
return False, e
# Make admin class
if len(owner_info) != 1 :
# Because of primary key, it won't happen.
print('Owner is wrong! The system must have just one owner!')
owner_info = owner_info[0]
tmpAdmin = Administrator(owner_info[0], owner_info[1], owner_info[2], owner_info[3], owner_info[4])
tmpAdmin.printInfo()
# Initialize
self.local_admin = tmpAdmin
# Return success
return True, "Success"
# Created at Aprl 17. with Say you love me please - red cheek puberty
# Below functions are loger.
# Written by <NAME>
# This function is copied from Scheduler.SendLog_ThrowMsgError
def SendLog_ThrowMsgError (self, Logger, command, ExceptionMsg) :
# Log structure :
## [ADMIN.ID] tried to throw [command] to [ServerID] by [ServerRole]@[Host] at [Date.time]
## Server was [Server.isOkay]. And program tried to connect, but server connection is BAD.
## specific report which pssh says is here : [Exception E]
strLogMsg = str(self.Admin.ID) + " tried to throw " + str(command) + " to " + str(self.ID) + " by " + str(self.CONNECTION_USERNAME)+"@" + str(self.CONNECTION_IPADDRESS) + " at " + str(datetime.datetime.now()) + "\n" + \
"Server was " + self.IS_ERROR + ". And program tried to connect, but server connection is BAD." + "\n" + \
"specific report which pssh says is here : " + str(ExceptionMsg)
Logger.SetOrigin('KNOWN_LOG')
RK = Logger.MakeReport( 'SERVICE_STATUS_CHECK', self.Admin.PATH, self.Admin.NAME, strLogMsg)
Logger.push_log('REQ_COMMAND', self.ID, RK, 'KNOWN_LOG', 'BAD', 'Server.SendLog_ThrowMsgError', 'SERVER')
def SendLog_ConnectionBad (self, Logger, ExceptionMsg) :
# Log structure :
## [ADMIN.ID] tried to connect [ServerID] by [ServerRole]@[Host] at [Date.time]
## Server was [Server.isOkay]. And program tried to connect, but server connection is BAD.
## specific report which pssh says is here : [Exception E]
strLogMsg = str(self.Admin.ID) + " tried to connect " + str(self.ID) + " by " + str(self.CONNECTION_USERNAME)+"@" + str(self.CONNECTION_IPADDRESS) + " at " + str(datetime.datetime.now()) + "\n" + \
"Server was " + self.IS_ERROR + ". And program tried to connect, but server connection is BAD." + "\n" + \
"specific report which pssh says is here : " + str(ExceptionMsg)
Logger.SetOrigin('KNOWN_LOG')
RK = Logger.MakeReport( 'SERVICE_STATUS_CHECK', self.Admin.PATH, self.Admin.NAME, strLogMsg)
Logger.push_log( 'CONNECT', self.ID, RK, 'KNOWN_LOG', 'BAD', 'Server.SendLog_ConnectionBad', 'SERVER')
def __str__ (self) :
# upper only
return "SERVER"
def getInfo(self) :
# CAUTION : DO NOT RETURN OTHER CLASS.
# IF YOU DO THAT, THIS FUNCTION WILL MAKE FULL STACK RECURSIVE FUNCTION, AND MAKE
# STACK OVERFLOW ERROR.
strMsg = "ID = " + str(self.ID) + \
"\nConnection_port = " + str(self.CONNECTION_PORT) + \
"\nCONNECTION_SORT = " + str(self.CONNECTION_SORT) + \
"\nCONNECTION_IPADDRESS = " + str(self.CONNECTION_IPADDRESS) + \
"\nCONNECTION_PASSWORD = " + str(self.CONNECTION_PASSWORD) + \
"\nCONNECTION_USERNAME = " + str(self.CONNECTION_USERNAME) + \
"\nOWNER_NAME = " + str(self.OWNER_NAME) + \
"\nOWNER_ID = " + str(self.OWNER_ID) + \
"\nSERVER_OS = " + str(self.SERVER_OS) + \
"\nSERVER_NAME = " + str(self.SERVER_NAME) + \
"\nIS_ERROR = " + str(self.IS_ERROR) + \
"\nCONNECTION_LASTDATE = " + str(self.CONNECTION_LASTDATE) + \
"\nDB_KEY = " + str(self.DB_KEY) + \
"\nOBJECT_KEY = " + str(self.OBJECT_KEY)
return strMsg
#
# Test if server can have Administrator and Database
#
if __name__ == "__main__" :
S = Server(1, 22, 'ssh', '172.16.58.3', 'makeitpopwebuzzz!1', 'root', 'Wonseok.J', 1230, 'ubuntu', 'wonseokbuntu', None, '2018-03-02', None, None)
S.DB = DB("psql", "'localhost'", "'5432'", "'testdb'", "'1234'", "'test'")
S.DB.Connect_DB()
S.Admin = Administrator('Wonseok', '/root/바탕화면/ServerPlayer/Report/', 'root', 'Admin', 'root')
S.GetServerOwner()
S.local_db = S.DB.AdminToDatabaseConnect( S.local_admin )
#
# Develop Log ( Aprl 15 )
#
# Aprl 15
# <NAME>
#
# I think server has to have connector's function named 'Connect_Servers)
# So I copied that function to Server.py
#
# Aprl 17
# Designer Wonseok.J
#
# I added some functions named GetServerOwner in this class. Just using this class,
# You can get server's owner. And once you call this function, automatically server makes admin
# class. It will initialize that with server.
# <file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 13
# @Last at Aprl. 15
# @Music Angel - Drunken Tiger with Yoon - mire, bizzy
# @Information This class supports you that you can do cron service easy.
# If you want to execute program at regular time, you can regist it easily.
# And also, cron service remains logs in log DB.
import os, sys
sys.path.insert(0, os.getcwd() )
from Logger import Logger
from ObjectInfo import Server
import time, datetime
# Scheduler class make schedule using crontab command for user in easy way.
# It will check if the server is okay to connect, and if that is okay then it will send command to server.
# You don't worry to make command and send it to server.
class Scheduler(object) :
def __init__(self, object) :
# This argument is important. Korean usually say this produce,
# Year -> Month -> Day -> Hour -> Minute -> Second
# But Crontab command's produce is
# Minute -> Hour -> Day -> Month -> DATE
# So this dict will change American's dict to Korean's
if( str(object) != "SERVER" ) : # program key is SERVER
# remain the log if you have more time.
print( str(object) + "'can't register any cron service! ")
else :
self.Server = object
self.KeyOfList = {
0 : 3,
1 : 4,
2 : 2,
3 : 1,
4 : 0
}
self.db = object.db
self.InputList = [None, None, None, None, None] # This list is raw list
self.OutputList = [None, None, None, None, None] # This list is changed for crontab command
self.CommandAtLst = "" # This command is final string when you write in crontab command
# This function makes command for sending server.
# This command will be exectued in user's server.
def MakeCommand(self) :
strTmp = ""
for i in range(0, 5) :
strTmp += str(self.OutputList[i]) + " "
strTmp += self.CommandAtLst
return ('cat <(crontab -l) <(echo "' + strTmp + '") | crontab -')
# MIN HOUR DAY MONTH DATE
# producedure : Month -> Date -> Day -> Hour -> Min
def PrintAndInput(self) :
for i in range(0, 5) :
self.PrintValues(i)
self.InputValue(i)
self.PrintValues(5)
def InputCommand(self) :
print('Please input command you want to execute : ')
self.CommandAtLst = raw_input()
# This function is just for input some variables.
# You have to check algorithm and __init__ comment before you see this lines.
def InputValue(self, num) :
if( num == 0 ) :
# Month
print('What month are you going to do it? : ')
elif( num == 1 ) :
# Date
print('What date are you going to do it? \n')
print('SUN MON TUE WED THU FRI SAT \n')
print('0 1 2 3 4 5 6\n')
print('-> ')
elif( num == 2 ) :
# Day
print('What day are you going to do it? : ')
elif( num == 3 ) :
# Hour
print('What hour are you going to do it? : ')
elif( num == 4 ) :
# Minute
print('What minute are you going to do it? : ')
# Korean usually say : month-> date -> day -> hour-> Minute .
# This function change to crontab producedure.
self.InputList[num] = raw_input()
self.OutputList[(self.KeyOfList[num])] = self.InputList[num]
# This function is just for print values to help user to make command.
# After this function, you have to execute input function to input some values.
def PrintValues(self, num) : # recursive function
strTable = "MIN HOUR Day MONTH DATE"
strTmp = ""
for i in range(0, 5) :
if( self.OutputList[i] == None ) :
strTmp += '.\t'
else :
strTmp += str(self.OutputList[i])+'\t'
os.system('clear')
print('Now (Every = *) : ' + '\n' + strTable)
print(strTmp + '\n\n')
# This function is main function in this class
# Whenever user wants to make schedule, this function will make special schedule.
def MakeSchedule(self) :
while( True ) :
self.PrintAndInput()
self.InputCommand()
strTmp = self.MakeCommand()
flag = raw_input(strTmp + '\nCommand will be excuted! is it right? (y/n) ')
if( flag == 'y' or flag == 'Y' ) :
print('Sending...')
break
else :
print('Do you want to exit? (y/n) ')
falg = raw_input()
if( flag == 'y' or flag == 'Y') :
return
print('Retry it!')
self.InputList = [None, None, None, None, None]
self.OutputList = [None, None, None, None, None]
flag = raw_input()
# from break command
return strTmp
# This function is for send command server.
# Main alogorithm is, check the server if service is online, send the command, remain the log.
# Lst updated at Aprl 15 with I'm not laughing ( Leesang )
def MakeAndSendCommand(self):
# Is server okay to connect ?
# Make Logger for if you have some issue to connect server, make report and send log to DB.
SchedulerLogger = Logger(self) # This is for before line.
if( self.Server.IS_ERROR == 'YES' ) :
# The server has some error. Try connect?
print('The report says server is not online. Do you want to test the server? (y/n)')
isOkay = raw_input()
if( (isOkay is not 'y') and (isOkay is not 'Y')) :
# Server status is BAD < report >
# No I don't want to connect.
print('return to before menu!')
flag = raw_input()
return
isOkayServer, ServerMsg = self.Server.isTryConnect()
if( isOkayServer == False ) :
# Server status is BAD < report >
# Yes I wanted to connect. But server is still BAD.
print('Sorry, the server connection is so bad.')
self.SendLog_ConnectionBad(SchedulerLogger, ServerMsg)
# Send log < Not connect >
return
print('Server connection is successful!')
print('Press any key to continue!')
flag = raw_input() # just UI
# Server is good or was not good but now good.
# You have to connect the server and send message to DB ! below here
Usr_Comd = self.MakeSchedule() # Get Command.
isSuccess, ExMsg = self.Server.ThrowCommand(Usr_Comd) # This function returns if success, and message from try ~ catch
if( isSuccess == True ) :
self.Server.ThrowCommand('crontab -l')
print('I sent message successfully!')
else :
print("I coudln't send message to server!")
self.SendLog_ThrowMsgError(SchedulerLogger, Usr_Comd, ExMsg)
# Created at Aprl 17. with Parchelbel: Canon In D Major) In Jazz
# Below functions are for logger.
# Written by <NAME>
def SendLog_ThrowMsgError (self, Logger, command, ExceptionMsg) :
# Log structure :
## [ADMIN.ID] tried to throw [command] to [ServerID] by [ServerRole]@[Host] at [Date.time]
## Server was [Server.isOkay]. And program tried to connect, but server connection is BAD.
## specific report which pssh says is here : [Exception E]
strLogMsg = str(self.Server.admin.ID) + " tried to throw " + str(command) + " to " + str(self.Server.ID) + " by " + str(self.Server.CONNECTION_USERNAME)+"@" + str(self.Server.CONNECTION_IPADDRESS) + " at " + str(datetime.datetime.now()) + "\n" + \
"Server was " + self.Server.IS_ERROR + ". And program tried to connect, but server connection is BAD." + "\n" + \
"specific report which pssh says is here : " + str(ExceptionMsg)
Logger.SetOrigin('KNOWN_LOG')
RK = Logger.MakeReport( 'SERVICE_STATUS_CHECK', self.Server.admin.PATH, self.Server.admin.NAME, strLogMsg)
Logger.push_log('REQ_COMMAND', self.Server.ID, RK, 'KNOWN_LOG', 'BAD', 'Scheduler.SendLog_ThrowMsgError', 'SCHEDULER')
def SendLog_ConnectionBad (self, Logger, ExceptionMsg) :
# Log structure :
## [ADMIN.ID] tried to connect [ServerID] by [ServerRole]@[Host] at [Date.time]
## Server was [Server.isOkay]. And program tried to connect, but server connection is BAD.
## specific report which pssh says is here : [Exception E]
strLogMsg = str(self.Server.admin.ID) + " tried to connect " + str(self.Server.ID) + " by " + str(self.Server.CONNECTION_USERNAME)+"@" + str(self.Server.CONNECTION_IPADDRESS) + " at " + str(datetime.datetime.now()) + "\n" + \
"Server was " + self.Server.IS_ERROR + ". And program tried to connect, but server connection is BAD." + "\n" + \
"specific report which pssh says is here : " + str(ExceptionMsg)
Logger.SetOrigin('KNOWN_LOG')
RK = Logger.MakeReport( 'SERVICE_STATUS_CHECK', self.Server.admin.PATH, 'Wonseok', strLogMsg)
Logger.push_log( 'CONNECT', self.Server.ID, RK, 'KNOWN_LOG', 'BAD', 'Scheduler.SendLog_ConnectionBad', 'SCHEDULER')
def __str__(self) :
return "SCHEDULER"
# note :
# lst update aprl 14 with 'Stronger than you - Sans and ... trio'
# Test : <None>
if (__name__ == "__main__") :
'''
# You need server
testServer = Server.Server()
# You have to make scheduler
Scheduler = Scheduler(testServer)
# You have to execute function named 'MakeSchedule'
Scheduler.MakeSchedule()
# Test completed. it was successful. ( ~ Aprl 15 )
'''
testServer = Server.Server(1, 22, 'ssh', '172.16.17.32', '3@mHze=5K{1wj){}', 'root', 'Wonseok', 970403, 'ubuntu', 'WonseokTestbuntu',None,'2018-01010101')
Scheduler = Scheduler(testServer)
Scheduler.MakeAndSendCommand()
<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 4
# @Last at Aprl. 20
# @Music Parallax, U-Won-Jae
# @Information Engine class is place in System UI.
# In UI, user has to set the some settings.
# If not, user may have some issues to execute program.
from SystemLoader import SystemLoader
from Kernel import Kernel
from Logger import Logger
import os, sys
sys.path.insert(0, os.getcwd())
from ObjectInfo import AdministratorClass
from Scheduler import Scheduler
from UserInterface import UIManager
import fabric
import datetime
from anytree import Node, RenderTree
def Exit(code) :
# code 100 : OS Error. Your os is not supported in this system.
print("WARNNING : You can't execute program. \n Error code : " + str(code))
SetExecuteLog('Engine initialize is failed', code)
exit()
def clearScreen(sort) :
if( sort == 1 ) :
os.system('clear')
elif( sort == 2 ) :
os.system('cls')
else :
Exit(100)
BeforeTime = datetime.datetime.now()
def SetExecuteLog(code, ErrorCode) :
global BeforeTime
now = datetime.datetime.now()
LogFile = open(os.getcwd() + '/UserConfig/EngineLog.txt', "a")
LogFile.write('Code : ' + str(code) + '\n')
if( ErrorCode ) :
LogFile.write('ErCode : ' + str(ErrorCode) + '\n')
LogFile.write('Written time : ' + str(now) + ' [' + str(now - BeforeTime) + ']'+ '\n')
BeforeTime = now
def ParseSortCont_FromString(List_forParse):
# It only returns two data which are Sort and Content of string in listself.
# EX) NAME:Wonseok
# Sort = Name, Content = Wonseok
ParsedStr = List_forParse.split('=')
Sort = str(ParsedStr[0])
Content = str(ParsedStr[1]).strip()
return Sort, Content
def InstallRequirements() :
os.system('pip install -r requirements.txt')
class Engine(object) :
# engine.py check the bash if system has enough settings.
# one of the most important check component is these
# check if user install postgresql.
# check if user is using python 2.7 or upper version ( just in 2 version )
def __init__(self) :
SetExecuteLog('Engine initialize is started', 0)
self.CheckOS() # OS check!!
self.DBCheck() # DB Check!!
self.PythonCheck() # Python version check!!
self.isLaunchFirst()
SetExecuteLog('OS, DB, Python check is completed', 0)
flag = raw_input('System check complete!')
def CheckOS(self) :
# Error code 100 = OS error.
self.OS_SORT = int(raw_input('What is your OS? ( Linux = 1, Windows = 2, Others = 3 ) : '))
clearScreen(self.OS_SORT)
print('OS check ... [OK] ')
def DBCheck(self) :
# Error code 101 = DB error.
if( self.isPostgreInstall() == False ) :
print('System needs postgreSQL!')
Exit(101)
else :
os.system('./UserConfig/ex.sh')
print('Database check ... [OK] ')
def PythonCheck(self) :
# Error code 102 = Python version error
if( self.isPythonVer2() == False ) :
print('Your python version must be 2 !')
Exit(102)
else :
print('Python check ... [OK] ')
def isPythonVer2(self) :
print('CHECK IF SYSTEM IS RUNNING PYTHON VERSION 2 .......... ')
if( sys.version_info[0] == 2 ) :
return True
else :
return False
def isPostgreInstall(self) :
print('CHECK IF SYSTEM HAS POSTGRES VERSION ................. ')
# This directory is must be needed.
if( os.path.exists("/var/lib/postgresql/") ) :
return True
else :
return False
def launch(self) :
'''
! - ATTENTION ALL PROGRAMMERS - #!
This is the main loop function in this program.
As you know, this is the main function as 'void main' in C/C++ function.
If you want to check some logis or things, just edit here.
'''
clearScreen(self.OS_SORT)
print('System will be loaded. Please wait!')
self.load_SystemLoader()
print('Kernel will be loaded. Please wait!')
self.load_Kernel()
L = raw_input('Press any key to continue....')
# if you want to check logic of UI, check below function.
self.UIManage()
def load_SystemLoader(self) :
self.SystemLoaderObject = SystemLoader()
SetExecuteLog('System Loader initializer is successfully loaded',None)
self.SystemLoaderObject.LoadDBFiles()
SetExecuteLog('Database is successfully loaded.',None)
self.SystemLoaderObject.LoadUserFiles()
SetExecuteLog('User Files are successfully loaded.',None)
SetExecuteLog('System Loadeer is loaded.',None)
def load_Kernel(self) :
self.KernelObj = Kernel( self.SystemLoaderObject )
SetExecuteLog('Kernel is loaded.', None)
def isLaunchFirst(self) :
userConfigure = open('Configure.txt')
ConfigureLines = userConfigure.readlines()
for i in ConfigureLines :
Sort, Content = ParseSortCont_FromString( i )
if Sort == 'Execute' :
if Content == 'no' :
InstallRequirements()
userConfigure.close()
userConfigure = open('Configure.txt', 'w')
userConfigure.write('Execute=yes')
def UpgradeOS(self, OSList) :
# Now, only supports ubuntu and cent
UbuntuServers = []
CentServers = []
for i in OSList['ubuntu'] :
UbuntuServers.append( self.KernelObj.serverToServer(i) )
for i in OSList['cent'] :
CentServers.append( self.KernelObj.serverToServer(i) )
# Ubuntu server upgrade
for i in UbuntuServers :
print(i, type(i))
i.ThrowCommand('apt-get -y install update-manager-core')
i.ThrowCommand('do-release-upgrade')
# CentOS server upgrade
for i in CentServers :
i.ThrowCommand("su -c 'yum update")
tmp = raw_input('Upgrade Done!')
def UIManage(self) :
# After this line, User interface starts!
# Tree must be needed. < AnyTree >
'''
@ Recent 2018 05 07 23:51 Wonseok
[Null]
|----[UserInterface] o
|------[PrintServerManageMenu] o
| |------[Target Manage] o
| | |-------[Target Manage Menu] o
| | |--------[AddtargetMenu] o
| | |--------[DeltargetMenu] o
| | |--------[System UpdateMenu]
| | | |----------------[Operating System Upgrade ]
| | | |----------------[Update & Upgrade ]
| | | |----------------[ update at cron ]
| | |--------[Throw command Menu] o
| | |----------------[Throw File (scp)] o
| | | |------------------[ Set the command for file ] o
| | | |------------------[ Send command ] o
| | |----------------[Throw Command]
| |------[Install database]
| |------[Go Backup Console]
| |------[Firewall manage]
|
|------[DatabaseManage]
|------[Configuration Mode]
|------[Security Mode]
|------[Power Off]
'''
self.UI = UIManager.UserInterface(self)
currentNode=[]
currentNode.append(self.UI.nodUI)
targets = [[]]
while True :
print( currentNode[0])
if currentNode[0].name == "Null" :
break
else :
if currentNode[0].name == "UserInterface" :
self.UI.PrintMainMenu(len(self.KernelObj.BadServerList), len(self.KernelObj.GoodServerList), currentNode)
continue
elif currentNode[0].name == "PrintServerManageMenu" :
self.UI.PrintServerManageMenu(targets, currentNode)
continue
elif currentNode[0].name == "DatabaseManage":
# Not developed yet
pass
elif currentNode[0].name == "Configuration Mode":
# Not developed yet
pass
elif currentNode[0].name == "Security Mode":
# not developed yet
pass
elif currentNode[0].name == "Power Off" :
print("Good bye my kkammi ................ ")
elif currentNode[0].name == "Target Manage" :
self.UI.PrintAllTargetsDetails(targets, self.KernelObj.BadServerList, self.KernelObj.GoodServerList, currentNode)
continue
elif currentNode[0].name == "Install database" :
# not developed yet
pass
elif currentNode[0].name == "Go Backusp Console" :
# not developed yet
pass
elif currentNode[0].name == "Firewall manage" :
# not developed yet
pass
elif currentNode[0].name == "Target Manage Menu" :
self.UI.TargetManageMenu(targets, currentNode)
continue
elif currentNode[0].name == "AddtargetMenu" :
self.UI.AddtargetMenu(targets, self.KernelObj.BadServerList, self.KernelObj.GoodServerList, currentNode)
continue
elif currentNode[0].name == "DeltargetMenu" :
self.UI.DeltargetMenu(targets, self.KernelObj.BadServerList, self.KernelObj.GoodServerList, currentNode)
continue
elif currentNode[0].name == "System Update" :
self.UI.SystemUpdateMenu(targets, currentNode)
continue
elif currentNode[0].name == "Operating System Upgrade" :
OSList = self.UI.OperatingSystemUpgrade(targets, currentNode)
self.UpgradeOS(OSList)
elif currentNode[0].name == "Throw command menu" :
self.UI.func_ThrowCommandMenu(currentNode)
continue
elif currentNode[0].name == "Throw File" :
self.UI.func_ThrowFilescp(currentNode)
continue
elif currentNode[0].name == "Set the command for file" :
self.UI.func_SetCommandForFile(targets, currentNode)
elif currentNode[0].name == "Send command" :
self.UI.func_SendCommand(currentNode)
currentNode[0] = currentNode[0].parent
# self.UI = UIManager.UserInterface(self)
# target = [[]]
# level = 0
# while(True) :
# # ? -> PrintMainMenu ( level = 0 )
# key = 0
# if( level == 0 ) :
# # level = 1 and key = 1
# key = self.UI.PrintMainMenu(len(self.KernelObj.BadServerList), len(self.KernelObj.GoodServerList))
# level += 1
# elif( key == 1 and level > 0) :
# # PrintMainMenu -> ServerManage
# clearScreen(self.OS_SORT)
# num = self.UI.PrintServerManageMenu(target)
# if( num == 1 ) :
# # PrintMainMenu -> ServerManage )-> TargetManage
# clearScreen(self.OS_SORT)
# num = self.UI.TargetManageMenu(target)
# if( num == 1 ) :
# # PrintMainMenu -> ServerManage -> TargetManage) -> Add target
# self.UI.AddtargetMenu(target, self.KernelObj.BadServerList, self.KernelObj.GoodServerList)
# elif( num == 2 ) :
# self.UI.DeltargetMenu(target, self.KernelObj.BadServerList, self.KernelObj.GoodServerList)
# elif( num == 2 ) :
# elif( num == 3 ) :
# elif( num == 4 ) :
# elif( key == 2 or level > 0 ) :
# # PrintMainMenu -> Database manage
# clearScreen(self.OS_SORT)
# elif( key == 3 or level ) :
# # PrintMainMenu -> Configuration mode
# clearScreen(self.OS_SORT)
# elif( key == 4 ) :
# # PrintMainMenu -> Seuciry mode
# clearScreen(self.OS_SORT)
# elif ( key == 0 ) :
# # PrintMainMenu -> Power off
# clearScreen(self.OS_SORT)
# print("Good bye kkami ...")
'''
Below line is for test.
You don't need to think of it.
'''
# Aprl 15
# May 2 added UI, and some class will be added.
if __name__ == "__main__" :
E = Engine()
E.launch()
<file_sep># @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 13
# @Last at Aprl. 13
# @Music Paranoid - U Won Jae
# @Information Ths class is for 'Human'. If you have more time, You can design it using 'Heritance'
class Administrator(object) :
def __init__(self, Name=None, Path=None, Pw=None, Mode=None, Id=None):
print('Log : Administrator initializer is loaded! ')
self.NAME = Name
self.PATH = Path
self.PW = Pw
self.MODE = Mode
self.ID = Id
def printInfo(self) :
# STRUCTURE :
# NAME : \nHOST : \n ...
print('Administrator Information ~')
print('Name : ' + str(self.NAME))
print('PATH : ' + self.PATH)
print('ID : ' + str(self.ID))
print('MODE : ' + self.MODE)
print('PW : ' + "Check the file (for security)")
print('')
def getInfo(self) :
strMsg = "NAME = " + str(self.NAME) + \
"PATH = " + str(self.PATH) + \
"PW = " + str(self.PW) + \
"MODE = " + str(self.MODE) + \
"ID = " + str(self.ID)
return strMsg
def __str__(self) :
return "ADMINISTRATORCLASS"
<file_sep>import os
import sys
sys.path.insert(0, os.getcwd() )
# {value for value in variable}ys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
#
from ObjectInfo import AdministratorClass
from ObjectInfo import DatabaseClass
def ParseDataList_FromPath(FilePath) :
# returns file contents in 'FilePath'
# You can check your dir by using'print(os.getcwd())'
File = open(str(FilePath), "r")
return File.readlines()
def ParseSortCont_FromString(List_forParse):
# It only returns two data which are Sort and Content of string in listself.
# EX) NAME:Wonseok
# Sort = Name, Content = Wonseok
ParsedStr = List_forParse.split('=')
Sort = str(ParsedStr[0])
Content = str(ParsedStr[1]).strip()
return Sort, Content
class SystemLoader(object) :
def __init__ (self):
# self.Admin = Administrator()
self.Admin = AdministratorClass.Administrator()
self.DB = DatabaseClass.DB()
print('System Loader is Loaded! ... [OK] ')
def EndProgram(self, Code):
#print('System End.')
pass
def LoadDBFiles(self):
# ParseDataFromPath returns list in file contents
# This function returns nothing
DatabaseData = ParseDataList_FromPath("./ProgramSettings/DataBaseSettings.txt")
for i in range(0, len(DatabaseData)) :
Sort, Content = ParseSortCont_FromString( DatabaseData[i] )
if Sort == 'SORTS' :
self.DB.SORTS = Content
elif Sort == 'USER' :
self.DB.USER = Content
elif Sort == 'HOST' :
self.DB.HOST = Content
elif Sort == 'PORT' :
self.DB.PORT = Content
elif Sort == 'NAME' :
self.DB.NAME = Content
elif Sort == 'PW' :
self.DB.PW = Content
else : # For catch the error
print (' INPUT ERROR AT DB SETTINGS.TXT ' )
print (' (Input) Sort : ', Sort, ' Content : ', Content)
# # END LOOP & for check
# self.DB.printInfo()
def LoadUserFiles(self):
# This function returns nothing
UserData = ParseDataList_FromPath("./ProgramSettings/UserSetting.txt")
for i in range (0, len(UserData)) :
Sort, Content = ParseSortCont_FromString( UserData[i] )
if Sort == 'NAME' :
self.Admin.NAME = Content
elif Sort == 'PW' :
self.Admin.PW = Content
elif Sort == 'ID' :
self.Admin.ID = Content
elif Sort == 'MODE' :
self.Admin.MODE = Content
elif Sort == 'PATH' :
self.Admin.PATH = Content
else :
print (' INPUT ERROR AT USER SETTINGS.TXT ' )
print (' (Input) Sort : ', Sort, ' Content : ', Content)
# # END LOOP & for check
# self.Admin.printInfo()
def printInfo(self) :
self.Admin.printInfo()
self.DB.printInfo()
def __str__(self) :
return "SYSTEMLOADER"
<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os, sys
# @Author Wonseok
# @Designer Wonseok
# @Start at Aprl. 12
# @Last at Aprl. 13
# @Music Do you heare the people sing? (Les miserables), To X-Girlfriend (san-E)
# @Information This class is only for server each of being managed.
# Unfortunately, now, if your DB is not 'postgreSQl', you can't control it.
from Logger import Logger
from pexpect import pxssh
sys.path.insert(0, os.getcwd() )
from ObjectInfo import DatabaseClass
from ObjectInfo import AdministratorClass
import time, datetime
class Configurator(object) :
# Configurator supports functions which are about 'connect', 'back-up scheduling' ..
# You can write some special functions in here to expand your own servers
def Shell_login(self, Shell, Hostname, Username, Password) :
# as same as Connector function named 'Shell_login'
Shell.login( Hostname, Username, Password )
Shell.prompt()
print("Log 24 at Configurator" + Shell.before)
def Shell_logout(self, Shell) :
Shell.logout()
def SendLog_NotConnecting(self, ExceptionClass) :
ServerLogger = Logger( self.Server )
# ReportType, Path, Name, Content
s = str(datetime.datetime.now())
CONTENT = "The server host (" + str(self.Server.ID) +") has some issue to connect. \
here is the log from server host." + str(ExceptionClass) + " This log \
is written at Configurator-SendLog_Notconnecting. Today date is " + \
s + " Log end. "
RK = ServerLogger.MakeReport( 'WARNING_SERVICE_REPORT', self.Admin.PATH, self.Admin.NAME, CONTENT)
ServerLogger.push_log('CONNECT', str(self.Server.ID), RK, 'KNOWN_LOG', 'BAD', 'PXSSHEXCEPTION', 'SERVERCLASS')
def ConnectSSH(self) :
# as similar as Connector's Connect_Servers
try :
sh = pxssh.pxssh()
hostname = self.Server.CONNECTION_IPADDRESS
username = self.Server.CONNECTION_USERNAME
password = self.Server.CONNECTION_PASSWORD
self.Shell_login(sh, hostname, username, password)
except pxssh.ExceptionPxssh as e :
self.SendLog_NotConnecting(e)
print(' ' + str(hostname) + 'connection is bad! check the log!')
# def __init__(self) :
# self.Logger = Logger( Configurator )
def __init__(self, object, adminObject) :
# Configurator must control server only < at Aprl 13 >
# If configurator becomes bigger and has to support more functions,
# code it on here.
if( str(object) == "SERVER" ) :
self.master = "SERVER"
self.Server = object
self.Admin = adminObject
def __str__(self) :
return 'Configurator'
def ParseDataList_FromPath(FilePath) :
# returns file contents in 'FilePath'
# You can check your dir by using'print(os.getcwd())'
File = open(str(FilePath), "r")
return File.readlines()
#
# The codes below over here are for test.
# Last updated at Aprl 13. < Tough cookie - Zico (feat. Don.mills) >
# Test is completed at Aprl 15. < Dejavu ( Remastered ) - BeWhY >
def ParseSortCont_FromString(List_forParse):
# It only returns two data which are Sort and Content of string in listself.
# EX) NAME:Wonseok
# Sort = Name, Content = Wonseok
ParsedStr = List_forParse.split('=')
Sort = str(ParsedStr[0])
Content = str(ParsedStr[1]).strip()
return Sort, Content
class testClass(object):
def __init__ (self) :
# logger SETTINGS
print("DB class is made")
self.db = DatabaseClass.DB()
self.LoadDBFiles()
def __str__(self) :
return "Server"
def LoadDBFiles(self):
# ParseDataFromPath returns list in file contents
# This function returns nothing
print("load is complete")
DatabaseData = ParseDataList_FromPath("./ProgramSettings/DataBaseSettings.txt")
for i in range(0, len(DatabaseData)) :
Sort, Content = ParseSortCont_FromString( DatabaseData[i] )
if Sort == 'SORTS' :
self.db.SORTS = Content
elif Sort == 'USER' :
self.db.USER = Content
elif Sort == 'HOST' :
self.db.HOST = Content
elif Sort == 'PORT' :
self.db.PORT = Content
elif Sort == 'NAME' :
self.db.NAME = Content
elif Sort == 'PW' :
self.db.PW = Content
else : # For catch the error
print (' INPUT ERROR AT DB SETTINGS.TXT ' )
print (' (Input) Sort : ', Sort, ' Content : ', Content)
# # END LOOP & for check
# self.DB.printInfo()N
# if __name__ == "__main__" :
# TestClass = testClass()
# Conf1 = Configurator(TestClass)
| 02db9fc5eb9079b6bc0aa51cea21e3063d17b2ba | [
"Markdown",
"Python",
"Text",
"Shell"
] | 23 | Shell | wonseok0403/SOUL | ac1a0a8108628383b4afa5fd73e0d4439f455e73 | cae0e5492b8c9ee6e8bfc935941686ac594bc3e9 | |
refs/heads/master | <repo_name>paradigmdigitalapps/GAEStarterKit<file_sep>/util/SeaSurfForm.py
"""
Provides a base class for wtforms usage throughout application. Integrates SeaCurf for CSRF protection, such that the csrf token is automatically included
in each form.
"""
from flask import g
from flask.ext.wtf import Form
from flask.ext.wtf.form import _Auto
from wtforms import HiddenField
from app import app
class SeaSurfForm(Form):
def __init__(self, formdata=_Auto, obj=None, prefix='', csrf_context=None, secret_key=None, csrf_enabled=None, *args, **kwargs):
super(SeaSurfForm, self).__init__(formdata, obj, prefix, csrf_context, secret_key, csrf_enabled, *args, **kwargs)
csrf_name = app.config.get('CSRF_COOKIE_NAME', 'csrf_token')
getattr(self, csrf_name).data = csrf._get_token()
@staticmethod
@app.before_request
def add_csrf():
csrf_name = app.config.get('CSRF_COOKIE_NAME', 'csrf_token')
# token =
#
# if not token:
# raise ValueError('Expected CSRF token here')
setattr(SeaSurfForm,
csrf_name,
HiddenField(default=''))
from security import csrf
| b2088f98d09c915752453469c0cd8be7bfdfa7f3 | [
"Python"
] | 1 | Python | paradigmdigitalapps/GAEStarterKit | b6636ff5a1cffc3c2b1129e7fcdb78ddbf9ac0e1 | 8b696ee0ff65cf1b49fba49094bd3fd22f6ee427 | |
refs/heads/master | <file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "ObjectMacros.h"
#include "ScriptMacros.h"
PRAGMA_DISABLE_DEPRECATION_WARNINGS
struct FOscDataElemStruct;
#ifdef OSC_OscReceiverActor_generated_h
#error "OscReceiverActor.generated.h already included, missing '#pragma once' in OscReceiverActor.h"
#endif
#define OSC_OscReceiverActor_generated_h
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_RPC_WRAPPERS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_RPC_WRAPPERS_NO_PURE_DECLS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_EVENT_PARMS \
struct OscReceiverActor_eventOnOscReceived_Parms \
{ \
FName Address; \
TArray<FOscDataElemStruct> Data; \
FString SenderIp; \
};
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_CALLBACK_WRAPPERS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_INCLASS_NO_PURE_DECLS \
private: \
static void StaticRegisterNativesAOscReceiverActor(); \
friend OSC_API class UClass* Z_Construct_UClass_AOscReceiverActor(); \
public: \
DECLARE_CLASS(AOscReceiverActor, AActor, COMPILED_IN_FLAGS(0), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(AOscReceiverActor) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC};
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_INCLASS \
private: \
static void StaticRegisterNativesAOscReceiverActor(); \
friend OSC_API class UClass* Z_Construct_UClass_AOscReceiverActor(); \
public: \
DECLARE_CLASS(AOscReceiverActor, AActor, COMPILED_IN_FLAGS(0), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(AOscReceiverActor) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC};
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_STANDARD_CONSTRUCTORS \
/** Standard constructor, called after all reflected properties have been initialized */ \
NO_API AOscReceiverActor(const FObjectInitializer& ObjectInitializer); \
DEFINE_DEFAULT_OBJECT_INITIALIZER_CONSTRUCTOR_CALL(AOscReceiverActor) \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(AOscReceiverActor); \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API AOscReceiverActor(AOscReceiverActor&&); \
NO_API AOscReceiverActor(const AOscReceiverActor&); \
public:
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_ENHANCED_CONSTRUCTORS \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API AOscReceiverActor(AOscReceiverActor&&); \
NO_API AOscReceiverActor(const AOscReceiverActor&); \
public: \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(AOscReceiverActor); \
DEFINE_DEFAULT_CONSTRUCTOR_CALL(AOscReceiverActor)
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_PRIVATE_PROPERTY_OFFSET
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_9_PROLOG \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_EVENT_PARMS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_GENERATED_BODY_LEGACY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_RPC_WRAPPERS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_CALLBACK_WRAPPERS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_INCLASS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_STANDARD_CONSTRUCTORS \
public: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_GENERATED_BODY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_RPC_WRAPPERS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_CALLBACK_WRAPPERS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_INCLASS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h_12_ENHANCED_CONSTRUCTORS \
private: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#undef CURRENT_FILE_ID
#define CURRENT_FILE_ID UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverActor_h
PRAGMA_ENABLE_DEPRECATION_WARNINGS
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "GeneratedCppIncludes.h"
#include "Private/OscPrivatePCH.h"
#include "Private/Common/OscFunctionLibrary.h"
#ifdef _MSC_VER
#pragma warning (push)
#pragma warning (disable : 4883)
#endif
PRAGMA_DISABLE_DEPRECATION_WARNINGS
void EmptyLinkFunctionForGeneratedCodeOscFunctionLibrary() {}
// Cross Module References
OSC_API UClass* Z_Construct_UClass_UOscFunctionLibrary_NoRegister();
OSC_API UClass* Z_Construct_UClass_UOscFunctionLibrary();
ENGINE_API UClass* Z_Construct_UClass_UBlueprintFunctionLibrary();
UPackage* Z_Construct_UPackage__Script_OSC();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AddSendOscTarget();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsBlob();
OSC_API UScriptStruct* Z_Construct_UScriptStruct_FOscDataElemStruct();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsBool();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsFloat();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsInt();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsString();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromBlob();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromBool();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromFloat();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromInt();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromString();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopBlob();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopBool();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopFloat();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopInt();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopString();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushBlob();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushBool();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushFloat();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushInt();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushString();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_SendOsc();
OSC_API UFunction* Z_Construct_UFunction_UOscFunctionLibrary_SendOscBundle();
OSC_API UScriptStruct* Z_Construct_UScriptStruct_FOscMessageStruct();
// End Cross Module References
void UOscFunctionLibrary::StaticRegisterNativesUOscFunctionLibrary()
{
UClass* Class = UOscFunctionLibrary::StaticClass();
static const FNameNativePtrPair Funcs[] = {
{ "AddSendOscTarget", &UOscFunctionLibrary::execAddSendOscTarget },
{ "AsBlob", &UOscFunctionLibrary::execAsBlob },
{ "AsBool", &UOscFunctionLibrary::execAsBool },
{ "AsFloat", &UOscFunctionLibrary::execAsFloat },
{ "AsInt", &UOscFunctionLibrary::execAsInt },
{ "AsString", &UOscFunctionLibrary::execAsString },
{ "FromBlob", &UOscFunctionLibrary::execFromBlob },
{ "FromBool", &UOscFunctionLibrary::execFromBool },
{ "FromFloat", &UOscFunctionLibrary::execFromFloat },
{ "FromInt", &UOscFunctionLibrary::execFromInt },
{ "FromString", &UOscFunctionLibrary::execFromString },
{ "PopBlob", &UOscFunctionLibrary::execPopBlob },
{ "PopBool", &UOscFunctionLibrary::execPopBool },
{ "PopFloat", &UOscFunctionLibrary::execPopFloat },
{ "PopInt", &UOscFunctionLibrary::execPopInt },
{ "PopString", &UOscFunctionLibrary::execPopString },
{ "PushBlob", &UOscFunctionLibrary::execPushBlob },
{ "PushBool", &UOscFunctionLibrary::execPushBool },
{ "PushFloat", &UOscFunctionLibrary::execPushFloat },
{ "PushInt", &UOscFunctionLibrary::execPushInt },
{ "PushString", &UOscFunctionLibrary::execPushString },
{ "SendOsc", &UOscFunctionLibrary::execSendOsc },
{ "SendOscBundle", &UOscFunctionLibrary::execSendOscBundle },
};
FNativeFunctionRegistrar::RegisterFunctions(Class, Funcs, ARRAY_COUNT(Funcs));
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AddSendOscTarget()
{
struct OscFunctionLibrary_eventAddSendOscTarget_Parms
{
FString IpPort;
int32 ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FIntPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Int, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAddSendOscTarget_Parms, ReturnValue), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStrPropertyParams NewProp_IpPort = { UE4CodeGen_Private::EPropertyClass::Str, "IpPort", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAddSendOscTarget_Parms, IpPort), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_IpPort,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "@brief Add Ip:Port to the available OSC send targets.\n@param IpPort \"ip:port\". e.g. \"192.168.0.1:7777\"\n@return The created TargetIndex to pass to the SendOsc function.\n\nUse this function to add target at runtime. Generally, it is best\nto define your targets in the OSC plugin settings.\n\n@see SendOsc" },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "AddSendOscTarget", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x04022401, sizeof(OscFunctionLibrary_eventAddSendOscTarget_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsBlob()
{
struct OscFunctionLibrary_eventAsBlob_Parms
{
FOscDataElemStruct input;
TArray<uint8> ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Array, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAsBlob_Parms, ReturnValue), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FBytePropertyParams NewProp_ReturnValue_Inner = { UE4CodeGen_Private::EPropertyClass::Byte, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, nullptr, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAsBlob_Parms, input), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Interpret an OSC argument as a blob." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "AsBlob", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventAsBlob_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsBool()
{
struct OscFunctionLibrary_eventAsBool_Parms
{
FOscDataElemStruct input;
bool ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
auto NewProp_ReturnValue_SetBit = [](void* Obj){ ((OscFunctionLibrary_eventAsBool_Parms*)Obj)->ReturnValue = 1; };
static const UE4CodeGen_Private::FBoolPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Bool, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, sizeof(bool), UE4CodeGen_Private::ENativeBool::Native, sizeof(OscFunctionLibrary_eventAsBool_Parms), &UE4CodeGen_Private::TBoolSetBitWrapper<decltype(NewProp_ReturnValue_SetBit)>::SetBit, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAsBool_Parms, input), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Interpret an OSC argument as a boolean" },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "AsBool", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventAsBool_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsFloat()
{
struct OscFunctionLibrary_eventAsFloat_Parms
{
FOscDataElemStruct input;
float ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FFloatPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Float, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAsFloat_Parms, ReturnValue), METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAsFloat_Parms, input), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Interpret an OSC argument as a floating point." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "AsFloat", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventAsFloat_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsInt()
{
struct OscFunctionLibrary_eventAsInt_Parms
{
FOscDataElemStruct input;
int32 ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FIntPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Int, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAsInt_Parms, ReturnValue), METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAsInt_Parms, input), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Interpret an OSC argument as a integer." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "AsInt", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventAsInt_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_AsString()
{
struct OscFunctionLibrary_eventAsString_Parms
{
FOscDataElemStruct input;
FName ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FNamePropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Name, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAsString_Parms, ReturnValue), METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventAsString_Parms, input), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Interpret an OSC argument as a string." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "AsString", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventAsString_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromBlob()
{
struct OscFunctionLibrary_eventFromBlob_Parms
{
TArray<uint8> input;
FOscDataElemStruct ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FStructPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Struct, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventFromBlob_Parms, ReturnValue), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventFromBlob_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FBytePropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Byte, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, nullptr, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Create an OSC argument from a blob." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "FromBlob", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventFromBlob_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromBool()
{
struct OscFunctionLibrary_eventFromBool_Parms
{
bool input;
FOscDataElemStruct ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FStructPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Struct, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventFromBool_Parms, ReturnValue), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
auto NewProp_input_SetBit = [](void* Obj){ ((OscFunctionLibrary_eventFromBool_Parms*)Obj)->input = 1; };
static const UE4CodeGen_Private::FBoolPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Bool, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, sizeof(bool), UE4CodeGen_Private::ENativeBool::Native, sizeof(OscFunctionLibrary_eventFromBool_Parms), &UE4CodeGen_Private::TBoolSetBitWrapper<decltype(NewProp_input_SetBit)>::SetBit, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Create an OSC argument from a boolean" },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "FromBool", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14022401, sizeof(OscFunctionLibrary_eventFromBool_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromFloat()
{
struct OscFunctionLibrary_eventFromFloat_Parms
{
float input;
FOscDataElemStruct ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FStructPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Struct, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventFromFloat_Parms, ReturnValue), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FFloatPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Float, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventFromFloat_Parms, input), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Create an OSC argument from a floating point." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "FromFloat", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14022401, sizeof(OscFunctionLibrary_eventFromFloat_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromInt()
{
struct OscFunctionLibrary_eventFromInt_Parms
{
int32 input;
FOscDataElemStruct ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FStructPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Struct, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventFromInt_Parms, ReturnValue), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FIntPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Int, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventFromInt_Parms, input), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Create an OSC argument from a integer." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "FromInt", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14022401, sizeof(OscFunctionLibrary_eventFromInt_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_FromString()
{
struct OscFunctionLibrary_eventFromString_Parms
{
FName input;
FOscDataElemStruct ReturnValue;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FStructPropertyParams NewProp_ReturnValue = { UE4CodeGen_Private::EPropertyClass::Struct, "ReturnValue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000580, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventFromString_Parms, ReturnValue), Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FNamePropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Name, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventFromString_Parms, input), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReturnValue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Create an OSC argument from a string." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "FromString", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14022401, sizeof(OscFunctionLibrary_eventFromString_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopBlob()
{
struct OscFunctionLibrary_eventPopBlob_Parms
{
TArray<FOscDataElemStruct> input;
TArray<FOscDataElemStruct> output;
TArray<uint8> Value;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Array, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopBlob_Parms, Value), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FBytePropertyParams NewProp_Value_Inner = { UE4CodeGen_Private::EPropertyClass::Byte, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, nullptr, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopBlob_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopBlob_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Get the next value from an OSC message as a blob." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PopBlob", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPopBlob_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopBool()
{
struct OscFunctionLibrary_eventPopBool_Parms
{
TArray<FOscDataElemStruct> input;
TArray<FOscDataElemStruct> output;
bool Value;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
auto NewProp_Value_SetBit = [](void* Obj){ ((OscFunctionLibrary_eventPopBool_Parms*)Obj)->Value = 1; };
static const UE4CodeGen_Private::FBoolPropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Bool, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, sizeof(bool), UE4CodeGen_Private::ENativeBool::Native, sizeof(OscFunctionLibrary_eventPopBool_Parms), &UE4CodeGen_Private::TBoolSetBitWrapper<decltype(NewProp_Value_SetBit)>::SetBit, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopBool_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopBool_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Get the next value from an OSC message as a boolean." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PopBool", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPopBool_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopFloat()
{
struct OscFunctionLibrary_eventPopFloat_Parms
{
TArray<FOscDataElemStruct> input;
TArray<FOscDataElemStruct> output;
float Value;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FFloatPropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Float, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopFloat_Parms, Value), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopFloat_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopFloat_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Get the next value from an OSC message as a floating point." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PopFloat", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPopFloat_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopInt()
{
struct OscFunctionLibrary_eventPopInt_Parms
{
TArray<FOscDataElemStruct> input;
TArray<FOscDataElemStruct> output;
int32 Value;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FIntPropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Int, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopInt_Parms, Value), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopInt_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopInt_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Get the next value from an OSC message as a integer." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PopInt", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPopInt_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PopString()
{
struct OscFunctionLibrary_eventPopString_Parms
{
TArray<FOscDataElemStruct> input;
TArray<FOscDataElemStruct> output;
FName Value;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FNamePropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Name, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopString_Parms, Value), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopString_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPopString_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Get the next value from an OSC message as a string." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PopString", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPopString_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushBlob()
{
struct OscFunctionLibrary_eventPushBlob_Parms
{
TArray<FOscDataElemStruct> input;
TArray<uint8> Value;
TArray<FOscDataElemStruct> output;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushBlob_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Value_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Array, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushBlob_Parms, Value), METADATA_PARAMS(NewProp_Value_MetaData, ARRAY_COUNT(NewProp_Value_MetaData)) };
static const UE4CodeGen_Private::FBytePropertyParams NewProp_Value_Inner = { UE4CodeGen_Private::EPropertyClass::Byte, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, nullptr, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushBlob_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "AutoCreateRefTerm", "input" },
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Add a blob to an OSC message." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PushBlob", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPushBlob_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushBool()
{
struct OscFunctionLibrary_eventPushBool_Parms
{
TArray<FOscDataElemStruct> input;
bool Value;
TArray<FOscDataElemStruct> output;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushBool_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
auto NewProp_Value_SetBit = [](void* Obj){ ((OscFunctionLibrary_eventPushBool_Parms*)Obj)->Value = 1; };
static const UE4CodeGen_Private::FBoolPropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Bool, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, sizeof(bool), UE4CodeGen_Private::ENativeBool::Native, sizeof(OscFunctionLibrary_eventPushBool_Parms), &UE4CodeGen_Private::TBoolSetBitWrapper<decltype(NewProp_Value_SetBit)>::SetBit, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushBool_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "AutoCreateRefTerm", "input" },
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Add a boolean value to an OSC message." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PushBool", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPushBool_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushFloat()
{
struct OscFunctionLibrary_eventPushFloat_Parms
{
TArray<FOscDataElemStruct> input;
float Value;
TArray<FOscDataElemStruct> output;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushFloat_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FFloatPropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Float, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushFloat_Parms, Value), METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushFloat_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "AutoCreateRefTerm", "input" },
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Add a floating point value to an OSC message." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PushFloat", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPushFloat_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushInt()
{
struct OscFunctionLibrary_eventPushInt_Parms
{
TArray<FOscDataElemStruct> input;
int32 Value;
TArray<FOscDataElemStruct> output;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushInt_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FIntPropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Int, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushInt_Parms, Value), METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushInt_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "AutoCreateRefTerm", "input" },
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Add a integer value to an OSC message." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PushInt", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPushInt_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_PushString()
{
struct OscFunctionLibrary_eventPushString_Parms
{
TArray<FOscDataElemStruct> input;
FName Value;
TArray<FOscDataElemStruct> output;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_output = { UE4CodeGen_Private::EPropertyClass::Array, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000180, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushString_Parms, output), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_output_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "output", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FNamePropertyParams NewProp_Value = { UE4CodeGen_Private::EPropertyClass::Name, "Value", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushString_Parms, Value), METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_input_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_input = { UE4CodeGen_Private::EPropertyClass::Array, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventPushString_Parms, input), METADATA_PARAMS(NewProp_input_MetaData, ARRAY_COUNT(NewProp_input_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_input_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "input", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_output_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Value,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_input_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "AutoCreateRefTerm", "input" },
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "Add a string value to an OSC message." },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "PushString", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x14422401, sizeof(OscFunctionLibrary_eventPushString_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_SendOsc()
{
struct OscFunctionLibrary_eventSendOsc_Parms
{
FName Address;
TArray<FOscDataElemStruct> Data;
int32 TargetIndex;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FIntPropertyParams NewProp_TargetIndex = { UE4CodeGen_Private::EPropertyClass::Int, "TargetIndex", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventSendOsc_Parms, TargetIndex), METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Data_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_Data = { UE4CodeGen_Private::EPropertyClass::Array, "Data", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventSendOsc_Parms, Data), METADATA_PARAMS(NewProp_Data_MetaData, ARRAY_COUNT(NewProp_Data_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_Data_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "Data", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FNamePropertyParams NewProp_Address = { UE4CodeGen_Private::EPropertyClass::Name, "Address", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventSendOsc_Parms, Address), METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_TargetIndex,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Data,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Data_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Address,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "AutoCreateRefTerm", "Data" },
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "@brief Send an OSC message.\n@param Address OSC address.\n@param Data result of successive PushFloat/Int/String/etc.\n@param TargetIndex index of the destination, -1 for all destinations. (SendTarget list of the plugin settings)" },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "SendOsc", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x04422401, sizeof(OscFunctionLibrary_eventSendOsc_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UFunction* Z_Construct_UFunction_UOscFunctionLibrary_SendOscBundle()
{
struct OscFunctionLibrary_eventSendOscBundle_Parms
{
TArray<FOscMessageStruct> Messages;
int32 TargetIndex;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
static const UE4CodeGen_Private::FIntPropertyParams NewProp_TargetIndex = { UE4CodeGen_Private::EPropertyClass::Int, "TargetIndex", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventSendOscBundle_Parms, TargetIndex), METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Messages_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_Messages = { UE4CodeGen_Private::EPropertyClass::Array, "Messages", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(OscFunctionLibrary_eventSendOscBundle_Parms, Messages), METADATA_PARAMS(NewProp_Messages_MetaData, ARRAY_COUNT(NewProp_Messages_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_Messages_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "Messages", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscMessageStruct, METADATA_PARAMS(nullptr, 0) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_TargetIndex,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Messages,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Messages_Inner,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
{ "ToolTip", "@brief Send several OSC messages in an OSC bundle.\n@param Messages of the bundle.\n@param TargetIndex index of the destination, -1 for all destinations. (SendTarget list of the plugin settings)" },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UClass_UOscFunctionLibrary, "SendOscBundle", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x04422401, sizeof(OscFunctionLibrary_eventSendOscBundle_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
UClass* Z_Construct_UClass_UOscFunctionLibrary_NoRegister()
{
return UOscFunctionLibrary::StaticClass();
}
UClass* Z_Construct_UClass_UOscFunctionLibrary()
{
static UClass* OuterClass = nullptr;
if (!OuterClass)
{
static UObject* (*const DependentSingletons[])() = {
(UObject* (*)())Z_Construct_UClass_UBlueprintFunctionLibrary,
(UObject* (*)())Z_Construct_UPackage__Script_OSC,
};
static const FClassFunctionLinkInfo FuncInfo[] = {
{ &Z_Construct_UFunction_UOscFunctionLibrary_AddSendOscTarget, "AddSendOscTarget" }, // 970178559
{ &Z_Construct_UFunction_UOscFunctionLibrary_AsBlob, "AsBlob" }, // 655577081
{ &Z_Construct_UFunction_UOscFunctionLibrary_AsBool, "AsBool" }, // 3593537980
{ &Z_Construct_UFunction_UOscFunctionLibrary_AsFloat, "AsFloat" }, // 250294681
{ &Z_Construct_UFunction_UOscFunctionLibrary_AsInt, "AsInt" }, // 2884710270
{ &Z_Construct_UFunction_UOscFunctionLibrary_AsString, "AsString" }, // 630151514
{ &Z_Construct_UFunction_UOscFunctionLibrary_FromBlob, "FromBlob" }, // 3010108991
{ &Z_Construct_UFunction_UOscFunctionLibrary_FromBool, "FromBool" }, // 206838951
{ &Z_Construct_UFunction_UOscFunctionLibrary_FromFloat, "FromFloat" }, // 3823827983
{ &Z_Construct_UFunction_UOscFunctionLibrary_FromInt, "FromInt" }, // 504896184
{ &Z_Construct_UFunction_UOscFunctionLibrary_FromString, "FromString" }, // 1401488672
{ &Z_Construct_UFunction_UOscFunctionLibrary_PopBlob, "PopBlob" }, // 3996848714
{ &Z_Construct_UFunction_UOscFunctionLibrary_PopBool, "PopBool" }, // 2347298357
{ &Z_Construct_UFunction_UOscFunctionLibrary_PopFloat, "PopFloat" }, // 651975571
{ &Z_Construct_UFunction_UOscFunctionLibrary_PopInt, "PopInt" }, // 1957834556
{ &Z_Construct_UFunction_UOscFunctionLibrary_PopString, "PopString" }, // 750546308
{ &Z_Construct_UFunction_UOscFunctionLibrary_PushBlob, "PushBlob" }, // 2520132707
{ &Z_Construct_UFunction_UOscFunctionLibrary_PushBool, "PushBool" }, // 2262165770
{ &Z_Construct_UFunction_UOscFunctionLibrary_PushFloat, "PushFloat" }, // 1796110765
{ &Z_Construct_UFunction_UOscFunctionLibrary_PushInt, "PushInt" }, // 2052066224
{ &Z_Construct_UFunction_UOscFunctionLibrary_PushString, "PushString" }, // 2864894003
{ &Z_Construct_UFunction_UOscFunctionLibrary_SendOsc, "SendOsc" }, // 4256185357
{ &Z_Construct_UFunction_UOscFunctionLibrary_SendOscBundle, "SendOscBundle" }, // 3248990546
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Class_MetaDataParams[] = {
{ "IncludePath", "Common/OscFunctionLibrary.h" },
{ "ModuleRelativePath", "Private/Common/OscFunctionLibrary.h" },
};
#endif
static const FCppClassTypeInfoStatic StaticCppClassTypeInfo = {
TCppClassTypeTraits<UOscFunctionLibrary>::IsAbstract,
};
static const UE4CodeGen_Private::FClassParams ClassParams = {
&UOscFunctionLibrary::StaticClass,
DependentSingletons, ARRAY_COUNT(DependentSingletons),
0x00000080u,
FuncInfo, ARRAY_COUNT(FuncInfo),
nullptr, 0,
nullptr,
&StaticCppClassTypeInfo,
nullptr, 0,
METADATA_PARAMS(Class_MetaDataParams, ARRAY_COUNT(Class_MetaDataParams))
};
UE4CodeGen_Private::ConstructUClass(OuterClass, ClassParams);
}
return OuterClass;
}
IMPLEMENT_CLASS(UOscFunctionLibrary, 2120360335);
static FCompiledInDefer Z_CompiledInDefer_UClass_UOscFunctionLibrary(Z_Construct_UClass_UOscFunctionLibrary, &UOscFunctionLibrary::StaticClass, TEXT("/Script/OSC"), TEXT("UOscFunctionLibrary"), false, nullptr, nullptr, nullptr);
DEFINE_VTABLE_PTR_HELPER_CTOR(UOscFunctionLibrary);
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#ifdef _MSC_VER
#pragma warning (pop)
#endif
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "ObjectMacros.h"
#include "ScriptMacros.h"
PRAGMA_DISABLE_DEPRECATION_WARNINGS
#ifdef OSC_OscMessageStruct_generated_h
#error "OscMessageStruct.generated.h already included, missing '#pragma once' in OscMessageStruct.h"
#endif
#define OSC_OscMessageStruct_generated_h
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscMessageStruct_h_10_GENERATED_BODY \
friend OSC_API class UScriptStruct* Z_Construct_UScriptStruct_FOscMessageStruct(); \
OSC_API static class UScriptStruct* StaticStruct();
#undef CURRENT_FILE_ID
#define CURRENT_FILE_ID UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscMessageStruct_h
PRAGMA_ENABLE_DEPRECATION_WARNINGS
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "ObjectMacros.h"
#include "ScriptMacros.h"
PRAGMA_DISABLE_DEPRECATION_WARNINGS
#ifdef OSC_OscDataElemStruct_generated_h
#error "OscDataElemStruct.generated.h already included, missing '#pragma once' in OscDataElemStruct.h"
#endif
#define OSC_OscDataElemStruct_generated_h
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscDataElemStruct_h_9_GENERATED_BODY \
friend OSC_API class UScriptStruct* Z_Construct_UScriptStruct_FOscDataElemStruct(); \
OSC_API static class UScriptStruct* StaticStruct();
#undef CURRENT_FILE_ID
#define CURRENT_FILE_ID UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscDataElemStruct_h
PRAGMA_ENABLE_DEPRECATION_WARNINGS
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "ObjectMacros.h"
#include "ScriptMacros.h"
PRAGMA_DISABLE_DEPRECATION_WARNINGS
struct FOscDataElemStruct;
#ifdef OSC_OscReceiverComponent_generated_h
#error "OscReceiverComponent.generated.h already included, missing '#pragma once' in OscReceiverComponent.h"
#endif
#define OSC_OscReceiverComponent_generated_h
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_10_DELEGATE \
struct _Script_OSC_eventComponentOscReceivedSignature_Parms \
{ \
FName Address; \
TArray<FOscDataElemStruct> Data; \
FString SenderIp; \
}; \
static inline void FComponentOscReceivedSignature_DelegateWrapper(const FMulticastScriptDelegate& ComponentOscReceivedSignature, FName const& Address, TArray<FOscDataElemStruct> const& Data, const FString& SenderIp) \
{ \
_Script_OSC_eventComponentOscReceivedSignature_Parms Parms; \
Parms.Address=Address; \
Parms.Data=Data; \
Parms.SenderIp=SenderIp; \
ComponentOscReceivedSignature.ProcessMulticastDelegate<UObject>(&Parms); \
}
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_RPC_WRAPPERS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_RPC_WRAPPERS_NO_PURE_DECLS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_INCLASS_NO_PURE_DECLS \
private: \
static void StaticRegisterNativesUOscReceiverComponent(); \
friend OSC_API class UClass* Z_Construct_UClass_UOscReceiverComponent(); \
public: \
DECLARE_CLASS(UOscReceiverComponent, UActorComponent, COMPILED_IN_FLAGS(0), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(UOscReceiverComponent) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC};
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_INCLASS \
private: \
static void StaticRegisterNativesUOscReceiverComponent(); \
friend OSC_API class UClass* Z_Construct_UClass_UOscReceiverComponent(); \
public: \
DECLARE_CLASS(UOscReceiverComponent, UActorComponent, COMPILED_IN_FLAGS(0), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(UOscReceiverComponent) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC};
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_STANDARD_CONSTRUCTORS \
/** Standard constructor, called after all reflected properties have been initialized */ \
NO_API UOscReceiverComponent(const FObjectInitializer& ObjectInitializer); \
DEFINE_DEFAULT_OBJECT_INITIALIZER_CONSTRUCTOR_CALL(UOscReceiverComponent) \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UOscReceiverComponent); \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UOscReceiverComponent(UOscReceiverComponent&&); \
NO_API UOscReceiverComponent(const UOscReceiverComponent&); \
public:
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_ENHANCED_CONSTRUCTORS \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UOscReceiverComponent(UOscReceiverComponent&&); \
NO_API UOscReceiverComponent(const UOscReceiverComponent&); \
public: \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UOscReceiverComponent); \
DEFINE_DEFAULT_CONSTRUCTOR_CALL(UOscReceiverComponent)
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_PRIVATE_PROPERTY_OFFSET
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_13_PROLOG
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_GENERATED_BODY_LEGACY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_RPC_WRAPPERS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_INCLASS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_STANDARD_CONSTRUCTORS \
public: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_GENERATED_BODY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_RPC_WRAPPERS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_INCLASS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h_16_ENHANCED_CONSTRUCTORS \
private: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#undef CURRENT_FILE_ID
#define CURRENT_FILE_ID UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscReceiverComponent_h
PRAGMA_ENABLE_DEPRECATION_WARNINGS
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "GeneratedCppIncludes.h"
#include "Private/OscPrivatePCH.h"
#include "Private/OscSettings.h"
#ifdef _MSC_VER
#pragma warning (push)
#pragma warning (disable : 4883)
#endif
PRAGMA_DISABLE_DEPRECATION_WARNINGS
void EmptyLinkFunctionForGeneratedCodeOscSettings() {}
// Cross Module References
OSC_API UClass* Z_Construct_UClass_UOscSettings_NoRegister();
OSC_API UClass* Z_Construct_UClass_UOscSettings();
COREUOBJECT_API UClass* Z_Construct_UClass_UObject();
UPackage* Z_Construct_UPackage__Script_OSC();
// End Cross Module References
void UOscSettings::StaticRegisterNativesUOscSettings()
{
}
UClass* Z_Construct_UClass_UOscSettings_NoRegister()
{
return UOscSettings::StaticClass();
}
UClass* Z_Construct_UClass_UOscSettings()
{
static UClass* OuterClass = nullptr;
if (!OuterClass)
{
static UObject* (*const DependentSingletons[])() = {
(UObject* (*)())Z_Construct_UClass_UObject,
(UObject* (*)())Z_Construct_UPackage__Script_OSC,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Class_MetaDataParams[] = {
{ "IncludePath", "OscSettings.h" },
{ "ModuleRelativePath", "Private/OscSettings.h" },
};
#endif
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_MulticastLoopback_MetaData[] = {
{ "Category", "Network" },
{ "ModuleRelativePath", "Private/OscSettings.h" },
};
#endif
auto NewProp_MulticastLoopback_SetBit = [](void* Obj){ ((UOscSettings*)Obj)->MulticastLoopback = 1; };
static const UE4CodeGen_Private::FBoolPropertyParams NewProp_MulticastLoopback = { UE4CodeGen_Private::EPropertyClass::Bool, "MulticastLoopback", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000004001, 1, nullptr, sizeof(bool), UE4CodeGen_Private::ENativeBool::Native, sizeof(UOscSettings), &UE4CodeGen_Private::TBoolSetBitWrapper<decltype(NewProp_MulticastLoopback_SetBit)>::SetBit, METADATA_PARAMS(NewProp_MulticastLoopback_MetaData, ARRAY_COUNT(NewProp_MulticastLoopback_MetaData)) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Inputs_MetaData[] = {
{ "Category", "Input" },
{ "ModuleRelativePath", "Private/OscSettings.h" },
{ "ToolTip", "List of the messages treated as inputs.\n\nEvery entry adds a key in the input mapping project setting.\ne.g. \"/position/x\" -> \"OSC_position_x\"" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_Inputs = { UE4CodeGen_Private::EPropertyClass::Array, "Inputs", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000004001, 1, nullptr, STRUCT_OFFSET(UOscSettings, Inputs), METADATA_PARAMS(NewProp_Inputs_MetaData, ARRAY_COUNT(NewProp_Inputs_MetaData)) };
static const UE4CodeGen_Private::FStrPropertyParams NewProp_Inputs_Inner = { UE4CodeGen_Private::EPropertyClass::Str, "Inputs", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000004000, 1, nullptr, 0, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_SendTargets_MetaData[] = {
{ "Category", "Send" },
{ "ModuleRelativePath", "Private/OscSettings.h" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_SendTargets = { UE4CodeGen_Private::EPropertyClass::Array, "SendTargets", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000004001, 1, nullptr, STRUCT_OFFSET(UOscSettings, SendTargets), METADATA_PARAMS(NewProp_SendTargets_MetaData, ARRAY_COUNT(NewProp_SendTargets_MetaData)) };
static const UE4CodeGen_Private::FStrPropertyParams NewProp_SendTargets_Inner = { UE4CodeGen_Private::EPropertyClass::Str, "SendTargets", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000004000, 1, nullptr, 0, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_ReceiveFrom_MetaData[] = {
{ "Category", "Receive" },
{ "ModuleRelativePath", "Private/OscSettings.h" },
{ "ToolTip", "Specify the [address:]port to listen to.\n\ne.g.\n- \"8000\" listen to messages from any sender on port 8000. [default]\n- \"172.16.17.32:8000\" listen multi-cast messages of group 172.16.17.32 on port 8000.\n- \"192.168.0.1:8000\" listen messages addressed specifically to 192.168.0.1 on port 8000, useful if there are several addresses for this machine." },
};
#endif
static const UE4CodeGen_Private::FStrPropertyParams NewProp_ReceiveFrom = { UE4CodeGen_Private::EPropertyClass::Str, "ReceiveFrom", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000004001, 1, nullptr, STRUCT_OFFSET(UOscSettings, ReceiveFrom), METADATA_PARAMS(NewProp_ReceiveFrom_MetaData, ARRAY_COUNT(NewProp_ReceiveFrom_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_MulticastLoopback,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Inputs,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Inputs_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_SendTargets,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_SendTargets_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_ReceiveFrom,
};
static const FCppClassTypeInfoStatic StaticCppClassTypeInfo = {
TCppClassTypeTraits<UOscSettings>::IsAbstract,
};
static const UE4CodeGen_Private::FClassParams ClassParams = {
&UOscSettings::StaticClass,
DependentSingletons, ARRAY_COUNT(DependentSingletons),
0x00000086u,
nullptr, 0,
PropPointers, ARRAY_COUNT(PropPointers),
"Engine",
&StaticCppClassTypeInfo,
nullptr, 0,
METADATA_PARAMS(Class_MetaDataParams, ARRAY_COUNT(Class_MetaDataParams))
};
UE4CodeGen_Private::ConstructUClass(OuterClass, ClassParams);
}
return OuterClass;
}
IMPLEMENT_CLASS(UOscSettings, 2924652717);
static FCompiledInDefer Z_CompiledInDefer_UClass_UOscSettings(Z_Construct_UClass_UOscSettings, &UOscSettings::StaticClass, TEXT("/Script/OSC"), TEXT("UOscSettings"), false, nullptr, nullptr, nullptr);
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#ifdef _MSC_VER
#pragma warning (pop)
#endif
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "ObjectMacros.h"
#include "ScriptMacros.h"
PRAGMA_DISABLE_DEPRECATION_WARNINGS
struct FOscMessageStruct;
struct FOscDataElemStruct;
#ifdef OSC_OscFunctionLibrary_generated_h
#error "OscFunctionLibrary.generated.h already included, missing '#pragma once' in OscFunctionLibrary.h"
#endif
#define OSC_OscFunctionLibrary_generated_h
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_RPC_WRAPPERS \
\
DECLARE_FUNCTION(execAddSendOscTarget) \
{ \
P_GET_PROPERTY(UStrProperty,Z_Param_IpPort); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(int32*)Z_Param__Result=UOscFunctionLibrary::AddSendOscTarget(Z_Param_IpPort); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execSendOscBundle) \
{ \
P_GET_TARRAY_REF(FOscMessageStruct,Z_Param_Out_Messages); \
P_GET_PROPERTY(UIntProperty,Z_Param_TargetIndex); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::SendOscBundle(Z_Param_Out_Messages,Z_Param_TargetIndex); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execSendOsc) \
{ \
P_GET_PROPERTY(UNameProperty,Z_Param_Address); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_Data); \
P_GET_PROPERTY(UIntProperty,Z_Param_TargetIndex); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::SendOsc(Z_Param_Address,Z_Param_Out_Data,Z_Param_TargetIndex); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromBlob) \
{ \
P_GET_TARRAY_REF(uint8,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromBlob(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromString) \
{ \
P_GET_PROPERTY(UNameProperty,Z_Param_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromString(Z_Param_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromInt) \
{ \
P_GET_PROPERTY(UIntProperty,Z_Param_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromInt(Z_Param_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromFloat) \
{ \
P_GET_PROPERTY(UFloatProperty,Z_Param_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromFloat(Z_Param_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromBool) \
{ \
P_GET_UBOOL(Z_Param_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromBool(Z_Param_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsBlob) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(TArray<uint8>*)Z_Param__Result=UOscFunctionLibrary::AsBlob(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsString) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FName*)Z_Param__Result=UOscFunctionLibrary::AsString(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsInt) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(int32*)Z_Param__Result=UOscFunctionLibrary::AsInt(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsFloat) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(float*)Z_Param__Result=UOscFunctionLibrary::AsFloat(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsBool) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(bool*)Z_Param__Result=UOscFunctionLibrary::AsBool(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushBlob) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(uint8,Z_Param_Out_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushBlob(Z_Param_Out_input,Z_Param_Out_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushString) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_PROPERTY(UNameProperty,Z_Param_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushString(Z_Param_Out_input,Z_Param_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushInt) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_PROPERTY(UIntProperty,Z_Param_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushInt(Z_Param_Out_input,Z_Param_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushFloat) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_PROPERTY(UFloatProperty,Z_Param_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushFloat(Z_Param_Out_input,Z_Param_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushBool) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_UBOOL(Z_Param_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushBool(Z_Param_Out_input,Z_Param_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopBlob) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_TARRAY_REF(uint8,Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopBlob(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopString) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_PROPERTY_REF(UNameProperty,Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopString(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopInt) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_PROPERTY_REF(UIntProperty,Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopInt(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopFloat) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_PROPERTY_REF(UFloatProperty,Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopFloat(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopBool) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_UBOOL_REF(Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopBool(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
}
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_RPC_WRAPPERS_NO_PURE_DECLS \
\
DECLARE_FUNCTION(execAddSendOscTarget) \
{ \
P_GET_PROPERTY(UStrProperty,Z_Param_IpPort); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(int32*)Z_Param__Result=UOscFunctionLibrary::AddSendOscTarget(Z_Param_IpPort); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execSendOscBundle) \
{ \
P_GET_TARRAY_REF(FOscMessageStruct,Z_Param_Out_Messages); \
P_GET_PROPERTY(UIntProperty,Z_Param_TargetIndex); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::SendOscBundle(Z_Param_Out_Messages,Z_Param_TargetIndex); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execSendOsc) \
{ \
P_GET_PROPERTY(UNameProperty,Z_Param_Address); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_Data); \
P_GET_PROPERTY(UIntProperty,Z_Param_TargetIndex); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::SendOsc(Z_Param_Address,Z_Param_Out_Data,Z_Param_TargetIndex); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromBlob) \
{ \
P_GET_TARRAY_REF(uint8,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromBlob(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromString) \
{ \
P_GET_PROPERTY(UNameProperty,Z_Param_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromString(Z_Param_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromInt) \
{ \
P_GET_PROPERTY(UIntProperty,Z_Param_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromInt(Z_Param_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromFloat) \
{ \
P_GET_PROPERTY(UFloatProperty,Z_Param_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromFloat(Z_Param_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execFromBool) \
{ \
P_GET_UBOOL(Z_Param_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FOscDataElemStruct*)Z_Param__Result=UOscFunctionLibrary::FromBool(Z_Param_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsBlob) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(TArray<uint8>*)Z_Param__Result=UOscFunctionLibrary::AsBlob(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsString) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(FName*)Z_Param__Result=UOscFunctionLibrary::AsString(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsInt) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(int32*)Z_Param__Result=UOscFunctionLibrary::AsInt(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsFloat) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(float*)Z_Param__Result=UOscFunctionLibrary::AsFloat(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execAsBool) \
{ \
P_GET_STRUCT_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_FINISH; \
P_NATIVE_BEGIN; \
*(bool*)Z_Param__Result=UOscFunctionLibrary::AsBool(Z_Param_Out_input); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushBlob) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(uint8,Z_Param_Out_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushBlob(Z_Param_Out_input,Z_Param_Out_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushString) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_PROPERTY(UNameProperty,Z_Param_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushString(Z_Param_Out_input,Z_Param_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushInt) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_PROPERTY(UIntProperty,Z_Param_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushInt(Z_Param_Out_input,Z_Param_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushFloat) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_PROPERTY(UFloatProperty,Z_Param_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushFloat(Z_Param_Out_input,Z_Param_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPushBool) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_UBOOL(Z_Param_Value); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PushBool(Z_Param_Out_input,Z_Param_Value,Z_Param_Out_output); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopBlob) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_TARRAY_REF(uint8,Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopBlob(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopString) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_PROPERTY_REF(UNameProperty,Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopString(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopInt) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_PROPERTY_REF(UIntProperty,Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopInt(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopFloat) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_PROPERTY_REF(UFloatProperty,Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopFloat(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
} \
\
DECLARE_FUNCTION(execPopBool) \
{ \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_input); \
P_GET_TARRAY_REF(FOscDataElemStruct,Z_Param_Out_output); \
P_GET_UBOOL_REF(Z_Param_Out_Value); \
P_FINISH; \
P_NATIVE_BEGIN; \
UOscFunctionLibrary::PopBool(Z_Param_Out_input,Z_Param_Out_output,Z_Param_Out_Value); \
P_NATIVE_END; \
}
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_INCLASS_NO_PURE_DECLS \
private: \
static void StaticRegisterNativesUOscFunctionLibrary(); \
friend OSC_API class UClass* Z_Construct_UClass_UOscFunctionLibrary(); \
public: \
DECLARE_CLASS(UOscFunctionLibrary, UBlueprintFunctionLibrary, COMPILED_IN_FLAGS(0), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(UOscFunctionLibrary) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC};
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_INCLASS \
private: \
static void StaticRegisterNativesUOscFunctionLibrary(); \
friend OSC_API class UClass* Z_Construct_UClass_UOscFunctionLibrary(); \
public: \
DECLARE_CLASS(UOscFunctionLibrary, UBlueprintFunctionLibrary, COMPILED_IN_FLAGS(0), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(UOscFunctionLibrary) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC};
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_STANDARD_CONSTRUCTORS \
/** Standard constructor, called after all reflected properties have been initialized */ \
NO_API UOscFunctionLibrary(const FObjectInitializer& ObjectInitializer = FObjectInitializer::Get()); \
DEFINE_DEFAULT_OBJECT_INITIALIZER_CONSTRUCTOR_CALL(UOscFunctionLibrary) \
DECLARE_VTABLE_PTR_HELPER_CTOR(NO_API, UOscFunctionLibrary); \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UOscFunctionLibrary); \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UOscFunctionLibrary(UOscFunctionLibrary&&); \
NO_API UOscFunctionLibrary(const UOscFunctionLibrary&); \
public:
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_ENHANCED_CONSTRUCTORS \
/** Standard constructor, called after all reflected properties have been initialized */ \
NO_API UOscFunctionLibrary(const FObjectInitializer& ObjectInitializer = FObjectInitializer::Get()) : Super(ObjectInitializer) { }; \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UOscFunctionLibrary(UOscFunctionLibrary&&); \
NO_API UOscFunctionLibrary(const UOscFunctionLibrary&); \
public: \
DECLARE_VTABLE_PTR_HELPER_CTOR(NO_API, UOscFunctionLibrary); \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UOscFunctionLibrary); \
DEFINE_DEFAULT_OBJECT_INITIALIZER_CONSTRUCTOR_CALL(UOscFunctionLibrary)
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_PRIVATE_PROPERTY_OFFSET
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_8_PROLOG
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_GENERATED_BODY_LEGACY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_RPC_WRAPPERS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_INCLASS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_STANDARD_CONSTRUCTORS \
public: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_GENERATED_BODY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_RPC_WRAPPERS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_INCLASS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h_11_ENHANCED_CONSTRUCTORS \
private: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#undef CURRENT_FILE_ID
#define CURRENT_FILE_ID UE_OSC_Plugins_OSC_Source_OSC_Private_Common_OscFunctionLibrary_h
PRAGMA_ENABLE_DEPRECATION_WARNINGS
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "GeneratedCppIncludes.h"
#include "Private/OscPrivatePCH.h"
#include "Private/Receive/OscReceiverComponent.h"
#ifdef _MSC_VER
#pragma warning (push)
#pragma warning (disable : 4883)
#endif
PRAGMA_DISABLE_DEPRECATION_WARNINGS
void EmptyLinkFunctionForGeneratedCodeOscReceiverComponent() {}
// Cross Module References
OSC_API UFunction* Z_Construct_UDelegateFunction_OSC_ComponentOscReceivedSignature__DelegateSignature();
UPackage* Z_Construct_UPackage__Script_OSC();
OSC_API UScriptStruct* Z_Construct_UScriptStruct_FOscDataElemStruct();
OSC_API UClass* Z_Construct_UClass_UOscReceiverComponent_NoRegister();
OSC_API UClass* Z_Construct_UClass_UOscReceiverComponent();
ENGINE_API UClass* Z_Construct_UClass_UActorComponent();
// End Cross Module References
UFunction* Z_Construct_UDelegateFunction_OSC_ComponentOscReceivedSignature__DelegateSignature()
{
struct _Script_OSC_eventComponentOscReceivedSignature_Parms
{
FName Address;
TArray<FOscDataElemStruct> Data;
FString SenderIp;
};
static UFunction* ReturnFunction = nullptr;
if (!ReturnFunction)
{
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_SenderIp_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FStrPropertyParams NewProp_SenderIp = { UE4CodeGen_Private::EPropertyClass::Str, "SenderIp", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000080, 1, nullptr, STRUCT_OFFSET(_Script_OSC_eventComponentOscReceivedSignature_Parms, SenderIp), METADATA_PARAMS(NewProp_SenderIp_MetaData, ARRAY_COUNT(NewProp_SenderIp_MetaData)) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Data_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_Data = { UE4CodeGen_Private::EPropertyClass::Array, "Data", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(_Script_OSC_eventComponentOscReceivedSignature_Parms, Data), METADATA_PARAMS(NewProp_Data_MetaData, ARRAY_COUNT(NewProp_Data_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_Data_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "Data", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Address_MetaData[] = {
{ "NativeConst", "" },
};
#endif
static const UE4CodeGen_Private::FNamePropertyParams NewProp_Address = { UE4CodeGen_Private::EPropertyClass::Name, "Address", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000008000182, 1, nullptr, STRUCT_OFFSET(_Script_OSC_eventComponentOscReceivedSignature_Parms, Address), METADATA_PARAMS(NewProp_Address_MetaData, ARRAY_COUNT(NewProp_Address_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_SenderIp,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Data,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Data_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Address,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Function_MetaDataParams[] = {
{ "ModuleRelativePath", "Private/Receive/OscReceiverComponent.h" },
{ "ToolTip", "declare the OnOscReceived event type" },
};
#endif
static const UE4CodeGen_Private::FFunctionParams FuncParams = { (UObject*(*)())Z_Construct_UPackage__Script_OSC, "ComponentOscReceivedSignature__DelegateSignature", RF_Public|RF_Transient|RF_MarkAsNative, nullptr, (EFunctionFlags)0x00130000, sizeof(_Script_OSC_eventComponentOscReceivedSignature_Parms), PropPointers, ARRAY_COUNT(PropPointers), 0, 0, METADATA_PARAMS(Function_MetaDataParams, ARRAY_COUNT(Function_MetaDataParams)) };
UE4CodeGen_Private::ConstructUFunction(ReturnFunction, FuncParams);
}
return ReturnFunction;
}
void UOscReceiverComponent::StaticRegisterNativesUOscReceiverComponent()
{
}
UClass* Z_Construct_UClass_UOscReceiverComponent_NoRegister()
{
return UOscReceiverComponent::StaticClass();
}
UClass* Z_Construct_UClass_UOscReceiverComponent()
{
static UClass* OuterClass = nullptr;
if (!OuterClass)
{
static UObject* (*const DependentSingletons[])() = {
(UObject* (*)())Z_Construct_UClass_UActorComponent,
(UObject* (*)())Z_Construct_UPackage__Script_OSC,
};
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Class_MetaDataParams[] = {
{ "BlueprintSpawnableComponent", "" },
{ "ClassGroupNames", "OSC" },
{ "IncludePath", "Receive/OscReceiverComponent.h" },
{ "ModuleRelativePath", "Private/Receive/OscReceiverComponent.h" },
};
#endif
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_OnOscReceived_MetaData[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Receive/OscReceiverComponent.h" },
};
#endif
static const UE4CodeGen_Private::FMulticastDelegatePropertyParams NewProp_OnOscReceived = { UE4CodeGen_Private::EPropertyClass::MulticastDelegate, "OnOscReceived", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000010080000, 1, nullptr, STRUCT_OFFSET(UOscReceiverComponent, OnOscReceived), Z_Construct_UDelegateFunction_OSC_ComponentOscReceivedSignature__DelegateSignature, METADATA_PARAMS(NewProp_OnOscReceived_MetaData, ARRAY_COUNT(NewProp_OnOscReceived_MetaData)) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_AddressFilter_MetaData[] = {
{ "Category", "OSC" },
{ "ModuleRelativePath", "Private/Receive/OscReceiverComponent.h" },
};
#endif
static const UE4CodeGen_Private::FStrPropertyParams NewProp_AddressFilter = { UE4CodeGen_Private::EPropertyClass::Str, "AddressFilter", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000001, 1, nullptr, STRUCT_OFFSET(UOscReceiverComponent, AddressFilter), METADATA_PARAMS(NewProp_AddressFilter_MetaData, ARRAY_COUNT(NewProp_AddressFilter_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_OnOscReceived,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_AddressFilter,
};
static const FCppClassTypeInfoStatic StaticCppClassTypeInfo = {
TCppClassTypeTraits<UOscReceiverComponent>::IsAbstract,
};
static const UE4CodeGen_Private::FClassParams ClassParams = {
&UOscReceiverComponent::StaticClass,
DependentSingletons, ARRAY_COUNT(DependentSingletons),
0x00A00080u,
nullptr, 0,
PropPointers, ARRAY_COUNT(PropPointers),
nullptr,
&StaticCppClassTypeInfo,
nullptr, 0,
METADATA_PARAMS(Class_MetaDataParams, ARRAY_COUNT(Class_MetaDataParams))
};
UE4CodeGen_Private::ConstructUClass(OuterClass, ClassParams);
}
return OuterClass;
}
IMPLEMENT_CLASS(UOscReceiverComponent, 4222399122);
static FCompiledInDefer Z_CompiledInDefer_UClass_UOscReceiverComponent(Z_Construct_UClass_UOscReceiverComponent, &UOscReceiverComponent::StaticClass, TEXT("/Script/OSC"), TEXT("UOscReceiverComponent"), false, nullptr, nullptr, nullptr);
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#ifdef _MSC_VER
#pragma warning (pop)
#endif
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "ObjectMacros.h"
#include "ScriptMacros.h"
PRAGMA_DISABLE_DEPRECATION_WARNINGS
#ifdef OSC_OscDispatcher_generated_h
#error "OscDispatcher.generated.h already included, missing '#pragma once' in OscDispatcher.h"
#endif
#define OSC_OscDispatcher_generated_h
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_RPC_WRAPPERS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_RPC_WRAPPERS_NO_PURE_DECLS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_INCLASS_NO_PURE_DECLS \
private: \
static void StaticRegisterNativesUOscDispatcher(); \
friend OSC_API class UClass* Z_Construct_UClass_UOscDispatcher(); \
public: \
DECLARE_CLASS(UOscDispatcher, UObject, COMPILED_IN_FLAGS(0), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(UOscDispatcher) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC};
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_INCLASS \
private: \
static void StaticRegisterNativesUOscDispatcher(); \
friend OSC_API class UClass* Z_Construct_UClass_UOscDispatcher(); \
public: \
DECLARE_CLASS(UOscDispatcher, UObject, COMPILED_IN_FLAGS(0), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(UOscDispatcher) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC};
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_STANDARD_CONSTRUCTORS \
/** Standard constructor, called after all reflected properties have been initialized */ \
NO_API UOscDispatcher(const FObjectInitializer& ObjectInitializer); \
DEFINE_DEFAULT_OBJECT_INITIALIZER_CONSTRUCTOR_CALL(UOscDispatcher) \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UOscDispatcher); \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UOscDispatcher(UOscDispatcher&&); \
NO_API UOscDispatcher(const UOscDispatcher&); \
public:
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_ENHANCED_CONSTRUCTORS \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UOscDispatcher(UOscDispatcher&&); \
NO_API UOscDispatcher(const UOscDispatcher&); \
public: \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UOscDispatcher); \
DEFINE_DEFAULT_CONSTRUCTOR_CALL(UOscDispatcher)
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_PRIVATE_PROPERTY_OFFSET
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_12_PROLOG
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_GENERATED_BODY_LEGACY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_RPC_WRAPPERS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_INCLASS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_STANDARD_CONSTRUCTORS \
public: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_GENERATED_BODY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_RPC_WRAPPERS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_INCLASS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h_15_ENHANCED_CONSTRUCTORS \
private: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#undef CURRENT_FILE_ID
#define CURRENT_FILE_ID UE_OSC_Plugins_OSC_Source_OSC_Private_Receive_OscDispatcher_h
PRAGMA_ENABLE_DEPRECATION_WARNINGS
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "ObjectMacros.h"
#include "ScriptMacros.h"
PRAGMA_DISABLE_DEPRECATION_WARNINGS
#ifdef OSC_OscSettings_generated_h
#error "OscSettings.generated.h already included, missing '#pragma once' in OscSettings.h"
#endif
#define OSC_OscSettings_generated_h
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_RPC_WRAPPERS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_RPC_WRAPPERS_NO_PURE_DECLS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_INCLASS_NO_PURE_DECLS \
private: \
static void StaticRegisterNativesUOscSettings(); \
friend OSC_API class UClass* Z_Construct_UClass_UOscSettings(); \
public: \
DECLARE_CLASS(UOscSettings, UObject, COMPILED_IN_FLAGS(0 | CLASS_DefaultConfig | CLASS_Config), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(UOscSettings) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC}; \
static const TCHAR* StaticConfigName() {return TEXT("Engine");} \
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_INCLASS \
private: \
static void StaticRegisterNativesUOscSettings(); \
friend OSC_API class UClass* Z_Construct_UClass_UOscSettings(); \
public: \
DECLARE_CLASS(UOscSettings, UObject, COMPILED_IN_FLAGS(0 | CLASS_DefaultConfig | CLASS_Config), 0, TEXT("/Script/OSC"), NO_API) \
DECLARE_SERIALIZER(UOscSettings) \
enum {IsIntrinsic=COMPILED_IN_INTRINSIC}; \
static const TCHAR* StaticConfigName() {return TEXT("Engine");} \
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_STANDARD_CONSTRUCTORS \
/** Standard constructor, called after all reflected properties have been initialized */ \
NO_API UOscSettings(const FObjectInitializer& ObjectInitializer); \
DEFINE_DEFAULT_OBJECT_INITIALIZER_CONSTRUCTOR_CALL(UOscSettings) \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UOscSettings); \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UOscSettings(UOscSettings&&); \
NO_API UOscSettings(const UOscSettings&); \
public:
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_ENHANCED_CONSTRUCTORS \
private: \
/** Private move- and copy-constructors, should never be used */ \
NO_API UOscSettings(UOscSettings&&); \
NO_API UOscSettings(const UOscSettings&); \
public: \
DEFINE_VTABLE_PTR_HELPER_CTOR_CALLER(UOscSettings); \
DEFINE_DEFAULT_CONSTRUCTOR_CALL(UOscSettings)
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_PRIVATE_PROPERTY_OFFSET
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_11_PROLOG
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_GENERATED_BODY_LEGACY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_RPC_WRAPPERS \
UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_INCLASS \
UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_STANDARD_CONSTRUCTORS \
public: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#define UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_GENERATED_BODY \
PRAGMA_DISABLE_DEPRECATION_WARNINGS \
public: \
UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_PRIVATE_PROPERTY_OFFSET \
UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_RPC_WRAPPERS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_INCLASS_NO_PURE_DECLS \
UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h_14_ENHANCED_CONSTRUCTORS \
private: \
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#undef CURRENT_FILE_ID
#define CURRENT_FILE_ID UE_OSC_Plugins_OSC_Source_OSC_Private_OscSettings_h
PRAGMA_ENABLE_DEPRECATION_WARNINGS
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "GeneratedCppIncludes.h"
#include "Private/OscPrivatePCH.h"
#include "Private/Common/OscDataStruct.h"
#ifdef _MSC_VER
#pragma warning (push)
#pragma warning (disable : 4883)
#endif
PRAGMA_DISABLE_DEPRECATION_WARNINGS
void EmptyLinkFunctionForGeneratedCodeOscDataStruct() {}
// Cross Module References
OSC_API UScriptStruct* Z_Construct_UScriptStruct_FOscDataStruct();
UPackage* Z_Construct_UPackage__Script_OSC();
OSC_API UScriptStruct* Z_Construct_UScriptStruct_FOscDataElemStruct();
// End Cross Module References
class UScriptStruct* FOscDataStruct::StaticStruct()
{
static class UScriptStruct* Singleton = NULL;
if (!Singleton)
{
extern OSC_API uint32 Get_Z_Construct_UScriptStruct_FOscDataStruct_CRC();
Singleton = GetStaticStruct(Z_Construct_UScriptStruct_FOscDataStruct, Z_Construct_UPackage__Script_OSC(), TEXT("OscDataStruct"), sizeof(FOscDataStruct), Get_Z_Construct_UScriptStruct_FOscDataStruct_CRC());
}
return Singleton;
}
static FCompiledInDeferStruct Z_CompiledInDeferStruct_UScriptStruct_FOscDataStruct(FOscDataStruct::StaticStruct, TEXT("/Script/OSC"), TEXT("OscDataStruct"), false, nullptr, nullptr);
static struct FScriptStruct_OSC_StaticRegisterNativesFOscDataStruct
{
FScriptStruct_OSC_StaticRegisterNativesFOscDataStruct()
{
UScriptStruct::DeferCppStructOps(FName(TEXT("OscDataStruct")),new UScriptStruct::TCppStructOps<FOscDataStruct>);
}
} ScriptStruct_OSC_StaticRegisterNativesFOscDataStruct;
UScriptStruct* Z_Construct_UScriptStruct_FOscDataStruct()
{
#if WITH_HOT_RELOAD
extern uint32 Get_Z_Construct_UScriptStruct_FOscDataStruct_CRC();
UPackage* Outer = Z_Construct_UPackage__Script_OSC();
static UScriptStruct* ReturnStruct = FindExistingStructIfHotReloadOrDynamic(Outer, TEXT("OscDataStruct"), sizeof(FOscDataStruct), Get_Z_Construct_UScriptStruct_FOscDataStruct_CRC(), false);
#else
static UScriptStruct* ReturnStruct = nullptr;
#endif
if (!ReturnStruct)
{
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Struct_MetaDataParams[] = {
{ "ModuleRelativePath", "Private/Common/OscDataStruct.h" },
};
#endif
auto NewStructOpsLambda = []() -> void* { return (UScriptStruct::ICppStructOps*)new UScriptStruct::TCppStructOps<FOscDataStruct>(); };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Queue_MetaData[] = {
{ "ModuleRelativePath", "Private/Common/OscDataStruct.h" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_Queue = { UE4CodeGen_Private::EPropertyClass::Array, "Queue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000000, 1, nullptr, STRUCT_OFFSET(FOscDataStruct, Queue), METADATA_PARAMS(NewProp_Queue_MetaData, ARRAY_COUNT(NewProp_Queue_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_Queue_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "Queue", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Index_MetaData[] = {
{ "ModuleRelativePath", "Private/Common/OscDataStruct.h" },
};
#endif
static const UE4CodeGen_Private::FIntPropertyParams NewProp_Index = { UE4CodeGen_Private::EPropertyClass::Int, "Index", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000000, 1, nullptr, STRUCT_OFFSET(FOscDataStruct, Index), METADATA_PARAMS(NewProp_Index_MetaData, ARRAY_COUNT(NewProp_Index_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Queue,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Queue_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Index,
};
static const UE4CodeGen_Private::FStructParams ReturnStructParams = {
(UObject* (*)())Z_Construct_UPackage__Script_OSC,
nullptr,
&UE4CodeGen_Private::TNewCppStructOpsWrapper<decltype(NewStructOpsLambda)>::NewCppStructOps,
"OscDataStruct",
RF_Public|RF_Transient|RF_MarkAsNative,
EStructFlags(0x00000001),
sizeof(FOscDataStruct),
alignof(FOscDataStruct),
PropPointers, ARRAY_COUNT(PropPointers),
METADATA_PARAMS(Struct_MetaDataParams, ARRAY_COUNT(Struct_MetaDataParams))
};
UE4CodeGen_Private::ConstructUScriptStruct(ReturnStruct, ReturnStructParams);
}
return ReturnStruct;
}
uint32 Get_Z_Construct_UScriptStruct_FOscDataStruct_CRC() { return 1773877340U; }
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#ifdef _MSC_VER
#pragma warning (pop)
#endif
<file_sep>// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.
/*===========================================================================
Generated code exported from UnrealHeaderTool.
DO NOT modify this manually! Edit the corresponding .h files instead!
===========================================================================*/
#include "GeneratedCppIncludes.h"
#include "Private/OscPrivatePCH.h"
#include "Private/Common/OscMessageStruct.h"
#ifdef _MSC_VER
#pragma warning (push)
#pragma warning (disable : 4883)
#endif
PRAGMA_DISABLE_DEPRECATION_WARNINGS
void EmptyLinkFunctionForGeneratedCodeOscMessageStruct() {}
// Cross Module References
OSC_API UScriptStruct* Z_Construct_UScriptStruct_FOscMessageStruct();
UPackage* Z_Construct_UPackage__Script_OSC();
OSC_API UScriptStruct* Z_Construct_UScriptStruct_FOscDataElemStruct();
// End Cross Module References
class UScriptStruct* FOscMessageStruct::StaticStruct()
{
static class UScriptStruct* Singleton = NULL;
if (!Singleton)
{
extern OSC_API uint32 Get_Z_Construct_UScriptStruct_FOscMessageStruct_CRC();
Singleton = GetStaticStruct(Z_Construct_UScriptStruct_FOscMessageStruct, Z_Construct_UPackage__Script_OSC(), TEXT("OscMessageStruct"), sizeof(FOscMessageStruct), Get_Z_Construct_UScriptStruct_FOscMessageStruct_CRC());
}
return Singleton;
}
static FCompiledInDeferStruct Z_CompiledInDeferStruct_UScriptStruct_FOscMessageStruct(FOscMessageStruct::StaticStruct, TEXT("/Script/OSC"), TEXT("OscMessageStruct"), false, nullptr, nullptr);
static struct FScriptStruct_OSC_StaticRegisterNativesFOscMessageStruct
{
FScriptStruct_OSC_StaticRegisterNativesFOscMessageStruct()
{
UScriptStruct::DeferCppStructOps(FName(TEXT("OscMessageStruct")),new UScriptStruct::TCppStructOps<FOscMessageStruct>);
}
} ScriptStruct_OSC_StaticRegisterNativesFOscMessageStruct;
UScriptStruct* Z_Construct_UScriptStruct_FOscMessageStruct()
{
#if WITH_HOT_RELOAD
extern uint32 Get_Z_Construct_UScriptStruct_FOscMessageStruct_CRC();
UPackage* Outer = Z_Construct_UPackage__Script_OSC();
static UScriptStruct* ReturnStruct = FindExistingStructIfHotReloadOrDynamic(Outer, TEXT("OscMessageStruct"), sizeof(FOscMessageStruct), Get_Z_Construct_UScriptStruct_FOscMessageStruct_CRC(), false);
#else
static UScriptStruct* ReturnStruct = nullptr;
#endif
if (!ReturnStruct)
{
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam Struct_MetaDataParams[] = {
{ "BlueprintType", "true" },
{ "ModuleRelativePath", "Private/Common/OscMessageStruct.h" },
};
#endif
auto NewStructOpsLambda = []() -> void* { return (UScriptStruct::ICppStructOps*)new UScriptStruct::TCppStructOps<FOscMessageStruct>(); };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Data_MetaData[] = {
{ "Category", "OscMessageStruct" },
{ "ModuleRelativePath", "Private/Common/OscMessageStruct.h" },
};
#endif
static const UE4CodeGen_Private::FArrayPropertyParams NewProp_Data = { UE4CodeGen_Private::EPropertyClass::Array, "Data", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000005, 1, nullptr, STRUCT_OFFSET(FOscMessageStruct, Data), METADATA_PARAMS(NewProp_Data_MetaData, ARRAY_COUNT(NewProp_Data_MetaData)) };
static const UE4CodeGen_Private::FStructPropertyParams NewProp_Data_Inner = { UE4CodeGen_Private::EPropertyClass::Struct, "Data", RF_Public|RF_Transient|RF_MarkAsNative, 0x0000000000000000, 1, nullptr, 0, Z_Construct_UScriptStruct_FOscDataElemStruct, METADATA_PARAMS(nullptr, 0) };
#if WITH_METADATA
static const UE4CodeGen_Private::FMetaDataPairParam NewProp_Address_MetaData[] = {
{ "Category", "OscMessageStruct" },
{ "ModuleRelativePath", "Private/Common/OscMessageStruct.h" },
};
#endif
static const UE4CodeGen_Private::FNamePropertyParams NewProp_Address = { UE4CodeGen_Private::EPropertyClass::Name, "Address", RF_Public|RF_Transient|RF_MarkAsNative, 0x0010000000000005, 1, nullptr, STRUCT_OFFSET(FOscMessageStruct, Address), METADATA_PARAMS(NewProp_Address_MetaData, ARRAY_COUNT(NewProp_Address_MetaData)) };
static const UE4CodeGen_Private::FPropertyParamsBase* const PropPointers[] = {
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Data,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Data_Inner,
(const UE4CodeGen_Private::FPropertyParamsBase*)&NewProp_Address,
};
static const UE4CodeGen_Private::FStructParams ReturnStructParams = {
(UObject* (*)())Z_Construct_UPackage__Script_OSC,
nullptr,
&UE4CodeGen_Private::TNewCppStructOpsWrapper<decltype(NewStructOpsLambda)>::NewCppStructOps,
"OscMessageStruct",
RF_Public|RF_Transient|RF_MarkAsNative,
EStructFlags(0x00000001),
sizeof(FOscMessageStruct),
alignof(FOscMessageStruct),
PropPointers, ARRAY_COUNT(PropPointers),
METADATA_PARAMS(Struct_MetaDataParams, ARRAY_COUNT(Struct_MetaDataParams))
};
UE4CodeGen_Private::ConstructUScriptStruct(ReturnStruct, ReturnStructParams);
}
return ReturnStruct;
}
uint32 Get_Z_Construct_UScriptStruct_FOscMessageStruct_CRC() { return 1911598487U; }
PRAGMA_ENABLE_DEPRECATION_WARNINGS
#ifdef _MSC_VER
#pragma warning (pop)
#endif
| 0299fda1fdcfdf5871c514c850e7fbb6642e1ea5 | [
"C",
"C++"
] | 12 | C | badeand/ue_ballworld | 0d7e327c0f35740362781dccfd1948baa6b5f250 | 62d270d3f748cf9964f92af4c87fece2a4cbda9f | |
refs/heads/master | <file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package iaquizapp;
import java.io.Serializable;
import java.sql.Date;
import java.util.List;
/**
*
* @author vunvd
*/
public class DotThi implements Serializable {
private String dotthiID;
private String dotthiTen;
private Date dotthiNgay;
private String dottthiDesc;
private List<Lop> dotthiLop;
private List<MonHoc> dotthiMon;
private List<DeThi> dotthiDe;
public DotThi() {
}
public DotThi(String dotthiTen, Date dotthiNgay, String dottthiDesc) {
this.dotthiTen = dotthiTen;
this.dotthiNgay = dotthiNgay;
this.dottthiDesc = dottthiDesc;
}
public DotThi(String dotthiID, String dotthiTen, Date dotthiNgay, String dottthiDesc) {
this.dotthiID = dotthiID;
this.dotthiTen = dotthiTen;
this.dotthiNgay = dotthiNgay;
this.dottthiDesc = dottthiDesc;
}
public DotThi(String dotthiID, String dotthiTen, Date dotthiNgay, String dottthiDesc, List<Lop> dotthiLop, List<MonHoc> dotthiMon, List<DeThi> dotthiDe) {
this.dotthiID = dotthiID;
this.dotthiTen = dotthiTen;
this.dotthiNgay = dotthiNgay;
this.dottthiDesc = dottthiDesc;
this.dotthiLop = dotthiLop;
this.dotthiMon = dotthiMon;
this.dotthiDe = dotthiDe;
}
public String getDotthiID() {
return dotthiID;
}
public void setDotthiID(String dotthiID) {
this.dotthiID = dotthiID;
}
public String getDotthiTen() {
return dotthiTen;
}
public void setDotthiTen(String dotthiTen) {
this.dotthiTen = dotthiTen;
}
public String getDottthiDesc() {
return dottthiDesc;
}
public void setDottthiDesc(String dottthiDesc) {
this.dottthiDesc = dottthiDesc;
}
public Date getDotthiNgay() {
return dotthiNgay;
}
public void setDotthiNgay(Date dotthiNgay) {
this.dotthiNgay = dotthiNgay;
}
public List<Lop> getDotthiLop() {
return dotthiLop;
}
public void setDotthiLop(List<Lop> dotthiLop) {
this.dotthiLop = dotthiLop;
}
public List<MonHoc> getDotthiMon() {
return dotthiMon;
}
public void setDotthiMon(List<MonHoc> dotthiMon) {
this.dotthiMon = dotthiMon;
}
public List<DeThi> getDotthiDe() {
return dotthiDe;
}
public void setDotthiDe(List<DeThi> dotthiDe) {
this.dotthiDe = dotthiDe;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Data;
import iaquizapp.MonHoc;
import iaquizapp.SinhVien;
import java.util.List;
/**
*
* @author vunvd
*/
public class DataAccess {
private static SinhVien sv;
private static String dotthiID;
private static String maDeThi;
private static List<MonHoc> listMonThi;
public DataAccess() {
}
public static String getDotthiID() {
return dotthiID;
}
public static void setDotthiID(String dotthiID) {
DataAccess.dotthiID = dotthiID;
}
public static SinhVien getSv() {
return sv;
}
public static void setSv(SinhVien sv) {
DataAccess.sv = sv;
}
public static List<MonHoc> getListMonThi() {
return listMonThi;
}
public static void setListMonThi(List<MonHoc> listMonThi) {
DataAccess.listMonThi = listMonThi;
}
public static String getMaDeThi() {
return maDeThi;
}
public static void setMaDeThi(String maDeThi) {
DataAccess.maDeThi = maDeThi;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package iaquizapp;
import java.io.Serializable;
/**
*
* @author vunvd
*/
public class DeThi implements Serializable {
private String dethiID;
private int dethiSoCau;
private String dethiDSCauHoi;
private int dethiMon;
private String dotthiID;
public DeThi() {
}
public DeThi(String dethiID, int dethiSoCau, String dethiDSCauHoi, int dethiMon, String dotthiID) {
this.dethiID = dethiID;
this.dethiSoCau = dethiSoCau;
this.dethiDSCauHoi = dethiDSCauHoi;
this.dethiMon = dethiMon;
this.dotthiID = dotthiID;
}
public String getDotthiID() {
return dotthiID;
}
public void setDotthiID(String dotthiID) {
this.dotthiID = dotthiID;
}
public String getDethiID() {
return dethiID;
}
public void setDethiID(String dethiID) {
this.dethiID = dethiID;
}
public int getDethiSoCau() {
return dethiSoCau;
}
public void setDethiSoCau(int dethiSoCau) {
this.dethiSoCau = dethiSoCau;
}
public String getDethiDSCauHoi() {
return dethiDSCauHoi;
}
public void setDethiDSCauHoi(String dethiDSCauHoi) {
this.dethiDSCauHoi = dethiDSCauHoi;
}
public int getDethiMon() {
return dethiMon;
}
public void setDethiMon(int dethiMon) {
this.dethiMon = dethiMon;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package iaquizapp;
import java.io.Serializable;
/**
*
* @author vunvd
*/
public class SinhVien implements Serializable {
private String svID;
private String svPass;
private String svHoTen;
private int svGioiTinh;
private String svEmail;
private String svSDT;
private int classID;
private String classTen;
public SinhVien(String svID, String svPass, String svHoTen, int svGioiTinh, String svEmail, String svSDT, int classID) {
this.svID = svID;
this.svPass = svPass;
this.svHoTen = svHoTen;
this.svGioiTinh = svGioiTinh;
this.svEmail = svEmail;
this.svSDT = svSDT;
this.classID = classID;
}
public SinhVien(String svID, String svPass, String svHoTen, int svGioiTinh, String svEmail, String svSDT, String classTen) {
this.svID = svID;
this.svPass = svPass;
this.svHoTen = svHoTen;
this.svGioiTinh = svGioiTinh;
this.svEmail = svEmail;
this.svSDT = svSDT;
this.classTen = classTen;
}
public String getSvID() {
return svID;
}
public void setSvID(String svID) {
this.svID = svID;
}
public String getSvPass() {
return svPass;
}
public void setSvPass(String svPass) {
this.svPass = svPass;
}
public String getSvHoTen() {
return svHoTen;
}
public void setSvHoTen(String svHoTen) {
this.svHoTen = svHoTen;
}
public int getSvGioiTinh() {
return svGioiTinh;
}
public void setSvGioiTinh(int svGioiTinh) {
this.svGioiTinh = svGioiTinh;
}
public String getSvEmail() {
return svEmail;
}
public void setSvEmail(String svEmail) {
this.svEmail = svEmail;
}
public String getSvSDT() {
return svSDT;
}
public void setSvSDT(String svSDT) {
this.svSDT = svSDT;
}
public int getClassID() {
return classID;
}
public void setClassID(int classID) {
this.classID = classID;
}
public String getClassTen() {
return classTen;
}
public void setClassTen(String classTen) {
this.classTen = classTen;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Thi;
import com.jfoenix.controls.JFXButton;
import com.jfoenix.controls.JFXTextField;
import iaquizapp.DeThi;
import iaquizapp.MonHoc;
import iaquizapp.SinhVien;
import java.io.IOException;
import java.net.URL;
import java.rmi.RemoteException;
import java.sql.SQLException;
import java.util.List;
import java.util.ResourceBundle;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.text.Text;
import javafx.stage.Stage;
import quizapp.Login;
import quizapp.LoginController;
/**
* FXML Controller class
*
* @author vunvd
*/
public class MaDeController implements Initializable {
private static SinhVien sv;
private static Stage stage;
private static int indexMon;
private iaquizapp.IAQuizApp server;
@FXML
private JFXTextField tfMaDe;
@FXML
private JFXButton btnBatDauThi;
@FXML
private Text txtMonThi;
/**
* Initializes the controller class.
*/
@Override
public void initialize(URL url, ResourceBundle rb) {
server = LoginController.getServer();
MonHoc mon = Data.DataAccess.getListMonThi().get(indexMon);
txtMonThi.setText(mon.getMonName());
}
public static void tangIndexMon() {
MaDeController.indexMon++;
}
public static Stage getStage() {
return stage;
}
public static void setStage(Stage stage) {
MaDeController.stage = stage;
}
public static SinhVien getSv() {
return sv;
}
public static void setSv(SinhVien sv) {
MaDeController.sv = sv;
}
public static int getIndexMon() {
return indexMon;
}
public static void setIndexMon(int indexMon) {
MaDeController.indexMon = indexMon;
}
@FXML
private void clickedThi(ActionEvent event) {
if (tfMaDe.getText() == null) {
System.out.println("Sai ma De");
} else {
try {
String dotthiID = Data.DataAccess.getDotthiID();
MonHoc mon = Data.DataAccess.getListMonThi().get(indexMon);
List<DeThi> dethi = server.getListDeThi(mon.getMonID(), dotthiID);
if (dethi.stream().anyMatch(x -> x.getDethiID().equals(tfMaDe.getText()))) {
TrangThiController.setMaDe(tfMaDe.getText());
Parent root = FXMLLoader.load(getClass().getResource("/Thi/TrangThi.fxml"));
Stage stage = new Stage();
Scene scene = new Scene(root);
stage.setResizable(false);
stage.setScene(scene);
TrangThiController.setStage(stage);
TrangThiController.setMonthi(mon);
stage.show();
getStage().close();
} else {
System.out.println("Sai");
}
} catch (RemoteException ex) {
Logger.getLogger(MaDeController.class.getName()).log(Level.SEVERE, null, ex);
} catch (SQLException ex) {
Logger.getLogger(MaDeController.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException ex) {
Logger.getLogger(MaDeController.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package quizapp;
import Admin.AdminHomeController;
import Thi.MaDeController;
import com.jfoenix.controls.JFXPasswordField;
import com.jfoenix.controls.JFXTextField;
import iaquizapp.IAQuizApp;
import iaquizapp.SinhVien;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.rmi.Naming;
import java.rmi.NotBoundException;
import java.rmi.RemoteException;
import java.sql.SQLException;
import java.util.List;
import java.util.ResourceBundle;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.stage.Stage;
/**
*
* @author vunvd
*/
public class LoginController implements Initializable {
private static iaquizapp.IAQuizApp server;
@FXML
private JFXTextField tfID;
@FXML
private JFXPasswordField tfPass;
@Override
public void initialize(URL url, ResourceBundle rb) {
try {
server = (IAQuizApp) Naming.lookup("rmi://localhost:3000/server_quizapp");
String dotthiID = server.getDotThiID();
Data.DataAccess.setDotthiID(dotthiID);
Data.DataAccess.setListMonThi(server.getDotThiMon(dotthiID));
} catch (NotBoundException ex) {
Logger.getLogger(AdminHomeController.class.getName()).log(Level.SEVERE, null, ex);
} catch (MalformedURLException ex) {
Logger.getLogger(AdminHomeController.class.getName()).log(Level.SEVERE, null, ex);
} catch (RemoteException ex) {
Logger.getLogger(AdminHomeController.class.getName()).log(Level.SEVERE, null, ex);
} catch (SQLException ex) {
Logger.getLogger(LoginController.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static IAQuizApp getServer() {
return server;
}
public static void setServer(IAQuizApp server) {
LoginController.server = server;
}
@FXML
private void clickedDangNhap(ActionEvent event) {
try {
String id = tfID.getText();
String pass = tfPass.getText();
Data.DataAccess.setSv(server.getSinhVien(id));
if (server.isSvLogin(id, pass)) {
Parent root = FXMLLoader.load(getClass().getResource("/Thi/MaDe.fxml"));
Stage stage = new Stage();
Scene scene = new Scene(root);
stage.setResizable(false);
stage.setScene(scene);
MaDeController.setStage(stage);
MaDeController.setIndexMon(0);
stage.show();
Login.getStage().close();
} else {
System.out.println("sai");
}
} catch (RemoteException ex) {
Logger.getLogger(LoginController.class.getName()).log(Level.SEVERE, null, ex);
} catch (SQLException ex) {
Logger.getLogger(LoginController.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException ex) {
Logger.getLogger(LoginController.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Components;
import Admin.AdminHomeController;
import com.jfoenix.controls.JFXButton;
import com.jfoenix.controls.JFXComboBox;
import com.jfoenix.controls.JFXListCell;
import com.jfoenix.controls.JFXTextField;
import iaquizapp.Lop;
import iaquizapp.MonHoc;
import iaquizapp.SinhVien;
import java.net.URL;
import java.rmi.RemoteException;
import java.security.SecureRandom;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.ResourceBundle;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.ListCell;
import javafx.scene.control.ListView;
import javafx.scene.text.Text;
import javafx.stage.Stage;
import javafx.util.Callback;
import javafx.util.StringConverter;
/**
* FXML Controller class
*
* @author vunvd
*/
public class AddSinhVienController implements Initializable {
@FXML
private JFXTextField tfSvHoTen;
@FXML
private JFXTextField tfSvEmail;
@FXML
private JFXTextField tfSvSdt;
@FXML
private JFXComboBox<GioiTinh> cbSvGioiTinh;
@FXML
private JFXComboBox<Lop> cbLopHoc;
@FXML
private JFXButton btnThemSV;
@FXML
private JFXButton btnHuy;
private static Stage stage;
private iaquizapp.IAQuizApp server;
private static boolean editSinhVien;
private static SinhVien sv;
/**
* Initializes the controller class.
*/
@Override
public void initialize(URL url, ResourceBundle rb) {
server = AdminHomeController.getServer();
Platform.runLater(new Runnable() {
@Override
public void run() {
setComboBoxLop();
setComboBoxGioiTinh();
}
});
Platform.runLater(new Runnable() {
@Override
public void run() {
if (editSinhVien) {
btnThemSV.setText("Cập Nhật");
tfSvHoTen.setText(getSv().getSvHoTen());
tfSvEmail.setText(getSv().getSvEmail());
tfSvSdt.setText(getSv().getSvSDT());
cbLopHoc.getItems().stream().forEach(x -> {
if (x.getClassID() == getSv().getClassID()) {
cbLopHoc.getSelectionModel().select(x);
}
});
cbSvGioiTinh.getItems().stream().forEach(x -> {
if (x.getId() == sv.getSvGioiTinh()) {
cbSvGioiTinh.getSelectionModel().select(x);
}
});
}
}
});
}
private void setComboBoxGioiTinh() {
List<GioiTinh> list = new ArrayList<>();
list.add(new GioiTinh(0, "Nữ"));
list.add(new GioiTinh(1, "Nam"));
ObservableList<GioiTinh> itemList = FXCollections.observableArrayList();
itemList.addAll(list);
cbSvGioiTinh.setItems(itemList);
cbSvGioiTinh.setCellFactory(new Callback<ListView<GioiTinh>, ListCell<GioiTinh>>() {
@Override
public ListCell<GioiTinh> call(ListView<GioiTinh> param) {
return new JFXListCell<GioiTinh>(){
@Override
public void updateItem(GioiTinh item, boolean empty) {
super.updateItem(item, empty); //To change body of generated methods, choose Tools | Templates.
if (item != null) {
Text text = new Text(item.getName());
getStyleClass().add("list-cell--gray");
setGraphic(text);
}
}
};
}
});
cbSvGioiTinh.setConverter(new StringConverter<GioiTinh>() {
@Override
public String toString(GioiTinh object) {
if (object != null) {
return object.getName();
}
return null;
}
@Override
public GioiTinh fromString(String string) {
return null;
}
});
}
private void setComboBoxLop() {
try {
List<Lop> list = server.getListLop();
ObservableList<Lop> itemlist = FXCollections.observableArrayList();
itemlist.addAll(list);
cbLopHoc.setItems(itemlist);
cbLopHoc.setCellFactory(new Callback<ListView<Lop>, ListCell<Lop>>() {
@Override
public ListCell<Lop> call(ListView<Lop> param) {
return new JFXListCell<Lop>(){
@Override
public void updateItem(Lop item, boolean empty) {
super.updateItem(item, empty); //To change body of generated methods, choose Tools | Templates.
if (item != null) {
Text text = new Text(item.getClassTen());
getStyleClass().add("list-cell--gray");
setGraphic(text);
}
}
};
}
});
cbLopHoc.setConverter(new StringConverter<Lop>() {
@Override
public String toString(Lop object) {
if (object != null) {
return object.getClassTen();
}
return null;
}
@Override
public Lop fromString(String string) {
return null;
}
});
} catch (RemoteException ex) {
Logger.getLogger(AddSinhVienController.class.getName()).log(Level.SEVERE, null, ex);
} catch (SQLException ex) {
Logger.getLogger(AddSinhVienController.class.getName()).log(Level.SEVERE, null, ex);
}
}
private String getAutoSvID() {
String newID = null;
try {
String svID = server.getLastSinhVien();
int numberID;
if (svID == null) {
numberID = 0;
} else {
numberID = Integer.parseInt(svID.substring(1));
}
newID = "D" + String.format("%05d", numberID + 1);
} catch (RemoteException ex) {
Logger.getLogger(AddSinhVienController.class.getName()).log(Level.SEVERE, null, ex);
} catch (SQLException ex) {
Logger.getLogger(AddSinhVienController.class.getName()).log(Level.SEVERE, null, ex);
}
return newID;
}
private String getRdPass() {
String Ab = "<KEY>";
SecureRandom rnd = new SecureRandom();
StringBuilder sb = new StringBuilder(8);
for( int i = 0; i < 8; i++ )
sb.append( Ab.charAt( rnd.nextInt(Ab.length()) ) );
return sb.toString();
}
@FXML
private void clickSubmit(ActionEvent event) {
try {
if (editSinhVien) {
SinhVien sv2 = new SinhVien(
null,
null,
tfSvHoTen.getText(),
cbSvGioiTinh.getSelectionModel().getSelectedItem().getId(),
tfSvEmail.getText(),
tfSvSdt.getText(),
cbLopHoc.getSelectionModel().getSelectedItem().getClassID());
server.modifySinhVien(getSv().getSvID(), sv2);
setEditSinhVien(false);
getStage().close();
} else {
SinhVien sv = new SinhVien(
getAutoSvID(),
getRdPass(),
tfSvHoTen.getText(),
cbSvGioiTinh.getSelectionModel().getSelectedItem().getId(),
tfSvEmail.getText(),
tfSvSdt.getText(),
cbLopHoc.getSelectionModel().getSelectedItem().getClassID());
server.addSinhVien(sv);
getStage().close();
}
} catch (RemoteException ex) {
Logger.getLogger(AddSinhVienController.class.getName()).log(Level.SEVERE, null, ex);
} catch (SQLException ex) {
Logger.getLogger(AddSinhVienController.class.getName()).log(Level.SEVERE, null, ex);
}
}
@FXML
private void clickedClose(ActionEvent event) {
getStage().close();
}
public static Stage getStage() {
return stage;
}
public static void setStage(Stage stage) {
AddSinhVienController.stage = stage;
}
public static boolean isEditSinhVien() {
return editSinhVien;
}
public static void setEditSinhVien(boolean editSinhVien) {
AddSinhVienController.editSinhVien = editSinhVien;
}
public static SinhVien getSv() {
return sv;
}
public static void setSv(SinhVien sv) {
AddSinhVienController.sv = sv;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package iaquizapp;
import java.io.Serializable;
/**
*
* @author vunvd
*/
public class Lop implements Serializable {
private int classID;
private String classTen;
public Lop() {
}
public Lop(String classTen) {
this.classTen = classTen;
}
public Lop(int classID, String classTen) {
this.classID = classID;
this.classTen = classTen;
}
public int getClassID() {
return classID;
}
public void setClassID(int classID) {
this.classID = classID;
}
public String getClassTen() {
return classTen;
}
public void setClassTen(String classTen) {
this.classTen = classTen;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package iaquizapp;
import java.io.Serializable;
/**
*
* @author vunvd
*/
public class DiemThi implements Serializable {
private String dotthiID;
private String svID;
private int monID;
private float diemSo;
private String monName;
public DiemThi() {
}
public DiemThi(String dotthiID, String svID, int monID, float diemSo) {
this.dotthiID = dotthiID;
this.svID = svID;
this.monID = monID;
this.diemSo = diemSo;
}
public DiemThi(String dotthiID, String svID, int monID, float diemSo, String monName) {
this.dotthiID = dotthiID;
this.svID = svID;
this.monID = monID;
this.diemSo = diemSo;
this.monName = monName;
}
public String getDotthiID() {
return dotthiID;
}
public void setDotthiID(String dotthiID) {
this.dotthiID = dotthiID;
}
public String getSvID() {
return svID;
}
public void setSvID(String svID) {
this.svID = svID;
}
public int getMonID() {
return monID;
}
public void setMonID(int monID) {
this.monID = monID;
}
public float getDiemSo() {
return diemSo;
}
public void setDiemSo(float diemSo) {
this.diemSo = diemSo;
}
public String getMonName() {
return monName;
}
public void setMonName(String monName) {
this.monName = monName;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package iaquizapp;
import java.io.Serializable;
/**
*
* @author vunvd
*/
public class CauHoi implements Serializable {
private int cauhoiID;
private String cauhoiNoiDung;
private String cauhoiDapAnA;
private String cauhoiDapAnB;
private String cauhoiDapAnC;
private String cauhoiDapAnD;
private String cauhoiDapAnDung;
private int monID;
public CauHoi() {
}
public CauHoi(String cauhoiNoiDung, String cauhoiDapAnA, String cauhoiDapAnB, String cauhoiDapAnC, String cauhoiDapAnD, String cauhoiDapAnDung, int monID) {
this.cauhoiNoiDung = cauhoiNoiDung;
this.cauhoiDapAnA = cauhoiDapAnA;
this.cauhoiDapAnB = cauhoiDapAnB;
this.cauhoiDapAnC = cauhoiDapAnC;
this.cauhoiDapAnD = cauhoiDapAnD;
this.cauhoiDapAnDung = cauhoiDapAnDung;
this.monID = monID;
}
public CauHoi(int cauhoiID, String cauhoiNoiDung, String cauhoiDapAnA, String cauhoiDapAnB, String cauhoiDapAnC, String cauhoiDapAnD, String cauhoiDapAnDung, int monID) {
this.cauhoiID = cauhoiID;
this.cauhoiNoiDung = cauhoiNoiDung;
this.cauhoiDapAnA = cauhoiDapAnA;
this.cauhoiDapAnB = cauhoiDapAnB;
this.cauhoiDapAnC = cauhoiDapAnC;
this.cauhoiDapAnD = cauhoiDapAnD;
this.cauhoiDapAnDung = cauhoiDapAnDung;
this.monID = monID;
}
public int getCauhoiID() {
return cauhoiID;
}
public void setCauhoiID(int cauhoiID) {
this.cauhoiID = cauhoiID;
}
public String getCauhoiNoiDung() {
return cauhoiNoiDung;
}
public void setCauhoiNoiDung(String cauhoiNoiDung) {
this.cauhoiNoiDung = cauhoiNoiDung;
}
public String getCauhoiDapAnA() {
return cauhoiDapAnA;
}
public void setCauhoiDapAnA(String cauhoiDapAnA) {
this.cauhoiDapAnA = cauhoiDapAnA;
}
public String getCauhoiDapAnB() {
return cauhoiDapAnB;
}
public void setCauhoiDapAnB(String cauhoiDapAnB) {
this.cauhoiDapAnB = cauhoiDapAnB;
}
public String getCauhoiDapAnC() {
return cauhoiDapAnC;
}
public void setCauhoiDapAnC(String cauhoiDapAnC) {
this.cauhoiDapAnC = cauhoiDapAnC;
}
public String getCauhoiDapAnD() {
return cauhoiDapAnD;
}
public void setCauhoiDapAnD(String cauhoiDapAnD) {
this.cauhoiDapAnD = cauhoiDapAnD;
}
public String getCauhoiDapAnDung() {
return cauhoiDapAnDung;
}
public void setCauhoiDapAnDung(String cauhoiDapAnDung) {
this.cauhoiDapAnDung = cauhoiDapAnDung;
}
public int getMonID() {
return monID;
}
public void setMonID(int monID) {
this.monID = monID;
}
}
| 82ec898337e97a509e071cabeef4be603a0385d8 | [
"Java"
] | 10 | Java | vunguyen9404/quizapp | 765e3ad60777fedba6ed7bdcc6be720eb3d47e55 | 00d14e466490650afbc716aaf1a508e4ecebc672 | |
refs/heads/master | <file_sep>import React, { Component } from "react";
import { connect } from "react-redux";
import { Carousel } from "react-responsive-carousel";
import { Grid, Input, Card, Icon } from "semantic-ui-react";
import axios from 'axios';
class Actor extends Component {
constructor() {
super();
this.state = {
dataFilm: [],
dataAktor: []
}
}
getDataFilm = () => {
axios.get(`http://api.tvmaze.com/search/shows?q=indo`).then((res) => {
this.setState({
dataFilm: res.data
})
})
}
getDataActor = () => {
axios.get(`http://api.tvmaze.com/search/people?q=lauren`).then((res) => {
this.setState({
dataAktor: res.data
})
})
}
pencarianAktor = (e) => {
if (e.target.value === "") {
this.getDataActor();
} else {
axios.get(`http://api.tvmaze.com/search/people?q=${e.target.value}`).then((res) => {
this.setState({
dataAktor: res.data
})
})
}
}
componentDidMount() {
this.getDataFilm();
this.getDataActor();
}
render() {
return (
<div>
<Carousel autoPlay centerMode centerSlidePercentage={20} showStatus={false}>
{this.state.dataFilm.map((data, key) => {
var images = { ...data.show.image }
if (data.show.image === null) {
images = "https://cdn.pixabay.com/photo/2015/05/15/09/13/demonstration-767982__480.jpg";
}
else {
images = images.original
}
return (
<div key={key}>
<img alt="" src={images} />
</div>
)
})}
</Carousel>
<Grid style={{ marginTop: 20 }}>
<Grid.Column width={4}>
<Carousel autoPlay centerMode centerSlidePercentage={40} showStatus={false}>
{this.state.dataAktor.map((data, key) => {
var images = { ...data.person.image }
if (data.person.image === null) {
images = "https://cdn.pixabay.com/photo/2015/05/15/09/13/demonstration-767982__480.jpg";
}
else {
images = images.original
}
return (
<div key="key">
<img alt="" src={images} />
</div>
)
})}
</Carousel>
</Grid.Column>
<Grid.Column width={12}>
<Input icon='search' placeholder='Search...' style={{ marginBottom: 20 }} onChange={(e) => { this.pencarianAktor(e) }} />
<Grid >
{this.state.dataAktor.map((data, key) => {
var images = { ...data.person.image }
if (data.person.image === null) {
images = "https://cdn.pixabay.com/photo/2015/05/15/09/13/demonstration-767982__480.jpg";
}
else {
images = images.original
}
return (
<Grid.Column width={4}>
<Card
image={images}
header={data.person.name}
meta={data.person.gender}
extra={<Icon name='start' ><p>{data.score}</p></Icon>}
/>
</Grid.Column>
)
})}
</Grid>
</Grid.Column>
</Grid>
</div>
)
}
}
const mapDispatchToProps = (dispatch) => {
return dispatch({
type: "ACTIVE_ITEM",
activeItem: "Actor"
})
}
const mapStateToProps = () => {
return {}
}
export default connect(mapStateToProps, mapDispatchToProps)(Actor) | 110e575d04df60174294de149892542da6c8197f | [
"JavaScript"
] | 1 | JavaScript | MFatoni/React_Film | d6a16e7529250c22dc265607b5e45c10f6af28c7 | b76a485c366192ea650d65970169bc1c2a8bd409 | |
refs/heads/master | <file_sep>#include <iostream>
#include "lamp.h"
//---------------------------------------------
/*
This programm works but still doesn't works in
googletests... I think it because I had implemented
the class without functor...???
*/
//---------------------------------------------
/*
This comment just for make changes for Git
*/
using namespace std;
int main()
{
Lamp R;
unsigned long long int reshet = R.input_data ();
cout<< reshet<<endl;
return 0;
}
<file_sep>#ifndef LAMP_H
#define LAMP_H
/*
This comment just for make changes for Git
*/
class Lamp
{
unsigned long long int m_xn;
unsigned long long int m_xm;
int m_trw;
int m_bgn;
int m_end;
public:
Lamp(unsigned long long int i_xn, unsigned long long int i_xm, int i_trw, int i_bgn, int i_end);
Lamp();
unsigned long long int input_data(void);
};
#endif // LAMP_H
<file_sep>#include <iostream>
#include <fstream>
#include <string>
#include "lamp.h"
/*
This comment just for make changes for Git
*/
using namespace std;
Lamp::Lamp(unsigned long long int i_xn, unsigned long long int i_xm, int i_trw, int i_bgn, int i_end): m_xn(i_xn=1), m_xm(i_xm=1), m_trw(i_trw=1), m_bgn(i_bgn=1), m_end(i_end=1)
{
}
Lamp::Lamp()
{
}
unsigned long long int Lamp::input_data(void)
{
string fst;
unsigned long long int s_mtx=0;
unsigned long long int rez=0;
int cnt=1;
//-----------------
//open file init.txt
//-----------------
ifstream f_read("init.txt", ios::ios_base::in);
if (f_read.is_open ())
{
while(!f_read.eof ())
{
getline(f_read, fst, '\n');
if (cnt==1)
{
m_xn = static_cast<unsigned long long int>(stoi(fst));
m_xm = static_cast<unsigned long long int>(stoi(fst.erase (0,2)));
m_trw = stoi(fst.erase (0,2));
s_mtx = (m_xn*m_xm);
rez = s_mtx;
++cnt;
}
else if (cnt>1 && cnt<=m_trw+1)
{
m_bgn = stoi(fst.erase (0,2));
m_end = stoi (fst.erase (0,2));
rez -= static_cast<unsigned long long int>(m_end-m_bgn+1);
++cnt;
if (cnt>m_trw+1) break;
}
else
{
cout<<"error...";
break;
}
}
}
else
{
cout << "File not open to read!" << endl;
}
f_read.close ();
//-----------------
//write file rez.txt
//-----------------
std::ofstream f_rez("rez.txt", ios::trunc);
if(f_rez.is_open ())
{
f_rez<<rez;
f_rez.close ();
}
else
{
cout << "File is not open!" << endl;
return 0;
}
return rez;
}
| a981d37f1cf03bef5c1097d617069755815ae73c | [
"C++"
] | 3 | C++ | yonivol/VOLO | 16b6b06e7a6f57ae0a1ad93d7ef99b21e2e99118 | cf2d75db62b3c82a1e2c7026b049d1715947207b | |
refs/heads/master | <file_sep><?php
/**
* v1模块配置文件
* @author Gene <https://github.com/Talkyunyun>
*/
return [
'params' => [
'v1' => '版本1'
],
'components' => [
'log' => [
'class' => 'yii\log\Dispatcher',
'traceLevel' => YII_DEBUG ? 3 : 0,
'targets' => [
[
'class' => 'yii\log\FileTarget',
'levels' => ['error', 'warning', 'trace' ,'info'],
'logVars' => [],
'logFile' => '@api/modules/v1/runtime/logs/run_'.date('Y-m-d').'.log'
]
]
]
]
];<file_sep><?php
return [
'api' => '我是API总的'
];<file_sep><?php
namespace api\modules\v1\controllers;
use api\controllers\BaseController;
class SiteController extends BaseController {
public function actionIndex() {
echo 33;
}
}<file_sep># yii2-api-admin-seed<file_sep><?php
namespace api\modules\v1;
/**
* 版本模块入口文件
* @author Gene <https://github.com/Talkyunyun>
*/
class Module extends \yii\base\Module {
// 指定模板布局文件
public $layout = 'main';
// 初始化
public function init() {
parent::init();
// 加载对应模块配置文件
$config = require_once __DIR__ . '/config/config.php';
$components = \Yii::$app->getComponents();
$routes = require_once __DIR__ . '/config/routes.php';
$components['urlManager']['rules'] = array_merge($routes, $components['urlManager']['rules']);
$config['components'] = array_merge($components, $config['components']);
$config['params'] = array_merge($config['params'], \Yii::$app->params);
\Yii::configure(\Yii::$app, $config);
}
}<file_sep>#
# SQL Export
# Created by Querious (201009)
# Created: 2017年11月3日 GMT+8 下午3:38:09
# Encoding: Unicode (UTF-8)
#
SET @PREVIOUS_FOREIGN_KEY_CHECKS = @@FOREIGN_KEY_CHECKS;
SET FOREIGN_KEY_CHECKS = 0;
DROP TABLE IF EXISTS `sys_role_user`;
DROP TABLE IF EXISTS `sys_role`;
DROP TABLE IF EXISTS `sys_node`;
DROP TABLE IF EXISTS `sys_admin_user_login_log`;
DROP TABLE IF EXISTS `sys_admin_user`;
DROP TABLE IF EXISTS `sys_access`;
CREATE TABLE `sys_access` (
`role_id` int(11) unsigned NOT NULL COMMENT '角色ID',
`node_id` int(11) unsigned NOT NULL COMMENT '节点ID,sys_node中的ID',
KEY `role_id` (`role_id`),
KEY `node_id` (`node_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='权限表';
CREATE TABLE `sys_admin_user` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '用户UID',
`username` varchar(50) NOT NULL COMMENT '用户名',
`password` char(60) NOT NULL COMMENT '登录密码',
`real_name` varchar(20) NOT NULL DEFAULT '' COMMENT '真实姓名',
`phone` varchar(18) NOT NULL DEFAULT '' COMMENT '联系号码',
`email` varchar(30) NOT NULL DEFAULT '' COMMENT '电子邮箱',
`auth_key` varchar(60) NOT NULL DEFAULT '' COMMENT 'auth_key',
`access_token` varchar(60) NOT NULL DEFAULT '' COMMENT 'access_token',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态,0:禁用,1:正常',
`birth_date` date DEFAULT NULL COMMENT '生日日期',
`created` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
PRIMARY KEY (`id`),
UNIQUE KEY `username` (`username`),
KEY `status` (`status`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COMMENT='系统用户表';
CREATE TABLE `sys_admin_user_login_log` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`uid` int(11) unsigned NOT NULL COMMENT '登录UID',
`ip` varchar(15) NOT NULL DEFAULT '' COMMENT '登录IP',
`data` text COMMENT '请求参数,json格式',
`url` varchar(255) NOT NULL DEFAULT '' COMMENT '请求Url地址',
`client_name` varchar(60) NOT NULL DEFAULT '' COMMENT '客户端名称',
`client_version` varchar(60) NOT NULL DEFAULT '' COMMENT '客户端版本',
`platform` varchar(60) NOT NULL DEFAULT '' COMMENT '客户端系统',
`created` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '登录时间',
PRIMARY KEY (`id`),
KEY `uid` (`uid`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8 COMMENT='系统用户登录日志表';
CREATE TABLE `sys_node` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`pid` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '父级ID,0:顶级节点',
`name` varchar(100) NOT NULL COMMENT '操作名称,或菜单名',
`url` varchar(255) NOT NULL DEFAULT '' COMMENT 'url地址',
`status` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态,1正常 0禁用',
`is_menu` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否是菜单,0:否,1:是',
`level` tinyint(1) unsigned NOT NULL DEFAULT '1' COMMENT '等级',
`can_del` tinyint(1) unsigned NOT NULL DEFAULT '1' COMMENT '是否可以删除,0:不可以,1:可以',
`sort` int(11) unsigned DEFAULT '0' COMMENT '排序',
`font_icon` varchar(100) DEFAULT '' COMMENT '菜单字体图片',
PRIMARY KEY (`id`),
KEY `level` (`level`),
KEY `pid` (`pid`),
KEY `status` (`status`),
KEY `is_menu` (`is_menu`)
) ENGINE=MyISAM AUTO_INCREMENT=13 DEFAULT CHARSET=utf8 COMMENT='操作节点表';
CREATE TABLE `sys_role` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(100) NOT NULL DEFAULT '' COMMENT '角色名字',
`status` tinyint(1) unsigned NOT NULL DEFAULT '1' COMMENT '状态,1正常 0禁用',
`remark` varchar(255) NOT NULL DEFAULT '' COMMENT '备注',
PRIMARY KEY (`id`),
KEY `status` (`status`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 COMMENT='角色表';
CREATE TABLE `sys_role_user` (
`role_id` int(11) unsigned DEFAULT '0' COMMENT '角色ID,对应sys_role表主键',
`user_id` int(11) DEFAULT '0' COMMENT '用户ID',
KEY `role_id` (`role_id`),
KEY `user_id` (`user_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='用户角色表';
SET FOREIGN_KEY_CHECKS = @PREVIOUS_FOREIGN_KEY_CHECKS;
SET @PREVIOUS_FOREIGN_KEY_CHECKS = @@FOREIGN_KEY_CHECKS;
SET FOREIGN_KEY_CHECKS = 0;
LOCK TABLES `sys_access` WRITE;
ALTER TABLE `sys_access` DISABLE KEYS;
INSERT INTO `sys_access` (`role_id`, `node_id`) VALUES
(4,2),
(1,4),
(1,3),
(1,2),
(1,1);
ALTER TABLE `sys_access` ENABLE KEYS;
UNLOCK TABLES;
LOCK TABLES `sys_admin_user` WRITE;
ALTER TABLE `sys_admin_user` DISABLE KEYS;
INSERT INTO `sys_admin_user` (`id`, `username`, `password`, `real_name`, `phone`, `email`, `auth_key`, `access_token`, `status`, `birth_date`, `created`, `updated`) VALUES
(1,'admin','$2y$13$2TY3rdo.Y3jUoZ6O3STC4OAWDFux1Q3h5yzRqDpLYJQSjmTxt6qxK','admin','','<EMAIL>','','',1,'2017-09-30','2017-09-15 15:09:18','2017-09-15 20:09:42');
ALTER TABLE `sys_admin_user` ENABLE KEYS;
UNLOCK TABLES;
LOCK TABLES `sys_node` WRITE;
ALTER TABLE `sys_node` DISABLE KEYS;
INSERT INTO `sys_node` (`id`, `pid`, `name`, `url`, `status`, `is_menu`, `level`, `can_del`, `sort`, `font_icon`) VALUES
(1,0,'系统管理','#',1,1,1,0,0,'cog'),
(2,1,'菜单管理','/system/node/index',1,1,1,0,0,''),
(3,1,'角色管理','/system/role/index',1,1,1,0,0,''),
(4,1,'系统用户','/system/user/index',1,1,1,0,0,'');
ALTER TABLE `sys_node` ENABLE KEYS;
UNLOCK TABLES;
LOCK TABLES `sys_role` WRITE;
ALTER TABLE `sys_role` DISABLE KEYS;
INSERT INTO `sys_role` (`id`, `name`, `status`, `remark`) VALUES
(1,'系统管理员',1,'系统管理员'),
(2,'sdfds',1,'sfs'),
(4,'aaa',1,'ddd');
ALTER TABLE `sys_role` ENABLE KEYS;
UNLOCK TABLES;
LOCK TABLES `sys_role_user` WRITE;
ALTER TABLE `sys_role_user` DISABLE KEYS;
INSERT INTO `sys_role_user` (`role_id`, `user_id`) VALUES
(2,2),
(2,4),
(4,4),
(2,3),
(4,3),
(2,6),
(4,6);
ALTER TABLE `sys_role_user` ENABLE KEYS;
UNLOCK TABLES;
SET FOREIGN_KEY_CHECKS = @PREVIOUS_FOREIGN_KEY_CHECKS;
<file_sep><?php
return [
'GET /get' => 'v1/test/index'
];<file_sep><?php
$routes = [
'/gene' => 'v1/test/test'
];
return $routes;<file_sep><?php
namespace api\modules\v1\controllers;
use yii\web\Controller;
class TestController extends Controller {
public function actionIndex() {
dd(\Yii::$app->getComponents());
echo "aaaaa";
// return $this->render('index');
}
public function actionTest() {
echo "sdfdsf";
}
} | 878ac1a9b8b5409efb679591ef605cc46b6891e9 | [
"Markdown",
"SQL",
"PHP"
] | 9 | PHP | zhangkg/yii2-api-admin-seed | 7dc33e10e8e59719420f9057c47928fa3e0915f1 | d05213f7b505b899b0ed65541ea3c02d31028c76 | |
refs/heads/master | <repo_name>seoulpm/PerlKr-Web<file_sep>/README.md
PerlKr-Web
==========
Perl.Kr Web<file_sep>/run
#!/bin/sh
starman --port=5000 --workers=5 perlkr-web.pl
<file_sep>/public/gallery/about.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Account Setting</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="">
<meta name="author" content="">
<!-- Le styles -->
<link href="css/bootstrap.css" rel="stylesheet">
<link href="css/style.css" rel="stylesheet">
<link href="css/icon-style.css" rel="stylesheet">
<link href="css/bootstrap-responsive.css" rel="stylesheet">
<link href="source/jquery.fancybox.css" rel="stylesheet">
<!-- Le HTML5 shim, for IE6-8 support of HTML5 elements -->
<!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
<!-- Le fav and touch icons -->
<link rel="shortcut icon" href="img/favicon.ico" type="image/x-icon">
<link rel="icon" href="img/favicon.ico" type="image/x-icon">
</head>
<body>
<div class="navbar navbar-fixed-top">
<div class="navbar-inner">
<div class="container-fluid">
<a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</a>
<a class="brand" href="index.html"><img src="img/logo.png"/></a>
<div class="nav-collapse">
<ul class="nav">
<li class="divider-vertical"></li>
<li class="active"><a href="about.html">About</a></li>
<li class="divider-vertical"></li>
<form class="navbar-search pull-left" action="">
<input type="text" class="search-query span2" placeholder="Search" required>
</form>
<li class="divider-vertical"></li>
</ul>
<ul class="nav pull-right">
<li class="divider-vertical"></li>
<li class="gravatar"><a href="profile.html"></a></li>
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown"><NAME> <b class="caret"></b></a>
<ul class="dropdown-menu">
<li><a href="account_setting.html"><i class="icon-user"></i> Account Setting</a></li>
<li class="divider"></li>
<li><a href="signin.html"><i class="icon-off"></i> Sign out</a></li>
</ul>
</li>
<li class="divider-vertical"></li>
</ul>
</div><!--/.nav-collapse -->
</div>
</div>
</div>
<div class="container">
<div class="row">
<div class="span12 about-wrapper">
<div class="row">
<div class="span3 about-title">
<h2>About</h2>
</div>
<div class="span9 about-content">
<div class="row">
<div class="span3">
<h3>Lorem ipsum dolor sit amet.</h3>
<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce vitae nunc lectus, sit amet viverra ligula.
Integer dignissim aliquam laoreet. Sed eleifend tempor ante, id fermentum enim vulputate vitae.
Donec ac ultricies ante. Ut lacus lorem, lacinia et pretium non, posuere suscipit dui.
Nulla eget mauris sapien, in laoreet nibh.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam tempor felis et orci facilisis sodales.
Nunc dignissim turpis sed sapien euismod tincidunt. Integer pretium neque.</p>
<p><a href="#" class="btn btn-large">Join Now →</a></p>
</div>
<div class="span6 img-thumbnail marg10-left">
<ul class="thumbnails">
<li class="span4">
<a href="#" class="thumbnail">
<img src="http://placehold.it/360x268" alt="">
</a>
</li>
<li class="span2">
<a href="#" class="thumbnail">
<img src="http://placehold.it/160x120" alt="">
</a>
</li>
<li class="span2">
<a href="#" class="thumbnail">
<img src="http://placehold.it/160x120" alt="">
</a>
</li>
<li class="span2">
<a href="#" class="thumbnail">
<img src="http://placehold.it/160x120" alt="">
</a>
</li>
<li class="span2">
<a href="#" class="thumbnail">
<img src="http://placehold.it/160x120" alt="">
</a>
</li>
<li class="span2">
<a href="#" class="thumbnail">
<img src="http://placehold.it/160x120" alt="">
</a>
</li>
</ul>
</div>
<div class="span4">
<h3>Lorem ipsum dolor sit amet.</h3>
<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce vitae nunc lectus, sit amet viverra ligula.
Integer dignissim aliquam laoreet. Sed eleifend tempor ante, id fermentum enim vulputate vitae.
Donec ac ultricies ante. Ut lacus lorem, lacinia et pretium non, posuere suscipit dui.
Nulla eget mauris sapien, in laoreet nibh.</p>
</div>
<div class="span5">
<h3>Lorem ipsum dolor sit amet.</h3>
<p class="marg20-right">Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce vitae nunc lectus, sit amet viverra ligula.
Integer dignissim aliquam laoreet. Sed eleifend tempor ante, id fermentum enim vulputate vitae.
Donec ac ultricies ante. Ut lacus lorem, lacinia et pretium non, posuere suscipit dui.
Nulla eget mauris sapien, in laoreet nibh.</p>
</div>
</div>
</div>
<div class="span3 about-title">
<h2>Partners</h2>
</div>
<div class="span9 about-content">
<div class="row">
<div class="span3">
<h3>Lorem ipsum dolor sit amet.</h3>
<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce vitae nunc lectus, sit amet viverra ligula.
Integer dignissim aliquam laoreet. Sed eleifend tempor ante, id fermentum enim vulputate vitae.
Donec ac ultricies ante. Ut lacus lorem, lacinia et pretium non, posuere suscipit dui.
Nulla eget mauris sapien, in laoreet nibh.</p>
</div>
<div class="span6 img-thumbnail marg10-left">
<ul class="thumbnails">
<li class="span3">
<div class="thumbnail">
<img src="http://placehold.it/260x180" alt="">
<div class="caption">
<h5><NAME></h5>
<p>Cras justo odio, dapibus ac facilisis in, egestas eget quam. Donec id elit non mi porta gravida at eget metus. Nullam id dolor id nibh ultricies vehicula ut id elit.</p>
</div>
</div>
</li>
<li class="span3">
<div class="thumbnail">
<img src="http://placehold.it/260x180" alt="">
<div class="caption">
<h5><NAME></h5>
<p>Cras justo odio, dapibus ac facilisis in, egestas eget quam. Donec id elit non mi porta gravida at eget metus. Nullam id dolor id nibh ultricies vehicula ut id elit.</p>
</div>
</div>
</li>
</ul>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="container footer">
<hr class="alt" />
<div class="row">
<div class="span6">© 2012 Fransisca Gallery. All right reserved.</div>
<div class="span6">
<ul>
<li><a href="#">Terms Of Service</a></li>
<li><a href="#">Privacy</a></li>
</ul>
</div>
</div>
</div>
<!-- Le javascript
================================================== -->
<!-- Placed at the end of the document so the pages load faster -->
<script src="js/jquery-1.7.2.min.js"></script>
<script src="js/bootstrap.js"></script>
<script src="js/image-gallery.js"></script>
<script src="js/jquery.mousewheel-3.0.6.pack.js"></script>
<script src="source/jquery.fancybox.js"></script>
</body>
</html>
<file_sep>/INSTALL.md
설치
=====
Perl 의존 모듈
---------------
- Const::Fast
- Mojolicious
- Plack
- Plack::Middleware::Expires
- Starman
- Text::MultiMarkdown
apache 설정
------------
5000번 포트를 perl.kr로 연결하기 위한 아파치
리버스 프록시 설정은 다음과 같습니다.
LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_balancer_module modules/mod_proxy_balancer.so
LoadModule proxy_http_module modules/mod_proxy_http.so
LoadModule proxy_connect_module modules/mod_proxy_connect.so
<IfModule mod_proxy.c>
ProxyRequests Off
<Proxy *>
Order deny,allow
Allow from all
</Proxy>
<VirtualHost *:80>
ServerName perl.kr
ProxyPass / http://127.0.0.1:5000/
ProxyPassReverse / http://127.0.0.1:5000/
ErrorLog /var/log/apache2/perl.kr/error_log
CustomLog /var/log/apache2/perl.kr/access_log combined
</VirtualHost>
또한 아파치는 모르는 확장자에 대해서는 `text/plain`으로
보내므로 캐싱을 위해 폰트 관련 타입을 추가로 설정 합니다.
AddType application/vnd.bw-fontobject .eot
AddType application/x-font-ttf .ttf
AddType application/x-woff .woff
| 454d50417e001dc451cb8ca63910f1f4379cbf38 | [
"Markdown",
"HTML",
"Shell"
] | 4 | Markdown | seoulpm/PerlKr-Web | 08f1f6750ef1a69b385d0515d1ec4cb8c0db4bb9 | 17eec2f6c78fa4caade43bf46cf9ba14d90695f9 | |
refs/heads/master | <repo_name>pouyaman/python-hit-blow<file_sep>/python-hit-blow.py
import random
import os
# Workaround for Python2
try:
input = raw_input
except NameError:
pass
# Let's set the colours
# Define different colour commands
win_colour_dict = {
'cmd' : 'color ',
'colours': {
'pass' : 'A0',
'fail' : '4F',
'start' : '30',
'last' : '60',
'reset' : '0F'
}
}
ansi_colour_dict = {
'cmd' : 'echo -e ',
'colours': {
'pass' : r'"\e[38;2;0;255;0m"',
'fail' : r'"\e[38;2;255;0;0m"',
'start' : r'"\e[38;2;0;255;255m"',
'last' : r'"\e[38;2;255;255;0m"',
'reset' : r'"\e[0m"'
}
}
# Choose the colour command set based on OS
if os.name == 'nt':
temp_dict = win_colour_dict
else:
temp_dict = ansi_colour_dict
# Generate the command table
colour_dict = {}
for colour, command in temp_dict['colours'].items():
colour_dict[colour] = temp_dict['cmd'] + command
# Some settings for the game
TOTAL_TURNS = 8
TOTAL_COLOURS = 6
TOTAL_CODE = 4
# Location fo the code to crack
CODE = []
# Valid numbers
VALID_RANGE = range(1,TOTAL_COLOURS+1)
# Generate random Code to be broken
while(1):
digit = random.randint(1, TOTAL_COLOURS)
if digit not in CODE:
CODE.append(digit)
if len(CODE) == TOTAL_CODE:
break
# Retrieve input from user and basic checks
def get_input():
result = None
number = input("Give number:\n")
if len(number) != TOTAL_CODE:
print("Code should be %i digits" %TOTAL_CODE)
return None
for c in number:
digit = int(c)
if digit not in VALID_RANGE:
print("%s not valid..." %c)
print("Each digit of code should a number between 1 and %i" %TOTAL_COLOURS)
return None
if result is None:
result = []
result.append(digit)
return result
def print_exit_message():
input("Press enter to exit...")
set_colour('reset')
exit()
# Helper functions
def big_divider():
return "=========================================="
def small_divider():
return "------------------------------------------"
def set_colour(colour):
os.system(colour_dict[colour])
# Print greetings
set_colour('start')
print("Welcome to Hit & Blow!")
print(big_divider())
print("Guess a %i digit number." %TOTAL_CODE)
print("Each digit ranging from [1 to %i]." %TOTAL_COLOURS)
print("You have %i turns." %TOTAL_TURNS)
print("")
print("Hits : Correct colour and spot")
print("Blows: Correct colour only")
# Start Loop
turn = 0
while(turn < TOTAL_TURNS):
turn+=1
# Setup
hits = [None] # make index 0 = None, so the list index matches the digit starting from 1
blows = [None]
for i in VALID_RANGE:
hits.append( None)
blows.append(None)
# Print info
print(small_divider())
if(turn == TOTAL_TURNS):
set_colour('last')
print("!!!LAST TURN!!!")
else:
print("Turn: %i" %turn)
# Retrieve input from user
numbers = get_input()
# Check the input
if numbers is None:
print("Turn not lost.")
print("Try again!")
turn-=1
continue
# Did they win??
if numbers == CODE:
set_colour('pass')
print(big_divider())
print("Code:")
print(CODE)
print("WON!!!")
print_exit_message()
# Check each digit
for i, digit in enumerate(numbers):
if digit == CODE[i]: # find hit and cancel prior blow/prevent future blow for that number
hits[ digit] = 1
blows[digit] = 0
elif blows[digit] != 0 and digit in CODE: # mark blow if that number hasn't had a hit yet
blows[digit] = 1
# Let user know what the total hit and blows are
print("Hits : %i" %hits.count(1))
print("Blows: %i" %blows.count(1) )
# Let user know they failed :(
set_colour('fail')
print(big_divider())
print("Code:")
print(CODE)
print("LOST!!!")
print_exit_message()<file_sep>/README.md
# python-hit-blow
A simple command line implementation of guess 4 digits game, also known as Mastermind or Hit & Blow.
| 00094ed3b8c65f55d6cb4f4a345f7b9b6f6374c5 | [
"Markdown",
"Python"
] | 2 | Python | pouyaman/python-hit-blow | 6ed14bb145d821532e0c1766e30994d388e0b202 | 9ffbb06beff155dee2db7f0af7fecf5ca934dfab | |
refs/heads/master | <repo_name>kazak-by/HelloGIt42<file_sep>/HelloGit.py
print 'Hello Git'
# first step
# step 2
# step 3
# step 4
# step 5
# step 6
# step 7
# step 8
| 4f55b72641253a476da841512799e3ef6f64203f | [
"Python"
] | 1 | Python | kazak-by/HelloGIt42 | f8f99619c4ede1eab41c28ea657d3309020c2ab2 | 8f7a46ef8b21f6ba076651b19526b55b816d19b2 | |
refs/heads/master | <repo_name>SporkyMcForkinspoon/CSCE156<file_sep>/src/com/datacontainers/Value.java
package com.datacontainers;
import java.util.ArrayList;
public class Value {
//Variable names are not ordered arbitrarily. Changes will cause the JSON file to output in the wrong order.
private String customerIdNumber;
private String productIdNumber;
private String productType;
private String customerType;
private String personIdNumber;
private String lastName;
private String firstName;
private String movieDate;
private String name;
private Address address;
private ArrayList<String> emails;
private String startDate;
private String endDate;
private String screenNumber;
private String price;
//Constructors
//Customer Constructor
public Value(String customerIdNumber, String customerType, String personIdNumber, String name, Address address) {
super();
this.customerIdNumber = customerIdNumber;
this.customerType = customerType;
this.personIdNumber = personIdNumber;
this.name = name;
this.address = address;
}
//Movie constructor
public Value(String productIdNumber, String productType, String movieDate, String name, Address address, String screenNumber, String price) {
super();
this.productIdNumber = productIdNumber;
this.productType = productType;
this.movieDate = movieDate;
this.name = name;
this.address = address;
this.screenNumber = screenNumber;
this.price = price;
}
//Parking constructor
public Value(String productIdNumber, String productType, String price) {
this.productIdNumber = productIdNumber;
this.productType = productType;
this.price = price;
}
//Person Constructor (W/ Email)
public Value(String personIdNumber, String firstName, String lastName, Address address, ArrayList<String> emails) {
super();
this.personIdNumber = personIdNumber;
this.firstName = firstName;
this.lastName = lastName;
this.address = address;
this.emails = emails;
}
//Person Constructor (No Email)
public Value(String personIdNumber, String firstName, String lastName, Address address) {
super();
this.personIdNumber = personIdNumber;
this.firstName = firstName;
this.lastName = lastName;
this.address = address;
}
//Refreshment constructor
public Value(String productIdNumber, String productType, String name, String price) {
super();
this.productIdNumber = productIdNumber;
this.productType = productType;
this.name = name;
this.price = price;
}
//Season Pass constructor
public Value(String productIdNumber, String productType, String name, String startDate, String endDate, String price) {
super();
this.productIdNumber = productIdNumber;
this.productType = productType;
this.name = name;
this.startDate = startDate;
this.endDate = endDate;
this.price = price;
}
//Getter and Setter methods
public Address getAddress() {
return this.address;
}
public String getCustomerIdNumber () {
return this.customerIdNumber;
}
public void setCustomerIdNumber (String customerIdNumber) {
this.customerIdNumber = customerIdNumber;
}
public String getCustomerType() {
return this.customerType;
}
public void setCustomerType(String customerType) {
this.customerType = customerType;
}
public String getEndDate() {
return this.endDate;
}
public void setEndDate(String endDate) {
this.endDate = endDate;
}
public ArrayList<String> getEmails () {
return this.emails;
}
public void setEmails (ArrayList<String> emails) {
this.emails = emails;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getMovieDate() {
return this.movieDate;
}
public void setMovieDate(String movieDate) {
this.movieDate = movieDate;
}
public String getName () {
return this.name;
}
public void setName (String name){
this.name = name;
}
public String getPersonIdNumber () {
return this.personIdNumber;
}
public void setPersonIdNumber (String personIdNumber) {
this.personIdNumber = personIdNumber;
}
public String getPrice() {
return this.price;
}
public void setPrice(String price) {
this.price = price;
}
public String getProductIdNumber () {
return this.productIdNumber;
}
public void setProductIdNumber (String productIdNumber) {
this.productIdNumber = productIdNumber;
}
public String getProductType() {
return this.productType;
}
public void setProductType(String productType) {
this.productType = productType;
}
public String getScreenNumber() {
return this.screenNumber;
}
public void setScreenNumber(String screenNumber) {
this.screenNumber = screenNumber;
}
public String getStartDate() {
return this.startDate;
}
public void setStartDate(String startDate) {
this.startDate = startDate;
}
}<file_sep>/src/com/datacontainers/Person.java
package com.datacontainers;
import java.util.ArrayList;
public class Person {
private String idNumber;
private String firstName;
private String lastName;
private Address address; // Person class owns Address class as a field
private ArrayList<String> emails;
// Constructor
public Person(String idNumber, String firstName, String lastName, Address address, ArrayList<String> emails) {
super();
this.idNumber = idNumber;
this.firstName = firstName;
this.lastName = lastName;
this.address = address;
this.emails = emails;
}
// Getter and Setter methods
public String getIdNumber () {
return this.idNumber;
}
public void setIdNumber (String idNumber) {
this.idNumber = idNumber;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public Address getAddress() {
return this.address;
}
public ArrayList<String> getEmails () {
return this.emails;
}
public void setEmails (ArrayList<String> emails) {
this.emails = emails;
}
}
<file_sep>/src/com/fileReader/FlatFileReader.java
package com.fileReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Scanner;
import com.datacontainers.*;
public class FlatFileReader {
public ArrayList<Value> readValues(String fileName) {
Scanner sc = null;
try {
sc = new Scanner(new File(fileName));
sc.nextLine(); // reads the number of records from the first line
// This Value ArrayList stores the Value objects
ArrayList<Value> valueList = new ArrayList<Value>();
while(sc.hasNext()) {
String line = sc.nextLine(); // reads each line starting from 2nd line
String data[] = line.split(",|;"); // tokenizes the line and stores in a string array
Value value = null;
if (data[1].equals("M")) { //If the data[1] is "M", prepares a movie ticket product Value.
String productIdNumber = data[0].replaceAll("^\\s+", "").trim(); //Removes all starting and ending white space from the token before passing to the variable.
String productType = data[1].replaceAll("^\\s+", "").trim();
String movieDate = data[2].replaceAll("^\\s+", "").trim();
String name = data[3].replaceAll("^\\s+", "").trim();
String street = data[4].replaceAll("^\\s+", "").trim();
String city = data[5].replaceAll("^\\s+", "").trim();
String state = data[6].replaceAll("^\\s+", "").trim();
String postalCode = data[7].replaceAll("^\\s+", "").trim();
String country = data[8].replaceAll("^\\s+", "").trim();
String screenNumber = data[9].replaceAll("^\\s+", "").trim();
String price = data[10].replaceAll("^\\s+", "").trim();
// Creates an Address object
Address address = new Address(street, city, state, postalCode, country);
// Creates a movie product type Value object
value = new Value(productIdNumber, productType, movieDate, name, address, screenNumber, price);
}
else if (data[1].equals("P")) { // "P" is for parking pass
String productIdNumber = data[0].replaceAll("^\\s+", "").trim();
String productType = data[1].replaceAll("^\\s+", "").trim();
String price = data[2].replaceAll("^\\s+", "").trim();
//Creates a parking product Value object
value = new Value(productIdNumber, productType, price);
}
else if (data[1].equals("R")) { //"R" is for refreshment
String productIdNumber = data[0].replaceAll("^\\s+", "").trim();
String productType = data[1].replaceAll("^\\s+", "").trim();
String name = data[2].replaceAll("^\\s+", "").trim();
String price = data[3].replaceAll("^\\s+", "").trim();
//Creates a refreshment product Value object
value = new Value(productIdNumber, productType, name, price);
}
else if (data[1].equals("S") && data.length == 6){ //There are two "S" product types. This deals with season passes, which always have an array length of 6.
String productIdNumber = data[0].replaceAll("^\\s+", "").trim();
String productType = data[1].replaceAll("^\\s+", "").trim();
String name = data[2].replaceAll("^\\s+", "").trim();
String startDate = data[3].replaceAll("^\\s+", "").trim();
String endDate = data[4].replaceAll("^\\s+", "").trim();
String price = data[5].replaceAll("^\\s+", "").trim();
//Creates a season pass product Value object
value = new Value(productIdNumber, productType, name, startDate, endDate, price);
}
// "G" and "S" are both customer types, but "S" can also be season pass. Students always have an array length of 9.
else if (data[1].equals("G") || (data[1].equals("S") & data.length == 9)) {
String customerIdNumber = data[0].replaceAll("^\\s+", "").trim();
String customerType = data[1].replaceAll("^\\s+", "").trim();
String personIdNumber = data[2].replaceAll("^\\s+", "").trim();
String name = data[3].replaceAll("^\\s+", "").trim();
String street = data[4].replaceAll("^\\s+", "").trim();
String city = data[5].replaceAll("^\\s+", "").trim();
String state = data[6].replaceAll("^\\s+", "").trim();
String postalCode = data[7].replaceAll("^\\s+", "").trim();
String country = data[8].replaceAll("^\\s+", "").trim();
// Creates an Address object
Address address = new Address(street, city, state, postalCode, country);
// Creates a customer type Value object
value = new Value(customerIdNumber, customerType, personIdNumber, name, address);
}
else {
//Any other value for data[1] will always be a person.
//Persons can have an arbitrary number of email addresses, including none.
String personIdNumber = data[0].replaceAll("^\\s+", "").trim();
String lastName = data[1].replaceAll("^\\s+", "").trim();
String firstName = data[2].replaceAll("^\\s+", "").trim();
String street = data[3].replaceAll("^\\s+", "").trim();
String city = data[4].replaceAll("^\\s+", "").trim();
String state = data[5].replaceAll("^\\s+", "").trim();
String postalCode = data[6].replaceAll("^\\s+", "").trim();
String country = data[7].replaceAll("^\\s+", "").trim();
if (data.length >= 9) {
ArrayList<String> emails = new ArrayList<String>();
for (int i = 8; i < data.length; i++) {
emails.add(data[i].replaceAll("^\\s+", ""));
}
// Creates an Address object
Address address = new Address(street, city, state, postalCode, country);
// Creates a person type Value object (w/ email)
value = new Value(personIdNumber, lastName, firstName, address, emails);
}
else {
// Creates an Address object
Address address = new Address(street, city, state, postalCode, country);
//Creates a person type Value object (w/o email)
value = new Value (personIdNumber, lastName, firstName, address);
}
}
valueList.add(value); //Used at the end of the while loop, rather than at the end of each if statement.
}
sc.close();
return valueList;
}
catch (FileNotFoundException e) {
e.printStackTrace();
return null;
}
}
} | 9b40f2a6e9bd534da0b6e91a93f0c03ca1d3a22a | [
"Java"
] | 3 | Java | SporkyMcForkinspoon/CSCE156 | bd8e3879c5150b57b98d4f8a15966e6b250c42bd | fd9b7cb0fade6f0dcba458e4fb322abe7449f72c | |
refs/heads/master | <repo_name>somsom292/digital_signal_pocessing<file_sep>/image_bright.cpp
#pragma warning(disable:4996)
#include <stdio.h>
#include <stdlib.h>// 위에 두 부분은 파일 입출력을 하기 위한 부분
#include <Windows.h>//구조체 정보를 가지고 있는 헤더 파일
void original(BYTE *img, BYTE *Out, int W, int H)
{
int ImgSize = W * H;
for (int i = 0; i < ImgSize; i++)
{
Out[i] = img[i];
}
}
void change(BYTE *img, BYTE *Out, int W, int H)
{
int ImgSize = W * H;
for (int i = 0; i < ImgSize; i++)
{
Out[i] = 255 - img[i];
}
}
void bright_change(BYTE *img, BYTE *Out, int W, int H)
{
int ImgSize = W * H;
for (int i = 0; i < ImgSize; i++)
{
Out[i] = 100 + img[i];
}
}
void main()
{
BITMAPFILEHEADER hf; // BMP 파일헤더 14Bytes
BITMAPINFOHEADER hInfo; // BMP 인포헤더 40Bytes
RGBQUAD hRGB[256]; // 팔레트 (256 * 4Bytes)
FILE *fp;//데이터를 읽거나 내보낼 때 필요
fp = fopen("lenna.bmp", "rb");// read binary mode 로 오픈함, fp에는 파일의 제일 첫번째 주소나 위치 가르킴
if (fp == NULL) return;
fread(&hf, sizeof(BITMAPFILEHEADER), 1, fp);//fread 함수로 정보를 읽어 메모리 변수에 담는 과정
fread(&hInfo, sizeof(BITMAPINFOHEADER), 1, fp);
fread(hRGB, sizeof(RGBQUAD), 256, fp);
int ImgSize = hInfo.biWidth * hInfo.biHeight;//영상의 화소 정보
BYTE * Image = (BYTE *)malloc(ImgSize);//원래영상의 화소정보
BYTE * Output = (BYTE *)malloc(ImgSize);//영상이 처리된 결과를 담는 배열 (둘의 크기는 동일
BYTE * Output2 = (BYTE *)malloc(ImgSize);
BYTE * Output3 = (BYTE *)malloc(ImgSize);
fread(Image, sizeof(BYTE), ImgSize, fp);
fclose(fp);
/* 영상처리 */
change(Image, Output, hInfo.biWidth, hInfo.biHeight);
original(Image, Output2, hInfo.biWidth, hInfo.biHeight);
bright_change(Image, Output3, hInfo.biWidth, hInfo.biHeight);
fp = fopen("output.bmp", "wb");
fwrite(&hf, sizeof(BYTE), sizeof(BITMAPFILEHEADER), fp);
fwrite(&hInfo, sizeof(BYTE), sizeof(BITMAPINFOHEADER), fp);
fwrite(hRGB, sizeof(RGBQUAD), 256, fp);
fwrite(Output, sizeof(BYTE), ImgSize, fp);
fclose(fp);
free(Image);
free(Output);//메모리 누수 현상을 방지하기 위해서
free(Output2);
free(Output3);
}
| 6aa9e70bd27bf89f2eb1e2c42b1634074b035f69 | [
"C++"
] | 1 | C++ | somsom292/digital_signal_pocessing | f5d70bc46032e656d1cfa3ea9e1f6fb5146ab654 | 46eebe77b29125132de14f66026cc201a1a4054a | |
refs/heads/main | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using isometricgame.GameEngine.Rendering;
using isometricgame.GameEngine.Scenes;
using isometricgame.GameEngine.WorldSpace.ChunkSpace;
using OpenTK;
namespace isometricgame.GameEngine.WorldSpace.Generators
{
public class GenericGenerator : Generator
{
public GenericGenerator(int seed)
: base(seed)
{
}
public override Chunk CreateChunk(Vector2 pos)
{
Chunk c = new Chunk(pos);
float[,] noiseMap = Perlin.InterprolateNoise(pos);
RenderStructure ground = new RenderStructure(Chunk.CHUNK_TILE_WIDTH, Chunk.CHUNK_TILE_HEIGHT);
for (int x = 0; x < Chunk.CHUNK_TILE_WIDTH; x++)
{
for (int y = 0; y < Chunk.CHUNK_TILE_WIDTH; y++)
{
int z = (int)noiseMap[x, y];
int id = (z > 3) ? 1 : 0;
ground.structuralUnits[x][y] = new RenderUnit(id, 0, new Vector3(x, y, z));
}
}
c.AddStructure(ground);
return c;
}
public override void FinalizeChunk(ChunkDirectory chunkDirectory, ref Chunk c)
{
chunkDirectory.PerformTileOrientation(c.TileSpaceLocation, ref c.ChunkStructures[0]);
}
}
}
<file_sep>using isometricgame.GameEngine.Scenes;
using isometricgame.GameEngine.WorldSpace;
using isometricgame.GameEngine.WorldSpace.ChunkSpace;
using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine
{
public class Camera
{
private float zNear = 0.01f, zFar = 10f;
private float zoom = 0.2f;
private Vector3 position = new Vector3(0,0,0);
private float velocity = 3;
private Scene scene;
private GameObject focusObject;
public float Velocity { get => velocity; set => velocity = value; }
public Vector3 Position { get => position; set => position = value; }
public GameObject FocusObject { get => focusObject; set => focusObject = value; }
public Vector3 TargetPosition => (focusObject != null) ? focusObject.Position : position;
public float Zoom { get => zoom; set => zoom = value; }
public float Iso_X => Chunk.CartesianToIsometric_X(position.X, position.Y);
public float Iso_Y => Chunk.CartesianToIsometric_Y(position.X, position.Y, position.Z);
public Camera(Scene scene)
{
this.scene = scene;
}
public void Pan_Linear(float deltaT)
{
Vector3 pos = (focusObject != null) ? focusObject.Position : Vector3.Zero;
Vector3 distanceVector = pos - position;
position += (distanceVector * velocity) * deltaT;
}
public void Pan_Pow2(float deltaT)
{
}
public void Pan_Pow3(float deltaT)
{
}
public Matrix4 GetView()
{
//return Matrix4.CreateTranslation(new Vector3(-Iso_X, -Iso_Y, -1000f)) * Matrix4.CreatePerspectiveFieldOfView(fov, aspect, zNear, zFar);
//return Matrix4.LookAt(new Vector3(0, 0, 1), new Vector3(0, 0, 0), new Vector3(0, 1, 0))
// * Matrix4.CreateScale(fov, fov, 1) *
// Matrix4.CreateTranslation(Iso_X, Iso_Y, 1f);
return Matrix4.CreateTranslation(-Iso_X, -Iso_Y, -1f)
* Matrix4.CreateScale(zoom)
* Matrix4.CreateOrthographic(scene.Game.Width, scene.Game.Height, zNear, zFar);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using isometricgame.GameEngine.Events.Arguments;
using isometricgame.GameEngine.Rendering;
using isometricgame.GameEngine.Scenes;
using isometricgame.GameEngine.Systems.Rendering;
using isometricgame.GameEngine.WorldSpace;
using OpenTK;
using OpenTK.Graphics.OpenGL;
namespace isometricgame.GameEngine.Systems.Rendering
{
public class RenderService : GameSystem
{
private SpriteLibrary SpriteLibrary;
private Matrix4 projection;
private Matrix4 cachedWorldMatrix;
private Shader shader;
private string shaderSource_Vert, shaderSource_Frag;
public RenderService(Game game, int windowWidth, int windowHeight)
: base(game)
{
AdjustProjection(windowWidth, windowHeight);
cachedWorldMatrix = Matrix4.CreateTranslation(new Vector3(0,0,0));
shaderSource_Vert = Path.Combine(game.GAME_DIRECTORY_SHADERS, "shader.vert");
shaderSource_Frag = Path.Combine(game.GAME_DIRECTORY_SHADERS, "shader.frag");
shader = new Shader(shaderSource_Vert, shaderSource_Frag);
}
public override void Load()
{
SpriteLibrary = Game.GetSystem<SpriteLibrary>();
}
public override void Unload()
{
shader.Dispose();
}
public void AdjustProjection(int width, int height)
{
projection = Matrix4.CreateOrthographicOffCenter(0, width, height, 0, 0, 1);
}
internal void BeginRender()
{
GL.ClearColor(Color.FromArgb(5, 5, 25));
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.MatrixMode(MatrixMode.Projection);
GL.LoadMatrix(ref projection);
GL.Enable(EnableCap.Blend);
GL.BlendFunc(BlendingFactor.SrcAlpha, BlendingFactor.OneMinusSrcAlpha);
GL.Enable(EnableCap.Texture2D);
GL.MatrixMode(MatrixMode.Modelview);
}
public void RenderScene(Scene scene, FrameArgument e)
{
//Matrix4 view = World.Camera.GetView();
cachedWorldMatrix = scene.SceneMatrix;
scene.RenderFrame(this, e);
}
internal void EndRender()
{
GL.Flush();
}
public void UseSprite(int spriteId, int vaoIndex = 0)
{
SpriteLibrary.sprites[spriteId].VAO_Index = vaoIndex;
SpriteLibrary.sprites[spriteId].Use();
}
public void DrawSprite(ref RenderUnit renderUnit, float x, float y, float z = 0)
{
UseSprite(renderUnit.Id, renderUnit.VAO_Index);
DrawSprite(x + SpriteLibrary.sprites[renderUnit.Id].OffsetX, y + SpriteLibrary.sprites[renderUnit.Id].OffsetY, z);
}
public void DrawSprite(int spriteId, float x, float y, int vaoIndex = 0, float z = 0)
{
UseSprite(spriteId, vaoIndex);
DrawSprite(x, y, z);
}
private void DrawSprite(float x, float y, float z = 0)
{
shader.Use();
int transform = GL.GetUniformLocation(shader.Handle, "transform");
Matrix4 translation = Matrix4.CreateTranslation(new Vector3(x, y, z)) * cachedWorldMatrix;
GL.UniformMatrix4(transform, true, ref translation);
GL.DrawArrays(PrimitiveType.Quads, 0, VertexArray.VERTEXARRAY_INDEX_COUNT);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Exceptions.Services
{
/// <summary>
/// Thrown when a GameService of a certain type is already registered.
/// </summary>
public class ExistingServiceException : Exception
{
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Exceptions.Attributes
{
/// <summary>
/// This exception is thrown whenever an Attribute is added to an object that already posessed an attribute of that type.
/// This is thrown because Attributes should be determined during object construction. Only the needed attributes should be added.
/// Additionally, attributes which should be disabled should be disabled via Attribute.Toggle();
/// </summary>
public class ExistingAttributeException : Exception
{
}
}
<file_sep>using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Systems
{
public class MathHelper
{
public static bool IsBounded_XYZ(Vector3 subjectVector, Vector3 lowerBound, Vector3 upperBound)
{
return (
subjectVector.X > lowerBound.X &&
subjectVector.Y > lowerBound.Y &&
subjectVector.Z > lowerBound.Z &&
subjectVector.X < upperBound.X &&
subjectVector.Y < upperBound.Y &&
subjectVector.Z < upperBound.Z
);
}
public static bool IsBounded_XY(Vector3 subjectVector, Vector3 lowerBound, Vector3 upperBound)
{
return (
subjectVector.X > lowerBound.X &&
subjectVector.Y > lowerBound.Y &&
subjectVector.X < upperBound.X &&
subjectVector.Y < upperBound.Y
);
}
public static int MapEven(int n)
{
return 2 * n;
}
public static int MapOdd(int n)
{
return MapEven(n) + 1;
}
public static int MapPositive(int n)
{
if (n == 0)
return 0;
return (2 * Math.Abs(n)) + ((n + Math.Abs(n)) / (2 * n));
}
public static int MapCoordsToUniqueInteger(int x, int y)
{
int index = 4 * MapPositive(x) + MapPositive(y);
int quadSel = index % 4;
if (quadSel == 0)
return (4 * index) + 1;
else if (quadSel == 1)
return 2 * ((2 * index) + 1);
else if (quadSel == 2)
return (4 * index) + 3;
else
return 4 * index;
}
//This is simply genius. Props to wiki
public static float MapCoordsToUniqueFloat(int x, int y)
{
return 2920 * (float)Math.Sin(x * 21942 + y * 171324 + 8912) * (float)Math.Cos(x * 23157 * y * 217832 + 9758);
}
public static int StrideToIndex(int n, int stride)
{
return ((stride - 1) * (n + 1)) + n;
}
/// <summary>
/// Used a lot for finding the angle. With 0rad being on the x axis
/// </summary>
/// <param name="x"></param>
/// <param name="y_or_z"></param>
/// <returns></returns>
public static float GetAngle(float x, float y_or_z, float offset=0f)
{
float angle = (float)(Math.Atan2(x,y_or_z) / Math.PI * 180f) + offset;
if (angle < 0)
angle += 360f;
return angle;
}
public static float GetAngle(Vector2 position, float offset = 0f)
{
return GetAngle(position.X, position.Y, offset);
}
public static bool FormsTriangle(float side1, float side2, float side3)
{
return
(
side1 + side2 > side3 &&
side1 + side3 > side2 &&
side2 + side3 > side1
);
}
public static float Euler_To_Radian(float thetaEuler)
{
return (float)(thetaEuler * Math.PI / 180f);
}
public static float Radian_To_Euler(float thetaRadian)
{
return (float)(thetaRadian / Math.PI * 180f);
}
}
}
<file_sep>using isometricgame.GameEngine.Rendering;
using isometricgame.GameEngine.Scenes;
using isometricgame.GameEngine.Systems;
using OpenTK;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.WorldSpace.ChunkSpace
{
public enum ChunkStructureLayerType
{
Ground = 0,
Liquid = 1,
Flora = 2
}
public struct Chunk
{
public static readonly int CHUNK_TILE_WIDTH = 16;
public static readonly int CHUNK_TILE_HEIGHT = 16;
public static int CHUNK_PIXEL_WIDTH => CHUNK_TILE_WIDTH * 35;
public static int CHUNK_PIXEL_HEIGHT => CHUNK_TILE_HEIGHT * 18;
public static int CHUNK_MAX_STRUCTURE_COUNT = 3;
public static IntegerPosition WorldSpace_To_ChunkSpace(Vector2 position)
{
return new Vector2(position.X / Chunk.CHUNK_TILE_WIDTH, position.Y / Chunk.CHUNK_TILE_HEIGHT);
}
public static float CartesianToIsometric_X(float x, float y)
{
return Tile.TILE_WIDTH * 0.5f * (x + y);
}
public static float IsometricToCartesian_X(float iso_x, float cart_y)
{
return ((2 * iso_x) / Tile.TILE_WIDTH) - cart_y;
}
public static float CartesianToIsometric_Y(float x, float y, float z)
{
return (Tile.TILE_HEIGHT-7) * 0.5f * (y - x) + (-z * 6);
}
public static float IsometricToCartesian_Y(float iso_y, float cart_x, float z)
{
return ((2 * iso_y - (12 * z)) / (Tile.TILE_HEIGHT - 7)) + cart_x;
}
public static int Localize(int x_or_y) => (CHUNK_PIXEL_WIDTH + (x_or_y % CHUNK_TILE_WIDTH)) % CHUNK_TILE_WIDTH;
public static IntegerPosition Localize(IntegerPosition pos) => new IntegerPosition(Localize(pos.X), Localize(pos.Y));
//private Tile[,] tiles = new Tile[CHUNK_TILE_WIDTH, CHUNK_TILE_HEIGHT];
private IntegerPosition chunkIndexPosition;
private bool isFinalized;
private bool isValid;
private RenderStructure[] chunkStructures;
private int structureCount;
private float minimumZ, maximumZ;
//public Tile[,] Tiles { get => tiles; set => tiles = value; }
/// <summary>
/// Base Location is used for positioning on the chunk level.
/// </summary>
public IntegerPosition ChunkIndexPosition { get => chunkIndexPosition; set => chunkIndexPosition = value; }
/// <summary>
/// TileSpace Location is used for positioning on the tile level.
/// </summary>
public IntegerPosition TileSpaceLocation => new IntegerPosition(
ChunkIndexPosition.X * Chunk.CHUNK_TILE_WIDTH,
ChunkIndexPosition.Y * Chunk.CHUNK_TILE_WIDTH
);
/// <summary>
/// This is the edge of the chunk in terms of Tile Space.
/// </summary>
public IntegerPosition TileSpaceEdgeLocation => TileSpaceLocation + new IntegerPosition(16, 16);
public bool IsFinalized { get => isFinalized; }
public float MinimumZ { get => minimumZ; set => minimumZ = value; }
public float MaximumZ { get => maximumZ; set => maximumZ = value; }
public RenderStructure[] ChunkStructures { get => chunkStructures; private set => chunkStructures = value; }
public bool IsValid { get => isValid; set => isValid = value; }
public Chunk(Vector2 baseLocation)
{
this.chunkIndexPosition = baseLocation;
isFinalized = false;
minimumZ = 0;
maximumZ = 0;
isValid = true;
chunkStructures = new RenderStructure[CHUNK_MAX_STRUCTURE_COUNT];
structureCount = 0;
}
public void AddStructure(RenderStructure structure)
{
chunkStructures[structureCount] = structure;
structureCount++;
}
public void AssertZValues()
{
for (int i = 0; i < chunkStructures.Length; i++)
{
if (chunkStructures[i].minimumZ < minimumZ)
minimumZ = chunkStructures[i].minimumZ;
if (chunkStructures[i].maximumZ > maximumZ)
maximumZ = chunkStructures[i].maximumZ;
}
}
public bool WithinPosition(Vector2 basePos)
{
return TileSpaceLocation.X <= basePos.X &&
TileSpaceEdgeLocation.X > basePos.X &&
TileSpaceLocation.Y <= basePos.Y &&
TileSpaceEdgeLocation.Y > basePos.Y;
}
}
}
<file_sep>using isometricgame.GameEngine.Rendering;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Drawing.Imaging;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenTK.Graphics.OpenGL;
using OpenTK;
using isometricgame.GameEngine.Systems.Rendering;
namespace isometricgame.GameEngine.Systems.Rendering
{
public class TextDisplayer : GameSystem
{
public static readonly string CHARS = ",gjpqyABCDEFGHIJKLMNOPQRSTUVWXYZabcdefhiklmnorstuvwxz1234567890.?!/-+@#$%^&*()_=[]\\{}|:;\"'<>`~";
private SpriteLibrary SpriteLibrary;
private Dictionary<string, int> fonts = new Dictionary<string, int>();
public TextDisplayer(Game game)
: base(game)
{
}
public override void Load()
{
SpriteLibrary = Game.GetSystem<SpriteLibrary>();
}
public void LoadFont(string fontName, int fontSpriteId)
{
fonts.Add(fontName, fontSpriteId);
}
public void DrawText(RenderService renderService, string text, string fontName, float x, float y)
{
float fontWidth = SpriteLibrary.sprites[fonts[fontName]].SubWidth;
float fontHeight = SpriteLibrary.sprites[fonts[fontName]].SubHeight;
float descentGap = fontHeight / 2 + 1;
float commaDescent = -3;
float descentCharacter = -5;
int font = fonts[fontName];
float yWrite = y, xWrite = x, yOffset = 0;
for (int i = 0; i < text.Length; i++)
{
if (text[i] == ' ')
{
xWrite += fontWidth;
continue;
}
if (text[i] == '\n')
{
yWrite-= fontHeight + descentGap;
xWrite = x;
continue;
}
int index = CHARS.IndexOf(text[i]);
yOffset = (index == 0) ? commaDescent : (index < 6) ? descentCharacter : 0;
renderService.DrawSprite(font, xWrite, yWrite + yOffset, index);
xWrite += fontWidth;
}
}
}
}
<file_sep>using OpenTK;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenTK.Graphics.OpenGL;
namespace isometricgame.GameEngine.Rendering
{
public struct Texture2D
{
private int id;
private Vector2 size;
public Texture2D(Bitmap bitmap, bool pixelated = true)
{
BitmapData bmpd = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
size = new Vector2(bitmap.Width, bitmap.Height);
id = GL.GenTexture();
GL.BindTexture(TextureTarget.Texture2D, id);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, bitmap.Width, bitmap.Height, 0, OpenTK.Graphics.OpenGL.PixelFormat.Bgra, PixelType.UnsignedByte, bmpd.Scan0);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, pixelated ? (int)TextureMinFilter.Nearest : (int)TextureMinFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, pixelated ? (int)TextureMagFilter.Nearest : (int)TextureMagFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.ClampToEdge, pixelated ? (int)TextureMagFilter.Nearest : (int)TextureMagFilter.Linear);
bitmap.UnlockBits(bmpd);
}
public int ID => id;
public Vector2 Size => size;
public int Width => (int)size.X;
public int Height => (int)size.Y;
public int Area => Width * Height;
}
}
<file_sep>using isometricgame.GameEngine;
using isometricgame.GameEngine.WorldSpace;
using isometricgame.GameEngine.Rendering;
using isometricgame.GameEngine.Systems;
using OpenTK;
using OpenTK.Graphics.OpenGL;
using OpenTK.Input;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using isometricgame.GameEngine.Exceptions.Services;
using isometricgame.GameEngine.Scenes;
using isometricgame.GameEngine.Events.Arguments;
using isometricgame.GameEngine.Systems.Rendering;
using isometricgame.GameEngine.Systems.Serialization;
using OpenTK.Graphics;
using isometricgame.GameEngine.Systems.Input;
namespace isometricgame.GameEngine
{
public class Game : GameWindow
{
//PATHS
public readonly string GAME_DIRECTORY_BASE;
public readonly string GAME_DIRECTORY_WORLDS;
public readonly string GAME_DIRECTORY_ASSETS;
public readonly string GAME_DIRECTORY_SHADERS;
#region Systems
//SERVICES
private AssetProvider contentPipe;
private SpriteLibrary spriteLibrary;
private RenderService renderService;
private TextDisplayer textDisplayer;
private InputSystem inputSystem;
private AnimationSchematicLibrary animationSchematicLibrary;
private List<GameSystem> systems = new List<GameSystem>();
/// <summary>
/// Responsible for loading and unloading textures.
/// </summary>
public AssetProvider AssetProvider { get => contentPipe; private set => contentPipe = value; }
/// <summary>
/// Responsible for recording and recieving loaded sprites.
/// </summary>
public SpriteLibrary SpriteLibrary { get => spriteLibrary; private set => spriteLibrary = value; }
public RenderService RenderService { get => renderService; private set => renderService = value; }
public TextDisplayer TextDisplayer { get => textDisplayer; private set => textDisplayer = value; }
public InputSystem InputSystem { get => inputSystem; private set => inputSystem = value; }
public AnimationSchematicLibrary AnimationSchematicLibrary { get => animationSchematicLibrary; private set => animationSchematicLibrary = value; }
#endregion
#region Time
private double renderTime, updateTime;
public double RenderTime => renderTime;
public double UpdateTime => updateTime;
#endregion
private Scene scene;
public Game(int width, int height, string title, string GAME_DIR = "", string GAME_DIR_ASSETS = "", string GAME_DIR_WORLDS = "")
: base(width, height, GraphicsMode.Default, title)
{
GAME_DIRECTORY_BASE = (GAME_DIR == String.Empty) ? AppDomain.CurrentDomain.BaseDirectory : GAME_DIR;
GAME_DIRECTORY_ASSETS = (GAME_DIR_ASSETS == String.Empty) ? Path.Combine(GAME_DIRECTORY_BASE, "Assets\\") : GAME_DIR_ASSETS;
GAME_DIRECTORY_SHADERS = Path.Combine(GAME_DIRECTORY_ASSETS, "Shaders\\");
GAME_DIRECTORY_WORLDS = (GAME_DIR_WORLDS == String.Empty) ? Path.Combine(GAME_DIRECTORY_BASE, "Worlds\\") : GAME_DIR_WORLDS;
RegisterSystems();
//END SERVICES
LoadContent();
}
protected override void OnResize(EventArgs e)
{
GL.Viewport(ClientRectangle);
RenderService.AdjustProjection(Width, Height);
}
protected override void OnClosing(System.ComponentModel.CancelEventArgs e)
{
}
protected override void OnUpdateFrame(FrameEventArgs e)
{
updateTime += e.Time;
scene.UpdateFrame(new FrameArgument(UpdateTime, e.Time));
}
protected override void OnRenderFrame(FrameEventArgs e)
{
renderTime += e.Time;
RenderService.BeginRender();
RenderService.RenderScene(scene, new FrameArgument(RenderTime, e.Time));
RenderService.EndRender();
SwapBuffers();
}
protected override void OnLoad(EventArgs e)
{
}
protected override void OnUnload(EventArgs e)
{
foreach (GameSystem gamesys in systems)
gamesys.Unload();
}
public T GetSystem<T>() where T : GameSystem
{
foreach (GameSystem system in systems)
if (system is T)
return system as T;
throw new ServiceNotFoundException();
}
protected void RegisterSystem<T>(T gameService) where T : GameSystem
{
if (systems.Exists((s) => s is T))
throw new ExistingServiceException();
systems.Add(gameService);
}
protected virtual void RegisterSystems()
{
AssetProvider = new AssetProvider(this);
SpriteLibrary = new SpriteLibrary(this);
RenderService = new RenderService(this, Width, Height);
TextDisplayer = new TextDisplayer(this);
InputSystem = new InputSystem(this);
AnimationSchematicLibrary = new AnimationSchematicLibrary(this);
RegisterSystem(AssetProvider);
RegisterSystem(SpriteLibrary);
RegisterSystem(RenderService);
RegisterSystem(TextDisplayer);
RegisterSystem(InputSystem);
RegisterSystem(AnimationSchematicLibrary);
foreach (GameSystem system in systems)
system.Load();
}
protected virtual void LoadContent()
{
}
public void SetScene(Scene scene)
{
this.scene = scene;
}
}
}
<file_sep>using isometricgame.GameEngine.Rendering;
using isometricgame.GameEngine.Scenes;
using isometricgame.GameEngine.WorldSpace.Generators;
using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.WorldSpace.ChunkSpace
{
/// <summary>
/// Responsible for generating new chunks, and serializing and deserializing them.
/// </summary>
public class ChunkDirectory
{
//Chunks[0] is the minimal position. It is often times removed or inserted.
private List<Chunk> activeChunks = new List<Chunk>();
private List<Chunk> skirtChunks = new List<Chunk>();
private Generator chunkGenerator;
private int renderDistance, newRenderDistance;
private Chunk[,] _chunks;
private IntegerPosition center = new IntegerPosition(0, 0);
private bool firstRender = true;
#region locationals
public float MinimalX_ByBaseLocation => Chunks[0,0].ChunkIndexPosition.X;
public float MinimalY_ByBaseLocation => Chunks[0,0].ChunkIndexPosition.Y;
public float MaximalX_ByBaseLocation => Chunks[DoubleDist-1, DoubleDist-1].ChunkIndexPosition.X;
public float MaximalY_ByBaseLocation => Chunks[DoubleDist-1, DoubleDist-1].ChunkIndexPosition.Y;
public float MinimalX_ByTileLocation => MinimalX_ByBaseLocation * Chunk.CHUNK_TILE_WIDTH;
public float MinimalY_ByTileLocation => MinimalY_ByBaseLocation * Chunk.CHUNK_TILE_HEIGHT;
public float MaximalX_ByTileLocation => (MaximalX_ByBaseLocation+1) * Chunk.CHUNK_TILE_WIDTH-1;
public float MaximalY_ByTileLocation => (MaximalY_ByBaseLocation+1) * Chunk.CHUNK_TILE_HEIGHT-1;
public float VisibleWidth => (MaximalX_ByTileLocation - MinimalX_ByTileLocation) * Tile.TILE_WIDTH;
public float VisibleHeight => (MaximalY_ByTileLocation - MinimalY_ByTileLocation) * Tile.TILE_HEIGHT;
//public float MinimalX_ByGameLocation => MinimalX_ByBaseLocation * Chunk.CHUNK_PIXEL_WIDTH;
//public float MinimalY_ByGameLocation => MinimalY_ByBaseLocation * Chunk.CHUNK_PIXEL_HEIGHT;
//public float MaximalX_ByGameLocation => MaximalX_ByBaseLocation * Chunk.CHUNK_PIXEL_WIDTH;
//public float MaximalY_ByGameLocation => MaximalY_ByBaseLocation * Chunk.CHUNK_PIXEL_HEIGHT;
#endregion
private int DoubleDist => renderDistance * 2 + 1;
public Generator ChunkGenerator { get => chunkGenerator; set => chunkGenerator = value; }
public int RenderDistance { get => renderDistance; set => newRenderDistance = value; }
public Chunk[,] Chunks { get => _chunks; private set => _chunks = value; }
public ChunkDirectory(int renderDistance, Generator chunkGenerator)
{
this.chunkGenerator = chunkGenerator;
this.renderDistance = renderDistance;
this.newRenderDistance = renderDistance;
Chunks = new Chunk[DoubleDist, DoubleDist];
}
/// <summary>
/// Sorts the chunks from 0 -> n by the sum of X*renderDist and Y.
/// </summary>
/// <returns></returns>
public List<Chunk> GetSortedChunks()
{
return activeChunks.OrderBy((c) => (c.ChunkIndexPosition.X * renderDistance) + c.ChunkIndexPosition.Y).ToList();
}
public void SetCenter(Vector2 position)
{
center = Chunk.WorldSpace_To_ChunkSpace(position);
}
public IntegerPosition DeliminateChunkIndex(IntegerPosition position)
{
IntegerPosition posToChunk = Chunk.WorldSpace_To_ChunkSpace(position);
IntegerPosition chunkPosition = posToChunk - center + new IntegerPosition(renderDistance, renderDistance);
return chunkPosition;
}
/// <summary>
/// Deliminates a tile using gamespace coordinates.
/// </summary>
/// <param name="x"></param>
/// <param name="y"></param>
/// <returns></returns>
public IntegerPosition Localize(IntegerPosition position, IntegerPosition cpos)
{
/*
IntegerPosition posToChunk = Chunk.WorldSpace_To_ChunkSpace(position);
IntegerPosition chunkPosition = posToChunk - center + new IntegerPosition(renderDistance, renderDistance);
Chunk c = _chunks[chunkPosition.X, chunkPosition.Y];
*/
return position - Chunks[cpos.X, cpos.Y].TileSpaceLocation;
}
private Vector3 DeliminateRenderUnit_Position(IntegerPosition position, int structureIndex = 0)
{
//contemplate using unsafe {}.
IntegerPosition chunkPosition = DeliminateChunkIndex(position);
IntegerPosition pos = Localize(position, chunkPosition);
return Chunks[chunkPosition.X, chunkPosition.Y].ChunkStructures[structureIndex].structuralUnits[pos.X][pos.Y].Position;
}
public byte GetTileOrientation(IntegerPosition localPos, IntegerPosition chunkTilePos, ref RenderStructure structure)
{
IntegerPosition lookupPos;
byte orientation = 0;
byte oriVal = 0;
Vector3 target = structure.structuralUnits[localPos.X][localPos.Y].Position;
for (int ly = -1; ly < 2; ly++)
{
for (int lx = -1; lx < 2; lx++)
{
if (lx == 0 && ly == 0)
continue;
oriVal = Tile.ORIENTATIONS[lx + 1, ly + 1];
lookupPos = new IntegerPosition(lx, ly) + localPos;
Vector3 t;
if (lookupPos.X < 0 || lookupPos.Y < 0 || lookupPos.X >= Chunk.CHUNK_TILE_WIDTH || lookupPos.Y >= Chunk.CHUNK_TILE_HEIGHT)
{
t = DeliminateRenderUnit_Position(lookupPos + chunkTilePos);
}
else
{
t = structure.structuralUnits[lookupPos.X][lookupPos.Y].Position;
}
if (t.Z < target.Z)
{
orientation = (byte)(orientation | oriVal);
}
}
}
return orientation;
}
//we will need a means to check live to add/drop chunks
public void ChunkCleanup(Vector2 centerPosition)
{
IntegerPosition newCenter = Chunk.WorldSpace_To_ChunkSpace(centerPosition);
if (renderDistance != newRenderDistance)
renderDistance = newRenderDistance;
else if (newCenter == center && !firstRender)
return;
else if (firstRender)
firstRender = false;
IntegerPosition offsetFromOriginal = center - newCenter;
Chunk[,] newChunkSet = new Chunk[DoubleDist, DoubleDist];
IntegerPosition render_offset = new IntegerPosition(renderDistance, renderDistance);
IntegerPosition minPos = newCenter - render_offset;
IntegerPosition maxPos = newCenter + render_offset;
Chunk c;
//move chunks
for (int x = 0 ; x < DoubleDist; x++)
{
for (int y = 0; y < DoubleDist; y++)
{
c = (x < Chunks.GetLength(0) && y < Chunks.GetLength(1)) ? Chunks[x, y] : default(Chunk);
if (c.IsValid)
{
if (
c.ChunkIndexPosition.X >= minPos.X &&
c.ChunkIndexPosition.Y >= minPos.Y &&
c.ChunkIndexPosition.X < maxPos.X &&
c.ChunkIndexPosition.Y < maxPos.Y
)
{
IntegerPosition index = c.ChunkIndexPosition + render_offset - newCenter;
newChunkSet[index.X, index.Y] = c;
}
else
{
}
}
if (newChunkSet[x, y].IsValid)
continue;
IntegerPosition newChunkPos = new IntegerPosition(x, y) - render_offset + newCenter;
Chunk new_c = ChunkGenerator.CreateChunk(newChunkPos);
newChunkSet[x, y] = new_c;
}
}
Chunks = newChunkSet;
center = newCenter;
//verify z values and orientations
for (int x = 1; x < DoubleDist-1; x++) //do not verify skirt chunks, only: [1, n-1]
{
for (int y = 1; y < DoubleDist-1; y++)
{
if (!Chunks[x, y].IsFinalized)
{
ChunkGenerator.FinalizeChunk(this, ref Chunks[x, y]);
}
}
}
}
public void PerformTileOrientation(IntegerPosition chunkTileSpace, ref RenderStructure structure)
{
IntegerPosition localPos = new IntegerPosition(0,0);
for(localPos.Y = 0; localPos.Y < Chunk.CHUNK_TILE_HEIGHT; localPos.Y++)
{
for (localPos.X = 0; localPos.X < Chunk.CHUNK_TILE_WIDTH; localPos.X++)
{
byte orientation = GetTileOrientation(localPos, chunkTileSpace, ref structure);
if (orientation >= 15)
{
orientation = 0;
structure.structuralUnits[localPos.X][localPos.Y].Position -= new Vector3(0,0,1);
}
structure.structuralUnits[localPos.X][localPos.Y].VAO_Index = orientation;
}
}
}
}
}
<file_sep>using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.WorldSpace.ChunkSpace
{
public struct IntegerPosition
{
int x, y;
public int X { get => x; set => x = value; }
public int Y { get => y; set => y = value; }
public IntegerPosition(int x, int y)
{
this.x = x;
this.y = y;
}
public bool Equals(IntegerPosition position) => position.x == x && position.y == y;
public override string ToString()
{
return String.Format("{0}, {1}", x, y);
}
public static bool operator ==(IntegerPosition pos1, IntegerPosition pos2)
{
return pos1.Equals(pos2);
}
public static bool operator !=(IntegerPosition pos1, IntegerPosition pos2)
{
return !pos1.Equals(pos2);
}
public static IntegerPosition operator -(IntegerPosition pos1, IntegerPosition pos2)
{
return new IntegerPosition(pos1.x - pos2.x, pos1.y - pos2.y);
}
public static IntegerPosition operator +(IntegerPosition pos1, IntegerPosition pos2)
{
return new IntegerPosition(pos1.x + pos2.x, pos1.y + pos2.y);
}
public static IntegerPosition operator *(IntegerPosition pos1, int scalar)
{
return new IntegerPosition(pos1.x * scalar, pos1.y * scalar);
}
public static implicit operator IntegerPosition(Vector2 pos) => new IntegerPosition((int)Math.Floor(pos.X), (int)Math.Floor(pos.Y));
public static implicit operator Vector2(IntegerPosition pos) => new Vector2(pos.X, pos.Y);
}
}
<file_sep>using isometricgame.GameEngine.Rendering;
using isometricgame.GameEngine.Scenes;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Components.Rendering
{
public class SpriteComponent : GameComponent
{
public SpriteComponent()
: base()
{
}
/// <summary>
/// might need to make this thread safe.
/// </summary>
/// <param name="s"></param>
public virtual void SetSprite(RenderUnit ru)
{
if (ParentObject != null)
ParentObject.renderUnit = ru;
}
public virtual void SetSprite(int spriteId, int vao_Index = 0)
{
if (ParentObject != null)
{
ParentObject.renderUnit.Id = spriteId;
ParentObject.renderUnit.VAO_Index = vao_Index;
ParentObject.renderUnit.IsInitialized = true;
}
}
}
}
<file_sep>using OpenTK;
using OpenTK.Input;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Systems.Input
{
public class InputSystem : GameSystem
{
private Dictionary<InputType, List<InputHandler>> inputDirectory = new Dictionary<InputType, List<InputHandler>>()
{
{ InputType.Keyboard_Press, new List<InputHandler>() },
{ InputType.Keyboard_UpDown, new List<InputHandler>() },
{ InputType.Mouse_Button, new List<InputHandler>() },
{ InputType.Mouse_Move, new List<InputHandler>() },
{ InputType.Mouse_Wheel, new List<InputHandler>() }
};
private Dictionary<int, List<InputHandler>> handlerGroups = new Dictionary<int, List<InputHandler>>();
public InputSystem(Game game)
: base(game)
{
game.KeyDown += GameWindow_KeyDown;
game.KeyPress += GameWindow_KeyPress;
game.KeyUp += GameWindow_KeyUp;
game.MouseDown += GameWindow_MouseDown;
game.MouseUp += GameWindow_MouseUp;
game.MouseWheel += GameWindow_MouseWheel;
game.MouseMove += GameWindow_MouseMove;
}
public InputHandler RegisterHandler(InputType inputType, bool enabled = true, int handlerID = -1)
{
int newID = (handlerID > 0) ? handlerID : handlerGroups.Keys.Count;
InputHandler handler = new InputHandler(newID, inputType, enabled);
if (handlerGroups.ContainsKey(newID))
handlerGroups[newID].Add(handler);
else
{
handlerGroups.Add(newID, new List<InputHandler> { handler });
}
for(int key = 1; key <= 16; key*=2)
if (0 < ((int)inputType & key))
inputDirectory[(InputType)key].Add(handler);
return handler;
}
private void GameWindow_MouseMove(object sender, MouseMoveEventArgs e)
{
foreach (InputHandler handle in inputDirectory[InputType.Mouse_Move])
if (handle.Enabled)
handle.Handle_Mouse_Move(sender, e);
}
private void GameWindow_MouseWheel(object sender, MouseWheelEventArgs e)
{
foreach (InputHandler handle in inputDirectory[InputType.Mouse_Wheel])
if (handle.Enabled)
handle.Handle_Mouse_Wheel(sender, e);
}
private void GameWindow_MouseUp(object sender, MouseButtonEventArgs e)
{
foreach (InputHandler handle in inputDirectory[InputType.Mouse_Button])
if (handle.Enabled)
handle.Handle_Mouse_Button(sender, e);
}
private void GameWindow_MouseDown(object sender, MouseButtonEventArgs e)
{
foreach (InputHandler handle in inputDirectory[InputType.Mouse_Button])
if (handle.Enabled)
handle.Handle_Mouse_Button(sender, e);
}
private void GameWindow_KeyUp(object sender, KeyboardKeyEventArgs e)
{
foreach (InputHandler handle in inputDirectory[InputType.Keyboard_UpDown])
if (handle.Enabled)
handle.Handle_Keyboard_UpDown(sender, e);
}
private void GameWindow_KeyPress(object sender, KeyPressEventArgs e)
{
foreach (InputHandler handle in inputDirectory[InputType.Keyboard_Press])
if (handle.Enabled)
handle.Handle_Keyboard_Press(sender, e);
}
private void GameWindow_KeyDown(object sender, KeyboardKeyEventArgs e)
{
foreach (InputHandler handle in inputDirectory[InputType.Keyboard_UpDown])
if (handle.Enabled)
handle.Handle_Keyboard_UpDown(sender, e);
}
}
}
<file_sep>using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.WorldSpace.ChunkSpace
{
public struct Tile
{
public static readonly int TILE_WIDTH = 36;
public static readonly int TILE_HEIGHT = 25;
public static readonly int TILE_AREA = TILE_WIDTH * TILE_HEIGHT;
public static readonly int TILE_Y_INC = TILE_HEIGHT / 2 - 3;
public static byte[,] ORIENTATIONS = new byte[,]
{
/*
{ 8, 9, 1 },
{ 12, 0, 3 },
{ 4, 6, 2 }
*/
{ 2, 6, 4 },
{ 3, 0, 12 },
{ 1, 9, 8 }
};
private int _data;
private int _z;
private byte _orientation;
public Tile(int _z, int _data = 0, byte _orientation=0)
{
this._z = _z;
this._data = _data;
this._orientation = _orientation;
}
public int Z { get => _z; set => _z = value; }
public byte Orientation { get => _orientation; set => _orientation = value; }
public int Data { get => _data; set => _data = value; }
}
}
<file_sep>using isometricgame.GameEngine.WorldSpace.ChunkSpace;
using isometricgame.GameEngine.WorldSpace.Generators.PerlinNoise;
using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
namespace isometricgame.GameEngine.WorldSpace.Generators
{
public abstract class Generator
{
private int seed;
private Perlin perlin;
public int Seed { get => seed; private set => seed = value; }
protected Perlin Perlin => perlin;
public Generator(int seed)
{
this.seed = seed;
perlin = new Perlin(seed);
}
public abstract Chunk CreateChunk(Vector2 pos);
public abstract void FinalizeChunk(ChunkDirectory chunkDirectory, ref Chunk c);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenTK.Graphics.OpenGL;
namespace isometricgame.GameEngine.Rendering
{
/// <summary>
/// Use this for sprites that have multiple versions.
/// </summary>
public class Sprite
{
private string name;
private Texture2D texture;
private int[] vertexArrayObjects;
private int subWidth;
private int subHeight;
private int vaoIndex = 0, vaoRow = 0;
private float offsetX, offsetY;
private int columnCount, rowCount;
private int count;
private VertexArray[] vertexArrays;
public int[] VertexArrayObjects { get => vertexArrayObjects; set => vertexArrayObjects = value; }
public string Name => name;
public int SubWidth { get => subWidth; private set => subWidth = value; }
public int SubHeight { get => subHeight; private set => subHeight = value; }
public Texture2D Texture { get => texture; private set => texture = value; }
public int VAO_Index { get => vaoIndex + (VAO_Row * columnCount); set => vaoIndex = value; }
public int VAO_Row { get => vaoRow; set => vaoRow = value; }
public float OffsetX { get => offsetX; protected set => offsetX = value; }
public float OffsetY { get => offsetY; protected set => offsetY = value; }
public Sprite(Sprite s, int vboIndex = -1)
{
offsetX = s.offsetX;
offsetY = s.offsetY;
name = s.name;
texture = s.texture;
subWidth = s.subWidth;
subHeight = s.subHeight;
this.vaoIndex = (vboIndex < 0) ? s.vaoIndex : vboIndex;
columnCount = s.columnCount;
rowCount = s.rowCount;
count = s.count;
vertexArrays = new VertexArray[s.vertexArrays.Length];
vertexArrayObjects = new int[s.vertexArrayObjects.Length];
for (int i = 0; i < s.vertexArrayObjects.Length; i++)
vertexArrayObjects[i] = s.vertexArrayObjects[i];
for (int i = 0; i < count; i++)
vertexArrays[i] = s.vertexArrays[i];
}
public Sprite(
Texture2D texture,
int subWidth,
int subHeight,
string name="",
float offsetX = 0,
float offsetY = 0,
int vboIndex = 0,
float r = 0,
float g = 0,
float b = 0,
float a = 0)
{
this.offsetX = offsetX;
this.offsetY = offsetY;
this.name = name;
this.texture = texture;
this.subWidth = subWidth;
this.subHeight = subHeight;
this.vaoIndex = vboIndex;
columnCount = texture.Width / subWidth;
rowCount = texture.Height / subHeight;
count = columnCount * rowCount;
vertexArrays = new VertexArray[count];
vertexArrayObjects = new int[count];
for (int i = 0; i < vertexArrayObjects.Length; i++)
vertexArrayObjects[i] = GL.GenVertexArray();
Vertex[] vertices;
for(int y = 0; y < rowCount; y++)
{
for (int x = 0; x < columnCount; x++)
{
vertices = VertexArray.VerticesFromDimensions(texture.Width, texture.Height, subWidth, subHeight, x, y, r, g, b, a);
vertexArrays[x + (y * columnCount)] = new VertexArray(texture, vertices);
}
}
BindVertexArray();
}
public void Use()
{
vertexArrays[VAO_Index].Use();
GL.BindVertexArray(vertexArrayObjects[VAO_Index]);
}
private void BindVertexArray()
{
for (int i = 0; i < vertexArrays.Length; i++)
{
GL.BindVertexArray(vertexArrayObjects[i]);
vertexArrays[i].BindVertexBuffer();
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, 8 * sizeof(float), 0);
GL.EnableVertexAttribArray(0);
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, 8 * sizeof(float), 2 * sizeof(float));
GL.EnableVertexAttribArray(1);
GL.VertexAttribPointer(2, 4, VertexAttribPointerType.Float, false, 8 * sizeof(float), 4 * sizeof(float));
GL.EnableVertexAttribArray(2);
}
GL.BindVertexArray(0);
}
}
}
<file_sep>using isometricgame.GameEngine;
using isometricgame.GameEngine.WorldSpace;
using isometricgame.GameEngine.Systems;
using OpenTK;
using OpenTK.Graphics.OpenGL;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using isometricgame.GameEngine.Components.Rendering;
using isometricgame.GameEngine.Scenes;
using isometricgame.GameEngine.WorldSpace.Generators;
using isometricgame.GameEngine.Rendering;
using isometricgame.GameEngine.Events.Arguments;
using isometricgame.GameEngine.Systems.Rendering;
using isometricgame.GameEngine.WorldSpace.ChunkSpace;
namespace isometricgame.GameEngine.WorldSpace
{
public class WorldScene : Scene
{
private ChunkDirectory chunkDirectory;
private Camera camera;
public int renderTileRange, renderDistance, tileRange;
private SpriteLibrary spriteLibrary;
public Camera Camera { get => camera; private set => camera = value; }
public ChunkDirectory ChunkDirectory { get => chunkDirectory; set => chunkDirectory = value; }
public bool test_flop_REMOVE = true;
public WorldScene(Game game, Generator worldGenerator, int renderDistance=0)
: base(game)
{
this.ChunkDirectory = new ChunkDirectory(renderDistance, worldGenerator);
this.Camera = new Camera(this);
this.renderDistance = renderDistance;
spriteLibrary = game.GetSystem<SpriteLibrary>();
}
public override void UpdateFrame(FrameArgument e)
{
Camera.Pan_Linear((float)e.DeltaTime);
SceneMatrix = Camera.GetView();
ChunkDirectory.ChunkCleanup(Camera.Position.Xy);
renderTileRange = (int)((2 / Math.Log(camera.Zoom + 1)) * 16);
if (test_flop_REMOVE)
tileRange = (int)((2 / Math.Log(camera.Zoom * 1.5f + 1)) * 16);
else
tileRange = (int)((2 / Math.Log(camera.Zoom + 1)) * 16);
renderDistance = (renderTileRange / Chunk.CHUNK_TILE_WIDTH) + 2;
chunkDirectory.RenderDistance = renderDistance;
base.UpdateFrame(e);
}
public override void RenderFrame(RenderService renderService, FrameArgument e)
{
if (chunkDirectory.RenderDistance != renderDistance)
return; //prevent race condition
int flooredX = (int)camera.TargetPosition.X;
int flooredY = (int)camera.TargetPosition.Y;
IntegerPosition cpos;
IntegerPosition spos;
RenderUnit[] renderUnits;
float tx, ty;
int spriteId;
IntegerPosition rowOffset = new IntegerPosition(1, 0);
IntegerPosition colOffset = new IntegerPosition(-1, -1);
int flip = 1, flipadd = 0;
int test_n = tileRange;
int range = (int)(2.2f * test_n * test_n) + ((test_n - 1) * (test_n - 1));
int test_n_calc = test_n;
IntegerPosition basePos = new IntegerPosition(flooredX - test_n / 3, flooredY + (4 * test_n / 3)), yPrimeDescent = basePos;
int flop = test_n_calc;
for (int range_prime = 0; range_prime < range; range_prime++)
{
yPrimeDescent += colOffset;
cpos = ChunkDirectory.DeliminateChunkIndex(yPrimeDescent);
spos = ChunkDirectory.Localize(yPrimeDescent, cpos);
for (int structureIndex = 0; structureIndex < Chunk.CHUNK_MAX_STRUCTURE_COUNT; structureIndex++)
{
if (ChunkDirectory.Chunks[cpos.X, cpos.Y].ChunkStructures[structureIndex].IsValid)
{
renderUnits = ChunkDirectory.Chunks[cpos.X, cpos.Y].ChunkStructures[structureIndex].StructuralUnits[spos.X];
if (renderUnits[spos.Y].IsInitialized)
{
tx = Chunk.CartesianToIsometric_X(yPrimeDescent.X, yPrimeDescent.Y);
ty = Chunk.CartesianToIsometric_Y(yPrimeDescent.X, yPrimeDescent.Y, renderUnits[spos.Y].Position.Z);
spriteId = renderUnits[spos.Y].Id;
renderService.DrawSprite(renderUnits[spos.Y].Id, tx, ty, renderUnits[spos.Y].VAO_Index);
}
}
}
flop--;
if (flop <= 0)
{
rowOffset += colOffset * flip;
basePos += rowOffset;
yPrimeDescent = basePos;
flip = flip * -1;
flipadd += flip;
flop = test_n_calc + flipadd;
}
}
base.RenderFrame(renderService, e);
}
protected override void DrawSprite(RenderService renderService, ref RenderUnit renderUnit)
{
float cx = Chunk.CartesianToIsometric_X(renderUnit.X, renderUnit.Y);
float cy = Chunk.CartesianToIsometric_Y(renderUnit.X, renderUnit.Y, renderUnit.Z);
renderService.DrawSprite(ref renderUnit, cx, cy);
}
}
}
<file_sep>using isometricgame.GameEngine.Rendering.Animation;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Systems.Rendering
{
public class AnimationSchematicLibrary : GameSystem
{
private Dictionary<string, AnimationSchematic> animationSchematics = new Dictionary<string, AnimationSchematic>();
public AnimationSchematicLibrary(Game gameRef)
: base(gameRef)
{
}
/// <summary>
/// Not finished yet.
/// </summary>
/// <param name="path"></param>
/// <param name="name"></param>
public void LoadSchematic(string path, string name = "")
{
if (!File.Exists(path))
throw new FileNotFoundException();
if (name == String.Empty)
name = Path.GetFileNameWithoutExtension(path);
int[][] nodes;
int nodeCount = -1;
float animSpeed = 0.1f;
using (StreamReader reader = File.OpenText(path))
{
string line;
while (!reader.EndOfStream)
{
}
}
}
/// <summary>
/// Remove later.
/// </summary>
public void AddSchematic(AnimationSchematic schem, string name = "")
{
if (name == "")
name = "anim_" + animationSchematics.Count;
animationSchematics.Add(name, schem);
}
public AnimationSchematic GetSchematic(string name) => animationSchematics[name];
public AnimationSchematic GetSchematic(int id) => animationSchematics.Values.ElementAt(id);
}
}
<file_sep>using isometricgame.GameEngine.Events.Arguments;
using isometricgame.GameEngine.Scenes;
using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine
{
/// <summary>
/// Attributes are added to GameObjects to give additional functionalities. Such as hitboxes, physics, and more.
/// </summary>
public class GameComponent
{
private GameObject parentObject;
private bool enabled = true;
public GameObject ParentObject { get => parentObject; internal set => parentObject = value; }
public GameComponent()
{
}
internal void _initalize()
{
Initalize();
}
protected void Initalize()
{
}
/// <summary>
/// Logical update frame.
/// </summary>
internal void Update(FrameArgument args)
{
if (enabled)
{
OnUpdate(args);
}
}
protected virtual void OnUpdate(FrameArgument args)
{
}
public void Toggle()
{
enabled = !enabled;
}
public void Toggle(bool b)
{
enabled = b;
}
public virtual GameComponent Clone()
{
GameComponent newComp = new GameComponent();
newComp.enabled = enabled;
return newComp;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Rendering.Animation
{
public class AnimationSchematic
{
private int[][] animationNodes;
private double speed;
private int frame = 0;
private int frameOffset;
private double pauseTime;
private int lastNode = 0;
public int LastNode => lastNode;
public AnimationSchematic(int nodeCount, double speed = 1, int startFrame = 0)
{
animationNodes = new int[nodeCount][];
this.speed = speed;
frameOffset = startFrame;
}
public void DefineNode(int node, int[] spriteIndices)
{
animationNodes[node] = spriteIndices;
}
public int GetVBO_Index(double time, int node)
{
if (pauseTime == 0)
frame = GetFrame(node, time, frameOffset);
return lastNode = animationNodes[node][frame];
}
public void Pause(double pauseTime)
{
this.pauseTime = pauseTime;
}
public void Unpause(double unpauseTime, int node)
{
//Find the time skip
int unpauseFrame = GetFrame(node, unpauseTime, 0) + animationNodes[node].Length - frame;
//Offset the time skip so we continue on the same frame.
frameOffset = unpauseFrame % animationNodes[node].Length;
pauseTime = 0;
}
private int GetFrame(int node, double time, int givenOffset)
{
return (int)((time / speed) + givenOffset) % animationNodes[node].Length;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.WorldSpace.Generators.PerlinNoise2
{
class PerlinRegion
{
}
}
<file_sep>using isometricgame.GameEngine.Components.Rendering;
using isometricgame.GameEngine.Events.Arguments;
using isometricgame.GameEngine.Rendering;
using isometricgame.GameEngine.Systems;
using isometricgame.GameEngine.Systems.Rendering;
using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Scenes
{
public class Scene
{
private Game game;
private List<RenderStructure> staticSceneStructures = new List<RenderStructure>();
private List<RenderStructure> dynamicSceneStructures = new List<RenderStructure>();
private List<GameObject> staticSceneObjects = new List<GameObject>();
private List<GameObject> dynamicSceneObjects = new List<GameObject>();
private Matrix4 sceneMatrix;
public Game Game => game;
public Matrix4 SceneMatrix { get => sceneMatrix; protected set => sceneMatrix = value; }
public List<RenderStructure> StaticSceneStructures { get => staticSceneStructures; protected set => staticSceneStructures = value; }
public List<RenderStructure> DynamicSceneStructures { get => dynamicSceneStructures; protected set => dynamicSceneStructures = value; }
public List<GameObject> StaticSceneObjects { get => staticSceneObjects; protected set => staticSceneObjects = value; }
public List<GameObject> DynamicSceneObjects { get => dynamicSceneObjects; protected set => dynamicSceneObjects = value; }
public Scene(Game game)
{
this.game = game;
sceneMatrix = Matrix4.CreateOrthographic(1200,900,0.01f,30000f) * Matrix4.CreateTranslation(0,0,1);
}
public virtual void RenderFrame(RenderService renderService, FrameArgument e)
{
RenderSceneObjects(renderService, e, StaticSceneObjects.ToArray());
RenderSceneObjects(renderService, e, DynamicSceneObjects.ToArray());
}
public virtual void UpdateFrame(FrameArgument e)
{
UpdateObjects(e, StaticSceneObjects.ToArray());
UpdateObjects(e, DynamicSceneObjects.ToArray());
}
protected virtual void RenderSceneObjects(RenderService renderService, FrameArgument e, GameObject[] sceneObjects)
{
foreach (GameObject so in sceneObjects)
{
if (so.renderUnit.IsInitialized)
DrawSprite(renderService, ref so.renderUnit);
}
}
protected virtual void UpdateObjects(FrameArgument e, GameObject[] gameObjects)
{
foreach (GameObject obj in gameObjects)
obj.OnUpdate(e);
}
protected virtual void DrawSprite(RenderService renderService, ref RenderUnit renderUnit)
{
renderService.DrawSprite(ref renderUnit, renderUnit.Position.X, renderUnit.Position.Y, renderUnit.Position.Z);
}
}
}
<file_sep>using OpenTK;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Rendering
{
public struct Vertex
{
private Vector2 position;
private Vector2 textcoord;
private Vector4 color;
public Vector2 Position => position;
public Vector2 TextCoord => textcoord;
public Color Color
{
get => Color.FromArgb((int)(color.W * 255), (int)(color.X * 255), (int)(color.Y * 255), (int)(color.Z * 255));
set
{
this.color = new Vector4(value.R / 255f, value.G / 255f, value.B /255f, value.A / 255f);
}
}
public static int SizeInBytes => Vector2.SizeInBytes * 2 + Vector4.SizeInBytes;
public Vertex(Vector2 position, Vector2 textcoord, float r=0, float g=0, float b=0, float a=1)
{
this.position = position;
this.textcoord = textcoord;
this.color = new Vector4(r,g,b,a);
}
}
}
<file_sep>using OpenTK;
using OpenTK.Input;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Systems.Input
{
public class InputHandler
{
private bool enabled;
private int handlerID;
private InputType inputType;
private Dictionary<Key, InputSwitchType> keyboard_UpDownSwitch = new Dictionary<Key, InputSwitchType>();
private Dictionary<MouseButton, InputSwitchType> mouse_UpDownToggle = new Dictionary<MouseButton, InputSwitchType>();
public bool Enabled { get => enabled; set => enabled = value; }
public int HandlerID { get => handlerID; private set => handlerID = value; }
public InputType InputType { get => inputType; set => inputType = value; }
private KeyboardKeyEventArgs keyboard_UpDown;
private KeyPressEventArgs keyboard_Press;
private MouseButtonEventArgs mouse_Button;
private MouseMoveEventArgs mouse_Move;
private MouseWheelEventArgs mouse_Wheel;
public KeyboardKeyEventArgs Keyboard_UpDown { get => keyboard_UpDown; private set => keyboard_UpDown = value; }
public KeyPressEventArgs Keyboard_Press { get => keyboard_Press; private set => keyboard_Press = value; }
public MouseButtonEventArgs Mouse_Button { get => mouse_Button; private set => mouse_Button = value; }
public MouseMoveEventArgs Mouse_Move { get => mouse_Move; private set => mouse_Move = value; }
public MouseWheelEventArgs Mouse_Wheel { get => mouse_Wheel; private set => mouse_Wheel = value; }
internal InputHandler(int handlerID, InputType inputType, bool enabled = true)
{
this.enabled = enabled;
this.handlerID = handlerID;
this.inputType = inputType;
}
public void DeclareKeySwitch(Key key)
{
keyboard_UpDownSwitch.Add(key, InputSwitchType.RepeatUp);
}
public InputSwitchType Keyboard_SwitchState(Key key) => keyboard_UpDownSwitch[key];
public bool Keyboard_SwitchState_Bool(Key key) => (keyboard_UpDownSwitch[key] == InputSwitchType.InitalDown || keyboard_UpDownSwitch[key] == InputSwitchType.InitalUp) ? true : false;
public bool Keyboard_SwitchState_BoolReset(Key key)
{
bool ret = Keyboard_SwitchState_Bool(key);
if (ret)
keyboard_UpDownSwitch[key] = InputSwitchType.RepeatDown;
return ret;
}
public bool Keyboard_SwitchState_BoolResetFree(Key key)
{
bool ret = Keyboard_SwitchState_Bool(key);
if (ret)
keyboard_UpDownSwitch[key] = InputSwitchType.RepeatUp;
return ret;
}
public void RemoveKeySwitch(Key key)
{
keyboard_UpDownSwitch.Remove(key);
}
public void DeclareMouseToggle(MouseButton button)
{
mouse_UpDownToggle.Add(button, InputSwitchType.RepeatUp);
}
public void RemoveMouseToggle(MouseButton button)
{
mouse_UpDownToggle.Remove(button);
}
internal virtual void Handle_Keyboard_UpDown(object sender, KeyboardKeyEventArgs e)
{
if (keyboard_UpDownSwitch.ContainsKey(e.Key))
{
InputSwitchType state = handleSwitch(e.Keyboard.IsKeyDown(e.Key), keyboard_UpDownSwitch[e.Key]);
if (state != InputSwitchType.NoRead)
keyboard_UpDownSwitch[e.Key] = state;
}
keyboard_UpDown = (enabled) ? e : null;
}
internal virtual void Handle_Keyboard_Press(object sender, KeyPressEventArgs e) => keyboard_Press = (enabled) ? e : null;
internal virtual void Handle_Mouse_Button(object sender, MouseButtonEventArgs e)
{
if (mouse_UpDownToggle.ContainsKey(e.Button))
{
}
mouse_Button = (enabled) ? e : null;
}
internal virtual void Handle_Mouse_Move(object sender, MouseMoveEventArgs e) => mouse_Move = (enabled) ? e : null;
internal virtual void Handle_Mouse_Wheel(object sender, MouseWheelEventArgs e) => mouse_Wheel = (enabled) ? e : null;
private InputSwitchType handleSwitch(bool isDown, InputSwitchType state)
{
if (isDown)
{
if (state == InputSwitchType.InitalUp)
return InputSwitchType.RepeatDown;
else if (state == InputSwitchType.RepeatUp)
return InputSwitchType.InitalDown;
}
else
{
if (state == InputSwitchType.InitalDown)
return InputSwitchType.InitalUp;
else if (state == InputSwitchType.RepeatDown)
return InputSwitchType.RepeatUp;
}
return InputSwitchType.NoRead;
}
}
public enum InputType
{
Keyboard_UpDown = 1,
Keyboard_Press = 2,
Mouse_Button = 4,
Mouse_Move = 8,
Mouse_Wheel = 16
};
public enum InputSwitchType
{
NoRead = 0,
InitalDown = 1,
InitalUp = 2,
RepeatDown = 3,
RepeatUp = 4
}
}
<file_sep>using isometricgame.GameEngine.WorldSpace.ChunkSpace;
using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.WorldSpace.Generators.PerlinNoise
{
public class Perlin
{
private int seed;
private int frequency = 2;
private int[,] seedMap = new int[16, 16];
private static readonly Vector2[] OFFSETS = new Vector2[]
{
new Vector2(1,0),
new Vector2(0,1),
new Vector2(1,1)
};
public Perlin(int seed)
{
this.seed = seed;
Random rand = new Random(seed);
for (int i = 0; i < 16; i++)
{
for (int j = 0; j < 16; j++)
{
seedMap[i,j] = rand.Next();
}
}
}
public float[,] InterprolateNoise(Vector2 chunkPosition)
{
float[,] ret = new float[Chunk.CHUNK_TILE_WIDTH, Chunk.CHUNK_TILE_WIDTH];
int stride = Chunk.CHUNK_TILE_WIDTH;
Vector2 chunkTilePos = chunkPosition * Chunk.CHUNK_TILE_WIDTH;
Vector2 strideOffset;
//interprolation values for stride regions.
float z0, z1, z2, z3;
//Currently we end up recalucating these A LOT... will work to fix that later on.
for (int period = 0; period < frequency; period++)
{
for (int x_stride = 0; x_stride * stride < Chunk.CHUNK_TILE_WIDTH; x_stride++)
{
for (int y_stride = 0; y_stride * stride < Chunk.CHUNK_TILE_WIDTH; y_stride++)
{
strideOffset = chunkTilePos + new Vector2(x_stride * stride, y_stride * stride);
// get region z corners.
z0 = GetZ((int)strideOffset.X, (int)strideOffset.Y);
z1 = GetZ((int)strideOffset.X + stride, (int)strideOffset.Y);
z2 = GetZ((int)strideOffset.X, (int)strideOffset.Y + stride);
z3 = GetZ((int)strideOffset.X + stride, (int)strideOffset.Y + stride);
//Console.WriteLine("[{4}]: {0}, {1}, {2}, {3} --- strideXY {5}/{6}", z0, z1, z2, z3, chunkPosition, x_stride, y_stride);
for (int x = stride * x_stride; x < (x_stride + 1) * stride; x++)
{
for (int y = y_stride * stride; y < (y_stride + 1) * stride; y++)
{
ret[x,y] += 5 * (GetWeight(x % stride, y % stride, stride, z0, z1, z2, z3));
//Console.Write("[{0}] ", ret[x,y]);
}
//Console.WriteLine();
}
}
}
stride /= 2;
}
return ret;
}
private float GetWeight(float x, float y, int stride, float z0, float z1, float z2, float z3)
{
/*
z0 *= 0.5f;
z1 *= 0.5f;
z2 *= 0.5f;
z3 *= 0.5f;
*/
float xw = (x) / (stride - 1);
float yw = (y) / (stride - 1);
return ((1f - yw) * (z0 + (xw * (z1 - z0)))) + (yw * (z2 + (xw * (z3 - z2))));
}
private float GetZ(int x, int y)
{
int zSeed = (int)Systems.MathHelper.MapCoordsToUniqueFloat(x,y);
Random rand = new Random(zSeed + seed);
return rand.Next(100) / 100f;
}
}
}
<file_sep>using isometricgame.GameEngine.Events.Arguments;
using isometricgame.GameEngine.Rendering;
using isometricgame.GameEngine.Rendering.Animation;
using isometricgame.GameEngine.Scenes;
using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Components.Rendering
{
public class AnimationComponent : SpriteComponent
{
private AnimationSchematic schematic;
private int node;
public AnimationComponent(AnimationSchematic schematic = null)
: base()
{
this.schematic = schematic;
}
public void SetSchematic(AnimationSchematic schematic) => this.schematic = schematic;
public void SetNode(int node)
{
this.node = node;
}
public void DefineNode(int node, int[] subNodes)
{
schematic.DefineNode(node, subNodes);
}
public void Pause(double time) => schematic.Pause(time);
public void Unpause(double time) => schematic.Unpause(time, node);
protected override void OnUpdate(FrameArgument args)
{
ParentObject.renderUnit.VAO_Index = schematic.GetVBO_Index(args.Time, node);
}
}
}
<file_sep>using isometricgame.GameEngine.Rendering;
using OpenTK;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Systems.Rendering
{
public class SpriteLibrary : GameSystem
{
internal List<Sprite> sprites = new List<Sprite>();
private Dictionary<string, int> nameToIndex = new Dictionary<string, int>();
public SpriteLibrary(Game gameRef)
: base(gameRef)
{
}
public override void Unload()
{
base.Unload();
}
public int RecordSprite(Sprite s)
{
nameToIndex.Add(s.Name, sprites.Count);
sprites.Add(s);
return sprites.Count - 1;
}
public int GetSprite(string name) => nameToIndex[name];
public void SetVAO(int id, int vao) => sprites[id].VAO_Index = vao;
public void SetVAO_Row(int id, int row) => sprites[id].VAO_Row = row;
public RenderUnit ExtractRenderUnit(string name) => ExtractRenderUnit(nameToIndex[name]);
public RenderUnit ExtractRenderUnit(int id) => new RenderUnit(id, 0, Vector3.Zero);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenTK;
using OpenTK.Graphics.OpenGL;
namespace isometricgame.GameEngine.Rendering
{
public class VertexArray : IDisposable
{
public static readonly int VERTEXARRAY_INDEX_COUNT = 4;
private Vertex[] vertices;
private Texture2D texture;
private int vertexBufferObject;
private bool flippedX, flippedY;
public Texture2D Texture => texture;
public VertexArray(Texture2D texture, Vertex[] vertices = null,
float r = 0,
float g = 0,
float b = 0,
float a = 0)
{
vertexBufferObject = GL.GenBuffer();
if (vertices == null)
{
this.vertices = VerticesFromDimensions(texture.Width, texture.Height, -1, -1, 0, 0, r, g, b, a);
}
else
{
this.vertices = vertices;
}
flippedX = false;
flippedY = false;
this.texture = texture;
}
public void Use()
{
GL.BindTexture(TextureTarget.Texture2D, texture.ID);
}
//potentially remove?
public void Flip(bool x=true, bool y=true)
{
if (x)
{
vertices[0] = new Vertex(new Vector2(0, 0), flippedX ? new Vector2(1, 0) : new Vector2(0, 0));
vertices[2] = new Vertex(new Vector2(texture.Width, texture.Height), flippedX ? new Vector2(0, 1) : new Vector2(1, 1));
flippedX = !flippedX;
}
if (y)
{
vertices[1] = new Vertex(new Vector2(0, texture.Height), flippedY ? new Vector2(1, 1) : new Vector2(0, 1));
vertices[3] = new Vertex(new Vector2(texture.Width, 0), flippedY ? new Vector2(0, 0) : new Vector2(1, 0));
flippedY = !flippedY;
}
}
public void BindVertexBuffer()
{
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexBufferObject);
GL.BufferData(BufferTarget.ArrayBuffer, VERTEXARRAY_INDEX_COUNT * Vertex.SizeInBytes, vertices, BufferUsageHint.StaticDraw);
}
//remove
public void UnbindVertexBuffer()
{
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
}
public void Dispose()
{
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
GL.DeleteBuffer(vertexBufferObject);
}
/// <summary>
/// This function is not loop friendly, but it doesn't matter right now as this is not used in the main loop.
/// </summary>
/// <param name="width"></param>
/// <param name="maxWidth"></param>
/// <param name="height"></param>
/// <param name="maxHeight"></param>
/// <param name="indexX"></param>
/// <param name="indexY"></param>
/// <returns></returns>
public static Vertex[] VerticesFromDimensions(
float width,
float height,
float subWidth=-1,
float subHeight=-1,
int indexX=0,
int indexY=0,
float r = 0,
float g = 0,
float b = 0,
float a = 0)
{
float vertX = (subWidth > 0) ? subWidth / width : 1;
float vertY = (subHeight > 0) ? subHeight / height : 1;
float vertXi1 = vertX * indexX;
float vertXi2 = vertX * (indexX + 1);
float vertYi1 = vertY * indexY;
float vertYi2 = vertY * (indexY + 1);
return new Vertex[]
{
new Vertex(new Vector2(0, 0), new Vector2(vertXi1, vertYi2), r, g, b, a),
new Vertex(new Vector2(0, subHeight), new Vector2(vertXi1, vertYi1), r, g, b, a),
new Vertex(new Vector2(subWidth, subHeight), new Vector2(vertXi2, vertYi1), r, g, b, a),
new Vertex(new Vector2(subWidth, 0), new Vector2(vertXi2, vertYi2), r, g, b, a),
};
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Exceptions.Services
{
public class ServiceNotFoundException : Exception
{
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace isometricgame.GameEngine.Events.Arguments
{
public class FrameArgument
{
/// <summary>
/// Total elapsed time since game launch.
/// </summary>
public readonly double Time;
/// <summary>
/// Time since last loop.
/// </summary>
public readonly double DeltaTime;
public FrameArgument(double time, double deltaTime) { Time = time; DeltaTime = deltaTime; }
}
}
<file_sep>using isometricgame.GameEngine.Rendering;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenTK.Graphics.OpenGL;
using OpenTK;
using isometricgame.GameEngine.WorldSpace;
namespace isometricgame.GameEngine.Systems.Serialization
{
public class AssetProvider : GameSystem
{
public AssetProvider(Game game)
: base(game)
{
}
#region Texture2Ds
public Texture2D LoadTexture(string filePath, bool pixelated = true)
{
if (!File.Exists(filePath))
throw new FileNotFoundException();
Bitmap bmp = new Bitmap(filePath);
Texture2D texture = new Texture2D(bmp, pixelated);
return texture;
}
#endregion
#region Sprites
public Sprite ExtractSpriteSheet(
string filePath,
string name = null,
int width=-1,
int height=-1,
float offsetX = 0,
float offsetY = 0,
float r = 0,
float g = 0,
float b = 0,
float a = 0)
{
Texture2D texture = LoadTexture(filePath);
return new Sprite(
texture,
(width > 0)
? width
: texture.Width,
(height > 0)
? height
: texture.Height,
(name == null)
? filePath
: name,
offsetX / texture.Width,
offsetY / texture.Height,
0,
r,
g,
b,
a);
}
#endregion
#region legacy
/*
public void LoadTileSet(string filePath, string assetName, SpriteLibrary library)
{
TileSpriteAtlas tileset = new TileSpriteAtlas(LoadTileTextureSheet(filePath, Tile.TILE_WIDTH, Tile.TILE_HEIGHT));
library.RecordSpriteSet(assetName, tileset,
(i) => (i).ToString()
);
}
*/
/*
public Texture2D[] LoadTileTextureSheet(string filePath, int textureWidth, int textureHeight, bool pixelated = false)
{
SpriteSheetReader ssr = new SpriteSheetReader(filePath, textureWidth, textureHeight);
Texture2D[] textures;
textures = new Texture2D[16];
BitmapData bmpd;
for (int i = 0; i < 8; i++)
{
bmpd = ssr.Lock(0, i);
textures[i] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
}
bmpd = ssr.Lock(0, 2);
textures[8] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
bmpd = ssr.Lock(0, 3);
textures[9] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
bmpd = ssr.Lock(0, 8);
textures[10] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
bmpd = ssr.Lock(0, 9);
textures[11] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
/*for (int i = 10; i < 12; i++)
{
bmpd = ssr.Lock(0, i-2);
textures[i+2] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock();
}
bmpd = ssr.Lock(0, 6);
textures[12] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
bmpd = ssr.Lock(0, 7);
textures[13] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
bmpd = ssr.Lock(0, 10);
textures[14] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
textures[15] = textures[0];
ssr.Dispose();
return textures;
}
*/
/*
public Texture2D[] LoadTextureSheet(string filePath, int textureWidth, int textureHeight, int count = -1, bool pixelated = false)
{
BitmapData bmpd;
SpriteSheetReader ssr = new SpriteSheetReader(filePath, textureWidth, textureHeight);
Texture2D[] textures = new Texture2D[(count > 0) ? count : (ssr.SheetWidth * ssr.SheetHeight) / (textureWidth * textureHeight)];
int rows = ssr.SheetHeight / textureHeight, columns = ssr.SheetWidth / textureWidth;
for (int row = 0; row < rows; row++)
{
for (int column = 0; column < columns; column++)
{
int index = column + (row * columns);
if (count > -1 && index >= count)
return textures;
bmpd = ssr.Lock(column, row);
textures[index] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
}
}
bmpd = ssr.Lock(0, 0);
textures[0] = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
ssr.Unlock(bmpd);
ssr.Dispose();
return textures;
}
public Texture2D[] LoadTextureSheet_1(string filePath, int textureWidth, int textureHeight, int count = -1, bool pixelated = false)
{
if (!File.Exists(filePath))
throw new FileNotFoundException();
Texture2D[] textures;
Bitmap bmp = new Bitmap(filePath);
textures = new Texture2D[(count > 0) ? count : (bmp.Width * bmp.Height) / (textureWidth * textureHeight)];
int rows = bmp.Height / textureHeight, columns = bmp.Width / textureWidth;
for (int column = 0; column < columns; column++)
{
for (int row = 0; row < rows; row++)
{
int index = column * rows + row;
if (index >= textures.Length)
return textures;
BitmapData bmpd = bmp.LockBits(new Rectangle(textureWidth * column, textureHeight * row, textureWidth, textureHeight), ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
Texture2D texture = BindTexture(bmpd, textureWidth, textureHeight, pixelated);
bmp.UnlockBits(bmpd);
textures[index] = texture;
}
}
return textures;
}
*/
#endregion
}
}
| 0f9348e0e997863ac15acff29e6b22e8a7123592 | [
"C#"
] | 32 | C# | Hengle/isometricgame | 80306da6096ae82fd6be4d587cefecd1d251ae36 | 6d7571ae0ec1b6f702d31b09cff6b2fc93146a71 | |
refs/heads/master | <repo_name>mv123453715/CCU_2019_Data_Structure<file_sep>/HW1/Linux版本/Minimize_the_table_size.c
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#define bool int
#define false 0
#define true 1
main()
{
// rule value
int rule_number = 0;
char rule_point1[ 100 ][1000] = {'/0'}; //rule_point1
char rule_point2[ 100 ][1000] = {'/0'}; //rule_point2
char rule_port[ 100 ][1000] = {'/0'}; //rule_port
// input value
int input_number = 0;
char input_point1[ 100 ][1000] = {'/0'};
char input_point2[ 100 ][1000] = {'/0'};
//determine rule
//char star[1] = "*";
bool isfind_port = true;
int i=0;
int j=0;
//file input rule
FILE *fin;
fin=fopen("input.txt","r");
fscanf(fin,"%d\n",&rule_number);
//printf( "rule_number:%d\n",rule_number );
for (i=0;i<rule_number;i++)
{
fscanf(fin,"%s ",&rule_point1[i]);
fscanf(fin,"%s ",&rule_point2[i]);
fscanf(fin,"%s\n",&rule_port[i]);
}//for
//file input input_point
fscanf(fin,"%d\n",&input_number);
for (i=0;i<input_number;i++)
{
fscanf(fin,"%s ",&input_point1[i]);
fscanf(fin,"%s\n",&input_point2[i]);
}//for
fclose(fin);
//output file
FILE *fout;
fout = fopen( "output.txt","w" );
fprintf( fout,"%d\n",input_number );
for (i=0;i<input_number;i++){
fprintf( fout,"%s ",input_point1[i] );
fprintf( fout, "%s ",input_point2[i] );
//determine
//****************************************************
//if Fully equal
//else if number1 = * , input_point_number2 is correspond to rule_point_number2
//else if number2 = * , input_point_number1 is correspond to rule_point_number1
//else if number1 = * ,number2 = *
//esle drop
//****************************************************
for (j=0;j<rule_number;j++){
//printf( "i:%d\n",i );
//printf( "j:%d\n",j );
//printf( "input_point1[i]:%s\n",input_point1[i] );
//printf( "rule_point1[j]:%s\n",rule_point1[j] );
//printf( "strcmp(rule_point1[j],star)%d\n",strcmp(rule_point1[j],"*") );
if ( strcmp(input_point1[i],rule_point1[j])==0 && strcmp(input_point2[i],rule_point2[j] )==0 ) {
fprintf( fout, "%s",rule_port[j] );
isfind_port = true;
//printf( "strcmp(input_point1[i],rule_point1[j]) && strcmp(input_point2[i],rule_point2[j] )\n" );
//printf( "rule_port[j]:%s\n",rule_port[j] );
break;
}//if
else if( strcmp(rule_point1[j],"*")==0 && strcmp(input_point2[i],rule_point2[j] )==0 ){
fprintf( fout, "%s",rule_port[j] );
isfind_port = true;
//printf( "strcmp(rule_point1[j],star) && strcmp(input_point2[i],rule_point2[j] )\n" );
//printf( "rule_port[j]:%s\n",rule_port[j] );
break;
}//else if
else if( strcmp(input_point1[i],rule_point1[j] )==0 && strcmp(rule_point2[j],"*")==0 ){
fprintf( fout, "%s",rule_port[j] );
isfind_port = true;
//printf( "strcmp(input_point1[i],rule_point1[j] ) && strcmp(rule_point2[j],star)\n" );
//printf( "rule_port[j]:%s\n",rule_port[j] );
break;
}//else if
else if( strcmp(rule_point1[j], "*")==0 && strcmp(rule_point2[j],"*")==0 ){
fprintf( fout, "%s",rule_port[j] );
isfind_port = true;
//printf( "strcmp(rule_point1[j], star) && strcmp(rule_point2[j],star)\n" );
//printf( "rule_port[j]:%s\n",rule_port[j] );
break;
}//else if
else{
isfind_port = false;
}//else
}//for
if ( isfind_port == false ){
fprintf( fout, "drop" );
isfind_port = true ;
}//if
fprintf( fout, "\n" );
}//for
fclose(fout);
//debug
printf("%d\n",rule_number);
for (i=0;i<rule_number;i++)
{
printf("%s ",rule_point1[i]);
printf("%s ",rule_point2[i]);
printf("%s\n",rule_port[i]);
}//for
printf("%d\n",input_number);
for (i=0;i<input_number;i++)
{
printf("%s ",input_point1[i]);
printf("%s\n",input_point2[i]);
}//for
}//main
<file_sep>/HW1/Minimize_the_table_size_opt.c
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#define MAX_LINE 1024
#define bool int
#define false 0
#define true 1
main()
{
// rule value
int rule_number = 0;
char rule_point1[ 1024 ]; //rule_point1
char rule_point2[ 1024 ]; //rule_point2
char rule_port[ 1024 ]; //rule_port
// input value
int input_number = 0;
char input_point1[ 1024 ];
char input_point2[ 1024 ];
//determine rule
char star[1] = "*";
bool isfind_port = true;
//file input rule
FILE *fin;
fin=fopen("input.txt","r");
fscanf(fin,"%d\n",&rule_number);
for (int i=0;i<rule_number;i++)
{
fscanf(fin,"%c ",&rule_point1[i]);
fscanf(fin,"%c ",&rule_point2[i]);
fscanf(fin,"%c\n",&rule_port[i]);
}//for
//file input input_point
fscanf(fin,"%d\n",&input_number);
for (int i=0;i<input_number;i++)
{
fscanf(fin,"%c ",&input_point1[i]);
fscanf(fin,"%c\n",&input_point2[i]);
}//for
fclose(fin);
//output file
FILE *fout;
fout = fopen( "output.txt","w" );
fprintf( fout,"%d\n",rule_number);
for (int i=0;i<input_number;i++){
fprintf( fout,"%c ",input_point1[i] );
fprintf( fout, "%c ",input_point2[i] );
//determine
//****************************************************
//if Fully equal
//else if number1 = * , input_point_number2 is correspond to rule_point_number2
//else if number2 = * , input_point_number1 is correspond to rule_point_number1
//else if number1 = * ,number2 = *
//esle drop
//****************************************************
for (int j=0;j<rule_number;j++){
if ( (input_point1[i] == rule_point1[j]) && (input_point2[i] == rule_point2[j] ) ) {
fprintf( fout, "%c",rule_port[j] );
isfind_port = true;
break;
}//if
else if( (rule_point1[j] == star[0]) &&(input_point2[i] == rule_point2[j] ) ){
fprintf( fout, "%c",rule_port[j] );
isfind_port = true;
break;
}//else if
else if( (input_point1[i] == rule_point1[j] ) && (rule_point2[j] == star[0]) ){
fprintf( fout, "%c",rule_port[j] );
isfind_port = true;
break;
}//else if
else if( (rule_point1[j] == star[0]) && (rule_point2[j] == star[0]) ){
fprintf( fout, "%c",rule_port[j] );
isfind_port = true;
break;
}//else if
else{
isfind_port = false;
}//else
}//for
if ( isfind_port == false ){
fprintf( fout, "drop" );
isfind_port = true ;
}//if
fprintf( fout, "\n" );
}//for
fclose(fout);
//debug
/*
printf("%d\n",rule_number);
for (int i=0;i<rule_number;i++)
{
printf("%c ",rule_point1[i]);
printf("%c ",rule_point2[i]);
printf("%c\n",rule_port[i]);
}//for
printf("%d\n",input_number);
for (int i=0;i<input_number;i++)
{
printf("%c ",input_point1[i]);
printf("%c\n",input_point2[i]);
}//for
*/
}//main
<file_sep>/README.md
# CCU_2019_Data_Structure
## HW1 : Minimize the table size(network)
## HW2 : Loop-Free Route Updates for Software-Defined Networks (TON, 2018)
## HW3 : Accept or reject a flow with 2-segment routing “Optimized Network Traffic Engineering using Segment Routing,” in IEEE INFOCOM 2015
## HW4 :“Constant query time (1+𝜖)-approximate distance oracle for planar graphs,” in TCS 2019
<file_sep>/HW4/DS_HW4_406410114_Exact_Distance_Query.c
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <stdbool.h>
#include <limits.h>
#include <math.h>
#define SIZE 100
#define V 100
typedef struct link_input{
int linkID;
int firstNodeID;
int secondNodeID; // sorting
double distance;
} link_input;
typedef struct node{
int point;
double weight;
struct node *next;
} node;
typedef struct flow_input{
int flowID;
int sourceID;
int destinationID; // sorting
} flow_input;
struct DataItem {
int node1;
int node2;
double distance;
int key;
struct DataItem *next;
};
typedef struct node_dis{
char type;// 's':sample 'n' :not sample
double shortest_dis;
int shortest_dis_sample;
int shortest_dis_sample_index;
double distance[100];
int sml_than_sht_num;
int sml_than_sht_node[100];
double sml_than_sht_dis[100];
} node_dis;
struct DataItem* dummyItem;
struct DataItem* item;
int node_num = 0,link_num =0,flow_num =0,n,hash_prime,sample_node_num,nd_dis_smp_i = 0,hash_pair_num = 0,hash_tb_size;
int sample_node[100];
node_dis node_dis_sample[100];
double wei_graph[100][100];
link_input link_info[10000];
flow_input flow_path[10000];
node node_info[1000] = { 0,0,NULL };
int hashCode(int key );
//struct DataItem* hashArray = malloc(sizeof(struct DataItem) * hash_tb_size);
struct DataItem* hashArray[SIZE];
struct DataItem *search(int key ) ;
void insert(int key,int node1,int node2,double distance);
int compute_key(int node1,int node2 );
void display();
void ReadInput();
void create_wei_graph( node node_info[] );
int minDistance(int dist[], bool sptSet[]);
int printSolution(int dist[]);
void dijkstra(double graph[V][V], int src,char mode[]) ;
int seclect_next_sample();
void Debug_node_dis_sample();
double pair_distance_search( int src,int des );
void store_to_hash_tb();
int main() {
int i,j,k;
double pari_distance;
//=======================================read_input================================================================//
ReadInput();
//=======================================compute hash_tb_size and sample_node_num=================================//
hash_tb_size = ceil(pow((float)node_num,1.5));
sample_node_num =ceil(sqrt((float)node_num));
//=======================================hash_pair_num=============================================================//
for ( i = node_num- sample_node_num; i< node_num ; i++)
hash_pair_num+=i;
//printf( "hash_pair_num:%d\n",hash_pair_num );
//=======================================generate graph=============================================================//
create_wei_graph( node_info );
//=======================================boot =========================================================================//
for ( i =0 ; i < node_num ; i++ ){
node_dis_sample[i].type = 'n';
node_dis_sample[i].sml_than_sht_num =0;
}//for
//=======================================first sample_node is 0 ======================================================//
sample_node[0] = 0;
node_dis_sample[0].type = 's';
//printf( "sample_node_num:%d\n",sample_node_num );
//=======================================select other sample ======================================================//
for (i =0 ; i < sample_node_num ; i++){
dijkstra(wei_graph, sample_node[i],"sample_node"); //sample_node
sample_node[nd_dis_smp_i] = seclect_next_sample();
}//for
//======================================= store smaller_than_shortest_distance =====================================//
for (i =0 ; i < node_num ; i++){
dijkstra(wei_graph, i,"smaller_than_shortest_dis"); //smaller_than_shortest_dis
}//for
//======================================= store into has table =====================================================//
store_to_hash_tb();
printf( "%d\n",hash_pair_num );
//======================================= pair_distance_search =====================================================//
for ( i = 0;i < flow_num;i++ ){
pari_distance = pair_distance_search( flow_path[i].sourceID,flow_path[i].destinationID );
printf( "%d %d %d %.0lf\n",i,flow_path[i].sourceID,flow_path[i].destinationID,pari_distance );
}//for
//Debug_node_dis_sample();
return 0;
}
void dijkstra(double graph[V][V], int src,char mode[]) // shortest path
{
int path[V],path_i=0,sou_des_trace[V][V],sdt_i = 0;
int dist[V];
// distance from src to i
for ( int i = 0 ;i<node_num;i++){
for(int j= 0 ;j<node_num ;j++)
sou_des_trace[i][j] = -1;
}//for
bool sptSet[V]; // sptSet[i] will be true if vertex i is included in shortest
// path tree or shortest distance from src to i is finalized
// Initialize all distances as INFINITE and stpSet[] as false
for (int i = 0; i < node_num; i++)
dist[i] = INT_MAX, sptSet[i] = false; //所有的距離都最大 且還沒選過
// Distance of source vertex from itself is always 0
dist[src] = 0; //source點
// Find shortest path for all vertices
for (int count = 0; count < node_num - 1; count++) {
// Pick the minimum distance vertex from the set of vertices not
// yet processed. u is always equal to src in the first iteration.
int u = minDistance(dist, sptSet);
// Mark the picked vertex as processed
sptSet[u] = true; //改成true
//path[path_i] = u; //走過路徑
//path_i++;
// Update dist value of the adjacent vertices of the picked vertex.
for (int v = 0; v < node_num; v++){
//sdt_i=0;
// Update dist[v] only if is not in sptSet, there is an edge from
// u to v, and total weight of path from src to v through u is
// smaller than current value of dist[v]
if (!sptSet[v] && graph[u][v] && dist[u] != INT_MAX
&& dist[u] + graph[u][v] < dist[v]){ //假設是false 並且graph不是0 dist有值 可以到
dist[v] = dist[u] + graph[u][v]; //
//sou_des_trace[v][sdt_i] = u;
//printf( "sou_des_trace[%d][sdt_i]:%d\n",v,sou_des_trace[v][sdt_i] );
//sdt_i++;
}//if
}//for
}//for
// print the constructed distance array
//printf( "%s\n",mode );
// append node to sample node distance
if ( strcmp(mode,"sample_node")==0 ){
//printf( "%c\n",mode );
for ( int i = 0; i<node_num;i++ )
node_dis_sample[i].distance[nd_dis_smp_i] =dist[i];
nd_dis_smp_i++;
}//if
bool record = false;
bool sample = false;
//smaller_than_shortest_distance
if ( strcmp(mode,"smaller_than_shortest_dis")==0 ){
for ( int i=0;i<node_num;i++ ){
record = false;
sample = false;
if ((dist[i] <= node_dis_sample[src].shortest_dis) && ( dist[i]!=0 )){
for ( int j = 0 ; j<sample_node_num;j++){
if ( sample_node[j] == i )
sample =true;
}//for
for ( int j = 0;j<node_dis_sample[i].sml_than_sht_num;j++ ){
//printf( "node_dis_sample[%d].sml_than_sht_node[%d]:%d\n",i,j,node_dis_sample[i].sml_than_sht_node[j] );
//printf( "src:%d\n",src );
if ( node_dis_sample[i].sml_than_sht_node[j] == src )
record = true;
}//for
//it is not sample node and not record
if ( record == false && sample ==false && node_dis_sample[src].type == 'n' ){
node_dis_sample[src].sml_than_sht_node[node_dis_sample[src].sml_than_sht_num] = i;
node_dis_sample[src].sml_than_sht_dis[node_dis_sample[src].sml_than_sht_num] = dist[i];
node_dis_sample[src].sml_than_sht_num++;
hash_pair_num++;
}//false
}//if
}//for
}//if
printSolution(dist);
}//dijkstra
int seclect_next_sample(){
int i,j;
int max_dis_node,sht_sample_node = 0;
double shortest_dis,longest_dis ;
//every node update shortest_distance
for ( i = 0 ;i<node_num;i++ ){
//printf( "i:%d\n",i );
for ( j = 0 ; j< nd_dis_smp_i;j++ ){
if ( j == 0 ){
shortest_dis =node_dis_sample[i].distance[j];
sht_sample_node = j;
//printf( "shortest_dis:%lf\n",shortest_dis );
//printf( "sample_node[sht_sample_node]:%d\n",sample_node[sht_sample_node] );
}//if
else {
if ( node_dis_sample[i].distance[j] < shortest_dis ){
shortest_dis =node_dis_sample[i].distance[j];
sht_sample_node = j;
//printf( "shortest_dis:%lf\n",shortest_dis );
//printf( "sample_node[sht_sample_node]:%d\n",sample_node[sht_sample_node] );
}//if
}//else
}//for
node_dis_sample[i].shortest_dis = shortest_dis;
node_dis_sample[i].shortest_dis_sample = sample_node[sht_sample_node];
node_dis_sample[i].shortest_dis_sample_index = sht_sample_node;
}//for
//select the distance which is to nearest sample node longest node
for ( i = 0 ;i<node_num;i++ ){
if ( node_dis_sample[i].shortest_dis > longest_dis ){
longest_dis = node_dis_sample[i].shortest_dis;
max_dis_node = i;
}//if
}//for
//return
if ( nd_dis_smp_i < sample_node_num )
node_dis_sample[max_dis_node].type = 's';
return max_dis_node;
}//seclect_next_sample
void store_to_hash_tb(){
int i,j,k,key;
//sample node
for ( i=0;i<node_num;i++ ){
for ( j = 0;j<sample_node_num;j++ ){
if ( i != sample_node[j] ){ // 自己不用存
//compute key
key = compute_key(i,sample_node[j] );
insert( key,i,sample_node[j],node_dis_sample[i].distance[j] );
//printf( "i:%d\n",i );
//printf( "key:%d\n",key );
//printf( "sample_node[%d]:%d\n",j,sample_node[j] );
//printf( "node_dis_sample[%d].distance[%d]:%lf\n",i,j,node_dis_sample[i].distance[j] );
}//if
}//for
}//for
//smaller_than_shortest_distance
for ( i=0;i<node_num;i++ ){
for ( j = 0 ; j<node_dis_sample[i].sml_than_sht_num;j++ ){
key = compute_key(i,node_dis_sample[i].sml_than_sht_node[j] );
insert( key,i,node_dis_sample[i].sml_than_sht_node[j],node_dis_sample[i].sml_than_sht_dis[j] );
}//for
}//for
//display();
}//store_to_hash_tb()
int compute_key(int node1,int node2 ) {
return ((node1+1)*(node2+1))%hash_prime%hash_tb_size;
}
int hashCode(int key) {
return key % SIZE;
}
void insert(int key,int node1,int node2,double distance) {
struct DataItem *item = (struct DataItem*) malloc(sizeof(struct DataItem));
item->node1 = node1;
item->node2 = node2;
item->distance = distance;
item->key = key;
item->next = NULL;
//get the hash
int hashIndex = hashCode(key);
struct DataItem *now_ptr = (struct DataItem*) malloc(sizeof(struct DataItem));
now_ptr = hashArray[hashIndex];
//printf( " hashArray[hashIndex]:%d", hashArray[hashIndex] );
//printf( " now_ptr:%d\n", now_ptr );
if ( now_ptr == NULL ){
//printf( "hashIndex:%d is null.\n",hashIndex );
hashArray[hashIndex] = item;
}//if
else {
while ( now_ptr->next != NULL ){
//printf(" (%d,%d,%d,%lf)\n",now_ptr->key,now_ptr->node1,now_ptr->node2,now_ptr->distance);
now_ptr = now_ptr->next;
}//while
//printf( "hashIndex:%d is not null.\n",hashIndex );
now_ptr->next = item;
//printf(" (%d,%d,%d,%lf)\n",now_ptr->key,now_ptr->node1,now_ptr->node2,now_ptr->distance);
}//else
}
struct DataItem *search(int key) {
//get the hash
int hashIndex = hashCode(key);
//move in array until an empty
while(hashArray[hashIndex] != NULL) {
if(hashArray[hashIndex]->key == key)
return hashArray[hashIndex];
//go to next cell
++hashIndex;
//wrap around the table
hashIndex %= SIZE;
}
return NULL;
}
double pair_distance_search( int src,int des ){
//printf( "src:%d des:%d\n",src,des );
int i,j,k;
int index;
double distance;
double src_to_des_dis ,des_to_src_dis;
//case 1: src = des ,return 0
//case 2: src is sample and des is sample ,directly search des to src
//case 3: src is sample and des is not sample ,directly search des to src
//case 4: src is sample and des is not sample ,directly search src to des
//case 5: src is not sample and des is not sample ,compare src_to_des and des_to_src
if ( src == des )
return 0.0;
else if ( node_dis_sample[src].type == 's' && node_dis_sample[des].type == 's' ){
for ( i = 0 ;i<sample_node_num;i++ ){
if ( sample_node[i] == des )
index = i;
}//for
distance = node_dis_sample[src].distance[index] ;
return distance;
}//if
else if ( node_dis_sample[src].type == 's' && node_dis_sample[des].type == 'n' ){
//printf( "s,n\n" );
for ( i = 0 ;i<sample_node_num;i++ ){
if ( sample_node[i] == src )
index = i;
}//for
//printf( "index;%d\n",index );
distance = node_dis_sample[des].distance[index] ;
return distance;
}//else if
else if ( node_dis_sample[src].type == 'n' && node_dis_sample[des].type == 's' ){
for ( i = 0 ;i<sample_node_num;i++ ){
if ( sample_node[i] == des )
index = i;
}//for
distance = node_dis_sample[src].distance[index] ;
return distance;
}//else if
else if ( node_dis_sample[src].type == 'n' && node_dis_sample[des].type == 'n' ){
//compute source to destination
//printf( "src:%d des:%d\n",src,des );
//printf( "=========================compute source to destination=====================\n" );
bool is_smaller_than_shortest_dis = false;
for ( i = 0 ;i<node_dis_sample[src].sml_than_sht_num;i++ ){
if ( des == node_dis_sample[src].sml_than_sht_node[i] ){
//printf( "is_smaller_than_shortest_dis is true\n" );
is_smaller_than_shortest_dis = true;
src_to_des_dis = node_dis_sample[src].sml_than_sht_dis[i];
}//if
}//for
if ( is_smaller_than_shortest_dis == false ){
//printf( "is_smaller_than_shortest_dis is false\n" );
src_to_des_dis = node_dis_sample[src].shortest_dis
+ node_dis_sample[des].distance[node_dis_sample[src].shortest_dis_sample_index];
//printf( "node_dis_sample[src].shortest_dis:%lf\n",node_dis_sample[src].shortest_dis );
//printf( "node_dis_sample[des].distance[node_dis_sample[src].shortest_dis_sample_index]:%lf\n",node_dis_sample[des].distance[node_dis_sample[src].shortest_dis_sample_index] );
//printf ( "src_to_des_dis:%lf\n",src_to_des_dis );
}//if
//compute destination to source
//printf( "=========================compute destination to source=====================\n" );
is_smaller_than_shortest_dis = false;
for ( i = 0 ;i<node_dis_sample[des].sml_than_sht_num;i++ ){
if ( src == node_dis_sample[des].sml_than_sht_node[i] ){
//printf( "is_smaller_than_shortest_dis is true\n" );
is_smaller_than_shortest_dis = true;
//printf( "node_dis_sample[des].sml_than_sht_dis[%d]:%lf\n",i,node_dis_sample[des].sml_than_sht_dis[i] );
//printf( "src:%d,i:%d\n",src,i );
des_to_src_dis = node_dis_sample[des].sml_than_sht_dis[i];
//printf ( "des_to_src_dis:%lf\n",des_to_src_dis );
}//if
}//for
if ( is_smaller_than_shortest_dis == false ){
//printf( "is_smaller_than_shortest_dis is false\n" );
des_to_src_dis = node_dis_sample[des].shortest_dis
+ node_dis_sample[src].distance[node_dis_sample[des].shortest_dis_sample_index];
//printf( "node_dis_sample[des].shortest_dis:%lf\n",node_dis_sample[des].shortest_dis );
//printf( "node_dis_sample[src].distance[node_dis_sample[des].shortest_dis_sample_index]:%lf\n",node_dis_sample[src].distance[node_dis_sample[des].shortest_dis_sample_index] );
//printf ( "des_to_src_dis:%lf\n",des_to_src_dis );
}//if
//take smaller distance
//printf ( "des_to_src_dis:%lf\n",des_to_src_dis );
if ( src_to_des_dis<des_to_src_dis )
distance = src_to_des_dis;
else
distance = des_to_src_dis;
return distance;
}//else if
}//pair_distance_search
void display() {
int i = 0,num = 0;
printf( "key , node1, node2,distance\n" );
for(i = 0; i<SIZE; i++) {
if(hashArray[i] != NULL){
struct DataItem *now_ptr = (struct DataItem*) malloc(sizeof(struct DataItem));
now_ptr = hashArray[i];
while ( now_ptr !=NULL ){
printf(" (%d,%d,%d,%lf)\n",now_ptr->key,now_ptr->node1,now_ptr->node2,now_ptr->distance);
now_ptr = now_ptr->next;
num++;
}//while
}//if
}
printf("num:%d\n",num);
/*
int i = 0;
printf( "key , node1, node2,distance\n" );
for(i = 0; i<SIZE; i++) {
if(hashArray[i] != NULL)
printf(" (%d,%d,%d,%lf)\n",hashArray[i]->key,hashArray[i]->node1,hashArray[i]->node2,hashArray[i]->distance);
else
printf(" ~~ ");
}
printf("\n");
*/
}
void ReadInput(){
//*****************************************
//******************************************
//int input_number = 0;
int input = 0;
double d_input ;
//determine rule
int i=0;
//file input rule
//FILE *fin;
//fin=fopen(filename,"r");
scanf("%d",&node_num);
//printf( "node_num:%d\n",node_num );
scanf("%d",&link_num);
scanf("%d",&hash_prime);
//printf( "node_num:%d\n",node_num );
for (i=0;i<link_num;i++)
{
scanf("%d",&input);
link_info[i].linkID = input ;
scanf("%d",&input);
link_info[i].firstNodeID = input ;
scanf("%d",&input);
link_info[i].secondNodeID = input ;
scanf("%lf",&d_input);
link_info[i].distance = d_input ;
}//for
scanf("%d",&flow_num);
//printf( "flow_num:%d\n",flow_num );
for (i=0;i<flow_num;i++)
{
scanf("%d",&input);
flow_path[i].flowID = input ;
scanf("%d",&input);
flow_path[i].sourceID = input ;
scanf("%d",&input);
flow_path[i].destinationID = input ;
}//for
//int node_index = -1 ;
//int max_finode_id = link_info[node_num-1].firstNodeID ;
node *now_ptr = (node *) malloc(sizeof(node));
for ( i=0;i<node_num;i++ ){
now_ptr = &node_info[node_num];
now_ptr->next = NULL;
}//for
//create Adjacency Lists use pointer array
for ( i=0;i<link_num;i++ ){
//printf( "-----------------------round--------------------------\n" );
//printf( "link_info[i].firstNodeID :%d\n",link_info[i].firstNodeID );
//printf( "node_index :%d\n ",node_index );
//first node
now_ptr = &node_info[link_info[i].firstNodeID];
node *new_ptr = (node *) malloc(sizeof(node));
while ( now_ptr->next != NULL )
now_ptr = now_ptr->next;
new_ptr->point = link_info[i].secondNodeID;
//printf( "new_ptr->point:%d\n",new_ptr->point );
new_ptr->weight = link_info[i].distance;
//printf( "new_ptr->linkCapacity:%d\n",new_ptr->linkCapacity );
now_ptr->next = new_ptr;
now_ptr = now_ptr->next;
now_ptr->next = NULL;
//second node
now_ptr = &node_info[link_info[i].secondNodeID];
node *new_ptr2 = (node *) malloc(sizeof(node));
while ( now_ptr->next != NULL )
now_ptr = now_ptr->next;
new_ptr2->point = link_info[i].firstNodeID;
//printf( "new_ptr2->point:%d\n",new_ptr2->point );
new_ptr2->weight = link_info[i].distance;
//printf( "new_ptr2->linkCapacity:%d\n",new_ptr2->linkCapacity );
now_ptr->next = new_ptr2;
now_ptr = now_ptr->next;
now_ptr->next = NULL;
}//for
}//ReadInput
void create_wei_graph( node node_info[] ){
int i,j;
for ( i=0;i<node_num;i++ ){
for ( j=0;j<node_num;j++ ){
//wei_graph[i][j]= -1.0;
wei_graph[i][j]= 0;
}//for
}//for
for ( i=0;i<node_num;i++ ){
for ( j=0;j<node_num;j++ ){
node *now_ptr = (node *) malloc(sizeof(node));
now_ptr = &node_info[i];
while ( now_ptr->next != NULL ){
//wei_graph[i][now_ptr->next->point] = now_ptr->next->weight;
wei_graph[i][now_ptr->next->point] = now_ptr->next->weight;
now_ptr = now_ptr->next;
}//wile
}//for
}//for
/*
printf( "wei_graph\n" );
for ( i=0;i<node_num;i++ ){
for ( j=0;j<node_num;j++ ){
printf( "%lf ",wei_graph[i][j] );
}//for
printf( "\n" );
}//for
*/
}//shorest_path
int minDistance(int dist[], bool sptSet[])
{
// Initialize min value
int min = INT_MAX, min_index; //
for (int v = 0; v < node_num; v++)
if (sptSet[v] == false && dist[v] <= min){
min = dist[v], min_index = v; // 距離 並且回傳index
//printf( " min_index:%d\n", min_index );
}//if
return min_index;
}
int printSolution(int dist[])
{
printf("Vertex \t\t Distance from Source\n");
for (int i = 0; i < node_num; i++)
printf("%d \t\t %d\n", i, dist[i]);
}
void Debug_node_dis_sample(){
int i,j,k;
printf( "================sample node====================\n" );
for( i=0;i<nd_dis_smp_i ;i++ ){
printf( "sample_node[%d]:%d\n",i,sample_node[i] );
}//for
printf( "================Debug_node_dis_sample====================\n" );
for ( i = 0 ; i<node_num;i++ ){
printf("node_dis_sample[%d].type:%c\n",i,node_dis_sample[i].type);
printf("node_dis_sample[%d].shortest_dis:%lf\n",i,node_dis_sample[i].shortest_dis);
printf("node_dis_sample[%d].sml_than_sht_num:%d\n",i,node_dis_sample[i].sml_than_sht_num);
printf("node_dis_sample[%d].shortest_dis_sample:%d\n",i,node_dis_sample[i].shortest_dis_sample);
printf("node_dis_sample[%d].shortest_dis_sample_index:%d\n",i,node_dis_sample[i].shortest_dis_sample_index);
for( j=0;j<nd_dis_smp_i ;j++ ){
printf("node_dis_sample[%d].distance[%d]:%lf\n",i,j,node_dis_sample[i].distance[j]);
}//for
for ( j = 0;j<node_dis_sample[i].sml_than_sht_num;j++ ){
printf( "node_dis_sample[%d].sml_than_sht_node[%d]:%d\n",i,j,node_dis_sample[i].sml_than_sht_node[j] );
printf( "node_dis_sample[%d].sml_than_sht_dis[%d]:%lf\n",i,j,node_dis_sample[i].sml_than_sht_dis[j] );
}//for
}//for
}//Debug_node_dis_sample
<file_sep>/HW2/CODE/DS_HW2_406410114.c
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <stdbool.h>
//node_struct
typedef struct node{
int point;
char type[10] ;
int white_index; // sorting
int heap;
struct node *original_path;
struct node *next_path;
struct node *solution_path;
} node;
//update_buffer
typedef struct update_infor{
int update_point ;
int distance_max ;
int white_index;
int heap_same;
} update_infor;
int number = 0,turn = 1;
void Debug_Print_Node_Point( node node_point[],int number );
node * ReadInput( node node_point[], char *filename );
void * PointRedConnectNextpath( node node_point[]);
void Print_Node_Point( node node_point[],int number );
void * AddPointType( node node_point[]);
//void * Odd_Shortcut_Phase( node node_point[],int up_info[]);
void * Odd_Shortcut_Phase_Mnay( node node_point[],update_infor up_info[]);
//void * Even_Prune_Phase( node node_point[],int update_point,int distance_max);
void * Even_Prune_Phase_Many( node node_point[],update_infor up_info[]);
int * IsFinishOddEven( node node_point[]);
void * PointBlackDisconnectNextpath( node node_point[]);
int * IsFinishRed( node node_point[]);
int * IsFinishBlack( node node_point[]);
update_infor up_info_many[1000] = {0,0};
int main()
{
//*****************************************
//1.ReadInput
//2.add type
//3.PointRedConnectNextpath
//4.odd and even
//5.PointBlackDisconnectNextpath
//6.print_result
//******************************************
node node_point[1000];
int round = 0,i = 0,isfinoddeven = 0,j = 0,isfinred = 0,isfinblack=0;
int debug_i =0;
int result[1000][100];
//1.ReadInput
ReadInput( node_point, "input.txt" );
//2.add type
//if no point return 0
if ( number == 0 )
return 0;
AddPointType( node_point);
//Debug_Print_Node_Point( node_point,number );
//store result
for ( i = 0 ; i < number ; i++ ){
if ( node_point[i].original_path == NULL )
result[round][i] = -1;
else
result[round][i] = node_point[i].original_path->point ;
}//for
round++;
//Print_Node_Point( node_point,number );
//printf( "ori:\n" );
//Debug_Print_Node_Point( node_point,number );
isfinred = IsFinishRed( node_point);
//3.PointRedConnectNextpath
for ( j =0 ; j < number ; j++ ){
if ( isfinred == 0 )
break;
if((strcmp(node_point[j].type,"red") == 0)){
PointRedConnectNextpath( node_point);
//Print_Node_Point( node_point,number );
//store result
for ( i = 0 ; i < number ; i++ ){
if ( node_point[i].original_path == NULL )
result[round][i] = -1;
else
result[round][i] = node_point[i].original_path->point ;
}//for
round++;
break;
}//if
}//for
//printf( "red:\n" );
//Debug_Print_Node_Point( node_point,number );
//determine IsFinishOddEven
isfinoddeven = IsFinishOddEven( node_point);
//4.odd and even
for ( round ; isfinoddeven == 0 ;debug_i++ ){
// reset buffer
for ( j = 0 ; j <1000;j++ ){
up_info_many[j].update_point = 0;
up_info_many[j].distance_max = 0;
up_info_many[j].white_index = 0;
}//for
//odd
Odd_Shortcut_Phase_Mnay( node_point,up_info_many);
//printf( "odd_round%d:\n",round );
//Debug_Print_Node_Point( node_point,number );
//store result
for ( i = 0 ; i < number ; i++ ){
if ( node_point[i].original_path == NULL )
result[round][i] = -1;
else
result[round][i] = node_point[i].original_path->point ;
}//for
round++;
//check is_finish
if ( IsFinishOddEven( node_point)==true )
break;
//even
Even_Prune_Phase_Many( node_point,up_info_many);
//printf( "even_round%d:\n",round );
//Debug_Print_Node_Point( node_point,number );
//store result
for ( i = 0 ; i < number ; i++ ){
if ( node_point[i].original_path == NULL )
result[round][i] = -1;
else
result[round][i] = node_point[i].original_path->point ;
}//for
round++;
//Debug_Print_Node_Point( node_point,number );
//Print_Node_Point( node_point,number );
isfinoddeven = IsFinishOddEven( node_point);
}//for
//5.PointBlackDisconnectNextpath
isfinblack =IsFinishBlack( node_point);
for ( j =0 ; j < number && isfinblack!=0 ; j++ ){
if((strcmp(node_point[j].type,"black") == 0)){
PointBlackDisconnectNextpath( node_point);
//Print_Node_Point( node_point,number );
//store result
for ( i = 0 ; i < number ; i++ ){
if ( node_point[i].original_path == NULL )
result[round][i] = -1;
else
result[round][i] = node_point[i].original_path->point ;
}//for
round++;
break;
}//if
}//for
//6.print_result
printf( "%d\n",round );
for ( i = 0 ; i < round ; i++ ){
for ( j = 0; j <number ; j++ ){
printf( "%d",result[i][j] );
if ( j < number-1 )
printf( " " );
}//for
printf( "\n" );
}//for
//Print_Node_Point( node_point,number );
//Debug_Print_Node_Point( node_point,number );
return 0 ;
}//main
//1.ReadInput
node * ReadInput( node node_point[], char *filename ){
//*****************************************
//point: point number
//type : red or black or white
// white_index : white_point index
// node *original_path:solid line
// node *next_path ; dotted line
// node *solution_path : answer
//******************************************
int input_number = 0;
int node_number = 0 ;
//determine rule
int i=0;
int j=0;
//file input rule
FILE *fin;
fin=fopen(filename,"r");
fscanf(fin,"%d\n",&number);
//printf( "number:%d\n",number );
for (i=0;i<number;i++)
{
node_point[i].point = i ;
}//for
for (i=0;i<number;i++)
{
fscanf(fin,"%d",&node_number);
//printf("node_number%d\n",node_number);
if (node_number == -1)
node_point[i].original_path = NULL ;
else
node_point[i].original_path = &node_point[node_number] ;
if ( i < number-1 )
fscanf(fin," ");
//printf("node_point[i].original_path:%d\n",node_point[i].original_path);
}//for
fscanf(fin,"\n");
for (i=0;i<number;i++)
{
fscanf(fin,"%d ",&node_number);
if (node_number == -1) {
node_point[i].next_path = NULL ;
node_point[i].solution_path = NULL ;
}//if
else{
node_point[i].next_path = &node_point[node_number] ;
node_point[i].solution_path = &node_point[node_number] ;
}//else
if ( i < number-1 )
fscanf(fin," ");
node_point[i].heap = 1;
}//for
fclose(fin);
return node_point;
}//ReadInput
void * AddPointType( node node_point[]){
//*****************************************
//red : original_path == NULL
//black : next_path == NULL
//white: d and else
//******************************************
int i,white_index;
white_index = 0;
node *now_point;
now_point = malloc(sizeof(now_point));
for ( i = 0 ; i < number ; i++ ){
if ( i == number-1 ){
strcpy(node_point[i].type,"white");
node_point[i].white_index = white_index;
white_index++;
//printf( "%d\n",i );
//printf( "node_point[i].white_index:%d\n",node_point[i].white_index );
}//if
else if(node_point[i].original_path == NULL)
strcpy(node_point[i].type,"red");
else if ( node_point[i].next_path == NULL )
strcpy(node_point[i].type,"black");
else{
strcpy(node_point[i].type,"white");
//node_point[i].white_index = white_index;
//white_index++;
//printf( "%d\n",i );
//printf( "node_point[i].white_index:%d\n",node_point[i].white_index );
}//else
}//for
if ( strcmp( node_point[0].type,"white" ) == 0 ){
node_point[0].white_index = white_index;
white_index++;
}//if
now_point = node_point[0].original_path;
while ( now_point != NULL ){
//printf( "now_point->:%d\n",now_point->point );
if ( strcmp( now_point->type,"white" ) == 0 ){
now_point->white_index = white_index;
white_index++;
}//while
now_point = now_point->original_path;
}//while
}//ReadInput
void * PointRedConnectNextpath( node node_point[]){
//*****************************************
//if red connect dotted line , disconnect solid line
//******************************************
int i;
for ( i = 0 ; i < number ; i++ ){
if((strcmp(node_point[i].type,"red") == 0)){
node_point[i].original_path = node_point[i].next_path;
node_point[i].next_path = NULL;
}//if
}//for
}//PointRedConnectNextpath
int * IsFinishOddEven( node node_point[]){
//*****************************************
//determine IsFinishOddEven ,if Finish return 1 else return 0
//******************************************
int i;
for ( i = 0 ; i < number ; i++ ){
if ( node_point[i].solution_path == NULL )
continue;
if(node_point[i].original_path != node_point[i].solution_path)
return 0;
}//for
return 1;//true
}//PointRedConnectNextpath
int * IsFinishRed( node node_point[]){
//*****************************************
//determine IsFinishhRed ,if Finish return 1 else return 0
//******************************************
int i;
for ( i = 0 ; i < number ; i++ ){
if ( (strcmp( node_point[i].type,"red" ) == 0))
return 1; //true
}//for
return 0; //false
}//PointRedConnectNextpath
int * IsFinishBlack( node node_point[]){
//*****************************************
//determine IsFinishBlack ,if Finish return 1 else return 0
//******************************************
int i;
for ( i = 0 ; i < number ; i++ ){
if ( (strcmp( node_point[i].type,"black" ) == 0))
return 1; //true
}//for
return 0; //false
}//PointRedConnectNextpath
void * Odd_Shortcut_Phase_Mnay( node node_point[],update_infor up_info[]){
//*****************************************
//1.find white and record the point number and distance
//2.check update_point
// (1)select farthest distance white_point , if there are more than one , the smallest_point_number priority.
// (2)check no_conflict_white_point update together
// (3)return the update numbers
//******************************************
//single update
node *now_point;
now_point = malloc(sizeof(now_point));
int i,j,k;
int array_i = 0,distance_max = 0,update_point = 0,proposol_point_white_index_index = 0;
int up_i = 0 ;
int distance[1000] = {0};
int start_point[1000] = {0};
int ori_pheap[1000] = {0};
int next_pheap[1000] = {0};
int white_in[1000] = {0};
int proposol_point_white_index[1000] = {0};
//1.find white and record the point number and distance
for ( i = 0 ; i < number ; i++ ){
if ( strcmp( node_point[i].type,"white" ) == 0 ){
//printf( "i:%d\n",i );
//printf("node_point[i].point: %d\n ",node_point[i].point);
//next_path pass
if ( i == number-1 )
continue;
//printf( "it is not final point.\n" );
if ( node_point[i].original_path->point == node_point[i].solution_path->point )
continue;
now_point = node_point[i].next_path;
if ( now_point == NULL )
continue;
//printf( "%s",now_point -> type );
for ( j = 0 ; j < number ; j++ ){
//printf("now_point->point: %d\n",now_point->point);
if ( strcmp(now_point->type,"white" ) == 0 ){
//printf("strcmp(now_point->type,white ) == 0 point: %d\n",now_point->next_path->point);
start_point[array_i] = i;
distance[array_i] = now_point->white_index - node_point[i].white_index;
white_in[array_i] = node_point[i].white_index;
ori_pheap[array_i] = node_point[i].heap;
next_pheap[array_i] = now_point->heap;
/*
printf( "start_point[array_i]:%d\n",start_point[array_i]);
printf( "distance[array_i]:%d\n",distance[array_i]);
printf( "white_in[array_i]:%d\n",white_in[array_i]);
printf( "ori_pheap[array_i]:%d\n",ori_pheap[array_i]);
printf( "next_pheap[array_i]:%d\n",next_pheap[array_i] );
proposol_point_white_index[array_i] = now_point->white_index;
*/
array_i++;
//printf( "array_i:%d\n",array_i );
break;
}//if
else{
now_point = now_point -> original_path;
}//else
}//for
}//if
}//for
//printf( "array_i:%d\n",array_i );
//find distance_max
//printf( "distance[array_i]:");
//2.check update_point
// (1)select farthest distance white_point , if there are more than one , the smallest_point_number priority.
//printf( "distance[0]:%d\n",distance[0] );
//printf( "array_i:%d\n",array_i );
for ( i = 0 ; i < array_i ; i++ ){
//printf( "%d ",distance[i] );
if ( i == 0 )
distance_max = distance[0];
if ( distance[i] > distance_max )
distance_max = distance[i] ;
}//for
//2.check update_point
// (2)check no_conflict_white_point update together
for ( i = 0 ; i < array_i ; i++ ){
//printf( "start_point[i]:%d\n",start_point[i] );
//printf( "distance[i]:%d\n",distance[i] );
if ( distance[i] == distance_max ){
update_point = start_point[i];
proposol_point_white_index_index = i;
up_info[up_i].update_point = update_point; // insert first update point
up_info[up_i].distance_max = distance_max;
up_info[up_i].white_index= node_point[update_point].white_index;
//if heap is same = 1 , else = 0
if ( ori_pheap[up_i] == next_pheap[up_i] )
up_info[up_i].heap_same = 1;
else
up_info[up_i].heap_same = 0;
//printf( "distance_max:%d\n",distance_max );
up_i++;
break;
}//if
}//for
//printf( "distance[0]:%d\n",distance[0] );
bool confi = false ;
//2.check update_point
// (3)return the update numbers
//isconflict
//another point >= update point + distance && another point + distance > update point + distance
//another point < update point && another point + distance <= update point
// if equal first pass
//printf( "array_i:%d\n",array_i );
for ( k = distance_max; k > 0 ; k-- ){ //距離下去找
//printf( "k:%d\n",k );
for ( i = 0 ; i < array_i ; i++ ){ //找每個點
//printf( "i:%d\n",i );
if ( distance[i] != k )
continue;
for ( j = 0;j < up_i ;j++ ){ //找衝突
if ( start_point[i] == up_info[j].update_point ){ //等於就跳過
confi = true;
break ;
}//if
if ( ori_pheap[i] != next_pheap[i] )
confi = false;
else if ( (distance[i] < 0) //距離為負
|| (((node_point[start_point[i]].white_index < node_point[up_info[j].update_point].white_index))
&&(node_point[start_point[i]].white_index + distance[i]) > node_point[up_info[j].update_point].white_index) // 左邊進到中間
|| ((node_point[start_point[i]].white_index >= node_point[up_info[j].update_point].white_index) // 中間不含最右的點
&& (node_point[start_point[i]].white_index < node_point[up_info[j].update_point].white_index +up_info[j].distance_max ))){
confi = true;
//printf( "start_point[i]:%d is confi\n",start_point[i] );
//printf( "node_point[start_point[i]].white_index + distance[i] is confi:%d\n",node_point[start_point[i]].white_index + distance[i] );
}//if
//printf( "start_point[i]:%d is not confi\n",start_point[i] );
//printf( "node_point[start_point[i]].white_index + distance[i] is not confi:%d\n",node_point[start_point[i]].white_index + distance[i] );
}//for
if (confi == true){
confi = false;
continue ;
}//if
else{//confi == false
/*
printf( "i:%d\n",i );
printf( "start_point[i]:%d\n",start_point[i] );
printf( "distance[i]:%d\n",distance[i]);
printf( "white_in[i]:%d\n",white_in[i] );
printf( "up_info[i].distance_max:%d\n",up_info[i].distance_max );
printf( "white_index:%d\n",node_point[up_info[j].update_point].white_index );
printf( "distance_max:%d\n",up_info[j].distance_max );
*/
up_info[up_i].update_point = start_point[i];
up_info[up_i].distance_max = distance[i];
up_info[up_i].white_index = white_in[i];
if ( ori_pheap[up_i] == next_pheap[up_i] ){
up_info[up_i].heap_same = 1;
//printf("up_info[up_i].heap_same:%d\n",up_info[up_i].heap_same);
}//if
else
up_info[up_i].heap_same = 0;
//printf( "distance_max:%d\n",distance_max );
up_i++;
// printf("up_i:%d\n",up_i);
confi = true;
}//else
}//for
}//for
/*
for ( i = 0 ; i < up_i ; i++ ){
printf( "i:%d\n",i );
printf( "up_info[i].update_point:%d\n",up_info[i].update_point );
printf( "up_info[i].distance_max:%d\n",up_info[i].distance_max );
}//for
*/
//printf( "up_i:%d\n",up_i );
//update
//1. point merge
//2.original_path = next_path
//3.next_path = NULL
//printf( "up_i:%d\n",up_i );
int max_heap = 0;
for ( i = 0 ; i < up_i ; i++ ){
//printf( "update point :%d\n",node_point[up_info[i].update_point].point );
//printf( "after update:%d\n",node_point[up_info[i].update_point].solution_path->point);
now_point = node_point[up_info[i].update_point].original_path;
//printf( "i:%d\n",i );
if ( (up_info[i].heap_same == 1) &&(up_info[i].distance_max > 1) ) {
for ( ; now_point->white_index > 0 && now_point->white_index <= node_point[up_info[i].update_point].white_index + up_info[i].distance_max ; ){
//printf( "test\n" );
if ( now_point->white_index < node_point[up_info[i].update_point].white_index + up_info[i].distance_max ){
now_point -> heap = 2;
now_point = now_point -> original_path;
}//if
else{
//printf( "now_point ->white_index:%d\n",now_point ->white_index );
max_heap = now_point -> heap;
break;
//now_point = now_point -> original_path;
}//else
}//for
}//if
node_point[up_info[i].update_point].heap = max_heap;
node_point[up_info[i].update_point].white_index = node_point[up_info[i].update_point].white_index + up_info[i].distance_max;
node_point[up_info[i].update_point].original_path = node_point[up_info[i].update_point].solution_path;
node_point[up_info[i].update_point].next_path = NULL;
}//for
}//Odd_Shortcut_phase
void * Even_Prune_Phase_Many( node node_point[],update_infor up_info[]){
//*****************************************
//1.read update_point and distance
//2.update all nodes in the middle
//******************************************
int i,j,k;
int array_i = 0;
int num_of_up = 0 ;
int ori_white_index = 0 ;
node *now_point;
now_point = malloc(sizeof(now_point));
/*
for ( i = 0 ; i < 2 ; i++ ){
printf( "i:%d\n",i );
printf( "up_info[i].update_point:%d\n",up_info[i].update_point );
printf( "up_info[i].distance_max:%d\n",up_info[i].distance_max );
}//for
*/
//1.read update_point and distance
//2.update all nodes in the middle
for ( array_i = 0 ;array_i <number ;array_i++ ){
//num_of_up = 0 ;
//printf( "array_i:%d\n",array_i );
//printf( "up_info[array_i].white_index:%d\n",up_info[array_i].white_index );
//printf( "up_info[array_i].distance_max:%d\n",up_info[array_i].distance_max );
//printf( "up_info[array_i].update_point:%d\n",up_info[array_i].update_point );
if ( up_info[array_i].distance_max == 0 )
break;
for ( i = 0 ; i < number-1 ;i++ ){
if ( node_point[i].original_path == node_point[i].solution_path )
continue;
//找下一個white 若遇到red繼續找
if ( (strcmp( node_point[i].type,"white" ) != 0) )
continue;
now_point = node_point[i].next_path;
while ( (strcmp( now_point -> type,"white" ) != 0 )){
//printf( "now_point:%d\n",now_point->point );
//printf( "now_point->original_path->type:%s\n",now_point->original_path->type );
now_point = now_point->original_path;
}//while
//printf( "now_point:%d\n",now_point->point );
//printf( "now_point->original_path->type:%s\n",now_point->original_path->type );
if ( (strcmp( node_point[i].type,"white" ) == 0)
&&( node_point[i].white_index > up_info[array_i].white_index)
&& ( node_point[i].white_index < up_info[array_i].white_index + up_info[array_i].distance_max)){
//printf( "node_point[i].point:%d\n",node_point[i].point );
//要更新的點比下一個大
if ( node_point[i].white_index > now_point->white_index ){
now_point->white_index = node_point[i].white_index ;
now_point->heap = node_point[i].heap;
}//if
else{
ori_white_index = node_point[i].white_index;
node_point[i].white_index = now_point->white_index ;
node_point[i].heap = now_point->heap;
for ( k = 0 ; k < number-1 ;k++ ) {
if ( node_point[j].white_index == ori_white_index )
node_point[j].white_index = node_point[i].white_index;
}//for
}//else
node_point[i].original_path = node_point[i].next_path;
node_point[i].next_path = NULL;
}//if
}//for
}//for
//if ( i == up_info[array_i].update_point ) {
/*
for ( j = up_info[array_i].update_point+1 ; j < number-1 ; j++ ){
//update update_point < white point < white_index + distance_max
if ( num_of_up+1 >= up_info[array_i].distance_max)
break;
// printf( "node_point[j].point is proposal.:%d\n",node_point[j].point );
if ( (strcmp( node_point[j].type,"white" ) == 0)) {
if ( node_point[j].original_path->point == node_point[j].solution_path->point )
continue;
else{
// printf( "node_point[j].point is updated.:%d\n",node_point[j].point);
num_of_up++;
}//esle
}//if
}//for
*/
//}//if
}//Even_Prune_Phase
void * PointBlackDisconnectNextpath( node node_point[]){
//*****************************************
//final round : if black disconnect solid line
//******************************************
int i;
for ( i = 0 ; i < number ; i++ ){
if(strcmp(node_point[i].type,"black") == 0){
node_point[i].original_path = NULL;
}//if
}//for
}//PointBlackDisconnectNextpath
//*****************************************
//below is Debug Code
//******************************************
void Print_Node_Point( node node_point[],int number ){
int i =0;
//printf( "number:%d\n",number );
for (i=0;i<number;i++)
{
if ( node_point[i].original_path == NULL )
printf( "-1" );
else
printf( "%d",node_point[i].original_path->point );
if ( i<number-1 )
printf(" ");
else
printf( "\n" );
}//for
}//Print_Node_Point
void Debug_Print_Node_Point( node node_point[],int number ){
int i =0;
//printf( "number:%d\n",number );
for (i=0;i<number;i++)
{
printf( "point:%d ",node_point[i].point );
printf( "type:%s ",node_point[i].type );
printf( "white_index:%d ",node_point[i].white_index );
printf( "heap:%d ",node_point[i].heap );
if ( node_point[i].original_path == NULL )
printf( "original_path:-1 " );
else
printf( "original_path:%d ",node_point[i].original_path->point );
if ( node_point[i].solution_path == NULL )
printf( "solution_path:-1 " );
else
printf( "solution_path:%d ",node_point[i].solution_path->point );
if ( node_point[i].next_path == NULL )
printf( "next_path:-1\n" );
else
printf( "next_path:%d\n",node_point[i].next_path->point );
}//for
}//Debug_Print_Node_Point()
<file_sep>/HW3/CODE/406410114_DS_HW3_segment_rounting.c
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <stdbool.h>
#include <limits.h>
#define V 100
typedef struct link_input{
int linkID;
int firstNodeID;
int secondNodeID; // sorting
double linkCapacity;
} link_input;
typedef struct flow_input{
int flowID;
int sourceID;
int destinationID; // sorting
double flowSize;
} flow_input;
typedef struct node{
int point;
double weight;
double load; // sorting
double linkCapacity;
struct node *next;
} node;
void dijkstra(double graph[V][V], int src);
void ReadInput();
void Debug_node_printf();
void Debug_flow_printf();
void Debug_node_printf();
void create_wei_graph( node node_info[] );
int minDistance(int dist[], bool sptSet[]);
int printSolution(int dist[]);
bool capacity_determine( node node_info[], double capacity );
void update_weight( node node_info[],double graph[V][V] );
void seg_select_path( int src,int des );
void segment_rounting();
int node_num = 0,link_num =0,flow_num =0,n,sou_des_path[V][V],sdp_i=0;
int seg_tb[100][100][100];
double wei_graph[100][100];
link_input link_info[10000];
flow_input flow_path[10000];
node node_info[1000] = { 0,0,0,0,NULL };
//===========================================方法如下=====================================//
//1.Read input後產生node_info
//2.利用 create_wei_graph 建立出graph
//3.呼叫 dijkstra 計算shorest path
//4.dijkstra呼叫segment_rounting 建立出 segment_table[source][destination][path]
//5.輸入flow 運用seg_select_path 選擇最佳路徑
//6.capacity_determine 判斷是否最佳路徑上還有capacity 回傳true or false
//6.1若為false 則continue
//6.2若為true 則新增path 計算flow_num throughput 並且更新權重
//7.不斷重複 4~6 直到輸入flow結束
int main() {
int i,j,k;
int throughput=0;
int accept_flow=0;
bool is_cap;
ReadInput();
for ( i = 0;i<node_num;i++ ){
for ( j = 0;j<node_num;j++ ){
sou_des_path[i][j] = -1;
for ( k =0 ; k <node_num;k++ ){
seg_tb[i][j][k] =-1 ;
}//for
}//for
}//for
//generate graph
create_wei_graph( node_info );
//create segment table
for (i =0 ; i < node_num ; i++){
dijkstra(wei_graph, i);
}//for
/*
for ( i = 0;i<node_num;i++ ){
for ( j = 0;j<node_num;j++ ){
for ( k =0 ; k <node_num;k++ ){
if ( seg_tb[i][j][k] !=-1 )
printf( "seg_tb[%d][%d][%d]:%d\n",i,j,k,seg_tb[i][j][k] );
}//for
}//for
}//for
*/
for (i =0 ; i < flow_num ; i++){
create_wei_graph( node_info );
if ( flow_path[i].sourceID < node_num &&flow_path[i].destinationID < node_num ){
seg_select_path(flow_path[i].sourceID,flow_path[i].destinationID );
is_cap = capacity_determine( node_info, flow_path[i].flowSize );
if (is_cap == true){
accept_flow++;
throughput = throughput + flow_path[i].flowSize;
}//if
}//if
}//for
printf( "%d %d\n",accept_flow,throughput );
for ( i = 0; i<sdp_i;i++ ){
printf( "%d ",i );
for( j= 0 ;j<node_num ;j++){
if ( sou_des_path[i][j] == -1 )
continue;
else{
printf( "%d ",sou_des_path[i][j] );
}//else
}//for
printf( "\n" );
}//for
//Debug_link_printf();
//Debug_flow_printf();
//Debug_node_printf();
return 0;
}//main
void ReadInput(){
//*****************************************
//******************************************
//int input_number = 0;
int input = 0;
double d_input ;
//determine rule
int i=0;
//file input rule
//FILE *fin;
//fin=fopen(filename,"r");
scanf("%d",&node_num);
//printf( "node_num:%d\n",node_num );
scanf("%d",&link_num);
//printf( "node_num:%d\n",node_num );
for (i=0;i<link_num;i++)
{
scanf("%d",&input);
link_info[i].linkID = input ;
scanf("%d",&input);
link_info[i].firstNodeID = input ;
scanf("%d",&input);
link_info[i].secondNodeID = input ;
scanf("%lf",&d_input);
link_info[i].linkCapacity = d_input ;
}//for
scanf("%d",&flow_num);
//printf( "flow_num:%d\n",flow_num );
for (i=0;i<flow_num;i++)
{
scanf("%d",&input);
flow_path[i].flowID = input ;
scanf("%d",&input);
flow_path[i].sourceID = input ;
scanf("%d",&input);
flow_path[i].destinationID = input ;
scanf("%lf",&d_input);
flow_path[i].flowSize = d_input ;
}//for
//int node_index = -1 ;
//int max_finode_id = link_info[node_num-1].firstNodeID ;
node *now_ptr = (node *) malloc(sizeof(node));
for ( i=0;i<node_num;i++ ){
now_ptr = &node_info[node_num];
now_ptr->next = NULL;
}//for
//create Adjacency Lists use pointer array
for ( i=0;i<link_num;i++ ){
//printf( "-----------------------round--------------------------\n" );
//printf( "link_info[i].firstNodeID :%d\n",link_info[i].firstNodeID );
//printf( "node_index :%d\n ",node_index );
//first node
now_ptr = &node_info[link_info[i].firstNodeID];
node *new_ptr = (node *) malloc(sizeof(node));
while ( now_ptr->next != NULL )
now_ptr = now_ptr->next;
new_ptr->point = link_info[i].secondNodeID;
//printf( "new_ptr->point:%d\n",new_ptr->point );
new_ptr->linkCapacity = link_info[i].linkCapacity;
new_ptr->load = 0;
new_ptr->weight = (new_ptr->load)/(new_ptr->linkCapacity - new_ptr->load);
//printf( "new_ptr->linkCapacity:%d\n",new_ptr->linkCapacity );
now_ptr->next = new_ptr;
now_ptr = now_ptr->next;
now_ptr->next = NULL;
//second node
now_ptr = &node_info[link_info[i].secondNodeID];
node *new_ptr2 = (node *) malloc(sizeof(node));
while ( now_ptr->next != NULL )
now_ptr = now_ptr->next;
new_ptr2->point = link_info[i].firstNodeID;
//printf( "new_ptr2->point:%d\n",new_ptr2->point );
new_ptr2->linkCapacity = link_info[i].linkCapacity;
new_ptr2->load = 0;
new_ptr2->weight = (new_ptr2->load)/(new_ptr2->linkCapacity - new_ptr2->load);
//printf( "new_ptr2->linkCapacity:%d\n",new_ptr2->linkCapacity );
now_ptr->next = new_ptr2;
now_ptr = now_ptr->next;
now_ptr->next = NULL;
}//for
}//ReadInput
void create_wei_graph( node node_info[] ){
int i,j;
for ( i=0;i<node_num;i++ ){
for ( j=0;j<node_num;j++ ){
//wei_graph[i][j]= -1.0;
wei_graph[i][j]= 0;
}//for
}//for
for ( i=0;i<node_num;i++ ){
for ( j=0;j<node_num;j++ ){
node *now_ptr = (node *) malloc(sizeof(node));
now_ptr = &node_info[i];
while ( now_ptr->next != NULL ){
//wei_graph[i][now_ptr->next->point] = now_ptr->next->weight;
wei_graph[i][now_ptr->next->point] = 1 + now_ptr->next->weight;
now_ptr = now_ptr->next;
}//wile
}//for
}//for
/*
printf( "wei_graph\n" );
for ( i=0;i<node_num;i++ ){
for ( j=0;j<node_num;j++ ){
printf( "%lf ",wei_graph[i][j] );
}//for
printf( "\n" );
}//for
*/
}//shorest_path
int minDistance(int dist[], bool sptSet[])
{
// Initialize min value
int min = INT_MAX, min_index; //
for (int v = 0; v < node_num; v++)
if (sptSet[v] == false && dist[v] <= min){
min = dist[v], min_index = v; // 距離 並且回傳index
//printf( " min_index:%d\n", min_index );
}//if
return min_index;
}
int printSolution(int dist[])
{
printf("Vertex \t\t Distance from Source\n");
for (int i = 0; i < node_num; i++)
printf("%d \t\t %d\n", i, dist[i]);
}
void dijkstra(double graph[V][V], int src) // shortest path
{
int path[V],path_i=0,sou_des_trace[V][V],sdt_i = 0;
int dist[V];
// distance from src to i
for ( int i = 0 ;i<node_num;i++){
for(int j= 0 ;j<node_num ;j++)
sou_des_trace[i][j] = -1;
}//for
bool sptSet[V]; // sptSet[i] will be true if vertex i is included in shortest
// path tree or shortest distance from src to i is finalized
// Initialize all distances as INFINITE and stpSet[] as false
for (int i = 0; i < node_num; i++)
dist[i] = INT_MAX, sptSet[i] = false; //所有的距離都最大 且還沒選過
// Distance of source vertex from itself is always 0
dist[src] = 0; //source點
// Find shortest path for all vertices
for (int count = 0; count < node_num - 1; count++) {
// Pick the minimum distance vertex from the set of vertices not
// yet processed. u is always equal to src in the first iteration.
int u = minDistance(dist, sptSet);
// Mark the picked vertex as processed
sptSet[u] = true; //改成true
//path[path_i] = u; //走過路徑
//path_i++;
// Update dist value of the adjacent vertices of the picked vertex.
for (int v = 0; v < node_num; v++){
//sdt_i=0;
// Update dist[v] only if is not in sptSet, there is an edge from
// u to v, and total weight of path from src to v through u is
// smaller than current value of dist[v]
if (!sptSet[v] && graph[u][v] && dist[u] != INT_MAX
&& dist[u] + graph[u][v] < dist[v]){ //假設是false 並且graph不是0 dist有值 可以到
dist[v] = dist[u] + graph[u][v]; //
//sou_des_trace[v][sdt_i] = u;
//printf( "sou_des_trace[%d][sdt_i]:%d\n",v,sou_des_trace[v][sdt_i] );
//sdt_i++;
}//if
}//for
}//for
// print the constructed distance array
//printSolution(dist);
segment_rounting( dist,graph,src );
}//dijkstra
//===================================segment_rounting_table=======================================//
void segment_rounting( int dist[],double graph[V][V],int src ){
int i,j,k,m,l;
int now_hop,max_hop=0;
bool min_idx = true; //找最小index
for ( i =0 ; i < node_num;i++ ){
if ( dist[i]>max_hop )
max_hop = dist[i];
}//for
//printf( "max_hop:%d\n",max_hop );
// hop
for ( l = 0; l <= max_hop;l++ ){
//printf( "l:%d\n",l );
for ( i = 0;i<node_num;i++ ){
//從最小hop開始做到max_hop
if ( dist[i] != l )
continue;
//printf( "i:%d\n",i );
if ( dist[i] == 0 ) // hop = 0
seg_tb[src][i][0] = src;
else if ( dist[i] == 1 ){ // hop = 1
seg_tb[src][i][0] = src;
seg_tb[src][i][1] = i;
}//else if
// hop >2
else {
for ( j = 0;j<node_num;j++ ){
//printf( "j:%d\n",j );
if ( dist[j] == dist[i]-1 ){ //使用hop小的去擴散
min_idx = false;
for ( m = 0;m<dist[i]-1 ;m++ ){
if ( seg_tb[src][i][m] > seg_tb[src][j][m] ) { //若有更小的min_idx 就去找 否則不做
//printf( "seg_tb[src][i][m]:%d\n",seg_tb[src][i][m] );
//printf( "seg_tb[src][j][m]:%d\n",seg_tb[src][j][m] );
min_idx = true;
break;
}//if
else if ( seg_tb[src][i][m] < seg_tb[src][j][m] ){
//printf( "seg_tb[src][i][m]:%d\n",seg_tb[src][i][m] );
//printf( "seg_tb[src][j][m]:%d\n",seg_tb[src][j][m] );
min_idx = false;
break;
}//else if
else
continue;
}//for
//printf( "min_idx:%d\n",min_idx );
//有更得min_index 或是第一次
if ( min_idx == true || seg_tb[src][i][0] == -1 ){
int temp_arr[100];
for ( k = 0;k<dist[i];k++ ){
temp_arr[k] = seg_tb[src][i][k]; //temp_arr暫存 若沒有找到救回填
seg_tb[src][i][k] = seg_tb[src][j][k];
}//for
//開始找 找到NULL代表沒找到 否則就是找到
node *now_ptr = (node *) malloc(sizeof(node));
now_ptr = &node_info[j];
while ( now_ptr->next != NULL && now_ptr->next->point != i )
now_ptr = now_ptr->next;
//沒找到 還原
if ( now_ptr->next == NULL ){
for ( k = 0;k<dist[i];k++ )
seg_tb[src][i][k] = temp_arr[k];
continue;
}//if
//找到 複寫
else if ( now_ptr->next->point == i ){
seg_tb[src][i][l] = i;
continue;
}//else
}//if
}//if
}//for
}//else
}//for
}//for
/*
for ( i = 0;i<node_num;i++ ){
for ( j = 0;j<node_num;j++ ){
for ( k =0 ; k <node_num;k++ ){
if ( seg_tb[i][j][k] !=-1 )
printf( "seg_tb[%d][%d][%d]:%d\n",i,j,k,seg_tb[i][j][k] );
}//for
}//for
}//for
*/
}
//===================================seg_select_path=======================================//
void seg_select_path( int src,int des ){
//computing weight
int i,j,k,m,l;
int hop_num =-1,mid = -1,hop_temp = 0; //mid為選中中間的的點
double wei_num = -1,wei_temp = 0;
for ( i = 0; i < node_num ; i++ ){
wei_temp = 0;
hop_temp = 0;
//printf( "i:%d\n",i );
//計算到source-mid
for ( j = 0; j<node_num;j++ ){
//printf( "j:%d\n",j );
//找到最後一個
if ( seg_tb[src][i][j+1] == -1 ){
hop_temp++;
break;
}//if
//source ->i
wei_temp = wei_temp + wei_graph[seg_tb[src][i][j]][seg_tb[src][i][j+1]]-1;
hop_temp++;
}//for
//計算到mid-destination
for ( j = 0; j<node_num;j++ ){
if ( seg_tb[i][des][j+1] == -1 ){
hop_temp++;
break;
}//if
//i->des
wei_temp = wei_temp + wei_graph[ seg_tb[i][des][j]][ seg_tb[i][des][j+1]]-1;
hop_temp++;
}//for
//printf( "wei_temp:%d\n",wei_temp );
//printf( "hop_temp:%d\n",hop_temp );
//printf( "mid:%d\n",mid );
//第一次直接複寫
if ( wei_num == -1 ){
wei_num = wei_temp;
hop_num = hop_temp;
mid = i;
//printf( "wei_num:%d\n",wei_num );
//printf( "hop_num:%d\n",hop_num );
//printf( "mid:%d\n",mid );
}//if
// 取wei小
else if ( wei_num > wei_temp ){
wei_num = wei_temp;
hop_num = hop_temp;
mid = i;
//printf( "wei_num:%d\n",wei_num );
//printf( "hop_num:%d\n",hop_num );
//printf( "mid:%d\n",mid );
}//else if
// 相等 取hop小 因為從小開始找故一定min_index
else if ( wei_num == wei_temp ){
if ( hop_num > hop_temp ){
hop_num = hop_temp;
mid = i;
//printf( "wei_num:%d\n",wei_num );
//printf( "hop_num:%d\n",hop_num );
//printf( "mid:%d\n",mid );
}//if
}//else if
}//for
//新增路徑
int mid_index = 0;
for ( i = 0; i < node_num ; i++ ){
if ( seg_tb[src][mid][i] != -1 ){
sou_des_path[sdp_i][mid_index++] = seg_tb[src][mid][i];
}//if
}//for
mid_index--; //扣掉中間重複的
for ( i = 0; i < node_num ; i++ ){
if ( seg_tb[mid][des][i] != -1 ){
sou_des_path[sdp_i][mid_index++] = seg_tb[mid][des][i];
}//if
}//for
sdp_i++;
/*
printf( "sou_des_path:\n" );
for ( i = 0; i < node_num ; i++ ){
if ( sou_des_path[sdp_i-1][i] != -1 ){
printf( "%d ",sou_des_path[sdp_i-1][i] );
}//if
}//for
printf( "\n" );
*/
}//seg_select_path
bool capacity_determine( node node_info[],double capacity ){
bool rule = true;
int i,j;
/*
for(i= 0 ;i < node_num ;i++){
if ( sou_des_path[sdp_i-1][i] == -1 )
continue;
else
printf( "sou_des_path[sdp_i][%d]:%d\n",i,sou_des_path[sdp_i-1][i] );
}//for
*/
//printf( "sou_des_path[sdp_i-1][0]:%d\n",sou_des_path[sdp_i-1][0] );
if ( sou_des_path[sdp_i-1][1] == -1 ){
rule = false;
for(int i= 0 ;i<node_num ;i++)
sou_des_path[sdp_i-1][i] = -1;
sdp_i--;
return rule;
}//IF
for(i= 0 ;i < node_num ;i++){
if ( sou_des_path[sdp_i-1][i] == -1 )
continue;
else{
node *now_ptr = (node *) malloc(sizeof(node));
now_ptr = &node_info[sou_des_path[sdp_i-1][i]];
//找到要確認的cap
while ( now_ptr->next != NULL && now_ptr->next->point != sou_des_path[sdp_i-1][i+1] )
now_ptr= now_ptr->next;
//若爆掉則reject 刪除此路徑
if ( now_ptr->next != NULL && capacity > now_ptr->next->linkCapacity ){
//printf( "capacity:%lf\n",capacity );
//printf( "now_ptr->next->linkCapacity:%lf\n", now_ptr->next->linkCapacity );
//printf( "now_ptr->point:%lf\n",now_ptr->point );
//printf( "now_ptr->next->point:%lf\n",now_ptr->next->point );
rule = false;
for(int i= 0 ;i<node_num ;i++)
sou_des_path[sdp_i-1][i] = -1;
sdp_i--;
return rule;
}//if
}//else
}//for
//printf( "sdp_i:%d\n",sdp_i );
int update_link[100],upli_i=0;
if ( rule == true ){
//printf( "rule is true.\n" );
for(i= 0 ;i < node_num ;i++){
if ( sou_des_path[sdp_i-1][i] == -1 || sou_des_path[sdp_i-1][i+1] == -1 ){
//printf( "sou_des_path[sdp_i-1][%d]:%d\n",i,sou_des_path[sdp_i-1][i] );
//printf( "sou_des_path[sdp_i-1][%d]:%d\n",i+1,sou_des_path[sdp_i-1][i+1] );
continue;
}//if
for( j = 0 ; j< link_num ;j++ ){
if ( (link_info[j].firstNodeID == sou_des_path[sdp_i-1][i+1] && link_info[j].secondNodeID == sou_des_path[sdp_i-1][i])
||(link_info[j].secondNodeID == sou_des_path[sdp_i-1][i+1] && link_info[j].firstNodeID == sou_des_path[sdp_i-1][i]) ){
//printf( "link_info[j].firstNodeID:%d\n",link_info[j].firstNodeID );
//printf( "sou_des_path[sdp_i][i+1]:%d\n",sou_des_path[sdp_i-1][i+1] );
//printf( "link_info[j].secondNodeID:%d\n",link_info[j].secondNodeID );
//printf( "sou_des_path[sdp_i][i]:%d\n",sou_des_path[sdp_i-1][i] );
if ( link_info[j].linkCapacity - capacity >= 0.0 ){
link_info[j].linkCapacity = link_info[j].linkCapacity - capacity;
update_link[upli_i] = j;
upli_i++;
}//if
else{
rule = false;
for(int i= 0 ;i<node_num ;i++)
sou_des_path[sdp_i-1][i] = -1;
sdp_i--;
return rule;
}//else
}//if
}//for
}//for
//printf( "upli_i:%d\n",upli_i );
//printf( "test1\n" );
for ( i = 0;i <upli_i;i++ ){
node *now_ptr = (node *) malloc(sizeof(node));
now_ptr = &node_info[link_info[update_link[i]].firstNodeID];
//printf( "link_info[update_link[i]].firstNodeID:%d\n",link_info[update_link[i]].firstNodeID );
now_ptr = now_ptr->next;
while (now_ptr->point != link_info[update_link[i]].secondNodeID )
now_ptr = now_ptr->next;
//printf( "now_ptr->point:%d\n",now_ptr->point );
now_ptr->load = now_ptr->load + capacity;
if ( (now_ptr->linkCapacity - now_ptr->load) ==0 )
now_ptr->weight = INT_MAX;
else
now_ptr->weight = (now_ptr->load)/(now_ptr->linkCapacity - now_ptr->load);
now_ptr = &node_info[link_info[update_link[i]].secondNodeID];
//printf( "link_info[update_link[i]].secondNodeID:%d\n",link_info[update_link[i]].secondNodeID );
now_ptr = now_ptr->next;
while (now_ptr->point != link_info[update_link[i]].firstNodeID ){
now_ptr = now_ptr->next;
}//while
//printf( "now_ptr->point:%d\n",now_ptr->point );
//printf( "capacity:%lf\n",capacity );
//printf( "now_ptr->load:%lf\n",now_ptr->load );
//printf( "now_ptr->load + capacity:%lf\n",now_ptr->load + capacity );
now_ptr->load = now_ptr->load + capacity;
if ( (now_ptr->linkCapacity - now_ptr->load) ==0 )
now_ptr->weight = INT_MAX;
else
now_ptr->weight = (now_ptr->load)/(now_ptr->linkCapacity - now_ptr->load);
}//for
}//if
return rule;
}//capacity_determine
void Debug_link_printf(){
printf( "node_num:%d\n",node_num );
printf( "link_num:%d\n",link_num );
printf( "Debug_node_printf:\n" );
printf( "linkID firstNodeID secondNodeID linkCapacity\n" );
for (int i=0;i<link_num;i++)
{
printf("%d ",link_info[i].linkID);
printf("%d ",link_info[i].firstNodeID);
printf("%d ",link_info[i].secondNodeID);
printf("%lf\n",link_info[i].linkCapacity);
}//for
}//Debug_node_printf
void Debug_flow_printf(){
printf( "flow_num:%d\n",flow_num );
printf( "Debug_flow_printf:\n" );
printf( "flowID sourceID destinationID flowSize\n" );
for (int i=0;i<flow_num;i++)
{
printf("%d ",flow_path[i].flowID);
printf("%d ",flow_path[i].sourceID);
printf("%d ",flow_path[i].destinationID);
printf("%lf\n",flow_path[i].flowSize);
}//for
}//Debug_node_printf
void Debug_node_printf(){
printf( "node_num:%d\n",node_num );
printf( "Debug_node_printf:\n" );
node *now_ptr = (node *) malloc(sizeof(node));
int max_finode_id = link_info[node_num-1].firstNodeID ;
for (int i=0;i< node_num;i++){
now_ptr = &node_info[i];
printf( "it is node [%d]\n",i);
while ( now_ptr->next != NULL ){
now_ptr = now_ptr->next;
printf("point:%d ",now_ptr->point);
printf("linkCapacity:%lf ",now_ptr->linkCapacity);
printf("load:%lf ",now_ptr->load);
printf("weight:%lf \n",now_ptr->weight);
}//if
}//for
}//Debug_node_printf
<file_sep>/HW1/Minimize_the_table_size_1.c
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#define MAX_LINE 1024
#define bool int
#define false 0
#define true 1
main()
{
int rule_number = 0;
int point_number = 0;
char rule_point1[ 1024 ];
char rule_point2[ 1024 ];
char rule_port[ 1024 ];
char input_point1[ 1024 ];
char input_point2[ 1024 ];
char star[1] = "*";
bool isfind_port = true;
FILE *fpr;
fpr=fopen("input.txt","r");
fscanf(fpr,"%d\n",&rule_number);
for (int i=0;i<rule_number;i++)
{
fscanf(fpr,"%c ",&rule_point1[i]);
fscanf(fpr,"%c ",&rule_point2[i]);
fscanf(fpr,"%c\n",&rule_port[i]);
}//for
fscanf(fpr,"%d\n",&point_number);
for (int i=0;i<point_number;i++)
{
fscanf(fpr,"%c ",&input_point1[i]);
fscanf(fpr,"%c\n",&input_point2[i]);
}//for
fclose(fpr);
//output file
FILE *fout;
fout = fopen( "output.txt","w" );
for (int i=0;i<point_number;i++){
fprintf( fout,"%c ",input_point1[i] );
fprintf( fout, "%c ",input_point2[i] );
for (int j=0;j<rule_number;j++){
//printf( "rule_point1[j]:%c",rule_point1[j] );
//printf( "rule_point2[j]:%c",rule_point2[j] );
if ( (input_point1[i] == rule_point1[j]) && (input_point2[i] == rule_point2[j] ) ) {
fprintf( fout, "%c",rule_port[j] );
isfind_port = true;
break;
}//if
else if( (rule_point1[j] == star[0]) &&(input_point2[i] == rule_point2[j] ) ){
fprintf( fout, "%c",rule_port[j] );
isfind_port = true;
break;
}//else if
else if( (input_point1[i] == rule_point1[j] ) && (rule_point2[j] == star[0]) ){
fprintf( fout, "%c",rule_port[j] );
isfind_port = true;
break;
}//else if
else{
isfind_port = false;
}//else
}//for
if ( isfind_port == false ){
fprintf( fout, "drop" );
isfind_port = true ;
}//if
fprintf( fout, "\n" );
}//for
fclose(fout);
/*
printf("%d\n",rule_number);
for (int i=0;i<rule_number;i++)
{
printf("%c ",rule_point1[i]);
printf("%c ",rule_point2[i]);
printf("%c\n",rule_port[i]);
}//for
printf("%d\n",point_number);
for (int i=0;i<point_number;i++)
{
printf("%c ",input_point1[i]);
printf("%c\n",input_point2[i]);
}//for
*/
}
| b940fa6b847cca861d408d22b7632939a9c58eaf | [
"Markdown",
"C"
] | 7 | C | mv123453715/CCU_2019_Data_Structure | 598bfd4fa8bd5eddbd551960d528b7b75f2f520d | 30f8ffd2a41eb1e4030e021014df87fb4dfd3d68 | |
refs/heads/main | <file_sep>import "./styles/index.scss";
import { throttle } from 'throttle-debounce';
import { hoverTimeout, automaticTimeout, animations} from './constants';
var eventRunning = false;
var eventId = -1;
var body = document.getElementsByTagName('body')[0];
var container = document.getElementsByClassName('mouse-container')[0];
window.addEventListener('load', function() {
body.classList.remove('preload');
init();
})
function init() {
body.addEventListener('mousemove', throttle(hoverTimeout, handleEvent, false));
body.addEventListener('click', handleEvent);
setDate();
}
function handleEvent(e) {
var id = e.target.id;
if (!id || eventRunning) return;
var classNameBox = animations[id].forwards;
handleMouseOver(false, classNameBox);
setEvent();
container.addEventListener('mouseleave', function() {
console.log('leave')
setTimeout(() => {
if(eventRunning) {
handleMouseOver(true);
resetEvent();
}
}, hoverTimeout);
});
setTimeout(
automaticReset, automaticTimeout, eventId);
}
function automaticReset(evId) {
// reset event only if there is one
// and if matches the event
if (evId !== -1 && evId === eventId) {
handleMouseOver(true);
resetEvent();
}
}
function handleMouseOver(remove, classToAdd) {
if(!remove) {
removeAllClasses();
body.classList.add(classToAdd);
}
else {
removeAllClasses();
}
}
function removeAllClasses() {
body.classList.remove(animations.e.forwards, animations.a.forwards,animations.h.forwards);
}
function setEvent() {
eventRunning = true;
eventId = Math.floor(Math.random() * 100000);
}
function resetEvent() {
eventRunning = false;
eventId = -1;
}
function setDate(){
var d = new Date();
var n = d.getFullYear();
document.getElementById("date").innerHTML = n;
}
<file_sep>
export var hoverTimeout = 150;
export var automaticTimeout = 3000;
export var animations = {
a: {
forwards: 'rotate-f-a'
},
h: {
forwards: 'rotate-f-h'
},
e: {
forwards: 'rotate-f-e'
},
}<file_sep># Install dependencies with: npm i
# To start dev server run: npm start
# To make production build run: npm run build
| 55b30c1c211deed81a26f96ccbaf02806cfea76e | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | bikilone/Cube | 65c070a0947be6a3fda2e474b92a8319749b7bc3 | 5eff8817e3a96736da599c711d7a29311dc99461 | |
refs/heads/master | <file_sep>base_64_symbol = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
def asc2bin(plain_text):
ascii_list = []
total_bin = ""
binary_txt = ""
for i in range(len(plain_text)):
ascii_list.append(ord(plain_text[i]))
for i in ascii_list:
while True:
div = i // 2
mod = i % 2
i = div
binary_txt += str(mod)
if div == 0:
break
total_bin += ("0" + binary_txt[::-1])
binary_txt = ""
return total_bin
def base64_encrypt(plain_text):
l1 = []
total = ""
return_value = asc2bin(plain_text)
while len(return_value) % 6 != 0:
return_value += "0"
index_bin = [return_value[i:i+6] for i in range(0, len(return_value), 6)]
for i in index_bin:
dec = 0
pow_num = 0
i = list(str(i))
i = i[::-1]
for bin_num in i:
if bin_num == "1":
dec += 2**pow_num
pow_num += 1
l1.append(dec)
for num in l1:
total += base_64_symbol[num]
if len(total) % 4 == 2:
total += "=="
elif len(total) % 4 == 3:
total += "="
return total<file_sep>####### Declaration #######
eng_alphabet_small = "abcdefghijklmnopqrstuvwxyz"
eng_alphabet_big = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
plain_text = ""
####### Definition #######
def caesar_decrypt(cipher_text, key):
global plain_text
for index_num in range(len(cipher_text)):
index_str = cipher_text[index_num]
if index_str.islower() == True:
position_num = eng_alphabet_small.index(index_str)
replace_position_num = (position_num - key) % 26
plain_text += eng_alphabet_small[replace_position_num]
elif index_str.isupper() == True:
position_num = eng_alphabet_big.index(index_str)
replace_position_num = (position_num - key) % 26
plain_text += eng_alphabet_big[replace_position_num]
elif index_str.isalpha() == False:
plain_text += index_str
return plain_text<file_sep>####### Declaration #######
eng_alphabet_small = "abcdefghijklmnopqrstuvwxyz"
eng_alphabet_big = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
cipher_text = ""
####### Definition #######
def caesar_encrypt(plain_text, shift_num):
global cipher_text
for index_num in range(len(plain_text)):
index_str = plain_text[index_num]
if index_str.islower() == True:
position_num = eng_alphabet_small.index(index_str)
replace_position_num = (position_num + shift_num) % 26
cipher_text += eng_alphabet_small[replace_position_num]
elif index_str.isupper() == True:
position_num = eng_alphabet_big.index(index_str)
replace_position_num = (position_num + shift_num) % 26
cipher_text += eng_alphabet_big[replace_position_num]
elif index_str.isalpha() == False:
cipher_text += index_str
return cipher_text<file_sep>####### Declaration #######
eng_alphabet_small = "abcdefghijklmnopqrstuvwxyz"
eng_alphabet_big = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
plain_text = ""
out_come_dic = {}
####### Definition #######
def caesar_decrypt(cipher_text):
global plain_text
for shift_num in range(26):
for index_num in range(len(cipher_text)):
index_str = cipher_text[index_num]
if index_str.islower() == True:
position_num = eng_alphabet_small.index(index_str)
replace_position_num = (position_num - shift_num) % 26
plain_text += eng_alphabet_small[replace_position_num]
elif index_str.isupper() == True:
position_num = eng_alphabet_big.index(index_str)
replace_position_num = (position_num - shift_num) % 26
plain_text += eng_alphabet_big[replace_position_num]
elif index_str.isalpha() == False:
plain_text += index_str
out_come_dic[shift_num] = plain_text
plain_text = ""
return out_come_dic<file_sep>eng_alphabet_small = "abcdefghijklmnopqrstuvwxyz"
eng_alphabet_big = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
plain_text = ""
index_key_l1 = []
list_index = 0
def vignere_decrypt(cipher_text, key):
global index_key_l1
global plain_text
global list_index
for index_key_num in range(len(key)):
index_key = key[index_key_num]
if index_key.isupper() == True:
index_key_num = eng_alphabet_big.index(index_key)
index_key_l1.append(index_key_num)
elif index_key.islower() == True:
index_key_num = eng_alphabet_small.index(index_key)
index_key_l1.append(index_key_num)
elif index_key.isalpha() == False:
index_key_l1.append(index_key)
for index_num in range(len(cipher_text)):
index_str = cipher_text[index_num]
if list_index == len(key):
list_index = 0
if index_str.islower() == True:
position_num = eng_alphabet_small.index(index_str)
replace_position_num = (position_num - index_key_l1[list_index]) % 26
plain_text += eng_alphabet_small[replace_position_num]
list_index += 1
elif index_str.isupper() == True:
position_num = eng_alphabet_big.index(index_str)
replace_position_num = (position_num - index_key_l1[list_index]) % 26
plain_text += eng_alphabet_big[replace_position_num]
list_index += 1
elif index_str.isalpha() == False:
plain_text += index_str
return plain_text | 583797a5692451baba9d5318ce8f31a1373c8968 | [
"Python"
] | 5 | Python | hayanso/cipher | f4668b0bfe79d315cc92e7b02cdbcfcaa7b3f5da | 8a3f5b5633cdd6218d99b4bb4a151dddffa873ff | |
refs/heads/master | <repo_name>XthemeCore/testing<file_sep>/Adder.java
import javax.swing.*;
public class Adder{
public static void main(String[] args){
String firstNumber,secondNumber,result;
int sum;
JTextField firstField = new JTextField(4);
JTextField secondField = new JTextField(4);
JPanel adderPanel = new JPanel();
adderPanel.add(new JLabel("First Number:"));
adderPanel.add(firstField);
adderPanel.add(new JLabel("Second Number:"));
adderPanel.add(secondField);
Object[] choices = {"Calculate"};
int calc = JOptionPane.showOptionDialog(null,
adderPanel,"Adder",
JOptionPane.PLAIN_MESSAGE,
JOptionPane.PLAIN_MESSAGE,
null,
choices,
null);
if (calc == 0) {
firstNumber = firstField.getText();
secondNumber = secondField.getText();
sum = Integer.parseInt(firstNumber) + Integer.parseInt(secondNumber);
result = Integer.toString(sum);
JOptionPane.showMessageDialog(adderPanel,"Sum: " + result,"Adder",JOptionPane.PLAIN_MESSAGE);
}
}
}<file_sep>/README.md
#5610110364
Identification Number
#Testing Project
##Developer
XthemeCore
## License
This project is licensed under th MIT license.
See [LICENSE.md](LICENSE.md) for license rights and limitations. | 35b98c329563399daf878d3d679d03d1c523138e | [
"Markdown",
"Java"
] | 2 | Java | XthemeCore/testing | 39cad25184b209cdec587a999974a190bab6fae9 | 2902769c76b0e41a759691f4ef0f2ee36c83ea25 | |
refs/heads/master | <repo_name>inventorjs/inventor-cli-legacy<file_sep>/skeletons/default/server/app/http/Kernel.js
/**
* http 应用核心
*
* @author : sunkeysun
*/
import HttpKernel from 'inventor'
export default class Kernel extends HttpKernel {
}
<file_sep>/skeletons/default/bin/start-pm2.sh
#!/bin/bash
project_name="project"
base_path="/data/release/$project_name"
target_file="$base_path/package-lock.json"
md5_file="$base_path/.package-lock.json.md5"
cd "$base_path"
if [ ! -f "$md5_file" ]; then
touch "$md5_file"
fi
pre_md5=`cat "$md5_file"`
cur_md5=`md5sum "$target_file" | awk '{print $1}'`
if [ "$pre_md5" != "$cur_md5" ]; then
npm install --production
fi
if [ $? -eq 0 ]; then
printf "$cur_md5" > "$md5_file"
pm2 startOrGracefulReload --only $project_name
fi
<file_sep>/skeletons/default/shared/common/App.jsx
/**
* 通用模块入口
*
* @author : sunkeysun
*/
import React, { Component, Fragment } from 'react'
import { hot } from 'react-hot-loader'
import { renderRoutes } from 'react-router-config'
@hot(module)
export default class Com extends Component {
componentDidMount() {
console.log('mount')
}
handleClick() {
alert('xxxo')
}
render() {
console.log(this.props)
return (
<Fragment>
<div onClick={ this.handleClick.bind(this) }>co3xx</div>
{ renderRoutes(this.props.route.routes) }
</Fragment>
)
}
}
<file_sep>/skeletons/default/ecosystem.config.js
/**
* pm2 配置文件
*
* @author: sunkeysun
*/
const DEPLOY_ENV = 'production'
module.exports = {
apps: [
{
name: 'project',
cwd: '/data/release/project',
script: 'build/server/startup/app.js',
interpreter: 'node',
exec_mode: 'cluster',
instances: -1,
autorestart: true,
watch: false,
max_memory_restart: '4G',
merge_logs: true,
output: 'logs/pm2-project-info.log',
error: 'logs/pm2-project-error.log',
env: {
NODE_ENV: DEPLOY_ENV,
SERVER_PORT: 9199,
},
},
{
name: 'project-local',
script: 'server/startup/app.js',
interpreter: 'babel-node',
autorestart: true,
watch: ['server/'],
max_memory_restart: '4G',
env: {
NODE_ENV: 'local',
BABEL_ENV: 'server',
SERVER_PORT: 9199,
LOCALHOST: '127.0.0.1',
WEB_PORT: 9099,
},
},
],
}
<file_sep>/skeletons/default/bin/build-web.sh
#! /bin/bash
export NODE_ENV=production
export BABEL_ENV=web
BUILD_MODULES=''
if [ $# != 0 ]; then
for MODULE in $*
do
if [ ! $BUILD_MODULES ]; then
BUILD_MODULES="$MODULE"
else
BUILD_MODULES="$BUILD_MODULES&${MODULE}"
fi
done
fi
export BUILD_MODULES
webpack-cli --config webpack/webpack.config.js
export BABEL_ENV=server
if [ $# != 0 ]; then
for MODULE in $*
do
babel "shared/${MODULE}" -d "build/shared/${MODULE}" -D
done
else
babel shared -d build/shared -D
fi
<file_sep>/skeletons/default/docker-compose.yml
version: '3'
services:
project:
image: "${IMAGE_NAME}:${IMAGE_TAG}"
working_dir: ${PROJECT_VOLUME}
network_mode: host
restart: on-failure
volumes:
- ${PWD}:${PROJECT_VOLUME}
- ${LOG_PATH}:${LOG_VOLUME}
- ${TMP_PATH}:${TMP_VOLUME}
environment:
- NODE_ENV=${DEPLOY_ENV}
- SERVER_PORT=${SERVER_PORT}
command: sh -c "node build/server/startup/app.js >> ${LOG_VOLUME}/<-project->-error.log 2>&1"
node:
image: "${IMAGE_NAME}:${IMAGE_TAG}"
working_dir: ${PROJECT_VOLUME}
network_mode: host
build:
context: .
dockerfile: Dockerfile
volumes:
- ${PWD}:${PROJECT_VOLUME}
local-server:
image: "${IMAGE_NAME}:${IMAGE_TAG}"
build:
context: .
dockerfile: Dockerfile
working_dir: ${PROJECT_VOLUME}
network_mode: bridge
volumes:
- ${PWD}:${PROJECT_VOLUME}
ports:
- ${SERVER_PORT}:${SERVER_PORT}
environment:
- NODE_ENV=local
- BABEL_DISABLE_CACHE=1
- BABEL_ENV=server
- SERVER_PORT=${SERVER_PORT}
command: sh -c "nodemon -e js,jsx,scss --delay 300ms --exec babel-node server/startup/app.js"
local-web:
image: "${IMAGE_NAME}:${IMAGE_TAG}"
build:
context: .
dockerfile: Dockerfile
working_dir: ${PROJECT_VOLUME}
network_mode: bridge
volumes:
- ${PWD}:${PROJECT_VOLUME}
ports:
- ${WEB_PORT}:${WEB_PORT}
environment:
- NODE_ENV=local
- BABEL_ENV=web
- SERVER_PORT=${SERVER_PORT}
- WEB_PORT=${WEB_PORT}
command: sh -c "babel-node webpack/devServer.js"
local-node:
image: "${IMAGE_NAME}:${IMAGE_TAG}"
working_dir: ${PROJECT_VOLUME}
network_mode: bridge
build:
context: .
dockerfile: Dockerfile
volumes:
- ${PWD}:${PROJECT_VOLUME}
build-web:
image: "${IMAGE_NAME}:${IMAGE_TAG}"
working_dir: ${PROJECT_VOLUME}
build:
context: .
dockerfile: Dockerfile
volumes:
- ${PWD}:${PROJECT_VOLUME}
environment:
- NODE_ENV=production
- BABEL_ENV=web
command: sh -c "webpack --config webpack/webpack.config.babel.js && BABEL_ENV=server babel shared -d build/shared -D"
build-server:
image: "${IMAGE_NAME}:${IMAGE_TAG}"
working_dir: ${PROJECT_VOLUME}
build:
context: .
dockerfile: Dockerfile
volumes:
- ${PWD}:${PROJECT_VOLUME}
environment:
- NODE_ENV=production
- BABEL_ENV=server
command: sh -c "babel server -d build/server -D"
build-webpack:
image: "${IMAGE_NAME}:${IMAGE_TAG}"
working_dir: ${PROJECT_VOLUME}
build:
context: .
dockerfile: Dockerfile
volumes:
- ${PWD}:${PROJECT_VOLUME}
environment:
- NODE_ENV=production
- BABEL_ENV=server
command: sh -c "babel webpack -d build/webpack -D"
<file_sep>/skeletons/default/build/shared/app/index/text.js
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _redux = require('./redux');
var _redux2 = _interopRequireDefault(_redux);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = _redux2.default;<file_sep>/skeletons/default/shared/app/index/redux/index.js
/**
* 模块入口
*
* @author sunkeysun
*/
export default {
a: 1
}
<file_sep>/skeletons/default/webpack/config/module.js
/**
* webpack 打包配置
*
* @author : sunkeysun
*/
import path from 'path'
export const app = {
common: {
name: '通用应用模块',
},
index: {
name: '首页应用模块',
},
}
export const common = {
name: '内部公用代码模块',
expose: {
Index: {
name: '#shared/common',
entry: path.resolve(__dirname, '../../shared/common'),
},
},
}
export const vendor = {
name: '第三方库模块',
expose: {
React: 'react',
ReactDOM: 'react-dom',
_: 'lodash',
InventorWeb: 'inventor/web',
InventorShared: 'inventor/shared',
},
}
<file_sep>/skeletons/default/server/config/redis.js
/**
* redis配置文件
*
* @author : sunkeysun
*/
export default {
mode: null, // cluster | single
servers: [
{
host: '127.0.0.1',
port: 6379,
db: 2,
},
],
}
<file_sep>/skeletons/default/server/config/log.js
/**
* 日志配置文件
*
* @author : sunkeysun
*/
import path from 'path'
const logPath = path.resolve(__dirname, '../../logs')
export default {
mode: 'console', // console | single | dateFile | levelDateFile | levelDirDateFile
logPath: logPath,
pattern: 'yyyyMMdd.log'
}
<file_sep>/skeletons/default/build/shared/app/common/components/Error.js
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require('babel-runtime/helpers/createClass');
var _createClass3 = _interopRequireDefault(_createClass2);
var _possibleConstructorReturn2 = require('babel-runtime/helpers/possibleConstructorReturn');
var _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2);
var _inherits2 = require('babel-runtime/helpers/inherits');
var _inherits3 = _interopRequireDefault(_inherits2);
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* 错误展示组件
*
* $author : sunkeysun
*/
var styles = {
'errorWrapper': 'index__errorWrapper',
'errorDetail': 'index__errorDetail'
};
var _default = function (_Component) {
(0, _inherits3.default)(_default, _Component);
function _default() {
(0, _classCallCheck3.default)(this, _default);
return (0, _possibleConstructorReturn3.default)(this, (_default.__proto__ || Object.getPrototypeOf(_default)).apply(this, arguments));
}
(0, _createClass3.default)(_default, [{
key: 'render',
value: function render() {
var _props = this.props,
code = _props.code,
_props$msg = _props.msg,
msg = _props$msg === undefined ? '' : _props$msg,
_props$detail = _props.detail,
detail = _props$detail === undefined ? '' : _props$detail;
return _react2.default.createElement(
'div',
{ className: styles.errorWrapper },
_react2.default.createElement(
'p',
null,
code
),
_react2.default.createElement(
'p',
null,
msg
),
_react2.default.createElement('p', { className: styles.errorDetail, dangerouslySetInnerHTML: { __html: detail } })
);
}
}]);
return _default;
}(_react.Component);
exports.default = _default;<file_sep>/skeletons/default/build/shared/apps/index/App.js
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require('babel-runtime/helpers/createClass');
var _createClass3 = _interopRequireDefault(_createClass2);
var _possibleConstructorReturn2 = require('babel-runtime/helpers/possibleConstructorReturn');
var _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2);
var _inherits2 = require('babel-runtime/helpers/inherits');
var _inherits3 = _interopRequireDefault(_inherits2);
var _dec, _class; /**
* 应用入口
*
* @author : sunkeysun
*/
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _reactHotLoader = require('react-hot-loader');
var _text = require('./text');
var _text2 = _interopRequireDefault(_text);
var _common = require('../../common');
var common = _interopRequireWildcard(_common);
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var App = (_dec = (0, _reactHotLoader.hot)(module), _dec(_class = function (_Component) {
(0, _inherits3.default)(App, _Component);
function App() {
(0, _classCallCheck3.default)(this, App);
return (0, _possibleConstructorReturn3.default)(this, (App.__proto__ || Object.getPrototypeOf(App)).apply(this, arguments));
}
(0, _createClass3.default)(App, [{
key: 'handleClick',
value: function handleClick() {
console.log(common);
alert(1);
}
}, {
key: 'render',
value: function render() {
var style = {
width: 500,
height: 200,
lineHeight: '200px',
color: '#666',
background: '#eee',
textAlign: 'center',
fontSize: 40,
border: '1px solid #ccc',
position: 'absolute',
top: '50%',
left: '50%',
marginLeft: -250,
marginTop: -100
};
return _react2.default.createElement(
'div',
{ style: style, onClick: this.handleClick.bind(this) },
'Welcome, Inventor!'
);
}
}]);
return App;
}(_react.Component)) || _class);
exports.default = App;<file_sep>/skeletons/default/server/config/app.js
/**
* 应用配置
*
* @author : sunkeysun
*/
import webpackConfig from '#webpack/config/common'
const localWeb = webpackConfig.debug.localWeb
export default {
ssr: true,
keys: [ 'example' ],
server: {
host: '0.0.0.0',
port: _.get(process.env, 'SERVER_PORT', 9199),
},
timeout: 60000,
noHash: true,
webHost: `http://${localWeb.host}:${localWeb.port}`,
}
<file_sep>/skeletons/default/build/shared/app/index/store/index.js
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _Common = require('./Common');
var _Common2 = _interopRequireDefault(_Common);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var Store = function Store() {
var initialState = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
(0, _classCallCheck3.default)(this, Store);
this.common = new _Common2.default(initialState.common);
};
exports.default = Store;<file_sep>/skeletons/default/shared/app/index/store/Common.js
import { observable } from 'mobx'
export default class Common {
@observable staffname = ''
constructor(initialState={}) {
this.staffname = initialState.staffname
}
addName(char) {
this.staffname += char
}
}
<file_sep>/skeletons/default/server/app/http/controllers/index/Index.js
/**
* 站点入口控制器
*
* @author : sunkeysun
*/
import Controller from '../Controller'
export default class IndexController extends Controller {
async index() {
this.response.renderApp('index', { common: { staffname: 'sunkeysun' } })
}
}
<file_sep>/skeletons/default/server/config/request.js
/**
* 请求全局配置
*
* @author: sunkeysun
*/
export default {
log: true,
config: {
timeout: 10 * 1000,
},
}
<file_sep>/skeletons/default/shared/app/index/store/index.js
import Common from './Common'
export default class Store {
constructor(initialState={}) {
this.common = new Common(initialState.common)
}
}
<file_sep>/skeletons/default/shared/app/common/containers/Error.jsx
/**
* 错误容器
*
* $author : sunkeysun
*/
import React, { Component, Fragment } from 'react'
import ErrorComponent from '../components/Error'
export default class extends Component {
_getMsg(code) {
switch(code) {
case 403:
return 'Forbidden'
case 404:
return 'Not Found'
case 500:
return 'Internal Server Error'
}
}
render() {
const { code, detail='' } = this.props
const msg = this._getMsg(code)
return (
<Fragment>
<ErrorComponent code={ code } msg={ msg } detail={ detail } />
</Fragment>
)
}
}
<file_sep>/skeletons/default/shared/app/common/store/states/Common.js
export default class Common {
staffname = ''
constructor(initialState={}) {
this.staffname = initialState.staffname
}
}
<file_sep>/skeletons/default/build/shared/apps/common/containers/Error.js
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require('babel-runtime/helpers/createClass');
var _createClass3 = _interopRequireDefault(_createClass2);
var _possibleConstructorReturn2 = require('babel-runtime/helpers/possibleConstructorReturn');
var _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2);
var _inherits2 = require('babel-runtime/helpers/inherits');
var _inherits3 = _interopRequireDefault(_inherits2);
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _Error = require('../components/Error');
var _Error2 = _interopRequireDefault(_Error);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* 错误容器
*
* $author : sunkeysun
*/
var _default = function (_Component) {
(0, _inherits3.default)(_default, _Component);
function _default() {
(0, _classCallCheck3.default)(this, _default);
return (0, _possibleConstructorReturn3.default)(this, (_default.__proto__ || Object.getPrototypeOf(_default)).apply(this, arguments));
}
(0, _createClass3.default)(_default, [{
key: '_getMsg',
value: function _getMsg(code) {
switch (code) {
case 403:
return 'Forbidden';
case 404:
return 'Not Found';
case 500:
return 'Internal Server Error';
}
}
}, {
key: 'render',
value: function render() {
var _props = this.props,
code = _props.code,
_props$detail = _props.detail,
detail = _props$detail === undefined ? '' : _props$detail;
var msg = this._getMsg(code);
return _react2.default.createElement(
_react.Fragment,
null,
_react2.default.createElement(_Error2.default, { code: code, msg: msg, detail: detail })
);
}
}]);
return _default;
}(_react.Component);
exports.default = _default;<file_sep>/skeletons/default/web/vendor/__vendor.js
/**
* vendor 入口
*
* @author : sunkeysun
*/
module.exports={
'React': require('react'),
'ReactDOM': require('react-dom')
}
<file_sep>/README.md
# Inventor - Inventor 命令行工具包
### 安装
```
npm install -g inventor-cli
```
### 创建项目骨架
```
inventor-cli new project_name
```
{
"auto_complete": false,
"color_scheme": "Packages/Theme - Flatland/Flatland Dark.tmTheme",
"font_face": "monaco",
"font_size": 16,
"ignored_packages":
[
],
"show_encoding": true,
"theme": "Flatland Dark.sublime-theme",
"translate_tabs_to_spaces": true,
"trim_trailing_white_space_on_save": true
}
<file_sep>/bin/inventor.js
#! /usr/bin/env node
/**
* inventor 命令行入口
*
* @author: sunkeysun
*/
const path = require('path')
const fs = require('fs')
const fse = require('fs-extra')
const program = require('commander')
const packageJson = require('../package.json')
program
.version(packageJson.version)
.command('new <project>')
.description('create a inventor [project]')
.option('-t, --template <template>', 'project skeleton template[default]', 'default')
.action(function(project, options) {
const template = options.template
const templateDir = path.resolve(__dirname, `../templates/${template}`)
const projectDir = `${process.cwd()}/${project}`
try {
if (!!fse.pathExistsSync(projectDir)
&& !!fs.lstatSync(projectDir).isDirectory()) {
console.error(`Project directory ${project} has exists, can't create project`)
} else {
fse.copySync(templateDir, projectDir)
}
console.log(`Project ${project} has been created successfully.`)
} catch (e) {
console.error(e)
}
})
program.parse(process.argv)
<file_sep>/skeletons/default/server/app/http/controllers/Controller.js
/**
* 基础控制器
*
* @author : sunkeysun
*/
import { Controller } from 'inventor'
export default class extends Controller {
}
<file_sep>/skeletons/default/Dockerfile
FROM node:8.11.1-alpine
MAINTAINER sunkeysun "<EMAIL>"
USER root
<file_sep>/skeletons/default/shared/app/common/store/index.js
import CommonState from './states/Common'
export default class Store {
constructor(initialState={}) {
this.common = new CommonState(initialState.common)
}
init(stateName) {
if (!!stateName && this[stateName]) {
}
}
}
<file_sep>/skeletons/default/.env.example
DEPLOY_ENV=production
SERVER_PORT=9199
IMAGE_NAME=project-env
IMAGE_TAG=0.0.1
PROJECT_VOLUME=/data/project
LOG_VOLUME=/data/project_logs
TMP_VOLUME=/data/project_tmp
WEB_PORT=9099
<file_sep>/skeletons/default/server/startup/app.js
/**
* 启动脚本
*
* @author : sunkeysun
*/
import path from 'path'
import Kernel from '#server/app/http/Kernel'
process.on('uncaughtException', (e) => {
console.error(e)
process.exit(1)
})
const app = new Kernel(path.resolve(__dirname, '../..'))
app.run()
<file_sep>/skeletons/default/shared/app/index/addon/index.jsx
export { cssList, jsList } from './__build'
<file_sep>/skeletons/default/build/shared/app/index/store/Common.js
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require('babel-runtime/helpers/createClass');
var _createClass3 = _interopRequireDefault(_createClass2);
var _desc, _value, _class, _descriptor;
var _mobx = require('mobx');
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _initDefineProp(target, property, descriptor, context) {
if (!descriptor) return;
Object.defineProperty(target, property, {
enumerable: descriptor.enumerable,
configurable: descriptor.configurable,
writable: descriptor.writable,
value: descriptor.initializer ? descriptor.initializer.call(context) : void 0
});
}
function _applyDecoratedDescriptor(target, property, decorators, descriptor, context) {
var desc = {};
Object['ke' + 'ys'](descriptor).forEach(function (key) {
desc[key] = descriptor[key];
});
desc.enumerable = !!desc.enumerable;
desc.configurable = !!desc.configurable;
if ('value' in desc || desc.initializer) {
desc.writable = true;
}
desc = decorators.slice().reverse().reduce(function (desc, decorator) {
return decorator(target, property, desc) || desc;
}, desc);
if (context && desc.initializer !== void 0) {
desc.value = desc.initializer ? desc.initializer.call(context) : void 0;
desc.initializer = undefined;
}
if (desc.initializer === void 0) {
Object['define' + 'Property'](target, property, desc);
desc = null;
}
return desc;
}
function _initializerWarningHelper(descriptor, context) {
throw new Error('Decorating class property failed. Please ensure that transform-class-properties is enabled.');
}
var Common = (_class = function () {
function Common() {
var initialState = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
(0, _classCallCheck3.default)(this, Common);
_initDefineProp(this, 'staffname', _descriptor, this);
this.staffname = initialState.staffname;
}
(0, _createClass3.default)(Common, [{
key: 'addName',
value: function addName(char) {
this.staffname += char;
}
}]);
return Common;
}(), (_descriptor = _applyDecoratedDescriptor(_class.prototype, 'staffname', [_mobx.observable], {
enumerable: true,
initializer: function initializer() {
return '';
}
})), _class);
exports.default = Common;<file_sep>/skeletons/default/shared/app/index/addon/__build.jsx
export const jsList = [
'/static/app/index/index.85d65a0d0b3ed569fbc9.js',
]
export const cssList = [
]
<file_sep>/skeletons/default/bin/start-docker.sh
#!/bin/bash
project_name="project"
base_path="/data/release/$project_name"
target_file="$base_path/package-lock.json"
md5_file="$base_path/.package-lock.json.md5"
cd "$base_path"
if [ ! -f "$md5_file" ]; then
touch "$md5_file"
fi
pre_md5=`cat "$md5_file"`
cur_md5=`md5sum "$target_file" | awk '{print $1}'`
if [ "$pre_md5" != "$cur_md5" ]; then
docker-compose run --rm node npm install --production
fi
if [ $? -eq 0 ]; then
printf "$cur_md5" > "$md5_file"
sleep 1s
docker-compose stop ${project_name}
sleep 1s
docker-compose up -d ${project_name}
fi
<file_sep>/skeletons/default/build/shared/app/index/addon/__build.js
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var jsList = exports.jsList = ['/static/app/index/index.85d65a0d0b3ed569fbc9.js'];
var cssList = exports.cssList = [];<file_sep>/skeletons/default/web/__entry/common.js
/**
* 应用入口
*
* @author : sunkeysun
*/
import Kernel from 'inventor/web'
import App from '/Volumes/Projects/Github/inventor-cli/templates/default/shared/apps/common/App'
import reducers from '/Volumes/Projects/Github/inventor-cli/templates/default/shared/apps/common/redux'
import webpackConfig from '/Volumes/Projects/Github/inventor-cli/templates/default/webpack/config'
import appConfig from '/Volumes/Projects/Github/inventor-cli/templates/default/shared/common/config/app'
const kernel = new Kernel({ webpackConfig, appConfig, App, reducers })
kernel.run()
<file_sep>/skeletons/default/webpack/config/common.js
/**
* webpack 通用配置文件
*
* $author : sunkeysun
*/
import _ from 'lodash'
const WEB_PORT = _.get(process.env, 'WEB_PORT', 9099)
const SERVER_PORT = _.get(process.env, 'SERVER_PORT', 9199)
const LOCALHOST = _.get(process.env, 'LOCALHOST', '127.0.0.1')
export default {
debug: {
localWeb: {
host: LOCALHOST,
port: WEB_PORT,
},
localServer: {
host: LOCALHOST,
port: SERVER_PORT,
},
publicPath: `http://${LOCALHOST}:${WEB_PORT}/static`,
},
release: {
publicPath: '/static',
},
}
<file_sep>/skeletons/default/README.md
# Inventor 初始化项目
<file_sep>/skeletons/default/webpack/devServer.js
/**
* react 开发服务器
*
* $author : sunkeysun
*/
import path from 'path'
import { WebpackDevServer } from 'inventor-dev'
import config from './config/common'
const buildMode = 'debug'
const localServer = config[buildMode].localServer
const localWeb = config[buildMode].localWeb
const basePath = path.resolve(__dirname, '..')
const publicPath = config[buildMode].publicPath
const devServer = new WebpackDevServer({ basePath, publicPath, localServer, localWeb, buildMode })
devServer.run()
<file_sep>/skeletons/default/shared/vendor/addon/__build.jsx
export const jsList = [
'/static/vendor/vendor.434d32d4e62c08a9e5be.js',
]
export const cssList = [
]
<file_sep>/skeletons/default/build/shared/app/common/store/states/Common.js
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var Common = function Common() {
var initialState = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
(0, _classCallCheck3.default)(this, Common);
this.staffname = '';
this.staffname = initialState.staffname;
};
exports.default = Common;<file_sep>/skeletons/default/shared/common/index.js
/**
* 通用模块入口
*
* @author : sunkeysun
*/
import App from './App'
export default App
| 2e0518cc63d0b9763fb9bf687423a960b8ad6982 | [
"YAML",
"JavaScript",
"Markdown",
"Dockerfile",
"Shell"
] | 42 | JavaScript | inventorjs/inventor-cli-legacy | 0dea11d17952059c608c3f1ad8757ed1f07c7ea9 | fa4ddbd71d6053fb152cdc575b2c69b0c9f46a48 | |
refs/heads/master | <repo_name>dorntrevor7/Burger-Bucketlist<file_sep>/README.md
# Burger Bucketlist
In this project I created a Burger Bucketlist that allows you to put new burgers that you want to try and be able to delete the burger or move it to the eaten section of the page.
### User Story:
```
As a burger enthusiast
I want to generate a webpage that displays burgers that IO would like to try so that I can try new burgers and get creative.
```
<br>
The following image displays the example:

<br>
Here's the link: [DIG IN!!](https://frozen-shore-70892.herokuapp.com/)
<file_sep>/db/seeds.sql
INSERT INTO burgers (name) VALUES ('PB&J Burger');
INSERT INTO burgers (name, eaten) VALUES ('Cheese Burger', true);
INSERT INTO burgers (name, eaten) VALUES ('Hamburger', true);
| 0a6482a90c733aef3087aea5ca1ccd9db5eb5a27 | [
"Markdown",
"SQL"
] | 2 | Markdown | dorntrevor7/Burger-Bucketlist | 0420d6d04c7dd12693bba97d3dbbc2f897dec0f6 | 0b8ee39510df97dd90ef9519b52ab3aefbad93d6 | |
refs/heads/master | <repo_name>alu0100774054/TiroDeBolas<file_sep>/src/es/esit/ull/PAI/TiroDeBolas/Bola.java
package es.esit.ull.PAI.TiroDeBolas;
import java.awt.Color;
public class Bola {
private int centroX;
private int centroY;
private final int RADIO;
private Color fondo;
public Bola(int centroX, int centroY, int radio, Color fondo) {
this.centroX = centroX;
this.centroY = centroY;
this.RADIO = radio;
this.fondo = fondo;
}
public int getCentroX() {
return centroX;
}
public void setCentroX(int centroX) {
this.centroX = centroX;
}
public int getCentroY() {
return centroY;
}
public void setCentroY(int centroY) {
this.centroY = centroY;
}
public Color getFondo() {
return fondo;
}
public void setFondo(Color fondo) {
this.fondo = fondo;
}
public int getRADIO() {
return RADIO;
}
}
<file_sep>/src/es/esit/ull/PAI/TiroDeBolas/TiroBolasGrafico.java
package es.esit.ull.PAI.TiroDeBolas;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.util.ArrayList;
import java.util.Random;
import javax.swing.JPanel;
import javax.swing.Timer;
public class TiroBolasGrafico extends JPanel {
private Bola bolaTiro;
private ArrayList<Bola> bolasJuego;
private ArrayList<Point> trazaTiro;
private Timer timer;
private final int DELAY = 100;
private boolean iniciado = false;
private final int NUM_BOLAS = 10;
private Random generadorAleatorios;
private ArrayList<Color> coloresJuego;
private Point raton;
private Point coordenadasBolaTiro;
private boolean cambiaRaton = false;
private boolean disparado = false;
private boolean mostrarTraza = false;
private Point inicio;
private int desplazamientoBola = 12;
private int actual = 0;
private boolean acabado = false;
public TiroBolasGrafico() {
bolaTiro = null;
bolasJuego = new ArrayList<Bola>();
trazaTiro = new ArrayList<Point>();
coloresJuego = new ArrayList<Color>();
timer = new Timer(getDELAY(), new TimerListener());
generadorAleatorios = new Random();
raton = new Point();
coordenadasBolaTiro = new Point();
inicio = new Point();
establecerEstilo();
generarColores();
iniciarOyentes();
//timer.start();
}
private void iniciarOyentes() {
addMouseMotionListener(new RatonListener());
addMouseListener(new LanzarListener());
}
private void establecerEstilo() {
setBackground(new Color(153, 204, 255));
}
private void generarColores() {
getColoresJuego().add(Color.BLUE);
getColoresJuego().add(Color.CYAN);
getColoresJuego().add(Color.GREEN);
getColoresJuego().add(Color.MAGENTA);
getColoresJuego().add(Color.ORANGE);
getColoresJuego().add(Color.PINK);
getColoresJuego().add(Color.RED);
getColoresJuego().add(Color.YELLOW);
}
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
if (!isIniciado()) {
dibujarBolas(g);
setIniciado(true);
} else {
if (isCambiaRaton()) {
dibujarBolas(g);
dibujarFlecha(g);
} else {
dibujarBolas(g);
dibujarFlecha(g);
}
if (isDisparado()) {
dibujarBolas(g);
dibujarFlecha(g);
dibujarTraza(g);
}
}
}
private void dibujarTraza(Graphics g) {
if (isMostrarTraza()) {
for (int i = 0; i < getTrazaTiro().size(); i++) {
g.setColor(getBolaTiro().getFondo());
g.fillOval((int)getTrazaTiro().get(getActual()).getX() - getBolaTiro().getRADIO() / 2,
(int) getTrazaTiro().get(getActual()).getY() - getBolaTiro().getRADIO() / 2,
(int) getBolaTiro().getRADIO(),
(int) getBolaTiro().getRADIO());
}
} else {
if (getActual() < getTrazaTiro().size() - 1) {
g.setColor(getBolaTiro().getFondo());
g.fillOval((int)getTrazaTiro().get(getActual()).getX() - getBolaTiro().getRADIO() / 2,
(int) getTrazaTiro().get(getActual()).getY() - getBolaTiro().getRADIO() / 2,
(int) getBolaTiro().getRADIO(),
(int) getBolaTiro().getRADIO());
setActual(getActual() + 1);
} else {
g.setColor(getBolaTiro().getFondo());
g.fillOval((int)getTrazaTiro().get(getActual()).getX() - getBolaTiro().getRADIO() / 2,
(int) getTrazaTiro().get(getActual()).getY() - getBolaTiro().getRADIO() / 2,
(int) getBolaTiro().getRADIO(),
(int) getBolaTiro().getRADIO());
}
}
}
private void dibujarFlecha(Graphics g) {
Graphics2D g2d = (Graphics2D)g;
g.setColor(Color.BLACK);
g2d.setStroke(new BasicStroke(5.0F));
g2d.drawLine((int) this.getWidth() / 2,
(int) this.getHeight(),
(int) getRaton().getX(),
(int) getRaton().getY());
}
private void dibujarBolas(Graphics g) {
if(!isIniciado()) {
generarBolas();
}
// Bolas del juego.
for (int i = 0; i < getNUM_BOLAS(); i++) {
g.setColor(getBolasJuego().get(i).getFondo());
g.fillOval((int) (getBolasJuego().get(i).getCentroX() - getBolasJuego().get(i).getRADIO() / 2),
0,
getBolasJuego().get(i).getRADIO(),
getBolasJuego().get(i).getRADIO());
}
// Bolas del usuario.
if(!isDisparado()) {
g.setColor(getBolaTiro().getFondo());
g.fillOval(getBolaTiro().getCentroX() - getBolaTiro().getRADIO() / 2,
getBolaTiro().getCentroY() - getBolaTiro().getRADIO() / 2,
getBolaTiro().getRADIO(),
getBolaTiro().getRADIO());
}
}
private void generarBolas() {
// Bolas del juego.
int radio = this.getWidth() / getNUM_BOLAS();
int inicioX = 0;
int inicioY = 0;
for (int i = 0; i < getNUM_BOLAS(); i++) {
int colorAleatorio = getGeneradorAleatorios().nextInt(getColoresJuego().size() - 1);
getBolasJuego().add(new Bola(inicioX + radio / 2, inicioY, radio, getColoresJuego().get(colorAleatorio)));
inicioX += radio;
}
// Bola del usuario.
setInicio(new Point(this.getWidth() / 2, this.getHeight()));
int colorAleatorio = getGeneradorAleatorios().nextInt(getColoresJuego().size() - 1);
setCoordenadasBolaTiro(new Point((int) getInicio().getX(), (int) getInicio().getY()));
setBolaTiro(new Bola((int) getCoordenadasBolaTiro().getX(),(int) getCoordenadasBolaTiro().getY(), radio, getColoresJuego().get(colorAleatorio)));
}
private void lanzar(int ratonX, int ratonY) {
int correcion = getBolaTiro().getRADIO() / 2;
System.out.println("punto origen " + getBolaTiro().getCentroX() + ", " + getBolaTiro().getCentroY() + " raton:" + ratonX + ", " + ratonY);
double pendiente = (float) (ratonY - getBolaTiro().getCentroY()) / (ratonX - getBolaTiro().getCentroX());
System.out.println("pendiente: " + pendiente);
while (getBolaTiro().getCentroY() >= getBolaTiro().getRADIO() + correcion ) {
Point nuevoPunto = null;
double nuevaY = getBolaTiro().getCentroY() - getDesplazamientoBola();
double nuevaX = ((nuevaY - getBolaTiro().getCentroY()) / pendiente) + getBolaTiro().getCentroX();
System.out.println(nuevaX + ", " + nuevaY);
if (nuevaX <= getBolaTiro().getRADIO() / 2 || nuevaX >= (this.getHeight())) {
pendiente = -pendiente;
nuevaX = ((nuevaY - getBolaTiro().getCentroY()) / pendiente) + getBolaTiro().getCentroX();
}
nuevoPunto = new Point((int) nuevaX, (int) nuevaY);
getTrazaTiro().add(nuevoPunto);
getBolaTiro().setCentroX((int) nuevoPunto.getX());
getBolaTiro().setCentroY((int) nuevoPunto.getY());
}
System.out.println(getBolaTiro().getCentroX() + ", " + getBolaTiro().getCentroY());
}
class TimerListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
repaint();
}
}
class RatonListener implements MouseMotionListener {
@Override
public void mouseDragged(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mouseMoved(MouseEvent e) {
if (!disparado) {
setRaton(new Point(e.getX(), e.getY()));
cambiaRaton = true;
repaint();
cambiaRaton = false;
}
}
}
class LanzarListener implements MouseListener {
@Override
public void mouseClicked(MouseEvent e) {
lanzar(e.getX(), e.getY());
setDisparado(true);
getTimer().start();
//repaint();
}
@Override
public void mousePressed(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mouseReleased(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mouseEntered(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mouseExited(MouseEvent e) {
// TODO Auto-generated method stub
}
}
public boolean isAcabado() {
return acabado;
}
public void setAcabado(boolean acabado) {
this.acabado = acabado;
}
public int getActual() {
return actual;
}
public void setActual(int actual) {
this.actual = actual;
}
public int getDesplazamientoBola() {
return desplazamientoBola;
}
public void setDesplazamientoBola(int desplazamientoBola) {
this.desplazamientoBola = desplazamientoBola;
}
public Point getInicio() {
return inicio;
}
public void setInicio(Point inicio) {
this.inicio = inicio;
}
public boolean isMostrarTraza() {
return mostrarTraza;
}
public void setMostrarTraza(boolean mostrarTraza) {
this.mostrarTraza = mostrarTraza;
}
public boolean isDisparado() {
return disparado;
}
public void setDisparado(boolean disparado) {
this.disparado = disparado;
}
public boolean isCambiaRaton() {
return cambiaRaton;
}
public void setCambiaRaton(boolean cambiaRaton) {
this.cambiaRaton = cambiaRaton;
}
public Point getCoordenadasBolaTiro() {
return coordenadasBolaTiro;
}
public void setCoordenadasBolaTiro(Point coordenadasBolaTiro) {
this.coordenadasBolaTiro = coordenadasBolaTiro;
}
public Point getRaton() {
return raton;
}
public void setRaton(Point raton) {
this.raton = raton;
}
public ArrayList<Color> getColoresJuego() {
return coloresJuego;
}
public void setColoresJuego(ArrayList<Color> coloresJuego) {
this.coloresJuego = coloresJuego;
}
public Random getGeneradorAleatorios() {
return generadorAleatorios;
}
public void setGeneradorAleatorios(Random generadorAleatorios) {
this.generadorAleatorios = generadorAleatorios;
}
public int getNUM_BOLAS() {
return NUM_BOLAS;
}
public boolean isIniciado() {
return iniciado;
}
public void setIniciado(boolean iniciado) {
this.iniciado = iniciado;
}
public Bola getBolaTiro() {
return bolaTiro;
}
public void setBolaTiro(Bola bolaTiro) {
this.bolaTiro = bolaTiro;
}
public ArrayList<Bola> getBolasJuego() {
return bolasJuego;
}
public void setBolasJuego(ArrayList<Bola> bolasJuego) {
this.bolasJuego = bolasJuego;
}
public ArrayList<Point> getTrazaTiro() {
return trazaTiro;
}
public void setTrazaTiro(ArrayList<Point> trazaTiro) {
this.trazaTiro = trazaTiro;
}
public Timer getTimer() {
return timer;
}
public void setTimer(Timer timer) {
this.timer = timer;
}
public int getDELAY() {
return DELAY;
}
}
| 013efea89605ffb8bfd9a4ce7a2f54521e06b186 | [
"Java"
] | 2 | Java | alu0100774054/TiroDeBolas | 005ce6536f4944f9484883b728719fdd27a5b4ee | d1f5805db874d7dd753d8717c7a6d39bbea52ffd | |
refs/heads/master | <file_sep>/*
GEODIFF - MIT License
Copyright (C) 2019 <NAME>
*/
#include "gtest/gtest.h"
#include "geodiff_testutils.hpp"
#include "geodiff.h"
#include "geodiffutils.hpp"
bool _test(
const std::string &testname,
const std::string &basename,
const std::string &modifiedname,
int expected_changes,
bool ignore_timestamp_change = false
)
{
std::cout << testname << std::endl;
makedir( pathjoin( tmpdir(), testname ) );
std::string base = pathjoin( testdir(), basename );
std::string modified = pathjoin( testdir(), modifiedname );
std::string changeset = pathjoin( tmpdir(), testname, "changeset.bin" );
std::string changeset_inv = pathjoin( tmpdir(), testname, "changeset_inv.bin" );
std::string patched = pathjoin( tmpdir(), testname, "patched.gpkg" );
std::string patched2 = pathjoin( tmpdir(), testname, "patched2.gpkg" );
std::string conflict = pathjoin( tmpdir(), testname, "conflict.json" );
std::string json = pathjoin( tmpdir(), testname, testname + ".json" );
std::string json_summary = pathjoin( tmpdir(), testname, testname + "_summary.json" );
if ( GEODIFF_createChangeset( testContext(), base.c_str(), modified.c_str(), changeset.c_str() ) != GEODIFF_SUCCESS )
{
std::cout << "err GEODIFF_createChangeset" << std::endl;
return false;
}
int nchanges = GEODIFF_changesCount( testContext(), changeset.c_str() );
if ( nchanges != expected_changes )
{
std::cout << "err GEODIFF_listChanges " << nchanges << " vs " << expected_changes << std::endl;
return false;
}
filecopy( patched, base );
if ( GEODIFF_applyChangeset( testContext(), patched.c_str(), changeset.c_str() ) != GEODIFF_SUCCESS )
{
std::cout << "err GEODIFF_applyChangeset" << std::endl;
return false;
}
// check that now it is same file with modified
if ( !equals( patched, modified, ignore_timestamp_change ) )
{
std::cout << "err equals" << std::endl;
return false;
}
// create inversed changeset
if ( GEODIFF_invertChangeset( testContext(), changeset.c_str(), changeset_inv.c_str() ) != GEODIFF_SUCCESS )
{
std::cout << "err GEODIFF_invertChangeset" << std::endl;
return false;
}
// apply inversed changeset
if ( GEODIFF_applyChangeset( testContext(), patched.c_str(), changeset_inv.c_str() ) != GEODIFF_SUCCESS )
{
std::cout << "err GEODIFF_applyChangeset inversed" << std::endl;
return false;
}
// check that now it is same file with base
if ( !equals( patched, base, ignore_timestamp_change ) )
{
std::cout << "err equals" << std::endl;
return false;
}
// check that direct rebase works
filecopy( patched2, modified );
if ( GEODIFF_rebase( testContext(), base.c_str(), base.c_str(), patched2.c_str(), conflict.c_str() ) != GEODIFF_SUCCESS )
{
std::cout << "err GEODIFF_rebase inversed" << std::endl;
return false;
}
int nConflicts = countConflicts( conflict );
if ( nConflicts != 0 )
{
std::cout << "conflicts found" << std::endl;
return false;
}
if ( !equals( patched2, modified, ignore_timestamp_change ) )
{
std::cout << "err equals" << std::endl;
return false;
}
printJSON( changeset, json, json_summary );
return true;
}
TEST( SingleCommitSqlite3Test, test_sqlite_no_gis )
{
std::cout << "sqlite 2 updated 1 added 1 deleted" << std::endl;
bool ret = _test( "pure_sqlite",
"base.sqlite",
pathjoin( "pure_sqlite", "modified_base.sqlite" ),
4
);
ASSERT_TRUE( ret );
}
TEST( SingleCommitSqlite3Test, geopackage )
{
std::cout << "geopackage 1 updated geometry" << std::endl;
bool ret = _test( "1_geopackage",
"base.gpkg",
pathjoin( "1_geopackage", "modified_1_geom.gpkg" ),
1
);
ASSERT_TRUE( ret );
}
TEST( SingleCommitSqlite3Test, geopackage_complex )
{
std::cout << "geopackage 2 new, 1 move, 1 changed attr, 1 delete" << std::endl;
bool ret = _test( "complex",
"base.gpkg",
pathjoin( "complex", "complex1.gpkg" ),
6
);
ASSERT_TRUE( ret );
}
TEST( SingleCommitSqlite3Test, retype_attribute )
{
std::cout << "geopackage attribute count is same, have same name, but different type" << std::endl;
bool ret = _test( "retype_attribute",
pathjoin( "modified_scheme", "added_attribute.gpkg" ),
pathjoin( "modified_scheme", "added_attribute_different_type.gpkg" ),
4
);
ASSERT_FALSE( ret );
}
TEST( SingleCommitSqlite3Test, reprojected )
{
std::cout << "geopackage change of crs" << std::endl;
bool ret = _test( "reprojected",
pathjoin( "modified_scheme", "reprojected.gpkg" ),
pathjoin( "modified_scheme", "reprojected2.gpkg" ),
6
);
ASSERT_FALSE( ret );
}
TEST( SingleCommitSqlite3Test, SingleCommitFkTest )
{
std::cout << "database with foreign keys" << std::endl;
bool ret = _test( "fk_1_update",
"base_fk.gpkg",
pathjoin( "fk_1_update", "modified_fk.gpkg" ),
3
);
ASSERT_TRUE( ret );
}
TEST( SingleCommitSqlite3Test, GpkgTriggersTest )
{
std::cout << "geopackage with many gpkg_ triggers" << std::endl;
bool ret1 = _test( "gpkg_triggers",
pathjoin( "gpkg_triggers", "db-base.gpkg" ),
pathjoin( "gpkg_triggers", "db-modified.gpkg" ),
GEODIFF_changesCount( testContext(),
pathjoin( testdir(), "gpkg_triggers", "modified-changeset.diff" ).c_str() )
);
bool ret2 = GEODIFF_createRebasedChangeset(
testContext(),
pathjoin( testdir(), "gpkg_triggers", "db-base.gpkg" ).c_str(),
pathjoin( testdir(), "gpkg_triggers", "db-modified.gpkg" ).c_str(),
pathjoin( testdir(), "gpkg_triggers", "modified-changeset.diff" ).c_str(),
pathjoin( testdir(), "gpkg_triggers", "res.diff" ).c_str(),
pathjoin( testdir(), "gpkg_triggers", "res.conflict" ).c_str()
) == GEODIFF_SUCCESS;
ASSERT_TRUE( ret1 && ret2 );
// remove created diff and conflict files
remove( pathjoin( testdir(), "gpkg_triggers", "res.diff" ).c_str() );
remove( pathjoin( testdir(), "gpkg_triggers", "res.conflict" ).c_str() );
}
TEST( SingleCommitSqlite3Test, NonAsciiCharactersTest )
{
std::cout << "non ascii characters in path test" << std::endl;
bool ret = _test(
"non_ascii_\xc5\xa1", // add special sign also here, because changeset file is created from it
pathjoin( "utf_test_\xc5\xa1\xc4\x8d\xc3\xa9", "test\xc3\xa1\xc3\xa1.gpkg" ), // testaa
pathjoin( "utf_test_\xc5\xa1\xc4\x8d\xc3\xa9", "test\xc4\x8d\xc4\x8d.gpkg" ), // testcc
2
);
ASSERT_TRUE( ret );
}
TEST( SingleCommitSqlite3Test, QuoteCharacterGpkgName )
{
std::cout << "path with quote character" << std::endl;
bool ret = _test(
"quote's test",
pathjoin( "dir_with_quote's's", "base.gpkg" ),
pathjoin( "dir_with_quote's's", "recreated.gpkg" ),
8
);
ASSERT_TRUE( ret );
}
int main( int argc, char **argv )
{
testing::InitGoogleTest( &argc, argv );
init_test();
int ret = RUN_ALL_TESTS();
finalize_test();
return ret;
}
<file_sep>
DROP SCHEMA IF EXISTS gd_tz_base CASCADE;
CREATE SCHEMA gd_tz_base;
CREATE TABLE gd_tz_base.simple ( "fid" SERIAL PRIMARY KEY, "geometry" GEOMETRY(POINT, 4326), "created" TIMESTAMP WITHOUT TIME ZONE);
INSERT INTO gd_tz_base.simple VALUES (1, ST_GeomFromText('Point (-1.08891928864569065 0.46101231190150482)', 4326), '2021-10-28 18:34:19.474');
INSERT INTO gd_tz_base.simple VALUES (2, ST_GeomFromText('Point (-0.36388508891928861 0.56224350205198359)', 4326), '2021-10-28 18:34:19.476');
INSERT INTO gd_tz_base.simple VALUES (3, ST_GeomFromText('Point (-0.73050615595075241 0.04240766073871405)', 4326));
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef POSTGRESDRIVER_H
#define POSTGRESDRIVER_H
#include "driver.h"
extern "C"
{
#include <libpq-fe.h>
}
// TODO: add docs!
class PostgresDriver : public Driver
{
public:
explicit PostgresDriver( const Context *context );
~PostgresDriver() override;
void open( const DriverParametersMap &conn ) override;
void create( const DriverParametersMap &conn, bool overwrite = false ) override;
std::vector<std::string> listTables( bool useModified = false ) override;
TableSchema tableSchema( const std::string &tableName, bool useModified = false ) override;
void createChangeset( ChangesetWriter &writer ) override;
void applyChangeset( ChangesetReader &reader ) override;
void createTables( const std::vector<TableSchema> &tables ) override;
void dumpData( ChangesetWriter &writer, bool useModified = false ) override;
void checkCompatibleForRebase( bool useModified = false ) override;
private:
void logApplyConflict( const std::string &type, const ChangesetEntry &entry ) const;
void openPrivate( const DriverParametersMap &conn );
void close();
std::string getSequenceObjectName( const TableSchema &tbl, int &autoIncrementPkeyIndex );
void updateSequenceObject( const std::string &seqName, int64_t maxValue );
PGconn *mConn = nullptr;
std::string mBaseSchema;
std::string mModifiedSchema;
};
#endif // POSTGRESDRIVER_H
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#include "geodiff.h"
#include "geodiffcontext.hpp"
#include "geodifflogger.hpp"
#include "geodiffutils.hpp"
#include <iostream>
#include <functional>
#include <algorithm>
Context::Context() = default;
Logger &Context::logger()
{
return mLogger;
}
const Logger &Context::logger() const
{
return mLogger;
}
void Context::setTablesToSkip( const std::vector<std::string> &tablesToSkip )
{
mTablesToSkip = tablesToSkip;
}
bool Context::isTableSkipped( const std::string &tableName ) const
{
if ( mTablesToSkip.empty() )
{
return false;
}
return std::any_of( mTablesToSkip.begin(), mTablesToSkip.end(), std::bind( std::equal_to< std::string >(), std::placeholders::_1, tableName ) );
}
<file_sep># GEODIFF (MIT License)
# Copyright (C) 2019 <NAME>
CMAKE_MINIMUM_REQUIRED(VERSION 3.10)
PROJECT(geodiffproject)
SET(CMAKE_CXX_VISIBILITY_PRESET hidden)
SET(CMAKE_VISIBILITY_INLINES_HIDDEN 1)
# if changed, change also run_cppcheck.sh
SET(CMAKE_CXX_STANDARD 11)
# set path to additional CMake modules
SET(CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake ${CMAKE_MODULE_PATH})
SET(ENABLE_TESTS TRUE CACHE BOOL "Build tests?")
SET(ENABLE_COVERAGE FALSE CACHE BOOL "Enable GCOV code coverage?")
SET(BUILD_TOOLS TRUE CACHE BOOL "Build tool executables?")
SET(BUILD_STATIC FALSE CACHE BOOL "Build static libraries?")
SET(BUILD_SHARED TRUE CACHE BOOL "Build shared libraries?")
SET(WITH_POSTGRESQL FALSE CACHE BOOL "Whether to build with PostgreSQL driver")
SET(PEDANTIC TRUE CACHE BOOL "Determines if we should compile in pedantic mode.")
IF(WITH_INTERNAL_SQLITE3)
MESSAGE(FATAL_ERROR "The WITH_INTERNAL_SQLITE3 option has been removed: geodiff now always uses external SQLite3 library")
ENDIF()
IF(SKBUILD)
MESSAGE(STATUS "The geodiff is built using scikit-build for pygeodiff Python package")
FIND_PACKAGE(PythonExtensions REQUIRED)
SET(GEODIFF_NAME "pygeodiff-${PYGEODIFFVERSION}-python${GEODIFF_NAME_SUFFIX}")
IF (CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
SET(GEODIFF_NAME "${GEODIFF_NAME}-win32")
ENDIF (CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
ELSE(SKBUILD)
SET(GEODIFF_NAME geodiff)
ENDIF(SKBUILD)
#############################################################
# Setup code coverage
IF(ENABLE_COVERAGE)
IF (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
MESSAGE(FATAL_ERROR "gcov coverage must be run in debug build type")
ENDIF(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
IF(MSVC)
MESSAGE(FATAL_ERROR "gcov coverage is not implemented for Windows")
ENDIF(MSVC)
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage")
ENDIF(ENABLE_COVERAGE)
IF(NOT "${CMAKE_VERSION}" VERSION_LESS "3.12")
CMAKE_POLICY(SET CMP0074 NEW)
ENDIF(NOT "${CMAKE_VERSION}" VERSION_LESS "3.12")
FIND_PACKAGE(SQLite3 REQUIRED)
MESSAGE(STATUS "SQLite3 version: ${SQLite3_VERSION}")
MESSAGE(STATUS "SQLite3 include dirs: ${SQLite3_INCLUDE_DIRS}")
MESSAGE(STATUS "SQLite3 library: ${SQLite3_LIBRARIES}")
IF (WITH_POSTGRESQL)
FIND_PACKAGE(Postgres REQUIRED)
IF (POSTGRES_FOUND)
SET(HAVE_POSTGRES TRUE) # used in geodiff_config.hpp
ENDIF()
ENDIF()
# Get libgpkg dependency
IF(EXISTS "${CMAKE_BINARY_DIR}/libgpkg.tar.gz")
MESSAGE("libgpkg already downloaded")
ELSE()
FILE(DOWNLOAD "https://github.com/benstadin/libgpkg/archive/0822c5cba7e1ac2c2806e445e5f5dd2f0d0a18b4.tar.gz" ${CMAKE_BINARY_DIR}/libgpkg.tar.gz )
FILE(MAKE_DIRECTORY "${CMAKE_BINARY_DIR}/external")
EXECUTE_PROCESS(COMMAND ${CMAKE_COMMAND} -E tar xfz ${CMAKE_BINARY_DIR}/libgpkg.tar.gz
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/external
RESULT_VARIABLE rv)
ENDIF()
SET(libgpkg_dir ${CMAKE_BINARY_DIR}/external/libgpkg-0822c5cba7e1ac2c2806e445e5f5dd2f0d0a18b4)
SET(libgpkg_src
${libgpkg_dir}/gpkg/binstream.c
${libgpkg_dir}/gpkg/blobio.c
${libgpkg_dir}/gpkg/error.c
${libgpkg_dir}/gpkg/fp.c
${libgpkg_dir}/gpkg/geomio.c
${libgpkg_dir}/gpkg/gpkg.c
${libgpkg_dir}/gpkg/gpkg_db.c
${libgpkg_dir}/gpkg/gpkg_geom.c
${libgpkg_dir}/gpkg/i18n.c
${libgpkg_dir}/gpkg/sql.c
${libgpkg_dir}/gpkg/spatialdb.c
${libgpkg_dir}/gpkg/spl_db.c
${libgpkg_dir}/gpkg/spl_geom.c
${libgpkg_dir}/gpkg/strbuf.c
${libgpkg_dir}/gpkg/wkb.c
${libgpkg_dir}/gpkg/wkt.c
${libgpkg_dir}/gpkg/atomic_ops.h
)
IF (NOT WIN32)
SET_SOURCE_FILES_PROPERTIES(${libgpkg_src} PROPERTIES COMPILE_FLAGS "-Wno-deprecated-declarations")
ENDIF (NOT WIN32)
INCLUDE(CheckIncludeFile)
INCLUDE( ${libgpkg_dir}/gpkg/cmake/UseTLS.cmake )
CHECK_TLS()
INCLUDE( ${libgpkg_dir}/gpkg/cmake/UseLocale.cmake )
CHECK_LOCALE()
# check the version in the libgpkg main CMakeLists.txt
SET(gpkg_VERSION_MAJOR 0)
SET(gpkg_VERSION_MINOR 10)
SET(gpkg_VERSION_PATCH 0)
ADD_DEFINITIONS( -DGPKG_HAVE_CONFIG_H )
CONFIGURE_FILE( "${libgpkg_dir}/gpkg/config.h.in" "${CMAKE_CURRENT_BINARY_DIR}/config.h" )
INCLUDE_DIRECTORIES(
${CMAKE_CURRENT_SOURCE_DIR}/src
${CMAKE_CURRENT_SOURCE_DIR}/src/3rdparty
${CMAKE_CURRENT_SOURCE_DIR}/src/drivers
${CMAKE_CURRENT_BINARY_DIR}
${SQLite3_INCLUDE_DIRS}
)
# create geodiff_config.h
CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/cmake_templates/geodiff_config.hpp.in ${CMAKE_BINARY_DIR}/geodiff_config.hpp)
# build
SET(geodiff_src
${geodiff_src}
src/geodiff.cpp
src/geodiff.h
src/geodiffutils.cpp
src/geodiffutils.hpp
src/geodiffrebase.cpp
src/geodiffrebase.hpp
src/geodifflogger.cpp
src/geodifflogger.hpp
src/geodiffcontext.cpp
src/geodiffcontext.hpp
src/changeset.h
src/changesetconcat.cpp
src/changesetreader.cpp
src/changesetreader.h
src/changesetutils.cpp
src/changesetutils.h
src/changesetwriter.cpp
src/changesetwriter.h
src/driver.cpp
src/driver.h
src/tableschema.cpp
src/tableschema.h
src/drivers/sqlitedriver.cpp
src/drivers/sqlitedriver.h
src/drivers/sqliteutils.cpp
src/drivers/sqliteutils.h
)
SET(external_3rd_party_src
src/3rdparty/base64utils.cpp
src/3rdparty/base64utils.h
src/3rdparty/portableendian.h
src/3rdparty/changesetgetvarint.h
src/3rdparty/changesetputvarint.h
src/3rdparty/json.hpp
)
IF (POSTGRES_FOUND)
SET(geodiff_src
${geodiff_src}
src/drivers/postgresdriver.cpp
src/drivers/postgresdriver.h
src/drivers/postgresutils.cpp
src/drivers/postgresutils.h
)
ENDIF()
IF (ENABLE_TESTS OR BUILD_TOOLS)
# tests need statically built library in order to use symbols that are not exported
SET ( BUILD_STATIC TRUE )
ENDIF ()
IF ( BUILD_STATIC )
ADD_LIBRARY(${GEODIFF_NAME}_a STATIC ${geodiff_src} ${libgpkg_src} ${external_3rd_party_src} )
SET_TARGET_PROPERTIES(${GEODIFF_NAME}_a PROPERTIES OUTPUT_NAME ${GEODIFF_NAME})
TARGET_COMPILE_DEFINITIONS(${GEODIFF_NAME}_a PUBLIC -DGEODIFF_STATIC)
TARGET_INCLUDE_DIRECTORIES(${GEODIFF_NAME}_a PRIVATE ${libgpkg_dir}/gpkg)
IF (POSTGRES_FOUND)
TARGET_INCLUDE_DIRECTORIES(${GEODIFF_NAME}_a PRIVATE ${POSTGRES_INCLUDE_DIR})
ENDIF ()
# win32 libs
IF ( WIN32 )
TARGET_LINK_LIBRARIES( ${GEODIFF_NAME}_a PUBLIC shlwapi )
ENDIF ()
ENDIF ()
IF ( BUILD_SHARED )
ADD_LIBRARY(${GEODIFF_NAME} SHARED ${geodiff_src} ${libgpkg_src} ${external_3rd_party_src} )
IF (SKBUILD AND WIN32)
# looks like delvewheel does not support dll extension at the moment
SET_TARGET_PROPERTIES(${GEODIFF_NAME} PROPERTIES SUFFIX .pyd)
ENDIF (SKBUILD AND WIN32)
IF (NOT WIN32 AND NOT ANDROID AND NOT IOS)
TARGET_LINK_LIBRARIES(${GEODIFF_NAME} PUBLIC dl pthread)
ENDIF (NOT WIN32 AND NOT ANDROID AND NOT IOS)
TARGET_LINK_LIBRARIES(${GEODIFF_NAME} PUBLIC ${SQLite3_LIBRARIES})
TARGET_INCLUDE_DIRECTORIES(${GEODIFF_NAME} PRIVATE ${libgpkg_dir}/gpkg)
IF (POSTGRES_FOUND)
TARGET_INCLUDE_DIRECTORIES(${GEODIFF_NAME} PRIVATE ${POSTGRES_INCLUDE_DIR})
TARGET_LINK_LIBRARIES(${GEODIFF_NAME} PUBLIC ${POSTGRES_LIBRARY})
ENDIF ()
# win32 libs
IF ( WIN32 )
TARGET_LINK_LIBRARIES( ${GEODIFF_NAME} PUBLIC shlwapi )
ENDIF ()
ENDIF ()
# command line tool
IF (BUILD_TOOLS)
SET(geodiff_cli_src src/geodiff-cli.cpp)
ADD_EXECUTABLE(geodiff-cli ${geodiff_cli_src})
TARGET_LINK_LIBRARIES(geodiff-cli PUBLIC ${GEODIFF_NAME}_a )
SET_TARGET_PROPERTIES(geodiff-cli PROPERTIES OUTPUT_NAME geodiff)
TARGET_LINK_LIBRARIES(geodiff-cli PUBLIC ${SQLite3_LIBRARIES} )
IF (POSTGRES_FOUND)
TARGET_LINK_LIBRARIES(geodiff-cli PUBLIC ${POSTGRES_LIBRARY})
ENDIF ()
ENDIF (BUILD_TOOLS)
ADD_DEFINITIONS( -DSQLITE_CORE )
# tests
IF (ENABLE_TESTS)
INCLUDE (CTest)
ADD_DEFINITIONS(-DENABLE_TESTS)
ENABLE_TESTING()
ADD_SUBDIRECTORY(tests)
ENDIF(ENABLE_TESTS)
#############################################################
# Setup pedantic warnings
IF (PEDANTIC)
IF (MSVC)
# warning level 4 and all warnings as errors.
set_source_files_properties(${geodiff_src} ${geodiff_cli_src} PROPERTIES COMPILE_OPTIONS "/W4;/WX")
ELSE(MSVC)
# lots of warnings and all warnings as errors
set_source_files_properties(${geodiff_src} ${geodiff_cli_src} PROPERTIES COMPILE_OPTIONS "-Wall;-Wextra;-pedantic;-Werror")
ENDIF(MSVC)
ENDIF(PEDANTIC)
# install
IF(SKBUILD)
IF(WIN32)
INSTALL(TARGETS ${GEODIFF_NAME} RUNTIME DESTINATION pygeodiff)
ELSE(WIN32)
INSTALL(TARGETS ${GEODIFF_NAME} LIBRARY DESTINATION pygeodiff)
ENDIF(WIN32)
ELSE(SKBUILD)
IF (BUILD_SHARED)
IF(WIN32)
INSTALL(TARGETS ${GEODIFF_NAME}
RUNTIME DESTINATION bin
ARCHIVE DESTINATION lib
)
ELSE(WIN32)
INSTALL(TARGETS ${GEODIFF_NAME}
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
)
ENDIF(WIN32)
ENDIF()
IF (BUILD_STATIC)
IF(WIN32)
INSTALL(TARGETS ${GEODIFF_NAME}_a
RUNTIME DESTINATION bin
ARCHIVE DESTINATION lib
)
ELSE(WIN32)
INSTALL(TARGETS ${GEODIFF_NAME}_a
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
)
ENDIF(WIN32)
ENDIF()
INSTALL(FILES src/geodiff.h DESTINATION include)
ENDIF(SKBUILD)
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef CHANGESETUTILS_H
#define CHANGESETUTILS_H
#include "geodiff.h"
#include <string>
#include <vector>
#include "json.hpp"
class ConflictFeature;
class ChangesetReader;
class ChangesetWriter;
struct ChangesetEntry;
struct ChangesetTable;
struct TableSchema;
struct Value;
class Context;
ChangesetTable schemaToChangesetTable( const std::string &tableName, const TableSchema &tbl );
void invertChangeset( ChangesetReader &reader, ChangesetWriter &writer );
void concatChangesets( const Context *context, const std::vector<std::string> &filenames, const std::string &outputChangeset );
nlohmann::json changesetEntryToJSON( const ChangesetEntry &entry );
nlohmann::json changesetToJSON( ChangesetReader &reader );
nlohmann::json changesetToJSONSummary( ChangesetReader &reader );
nlohmann::json conflictsToJSON( const std::vector<ConflictFeature> &conflicts );
nlohmann::json valueToJSON( const Value &value );
std::string hex2bin( const std::string &str );
std::string bin2hex( const std::string &str );
#endif // CHANGESETUTILS_H
<file_sep>
DROP SCHEMA IF EXISTS gd_deleted_a CASCADE;
CREATE SCHEMA gd_deleted_a;
CREATE TABLE gd_deleted_a.simple ( "fid" SERIAL PRIMARY KEY, "geometry" GEOMETRY(POINT, 4326), "name" TEXT, "rating" INTEGER);
INSERT INTO gd_deleted_a.simple VALUES (1, ST_GeomFromText('Point (-1.08891928864569065 0.46101231190150482)', 4326), 'feature1', 1);
INSERT INTO gd_deleted_a.simple VALUES (3, ST_GeomFromText('Point (-0.73050615595075241 0.04240766073871405)', 4326), 'feature3', 3);
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef SQLITEDRIVER_H
#define SQLITEDRIVER_H
#include "driver.h"
#include "sqliteutils.h"
/**
* Support for diffs between Sqlite-based files (including GeoPackage)
*
* Connection configuration:
*
* - for a single database use (not possible to call createChangeset())
* - "base" = path to the database
*
* - for use with two databases (possible to call createChangeset())
* - "base" = path to the 'base' database
* - "modified" = path to the 'modified' database
*/
class SqliteDriver : public Driver
{
public:
explicit SqliteDriver( const Context *context );
void open( const DriverParametersMap &conn ) override;
void create( const DriverParametersMap &conn, bool overwrite = false ) override;
std::vector<std::string> listTables( bool useModified = false ) override;
TableSchema tableSchema( const std::string &tableName, bool useModified = false ) override;
void createChangeset( ChangesetWriter &writer ) override;
void applyChangeset( ChangesetReader &reader ) override;
void createTables( const std::vector<TableSchema> &tables ) override;
void dumpData( ChangesetWriter &writer, bool useModified = false ) override;
void checkCompatibleForRebase( bool useModified = false ) override;
private:
void logApplyConflict( const std::string &type, const ChangesetEntry &entry ) const;
std::string databaseName( bool useModified = false );
std::shared_ptr<Sqlite3Db> mDb;
bool mHasModified = false; // whether there is also a second file attached
};
#endif // SQLITEDRIVER_H
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2021 <NAME>
*/
#include "gtest/gtest.h"
#include "geodiff_testutils.hpp"
#include "geodiff.h"
#include "geodiffutils.hpp"
TEST( UtilsTest, test_unicode )
{
std::string diffSource = pathjoin( testdir(), "utf_test_ščé", "changes.diff" );
std::string json = pathjoin( tmpdir(), "čúčo.json" );
// test whether unicode characters are working
EXPECT_EQ( GEODIFF_listChanges( testContext(), diffSource.c_str(), json.c_str() ), GEODIFF_SUCCESS );
// make sure our test util functions can deal with unicode
EXPECT_TRUE( fileexists( json ) );
EXPECT_FALSE( isFileEmpty( json ) );
EXPECT_TRUE( fileContains( json, "geodiff" ) );
printFileToStdout( "CUCO.JSON", json );
}
int main( int argc, char **argv )
{
testing::InitGoogleTest( &argc, argv );
init_test();
int ret = RUN_ALL_TESTS();
finalize_test();
return ret;
}
<file_sep># -*- coding: utf-8 -*-
"""
pygeodiff
-----------
This module provides tools for create diffs of geospatial data formats
:copyright: (c) 2019-2022 Lutra Consulting Ltd.
:license: MIT, see LICENSE for more details.
"""
from .main import GeoDiff
from .geodifflib import (
GeoDiffLibError,
GeoDiffLibConflictError,
GeoDiffLibUnsupportedChangeError,
GeoDiffLibVersionError,
ChangesetEntry,
ChangesetReader,
UndefinedValue,
)
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2022 <NAME>
*/
#include "gtest/gtest.h"
#include "geodiff_testutils.hpp"
#include "geodiff.h"
#include "geodiffutils.hpp"
TEST( SkipTablesSqlite3Test, test_skip_create )
{
std::string testname( "test_skip_create" );
std::string base = pathjoin( testdir(), "skip_tables", "base.gpkg" );
std::string modified_all = pathjoin( testdir(), "skip_tables", "modified_all.gpkg" );
std::string modified_points = pathjoin( testdir(), "skip_tables", "modified_points.gpkg" );
std::string changeset_points = pathjoin( tmpdir(), testname, "changeset_points.bin" );
std::string patched_points = pathjoin( tmpdir(), testname, "patched_points.gpkg" );
makedir( pathjoin( tmpdir(), testname ) );
// ignore lines table when creating changeset
std::vector<std::string> tablesToSkip( { "lines" } );
Context *ctx = static_cast<Context *>( testContext() );
ctx->setTablesToSkip( tablesToSkip );
int res = GEODIFF_createChangeset( testContext(), base.c_str(), modified_all.c_str(), changeset_points.c_str() );
EXPECT_EQ( res, GEODIFF_SUCCESS );
int nchanges = GEODIFF_changesCount( testContext(), changeset_points.c_str() );
EXPECT_EQ( nchanges, 4 );
// reconstuct changes in the points layer
filecopy( patched_points, base );
res = GEODIFF_applyChangeset( testContext(), patched_points.c_str(), changeset_points.c_str() );
EXPECT_EQ( res, GEODIFF_SUCCESS );
// check that now it is same file with modified
EXPECT_TRUE( equals( patched_points, modified_points, false ) );
// reset skip list
tablesToSkip.clear();
ctx->setTablesToSkip( tablesToSkip );
}
TEST( SkipTablesSqlite3Test, test_skip_apply )
{
std::string testname( "test_skip_apply" );
std::string base = pathjoin( testdir(), "skip_tables", "base.gpkg" );
std::string modified_all = pathjoin( testdir(), "skip_tables", "modified_all.gpkg" );
std::string modified_points = pathjoin( testdir(), "skip_tables", "modified_points.gpkg" );
std::string changeset = pathjoin( tmpdir(), testname, "changeset.bin" );
std::string patched_points = pathjoin( tmpdir(), testname, "patched_points.gpkg" );
makedir( pathjoin( tmpdir(), testname ) );
int res = GEODIFF_createChangeset( testContext(), base.c_str(), modified_all.c_str(), changeset.c_str() );
EXPECT_EQ( res, GEODIFF_SUCCESS );
int nchanges = GEODIFF_changesCount( testContext(), changeset.c_str() );
EXPECT_EQ( nchanges, 6 );
// ignore lines table when applying changeset
std::vector<std::string> tablesToSkip( { "lines" } );
Context *ctx = static_cast<Context *>( testContext() );
ctx->setTablesToSkip( tablesToSkip );
// reconstuct changes in the points layer
filecopy( patched_points, base );
res = GEODIFF_applyChangeset( testContext(), patched_points.c_str(), changeset.c_str() );
EXPECT_EQ( res, GEODIFF_SUCCESS );
// check that now it is same file with modified
EXPECT_TRUE( equals( patched_points, modified_points, false ) );
// reset skip list
tablesToSkip.clear();
ctx->setTablesToSkip( tablesToSkip );
}
int main( int argc, char **argv )
{
testing::InitGoogleTest( &argc, argv );
init_test();
int ret = RUN_ALL_TESTS();
finalize_test();
return ret;
}
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef TABLESCHEMA_H
#define TABLESCHEMA_H
#include <string>
#include <vector>
#include <algorithm>
#include "geodiffutils.hpp"
/* Information about column type, converted to base type */
struct TableColumnType
{
enum BaseType
{
TEXT = 0,
INTEGER,
DOUBLE,
BOOLEAN,
BLOB,
GEOMETRY,
DATE,
DATETIME
};
BaseType baseType = TEXT;
std::string dbType;
//! Returns string representation of the base type enum value
static std::string baseTypeToString( BaseType t );
//! Unified conversion of GeoPackage and PostgreSQL types to base types
void convertToBaseType();
bool operator==( const std::string &other ) const
{
return ( dbType == other );
}
bool operator!=( const std::string &other ) const
{
return !( *this == other );
}
bool operator==( const TableColumnType &other ) const
{
return ( dbType == other.dbType );
}
bool operator!=( const TableColumnType &other ) const
{
return !( *this == other );
}
bool operator==( const TableColumnType::BaseType &other ) const
{
return ( baseType == other );
}
bool operator!=( const TableColumnType::BaseType &other ) const
{
return !( *this == other );
}
};
/** Information about a single column of a database table */
struct TableColumnInfo
{
//! Unique name of the column
std::string name;
//! Type of the column as reported by the database
TableColumnType type;
//! Whether this column is a part of the table's primary key
bool isPrimaryKey = false;
//! Whether the column is defined as "NOT NULL" - i.e. null values are not allowed
bool isNotNull = false;
//! Whether the column has a default value assigned as an auto-incrementing
bool isAutoIncrement = false;
//! Whether the column encodes geometry data
bool isGeometry = false;
//! In case of geometry column - contains geometry type (e.g. POINT / LINESTRING / POLYGON / ...)
//! Only "flat" types are allowed - without Z / M - these are stored separately in geomHasZ, geomHasM
std::string geomType;
//! In case of geometry column - contains ID of the spatial ref. system
int geomSrsId = -1;
//! Whether the geometry column includes Z coordinates
bool geomHasZ = false;
//! Whether the geometry column includes M coordinates
bool geomHasM = false;
bool compareWithBaseTypes( TableColumnInfo other ) const
{
return name == other.name && type == other.type.baseType && isPrimaryKey == other.isPrimaryKey &&
isNotNull == other.isNotNull && isAutoIncrement == other.isAutoIncrement &&
isGeometry == other.isGeometry && geomType == other.geomType && geomSrsId == other.geomSrsId &&
geomHasZ == other.geomHasZ && geomHasM == other.geomHasM;
}
std::string dump() const
{
std::string output = name + " | " + type.dbType + " | ";
if ( isPrimaryKey )
output += "pkey ";
if ( isNotNull )
output += "notnull ";
if ( isAutoIncrement )
output += "autoincrement";
if ( isGeometry )
{
output += "geometry:" + geomType + ":" + std::to_string( geomSrsId );
if ( geomHasZ )
output += "hasZ";
if ( geomHasM )
output += "hasM";
}
return output;
}
void setGeometry( const std::string &geomTypeName, int srsId, bool hasM, bool hasZ )
{
type.baseType = TableColumnType::GEOMETRY;
isGeometry = true;
geomType = geomTypeName;
geomSrsId = srsId;
geomHasM = hasM;
geomHasZ = hasZ;
}
bool operator==( const TableColumnInfo &other ) const
{
return name == other.name && type == other.type && isPrimaryKey == other.isPrimaryKey &&
isNotNull == other.isNotNull && isAutoIncrement == other.isAutoIncrement &&
isGeometry == other.isGeometry && geomType == other.geomType && geomSrsId == other.geomSrsId &&
geomHasZ == other.geomHasZ && geomHasM == other.geomHasM;
}
bool operator!=( const TableColumnInfo &other ) const
{
return !( *this == other );
}
};
/** Definition of a coordinate reference system - may be needed when creating tables */
struct CrsDefinition
{
int srsId = 0; //!< Identifier of the CRS within the database
std::string authName; //!< Name of the authority (usually "EPSG")
int authCode = 0; //!< Code of the CRS within authority
std::string wkt; //!< Definition in form of WKT string
bool operator==( const CrsDefinition &other ) const
{
// TODO: We should also compare crs in future, however, the format may vary even for the same object
// and that makes it hard to compare just with string compare
return srsId == other.srsId && authName == other.authName && authCode == other.authCode;
}
bool operator!=( const CrsDefinition &other ) const
{
return !( *this == other );
}
};
/** Information about table's spatial extent */
struct Extent
{
Extent( double _minX = 0, double _minY = 0, double _maxX = 0, double _maxY = 0 )
: minX( _minX ), minY( _minY ), maxX( _maxX ), maxY( _maxY ) {}
double minX = 0, minY = 0, maxX = 0, maxY = 0;
};
/** Information about table schema of a database table */
struct TableSchema
{
std::string name;
std::vector<TableColumnInfo> columns;
CrsDefinition crs;
//! Returns true if at least one column is a part of table's primary key
bool hasPrimaryKey() const;
//! Returns column index for the given column name (returns SIZE_MAX if column not is not found)
size_t columnFromName( const std::string &columnName ) const;
//! Returns index of the first encountered geometry column (returns SIZE_MAX if no geometry column is found)
size_t geometryColumn() const;
std::string dump() const
{
std::string output = "TABLE " + name + "\n";
for ( const TableColumnInfo &col : columns )
{
// cppcheck-suppress useStlAlgorithm
output += " " + col.dump() + "\n";
}
return output;
}
bool compareWithBaseTypes( const TableSchema &other ) const
{
return name == other.name &&
crs == other.crs && columns.size() == other.columns.size() &&
std::equal( columns.begin(), columns.end(), other.columns.begin(),
[]( const TableColumnInfo & me, const TableColumnInfo & other ) { return me.compareWithBaseTypes( other ); } );
}
bool operator==( const TableSchema &other ) const
{
return name == other.name && columns == other.columns && crs == other.crs;
}
bool operator!=( const TableSchema &other ) const
{
return !( *this == other );
}
};
//! Converts column name to base type and returns struct with both names
TableColumnType columnType( const Context *context, const std::string &columnType, const std::string &driverName, bool isGeometry = false );
//! Converts table schema from base to destination driver, raises GeoDiffException if that is not supported
void tableSchemaConvert( const std::string &driverDstName, TableSchema &tbl );
#endif // TABLESCHEMA_H
<file_sep>#ifndef BASE64UTILS_H
#define BASE64UTILS_H
#include <string>
std::string base64_encode( unsigned char const *bytes_to_encode, unsigned int in_len );
std::string base64_decode( std::string const &encoded_string );
#endif // BASE64UTILS_H
<file_sep>
DROP SCHEMA IF EXISTS gd_inserted_1_b CASCADE;
CREATE SCHEMA gd_inserted_1_b;
CREATE TABLE gd_inserted_1_b.simple ( "fid" SERIAL PRIMARY KEY, "geometry" GEOMETRY(POINT, 4326), "name" TEXT, "rating" INTEGER);
INSERT INTO gd_inserted_1_b.simple VALUES (1, ST_GeomFromText('Point (-1.08891928864569065 0.46101231190150482)', 4326), 'feature1', 1);
INSERT INTO gd_inserted_1_b.simple VALUES (2, ST_GeomFromText('Point (-0.36388508891928861 0.56224350205198359)', 4326), 'feature2', 2);
INSERT INTO gd_inserted_1_b.simple VALUES (3, ST_GeomFromText('Point (-0.73050615595075241 0.04240766073871405)', 4326), 'feature3', 3);
INSERT INTO gd_inserted_1_b.simple VALUES (4, ST_GeomFromText('Point (-0.83257081666079680 0.16881887690991337)', 4326), 'my new point B', 2);
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef CHANGESET_H
#define CHANGESET_H
#include <assert.h>
#include <memory>
#include <string>
#include <vector>
/**
* Representation of a single value stored in a column.
* It can be one of types:
* - NULL
* - integer
* - double
* - string
* - binary data (blob)
*
* There is also a special "undefined" value type which is different
* from "null". The "undefined" value means that the particular value
* has not changed, for example in UPDATE change if a column's value
* is unchanged, its value will have this type.
*/
struct Value
{
Value() {}
~Value() { reset(); }
Value( const Value &other )
{
*this = other;
}
Value &operator=( const Value &other )
{
if ( &other != this )
{
reset();
mType = other.mType;
mVal = other.mVal;
if ( mType == TypeText || mType == TypeBlob )
{
mVal.str = new std::string( *mVal.str ); // make a deep copy
}
}
return *this;
}
bool operator==( const Value &other ) const
{
if ( mType != other.mType )
return false;
if ( mType == TypeUndefined || mType == TypeNull )
return true;
if ( mType == TypeInt )
return getInt() == other.getInt();
if ( mType == TypeDouble )
return getDouble() == other.getDouble();
if ( mType == TypeText || mType == TypeBlob )
return getString() == other.getString();
assert( false );
return false;
}
bool operator!=( const Value &other ) const
{
return !( *this == other );
}
//! Possible value types
enum Type
{
TypeUndefined = 0, //!< equal to "undefined" value type in sqlite3 session extension
TypeInt = 1, //!< equal to SQLITE_INTEGER
TypeDouble = 2, //!< equal to SQLITE_FLOAT
TypeText = 3, //!< equal to SQLITE_TEXT
TypeBlob = 4, //!< equal to SQLITE_BLOB
TypeNull = 5, //!< equal to SQLITE_NULL
};
Type type() const { return mType; }
int64_t getInt() const
{
assert( mType == TypeInt );
return mVal.num_i;
}
double getDouble() const
{
assert( mType == TypeDouble );
return mVal.num_f;
}
const std::string &getString() const
{
assert( mType == TypeText || mType == TypeBlob );
return *mVal.str;
}
void setInt( int64_t n )
{
reset();
mType = TypeInt;
mVal.num_i = n;
}
void setDouble( double n )
{
reset();
mType = TypeDouble;
mVal.num_f = n;
}
void setString( Type t, const char *ptr, size_t size )
{
reset();
assert( t == TypeText || t == TypeBlob );
mType = t;
mVal.str = new std::string( ptr, size );
}
void setUndefined()
{
reset();
}
void setNull()
{
reset();
mType = TypeNull;
}
static Value makeInt( int64_t n ) { Value v; v.setInt( n ); return v; }
static Value makeDouble( double n ) { Value v; v.setDouble( n ); return v; }
static Value makeText( const std::string &s ) { Value v; v.setString( TypeText, s.data(), s.size() ); return v; }
static Value makeNull() { Value v; v.setNull(); return v; }
protected:
void reset()
{
if ( mType == TypeText || mType == TypeBlob )
{
delete mVal.str;
}
mType = TypeUndefined;
}
protected:
Type mType = TypeUndefined;
union
{
int64_t num_i;
double num_f;
std::string *str;
} mVal = {0};
};
//! std::hash<Value> implementation
namespace std
{
template<> struct hash<Value>
{
std::size_t operator()( const Value &v ) const
{
switch ( v.type() )
{
case Value::TypeUndefined:
return 0xcccccccc;
case Value::TypeInt:
return std::hash<int64_t> {}( v.getInt() );
case Value::TypeDouble:
return std::hash<double> {}( v.getDouble() );
case Value::TypeText:
case Value::TypeBlob:
return std::hash<std::string> {}( v.getString() );
case Value::TypeNull:
return 0xdddddddd;
}
assert( false );
return 0;
}
};
}
/**
* Table metadata stored in changeset file
*/
struct ChangesetTable
{
//! Name of the table
std::string name;
//! Array of true/false values (one for each column) - indicating whether particular column is a part of primary key
std::vector<bool> primaryKeys;
//! Returns number of columns
size_t columnCount() const { return primaryKeys.size(); }
};
/**
* Details of a single change within a changeset
*
* Contents of old/new values array based on operation type:
* - INSERT - new values contain data of the row to be inserted, old values array is invalid
* - DELETE - old values contain data of the row to be deleted, new values array is invalid
* - UPDATE - both old and new values arrays are valid, if a column has not changed, both
* old and new value have "undefined" value type. In addition to that, primary key
* columns of old value are always present (but new value of pkey columns is undefined
* if the primary key is not being changed).
*/
struct ChangesetEntry
{
enum OperationType
{
OpInsert = 18, //!< equal to SQLITE_INSERT
OpUpdate = 23, //!< equal to SQLITE_UPDATE
OpDelete = 9, //!< equal to SQLITE_DELETE
};
//! Type of the operation in this entry
OperationType op;
//! Column values for "old" record - only valid for UPDATE and DELETE
std::vector<Value> oldValues;
//! Column values for "new" record - only valid for UPDATE and INSERT
std::vector<Value> newValues;
/**
* Optional pointer to the source table information as stored in changeset.
*
* When the changeset entry has been read by ChangesetReader, the table always will be set to a valid
* instance. Do not delete the instance - it is owned by ChangesetReader.
*
* When the changeset entry is being passed to ChangesetWriter, the table pointer is ignored
* and it does not need to be set (writer has an explicit beginTable() call to set table).
*/
ChangesetTable *table = nullptr;
//! a quick way for tests to create a changeset entry
static ChangesetEntry make( ChangesetTable *t, OperationType o, const std::vector<Value> &oldV, const std::vector<Value> &newV )
{
ChangesetEntry e;
e.op = o;
e.oldValues = oldV;
e.newValues = newV;
e.table = t;
return e;
}
};
#endif // CHANGESET_H
<file_sep>DROP SCHEMA IF EXISTS gd_pg_diff CASCADE;
CREATE SCHEMA gd_pg_diff;
CREATE TABLE gd_pg_diff.simple (
"fid" SERIAL PRIMARY KEY,
"geometry" GEOMETRY(POINT, 4326),
"name" text
);
INSERT INTO gd_pg_diff.simple (
"fid",
"geometry",
"name"
)
VALUES (
1,
ST_GeomFromText('Point (0 1)', 4326),
'feature 1'
);
INSERT INTO gd_pg_diff.simple (
"fid",
"geometry",
"name"
)
VALUES (
2,
ST_GeomFromText('Point (1 2)', 4326),
'feature 2'
);
<file_sep># -*- coding: utf-8 -*-
"""
:copyright: (c) 2023 <NAME>
:license: MIT, see LICENSE for more details.
"""
from .testutils import *
import os
import shutil
class UnitTestsCliCalls(GeoDiffCliTests):
"""Some quick tests of various CLI commands just to make sure they are not broken"""
def test_cli_calls(self):
print("********************************************************")
print("PYTHON: test API calls")
outdir = create_dir("cli-calls")
print("-- invalid")
self.run_command([], expect_fail=True)
self.run_command(["badcommand"], expect_fail=True)
print("-- dump")
self.run_command(["dump"], expect_fail=True)
self.run_command(["dump", "--driver"], expect_fail=True)
self.run_command(["dump", "--driver", "sqlite"], expect_fail=True)
self.run_command(["dump", geodiff_test_dir() + "/base.gpkg"], expect_fail=True)
self.run_command(
[
"dump",
geodiff_test_dir() + "/base.gpkg",
outdir + "/dump2.diff",
"extra_arg",
],
expect_fail=True,
)
self.run_command(
["dump", geodiff_test_dir() + "/base.gpkg", outdir + "/dump.diff"]
)
print("-- as-json")
self.run_command(["as-json"], expect_fail=True)
self.run_command(["as-json", "arg1", "extra_arg"], expect_fail=True)
self.run_command(["as-json", outdir + "/dump.diff"], check_in_output="feature2")
self.run_command(["as-json", outdir + "/dump.diff", outdir + "/dump.json"])
self.run_command(
["as-json", outdir + "/dump.diff", outdir + "/dump.json", "extra_arg"],
expect_fail=True,
)
file_contains(outdir + "/dump.json", "feature3")
print("-- as-summary")
self.run_command(["as-summary"], expect_fail=True)
self.run_command(["as-summary", "arg1", "extra_arg"], expect_fail=True)
self.run_command(
["as-summary", outdir + "/dump.diff"], check_in_output="geodiff_summary"
)
self.run_command(["as-summary", outdir + "/dump.diff", outdir + "/dump.json"])
file_contains(outdir + "/dump.json", "geodiff_summary")
print("-- diff")
self.run_command(["diff"], expect_fail=True)
self.run_command(["diff", "--driver1"], expect_fail=True)
self.run_command(["diff", "--driver1", "sqlite"], expect_fail=True)
self.run_command(["diff", "--skip-tables"], expect_fail=True)
self.run_command(
["diff", geodiff_test_dir() + "/non-existent.gpkg"], expect_fail=True
)
self.run_command(
[
"diff",
geodiff_test_dir() + "/non-existent.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
expect_fail=True,
)
self.run_command(
[
"diff",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
outdir + "/diff.diff",
"extra_arg",
],
expect_fail=True,
)
self.run_command(
[
"diff",
"--json",
"--summary",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
expect_fail=True,
)
self.run_command(
[
"diff",
"--driver1",
"sqlite",
"''",
"--driver2",
"sqlite",
"''",
"--summary",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
check_in_output="geodiff_summary",
expect_fail=True,
)
self.run_command(
[
"diff",
"--driver1",
"sqlite",
"--driver2",
"sqlite",
"''",
"--summary",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
check_in_output="geodiff_summary",
expect_fail=True,
)
self.run_command(
[
"diff",
"--json",
"--skip-tables",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
check_in_output="geodiff",
expect_fail=True,
)
self.run_command(
[
"diff",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
]
)
self.run_command(
[
"diff",
"--json",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
check_in_output="update",
)
self.run_command(
[
"diff",
"--json",
"--skip-tables",
"simple",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
check_in_output="geodiff",
) # empty diff
self.run_command(
[
"diff",
"--summary",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
check_in_output="geodiff_summary",
)
self.run_command(
[
"diff",
"--driver",
"sqlite",
"''",
"--summary",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
check_in_output="geodiff_summary",
)
self.run_command(
[
"diff",
"--driver-1",
"sqlite",
"''",
"--driver-2",
"sqlite",
"''",
"--summary",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
check_in_output="geodiff_summary",
)
self.run_command(
[
"diff",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/2_inserts/inserted_1_A.gpkg",
outdir + "/diff.diff",
]
)
print("-- copy")
self.run_command(["copy"], expect_fail=True)
self.run_command(
["copy", geodiff_test_dir() + "/non-existent.gpkg", outdir + "/copy.gpkg"],
expect_fail=True,
)
self.run_command(
[
"copy",
geodiff_test_dir() + "/base.gpkg",
outdir + "/copy.gpkg",
"extra_arg",
],
expect_fail=True,
)
self.run_command(
[
"copy",
"--skip-tables",
geodiff_test_dir() + "/base.gpkg",
outdir + "/copy.gpkg",
],
expect_fail=True,
)
self.run_command(
[
"copy",
"--driver-1",
geodiff_test_dir() + "/base.gpkg",
outdir + "/copy.gpkg",
],
expect_fail=True,
)
self.run_command(
[
"copy",
"--driver-2",
geodiff_test_dir() + "/base.gpkg",
outdir + "/copy.gpkg",
],
expect_fail=True,
)
self.run_command(
[
"copy",
"--driver",
"sqlite",
"''",
"--skip-tables",
"unknown",
geodiff_test_dir() + "/base.gpkg",
outdir + "/copy2.gpkg",
],
expect_fail=True,
)
self.run_command(
["copy", geodiff_test_dir() + "/base.gpkg", outdir + "/copy.gpkg"]
)
print("-- apply")
self.run_command(
["copy", geodiff_test_dir() + "/base.gpkg", outdir + "/copyA.gpkg"]
)
self.run_command(
["copy", geodiff_test_dir() + "/base.gpkg", outdir + "/copyB.gpkg"]
)
self.run_command(["apply"], expect_fail=True)
self.run_command(["apply", "--driver"], expect_fail=True)
self.run_command(["apply", "--skip-tables"], expect_fail=True)
self.run_command(["apply", outdir + "/copyA.gpkg"], expect_fail=True)
self.run_command(
[
"apply",
outdir + "/copyA.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
expect_fail=True,
) # second arg is diff
self.run_command(
["apply", outdir + "/copyA.gpkg", outdir + "/diff.diff", "extra_arg"],
expect_fail=True,
)
self.run_command(
["apply", outdir + "/copyA.gpkg", outdir + "/diff.diff", "extra_arg"],
expect_fail=True,
)
self.run_command(
[
"apply",
"--driver",
outdir + "/copyA.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
],
expect_fail=True,
)
self.run_command(
[
"apply",
"--driver",
"sqlite",
outdir + "/copyA.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
],
expect_fail=True,
)
self.run_command(
[
"apply",
"--skip-tables",
outdir + "/copyA.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
],
expect_fail=True,
)
self.run_command(
[
"apply",
"--invalid-flag",
outdir + "/copyA.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
],
expect_fail=True,
)
self.run_command(
[
"apply",
"--driver",
"sqlite",
"''",
"--skip-tables",
"''",
outdir + "/copyA.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
]
)
self.run_command(
[
"apply",
outdir + "/copyB.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
]
)
print("-- rebase-diff")
self.run_command(
["copy", geodiff_test_dir() + "/base.gpkg", outdir + "/copyF.gpkg"]
)
self.run_command(["rebase-diff"], expect_fail=True)
self.run_command(["rebase-diff", "--driver"], expect_fail=True)
self.run_command(["rebase-diff", "--driver", "sqlite"], expect_fail=True)
self.run_command(["rebase-diff", outdir + "/copyF.gpkg"], expect_fail=True)
self.run_command(
[
"rebase-diff",
outdir + "/copyF.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
],
expect_fail=True,
) # second arg is diff
self.run_command(
[
"rebase-diff",
outdir + "/copyF.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
geodiff_test_dir() + "/2_updates/base-updated_A.diff",
outdir + "/rebase-diff.diff",
outdir + "/confF.confict",
"extra_arg",
],
expect_fail=True,
)
self.run_command(
[
"rebase-diff",
outdir + "/copyF.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
geodiff_test_dir() + "/bad.diff",
outdir + "/rebase-diff.diff",
outdir + "/confF.confict",
],
expect_fail=True,
)
self.run_command(
[
"rebase-diff",
outdir + "/copyF.gpkg",
geodiff_test_dir() + "/bad.diff",
geodiff_test_dir() + "/2_updates/base-updated_A.diff",
outdir + "/rebase-diff.diff",
outdir + "/confF.confict",
],
expect_fail=True,
)
self.run_command(
[
"rebase-diff",
outdir + "/copyF.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
geodiff_test_dir() + "/2_updates/base-updated_A.diff",
outdir + "/rebase-diff.diff",
],
expect_fail=True,
)
self.run_command(
[
"rebase-diff",
outdir + "/copyF.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
geodiff_test_dir() + "/2_updates/base-updated_A.diff",
outdir + "/rebase-diff.diff",
outdir + "/confF.confict",
]
)
print("-- rebase-db")
self.run_command(
["copy", geodiff_test_dir() + "/base.gpkg", outdir + "/copyD.gpkg"]
)
self.run_command(["rebase-db"], expect_fail=True)
self.run_command(["rebase-db", "--driver"], expect_fail=True)
self.run_command(["rebase-db", "--driver", "sqlite"], expect_fail=True)
self.run_command(["rebase-db", "--bad_flag"], expect_fail=True)
self.run_command(
[
"rebase-db",
geodiff_test_dir() + "/base.gpkg",
outdir + "/copyD.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
],
expect_fail=True,
) # missing arg
self.run_command(
[
"rebase-db",
geodiff_test_dir() + "/bad.gpkg",
outdir + "/copyD.gpkg",
geodiff_test_dir() + "/bad.diff",
outdir + "/rebasedb.conflicts.json",
],
expect_fail=True,
)
self.run_command(
[
"rebase-db",
geodiff_test_dir() + "/base.gpkg",
outdir + "/copyD.gpkg",
geodiff_test_dir() + "/bad.diff",
outdir + "/rebasedb.conflicts.json",
],
expect_fail=True,
)
self.run_command(
[
"rebase-db",
geodiff_test_dir() + "/base.gpkg",
outdir + "/bad.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
outdir + "/rebasedb.conflicts.json",
],
expect_fail=True,
)
self.run_command(
[
"rebase-db",
geodiff_test_dir() + "/base.gpkg",
outdir + "/copyD.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
outdir + "/rebasedb.conflicts.json",
]
)
print("-- invert")
self.run_command(["invert"], expect_fail=True)
self.run_command(["invert", "--bad_flag"], expect_fail=True)
self.run_command(
["invert", geodiff_test_dir() + "/concat/bar-insert.diff"], expect_fail=True
)
self.run_command(
[
"invert",
geodiff_test_dir() + "/concat/bar-insert.diff",
outdir + "/invert.diff",
"extra_arg",
],
expect_fail=True,
)
self.run_command(
[
"invert",
geodiff_test_dir() + "/concat/non-existent-file.diff",
outdir + "/invert.diff",
],
expect_fail=True,
)
self.run_command(
[
"invert",
geodiff_test_dir() + "/concat/bar-insert.diff",
outdir + "/invert.diff",
]
)
self.run_command(["as-json", outdir + "/invert.diff"], check_in_output="points")
print("-- concat")
self.run_command(["concat"], expect_fail=True)
self.run_command(
[
"concat",
geodiff_test_dir() + "/concat/non-existent-file.diff",
geodiff_test_dir() + "/concat/bar-update.diff",
outdir + "/concat-fail.diff",
],
expect_fail=True,
)
self.run_command(
[
"concat",
geodiff_test_dir() + "/concat/bar-insert.diff",
geodiff_test_dir() + "/concat/bar-update.diff",
outdir + "/concat2.diff",
]
)
self.run_command(
["as-json", outdir + "/concat2.diff"], check_in_output="MODIFIED"
)
self.run_command(
[
"concat",
geodiff_test_dir() + "/concat/bar-insert.diff",
geodiff_test_dir() + "/concat/bar-update.diff",
geodiff_test_dir() + "/concat/bar-delete.diff",
outdir + "/concat3.diff",
]
)
self.run_command(
["as-json", outdir + "/concat3.diff"], check_in_output="geodiff"
) # empty file
print("-- schema")
self.run_command(["schema"], expect_fail=True)
self.run_command(
["schema", geodiff_test_dir() + "/non-existent.gpkg"], expect_fail=True
)
self.run_command(
["schema", geodiff_test_dir() + "/base.gpkg"], check_in_output="MEDIUMINT"
)
self.run_command(
["schema", geodiff_test_dir() + "/base.gpkg", outdir + "/schema.txt"]
)
file_contains(outdir + "/schema.txt", "MEDIUMINT")
self.run_command(
[
"schema",
geodiff_test_dir() + "/base.gpkg",
outdir + "/schema-fail.txt",
"extra_arg",
],
expect_fail=True,
)
print("-- drivers")
self.run_command(["drivers"], check_in_output="sqlite")
self.run_command(["drivers", "extra_arg"], expect_fail=True)
print("-- version")
self.run_command(["version"], check_in_output=".")
self.run_command(["version", "extra_arg"], expect_fail=True)
print("-- help")
self.run_command(["help"], check_in_output="Lutra Consulting")
self.run_command(["help", "extra_arg"], expect_fail=True)
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef CHANGESETWRITER_H
#define CHANGESETWRITER_H
#include "geodiff.h"
#include "changeset.h"
#include <fstream>
/**
* Class for writing binary changeset files.
* First use open() to create a new changeset file and then for each modified table:
* - call beginTable() once
* - then call writeEntry() for each change within that table
*
* See changeset-format.md for the documentation of the format.
*/
class ChangesetWriter
{
public:
/**
* opens a file for writing changeset (will overwrite if it exists already)
* throws GeoDiffException on error
*/
void open( const std::string &filename );
//! writes table information, all subsequent writes will be related to this table until next call to beginTable()
void beginTable( const ChangesetTable &table );
//! writes table change entry
void writeEntry( const ChangesetEntry &entry );
private:
void writeByte( char c );
void writeVarint( int n );
void writeNullTerminatedString( const std::string &str );
void writeRowValues( const std::vector<Value> &values );
std::ofstream mFile;
ChangesetTable mCurrentTable; // currently processed table
};
#endif // CHANGESETWRITER_H
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#include "gtest/gtest.h"
#include "geodiff_testutils.hpp"
#include "geodiff.h"
#include "changesetreader.h"
TEST( ChangesetReaderTest, test_open )
{
std::string changeset = "invalid_file";
ChangesetReader reader;
EXPECT_FALSE( reader.open( changeset ) );
}
TEST( ChangesetReaderTest, test_read_insert )
{
std::string changeset = pathjoin( testdir(), "2_inserts", "base-inserted_1_A.diff" );
ChangesetReader reader;
EXPECT_TRUE( reader.open( changeset ) );
ChangesetEntry entry;
EXPECT_TRUE( reader.nextEntry( entry ) );
EXPECT_EQ( entry.op, ChangesetEntry::OpInsert );
EXPECT_EQ( entry.table->name, "simple" );
EXPECT_EQ( entry.table->primaryKeys.size(), 4 );
EXPECT_EQ( entry.table->primaryKeys[0], true );
EXPECT_EQ( entry.table->primaryKeys[1], false );
EXPECT_EQ( entry.newValues.size(), 4 );
EXPECT_EQ( entry.newValues[0].type(), Value::TypeInt );
EXPECT_EQ( entry.newValues[0].getInt(), 4 );
EXPECT_EQ( entry.newValues[1].type(), Value::TypeBlob );
EXPECT_EQ( entry.newValues[2].type(), Value::TypeText );
EXPECT_EQ( entry.newValues[2].getString(), "my new point A" );
EXPECT_FALSE( reader.nextEntry( entry ) );
EXPECT_FALSE( reader.nextEntry( entry ) );
}
TEST( ChangesetReaderTest, test_read_update )
{
std::string changeset = pathjoin( testdir(), "2_updates", "base-updated_A.diff" );
ChangesetReader reader;
EXPECT_TRUE( reader.open( changeset ) );
ChangesetEntry entry;
EXPECT_TRUE( reader.nextEntry( entry ) );
EXPECT_EQ( entry.op, ChangesetEntry::OpUpdate );
EXPECT_EQ( entry.table->name, "simple" );
EXPECT_EQ( entry.oldValues.size(), 4 );
EXPECT_EQ( entry.newValues.size(), 4 );
// pkey - unchanged
EXPECT_EQ( entry.oldValues[0].type(), Value::TypeInt );
EXPECT_EQ( entry.oldValues[0].getInt(), 2 );
EXPECT_EQ( entry.newValues[0].type(), Value::TypeUndefined );
// geometry - changed
EXPECT_EQ( entry.oldValues[1].type(), Value::TypeBlob );
EXPECT_EQ( entry.newValues[1].type(), Value::TypeBlob );
// unchanged
EXPECT_EQ( entry.oldValues[2].type(), Value::TypeUndefined );
EXPECT_EQ( entry.newValues[2].type(), Value::TypeUndefined );
// changed
EXPECT_EQ( entry.oldValues[3].type(), Value::TypeInt );
EXPECT_EQ( entry.oldValues[3].getInt(), 2 );
EXPECT_EQ( entry.newValues[3].type(), Value::TypeInt );
EXPECT_EQ( entry.newValues[3].getInt(), 9999 );
EXPECT_FALSE( reader.nextEntry( entry ) );
}
TEST( ChangesetReaderTest, test_read_delete )
{
std::string changeset = pathjoin( testdir(), "2_deletes", "base-deleted_A.diff" );
ChangesetReader reader;
EXPECT_TRUE( reader.open( changeset ) );
ChangesetEntry entry;
EXPECT_TRUE( reader.nextEntry( entry ) );
EXPECT_EQ( entry.op, ChangesetEntry::OpDelete );
EXPECT_EQ( entry.table->name, "simple" );
EXPECT_EQ( entry.oldValues.size(), 4 );
EXPECT_EQ( entry.oldValues[0].type(), Value::TypeInt );
EXPECT_EQ( entry.oldValues[0].getInt(), 2 );
EXPECT_EQ( entry.oldValues[1].type(), Value::TypeBlob );
EXPECT_EQ( entry.oldValues[2].type(), Value::TypeText );
EXPECT_EQ( entry.oldValues[2].getString(), "feature2" );
EXPECT_EQ( entry.oldValues[3].type(), Value::TypeInt );
EXPECT_EQ( entry.oldValues[3].getInt(), 2 );
EXPECT_FALSE( reader.nextEntry( entry ) );
EXPECT_FALSE( reader.nextEntry( entry ) );
}
int main( int argc, char **argv )
{
testing::InitGoogleTest( &argc, argv );
init_test();
int ret = RUN_ALL_TESTS();
finalize_test();
return ret;
}
<file_sep>
DROP SCHEMA IF EXISTS gd_test_apply CASCADE;
CREATE SCHEMA gd_test_apply;
ALTER TABLE gd_base.simple SET SCHEMA gd_test_apply;
<file_sep>
DROP SCHEMA IF EXISTS gd_datatypes CASCADE;
DROP SCHEMA IF EXISTS gd_datatypes_copy CASCADE;
CREATE SCHEMA gd_datatypes;
CREATE TABLE gd_datatypes.simple (
"fid" SERIAL PRIMARY KEY,
"geometry" GEOMETRY(POINT, 4326),
"name_text" text,
"name_varchar" character varying,
"name_varchar_len" character varying(50),
"name_char_len" character(100),
"feature_id" uuid DEFAULT uuid_generate_v4(),
"col_numeric" numeric(10,3),
"col_decimal" decimal(10,3)
);
INSERT INTO gd_datatypes.simple (
"fid",
"geometry",
"name_text",
"name_varchar",
"name_varchar_len",
"name_char_len",
"col_numeric",
"col_decimal"
)
VALUES (
1,
ST_GeomFromText('Point (-1.08891928864569065 0.46101231190150482)', 4326),
'feature1',
'feature1 varchar',
'feature1 varchar(50)',
'feature1 char(100)',
31.203,
13.302
);
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef GEODIFFLOGGER_H
#define GEODIFFLOGGER_H
#include <string>
#include <memory>
#include <vector>
#include <stdio.h>
#include "geodiff.h"
class GeoDiffException;
class Logger
{
public:
Logger();
void setCallback( GEODIFF_LoggerCallback loggerCallback );
void setMaxLogLevel( GEODIFF_LoggerLevel level ) { mMaxLogLevel = level; }
GEODIFF_LoggerLevel maxLogLevel() const { return mMaxLogLevel; }
Logger( Logger const & ) = delete;
void operator=( Logger const & ) = delete;
void debug( const std::string &msg ) const;
void warn( const std::string &msg ) const;
void error( const std::string &msg ) const;
void info( const std::string &msg ) const;
//! Prints error message
void error( const GeoDiffException &exp ) const;
private:
GEODIFF_LoggerCallback mLoggerCallback = nullptr;
GEODIFF_LoggerLevel mMaxLogLevel = GEODIFF_LoggerLevel::LevelError;
void log( GEODIFF_LoggerLevel level, const std::string &msg ) const;
};
#endif // GEODIFFLOGGER_H
<file_sep>#!/bin/sh
set -eu
SCRIPT_DIR=$(dirname "$0")
case $SCRIPT_DIR in
"/"*)
;;
".")
SCRIPT_DIR=$(pwd)
;;
*)
SCRIPT_DIR=$(pwd)/$(dirname "$0")
;;
esac
LOG_FILE=/tmp/cppcheck_geodiff.txt
rm -f ${LOG_FILE}
echo "cppcheck for ${SCRIPT_DIR}/../geodiff"
cppcheck --inline-suppr \
--template='{file}:{line},{severity},{id},{message}' \
--enable=all --inconclusive --std=c++11 \
-j $(nproc) \
-igeodiff/src/3rdparty \
${SCRIPT_DIR}/../geodiff/src \
>>${LOG_FILE} 2>&1 &
PID=$!
while kill -0 $PID 2>/dev/null; do
printf "."
sleep 1
done
echo " done"
if ! wait $PID; then
echo "cppcheck could not be started"
exit 1
fi
ret_code=0
for category in "error" "style" "performance" "warning" "clarifyCalculation" "portability"; do
if grep "${category}," ${LOG_FILE} >/dev/null; then
echo "ERROR: Issues in '${category}' category found:"
grep "${category}," ${LOG_FILE}
echo ""
echo "${category} check failed !"
ret_code=1
fi
done
if [ ${ret_code} = 0 ]; then
echo "cppcheck succeeded"
fi
exit ${ret_code}
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#include "postgresutils.h"
#include "geodiffutils.hpp"
PGresult *execSql( PGconn *c, const std::string &sql )
{
PGresult *res = ::PQexec( c, sql.c_str() );
if ( res && ::PQstatus( c ) == CONNECTION_OK )
{
int errorStatus = PQresultStatus( res );
if ( errorStatus != PGRES_COMMAND_OK && errorStatus != PGRES_TUPLES_OK )
{
std::string err( PQresultErrorMessage( res ) );
PQclear( res );
throw GeoDiffException( "postgres cmd error: " + err + "\n\nSQL:\n" + sql );
}
return res;
}
if ( PQstatus( c ) != CONNECTION_OK )
{
if ( res )
PQclear( res );
throw GeoDiffException( "postgres conn error: " + std::string( PQerrorMessage( c ) ) );
}
else
{
throw GeoDiffException( "postgres error: out of memory" );
}
return nullptr;
}
std::string quotedIdentifier( const std::string &ident )
{
std::string result = replace( ident, "\"", "\"\"" );
return "\"" + result + "\"";
}
std::string quotedString( const std::string &value )
{
std::string result = replace( value, "'", "''" );
if ( result.find( '\\' ) != std::string::npos )
{
result = replace( result, "\\", "\\\\" );
return "E'" + result + "'";
}
else
return "'" + result + "'";
}
<file_sep>
DROP SCHEMA IF EXISTS gd_tz_updated CASCADE;
CREATE SCHEMA gd_tz_updated;
CREATE TABLE gd_tz_updated.simple ( "fid" SERIAL PRIMARY KEY, "geometry" GEOMETRY(POINT, 4326), "created" TIMESTAMP WITHOUT TIME ZONE);
INSERT INTO gd_tz_updated.simple VALUES (1, ST_GeomFromText('Point (-1.08891928864569065 0.46101231190150482)', 4326), '2021-10-28 18:34:19.472');
INSERT INTO gd_tz_updated.simple VALUES (2, ST_GeomFromText('Point (-0.36388508891928861 0.56224350205198359)', 4326), '2021-10-28 18:34:19');
INSERT INTO gd_tz_updated.simple VALUES (3, ST_GeomFromText('Point (-0.73050615595075241 0.04240766073871405)', 4326), '2021-10-28 18:34:19.53');
<file_sep># -*- coding: utf-8 -*-
"""
:copyright: (c) 2022 <NAME>
:license: MIT, see LICENSE for more details.
"""
from .testutils import *
import os
import shutil
import pygeodiff
class UnitTestsPythonSingleCommit(GeoDiffTests):
def test_skip_create(self):
base = geodiff_test_dir() + "/" + "skip_tables" + "/" + "base.gpkg"
modified = geodiff_test_dir() + "/" + "skip_tables" + "/" + "modified_all.gpkg"
modified_points = (
geodiff_test_dir() + "/" + "skip_tables" + "/" + "modified_points.gpkg"
)
changeset = tmpdir() + "/py" + "test_skip_create" + "/" + "changeset_points.bin"
changeset2 = (
tmpdir() + "/py" + "test_skip_create" + "/" + "changeset_points2.bin"
)
changeset_inv = (
tmpdir() + "/py" + "test_skip_create" + "/" + "changeset_inv.bin"
)
patched = tmpdir() + "/py" + "test_skip_create" + "/" + "patched_points.gpkg"
create_dir("test_skip_create")
# ignore lines table when creating changeset
self.geodiff.set_tables_to_skip(["lines"])
# create changeset
self.geodiff.create_changeset(base, modified, changeset)
check_nchanges(self.geodiff, changeset, 4)
# apply changeset
shutil.copyfile(base, patched)
self.geodiff.apply_changeset(patched, changeset)
# check that now it is same file
self.geodiff.create_changeset(patched, modified, changeset2)
check_nchanges(self.geodiff, changeset2, 0)
# check we can create inverted changeset
os.remove(changeset2)
self.geodiff.invert_changeset(changeset, changeset_inv)
self.geodiff.apply_changeset(patched, changeset_inv)
self.geodiff.create_changeset_dr(
"sqlite", "", patched, "sqlite", "", base, changeset2
)
check_nchanges(self.geodiff, changeset2, 0)
self.geodiff.set_tables_to_skip([])
def test_skip_apply(self):
base = geodiff_test_dir() + "/" + "skip_tables" + "/" + "base.gpkg"
modified = geodiff_test_dir() + "/" + "skip_tables" + "/" + "modified_all.gpkg"
modified_points = (
geodiff_test_dir() + "/" + "skip_tables" + "/" + "modified_points.gpkg"
)
changeset = tmpdir() + "/py" + "test_skip_apply" + "/" + "changeset_points.bin"
changeset2 = (
tmpdir() + "/py" + "test_skip_apply" + "/" + "changeset_points2.bin"
)
changeset_inv = tmpdir() + "/py" + "test_skip_apply" + "/" + "changeset_inv.bin"
patched = tmpdir() + "/py" + "test_skip_apply" + "/" + "patched_points.gpkg"
create_dir("test_skip_apply")
# create changeset
self.geodiff.create_changeset(base, modified, changeset)
check_nchanges(self.geodiff, changeset, 6)
# ignore lines table when creating changeset
self.geodiff.set_tables_to_skip(["lines"])
# apply changeset
shutil.copyfile(base, patched)
self.geodiff.apply_changeset(patched, changeset)
# check that now it is same file
self.geodiff.create_changeset(patched, modified, changeset2)
check_nchanges(self.geodiff, changeset2, 0)
# check we can create inverted changeset
os.remove(changeset2)
self.geodiff.invert_changeset(changeset, changeset_inv)
self.geodiff.apply_changeset(patched, changeset_inv)
self.geodiff.create_changeset_dr(
"sqlite", "", patched, "sqlite", "", base, changeset2
)
check_nchanges(self.geodiff, changeset2, 0)
self.geodiff.set_tables_to_skip([])
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2023 <NAME>
*/
#include "gtest/gtest.h"
#include "geodiff_testutils.hpp"
#include "geodiff.h"
#include "geodiffutils.hpp"
#include <string>
TEST( CAPITest, invalid_calls )
{
GEODIFF_ContextH invalidContext = nullptr;
GEODIFF_ContextH context = GEODIFF_createContext();
char buffer[200];
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_driverNameFromIndex( invalidContext, 0, buffer ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_driverIsRegistered( invalidContext, "sqlite" ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_driverIsRegistered( context, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_CX_setLoggerCallback( invalidContext, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_CX_setMaximumLoggerLevel( invalidContext, GEODIFF_LoggerLevel::LevelWarning ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_CX_setTablesToSkip( invalidContext, 0, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_CX_setTablesToSkip( context, 1, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createChangesetEx( invalidContext, "sqlite", nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createChangesetEx( context, "sqlite", nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createChangesetEx( context, "invalid driver", " ", " ", " ", " " ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createChangesetDr( invalidContext, "sqlite", nullptr, nullptr, "sqlite", nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createChangesetDr( context, "sqlite", nullptr, nullptr, "sqlite", nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_applyChangesetEx( invalidContext, "sqlite", nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_applyChangesetEx( context, "sqlite", nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_applyChangesetEx( context, "invalid driver", " ", " ", " " ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createRebasedChangeset( invalidContext, nullptr, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createRebasedChangeset( context, nullptr, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createRebasedChangesetEx( invalidContext, "sqlite", nullptr, nullptr, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createRebasedChangesetEx( context, "sqlite", nullptr, nullptr, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( -1, GEODIFF_hasChanges( invalidContext, nullptr ) );
ASSERT_EQ( -1, GEODIFF_hasChanges( context, nullptr ) );
ASSERT_EQ( -1, GEODIFF_changesCount( invalidContext, nullptr ) );
ASSERT_EQ( -1, GEODIFF_changesCount( context, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_listChanges( invalidContext, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_listChanges( context, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_listChangesSummary( invalidContext, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_listChangesSummary( context, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_invertChangeset( invalidContext, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_invertChangeset( context, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_concatChanges( invalidContext, 1, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_concatChanges( context, 1, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_concatChanges( context, 2, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_rebase( invalidContext, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_rebase( context, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_rebase( context, "bad file", "bad file", "bad file", "bad file" ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_rebase( context, pathjoin( testdir(), "base.gpkg" ).c_str(), "bad file", "bad file", "bad file" ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_rebase( context, pathjoin( testdir(), "base.gpkg" ).c_str(), pathjoin( testdir(), "base.gpkg" ).c_str(), "bad file", "bad file" ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_rebaseEx( invalidContext, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_rebaseEx( context, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_makeCopy( invalidContext, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_makeCopy( context, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_makeCopy( context, "invalid driver", " ", " ", " ", " ", " " ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_makeCopy( context, "sqlite", " ", " ", "invalid driver", " ", " " ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_makeCopySqlite( invalidContext, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_makeCopySqlite( context, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_dumpData( invalidContext, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_dumpData( context, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_dumpData( context, "invalid driver", " ", " ", " " ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_schema( invalidContext, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_schema( context, nullptr, nullptr, nullptr, nullptr ) );
ASSERT_EQ( nullptr, GEODIFF_readChangeset( invalidContext, nullptr ) );
ASSERT_EQ( nullptr, GEODIFF_readChangeset( context, nullptr ) );
ASSERT_EQ( nullptr, GEODIFF_CR_nextEntry( invalidContext, nullptr, nullptr ) );
bool ok;
ASSERT_EQ( nullptr, GEODIFF_CR_nextEntry( invalidContext, nullptr, &ok ) );
ASSERT_EQ( nullptr, GEODIFF_CR_nextEntry( context, nullptr, &ok ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createWkbFromGpkgHeader( invalidContext, nullptr, size_t( 1 ), nullptr, nullptr ) );
size_t size;
const char *wkb;
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_createWkbFromGpkgHeader( invalidContext, " ", size_t( 0 ), &wkb, &size ) );
GEODIFF_CX_destroy( context );
}
TEST( CAPITest, test_copy )
{
GEODIFF_ContextH context = GEODIFF_createContext();
makedir( pathjoin( tmpdir(), "test_copy" ) );
// database "db-base.gpkg"
// - table AUDIT has no PK
// - custom trigger that adds entry to AUDIT table on each update of "simple" table
{
std::string fileOutput = pathjoin( tmpdir(), "test_copy", "db-makeCopy.gpkg" );
ASSERT_EQ( GEODIFF_SUCCESS, GEODIFF_makeCopy(
context,
"sqlite",
"",
pathjoin( testdir(), "gpkg_custom_triggers", "db-base.gpkg" ).c_str(),
"sqlite",
"",
fileOutput.c_str() ) );
// THIS DROPS ALL TRIGGERS!!
ASSERT_FALSE( fileContentEquals( fileOutput, pathjoin( testdir(), "gpkg_custom_triggers", "db-base.gpkg" ) ) );
}
{
std::string fileOutput = pathjoin( tmpdir(), "test_copy", "db-makeCopySqlite.gpkg" );
ASSERT_EQ( GEODIFF_SUCCESS, GEODIFF_makeCopySqlite(
context,
pathjoin( testdir(), "gpkg_custom_triggers", "db-base.gpkg" ).c_str(),
fileOutput.c_str() ) );
}
GEODIFF_CX_destroy( context );
}
TEST( CAPITest, test_rebases )
{
GEODIFF_ContextH context = GEODIFF_createContext();
makedir( pathjoin( tmpdir(), "test_rebases" ) );
{
std::string fileOutput = pathjoin( tmpdir(), "test_rebases", "text_pk_A.sqlite" );
std::string fileConflict = pathjoin( tmpdir(), "test_rebases", "output_text_pk.log" );
ASSERT_EQ( GEODIFF_SUCCESS, GEODIFF_makeCopySqlite(
context,
pathjoin( testdir(), "sqlite_pks", "text_pk_A.sqlite" ).c_str(),
fileOutput.c_str() ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_rebase(
context,
pathjoin( testdir(), "sqlite_pks", "text_pk.sqlite" ).c_str(),
pathjoin( testdir(), "sqlite_pks", "text_pk_B.sqlite" ).c_str(),
fileOutput.c_str(),
fileConflict.c_str() )
);
}
{
std::string fileOutput = pathjoin( tmpdir(), "test_rebases", "output_compose_pk.sqlite" );
std::string fileConflict = pathjoin( tmpdir(), "test_rebases", "output_compose_pk.log" );
ASSERT_EQ( GEODIFF_SUCCESS, GEODIFF_makeCopySqlite(
context,
pathjoin( testdir(), "sqlite_pks", "multi_primary_key_A.sqlite" ).c_str(),
fileOutput.c_str() ) );
ASSERT_EQ( GEODIFF_ERROR, GEODIFF_rebase(
context,
pathjoin( testdir(), "sqlite_pks", "multi_primary_key.sqlite" ).c_str(),
pathjoin( testdir(), "sqlite_pks", "multi_primary_key_B.sqlite" ).c_str(),
fileOutput.c_str(),
fileConflict.c_str() )
);
}
{
std::string fileOutput = pathjoin( tmpdir(), "test_rebases", "output_custom_triggers.gpkg" );
std::string fileConflict = pathjoin( tmpdir(), "test_rebases", "output_custom_triggers.log" );
ASSERT_EQ( GEODIFF_SUCCESS, GEODIFF_makeCopySqlite(
context,
pathjoin( testdir(), "gpkg_custom_triggers", "db-modified_A.gpkg" ).c_str(),
fileOutput.c_str() ) );
ASSERT_EQ( GEODIFF_SUCCESS, GEODIFF_rebase(
context,
pathjoin( testdir(), "gpkg_custom_triggers", "db-base.gpkg" ).c_str(),
pathjoin( testdir(), "gpkg_custom_triggers", "db-modified_B.gpkg" ).c_str(),
fileOutput.c_str(),
fileConflict.c_str() )
);
}
GEODIFF_CX_destroy( context );
}
int main( int argc, char **argv )
{
testing::InitGoogleTest( &argc, argv );
init_test();
int ret = RUN_ALL_TESTS();
finalize_test();
return ret;
}
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2019 <NAME>
*/
#ifndef GEODIFFREBASE_H
#define GEODIFFREBASE_H
#include <string>
#include <vector>
#include "geodiffutils.hpp"
class Logger;
//! throws GeoDiffException on error
void rebase( const Context *context,
const std::string &changeset_BASE_THEIRS, //in
const std::string &changeset_THEIRS_MODIFIED, // out
const std::string &changeset_BASE_MODIFIED, //in
std::vector<ConflictFeature> &conflicts// out
);
#endif // GEODIFFREBASE_H
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#include "changesetreader.h"
#include "geodiffutils.hpp"
#include "changesetgetvarint.h"
#include "portableendian.h"
#include <assert.h>
#include <memory.h>
#include <sstream>
ChangesetReader::ChangesetReader() = default;
ChangesetReader::~ChangesetReader() = default;
bool ChangesetReader::open( const std::string &filename )
{
try
{
mBuffer.reset( new Buffer );
mBuffer->read( filename );
}
catch ( const GeoDiffException & )
{
return false;
}
return true;
}
bool ChangesetReader::nextEntry( ChangesetEntry &entry )
{
while ( 1 )
{
if ( mOffset >= mBuffer->size() )
break; // EOF
int type = readByte();
if ( type == 'T' )
{
readTableRecord();
// and now continue reading, we want an entry
}
else if ( type == ChangesetEntry::OpInsert || type == ChangesetEntry::OpUpdate || type == ChangesetEntry::OpDelete )
{
readByte();
if ( type != ChangesetEntry::OpInsert )
readRowValues( entry.oldValues );
else
entry.oldValues.erase( entry.oldValues.begin(), entry.oldValues.end() );
if ( type != ChangesetEntry::OpDelete )
readRowValues( entry.newValues );
else
entry.newValues.erase( entry.newValues.begin(), entry.newValues.end() );
entry.op = static_cast<ChangesetEntry::OperationType>( type );
entry.table = &mCurrentTable;
return true; // we're done!
}
else
{
throwReaderError( "Unknown entry type " + std::to_string( type ) );
}
}
return false;
}
bool ChangesetReader::isEmpty() const
{
return mBuffer->size() == 0;
}
void ChangesetReader::rewind()
{
mOffset = 0;
mCurrentTable = ChangesetTable();
}
char ChangesetReader::readByte()
{
if ( mOffset >= mBuffer->size() )
throwReaderError( "readByte: at the end of buffer" );
const char *ptr = mBuffer->c_buf() + mOffset;
++mOffset;
return *ptr;
}
int ChangesetReader::readVarint()
{
u32 value;
const unsigned char *ptr = ( const unsigned char * )mBuffer->c_buf() + mOffset;
int nBytes = getVarint32( ptr, value );
mOffset += nBytes;
return value;
}
std::string ChangesetReader::readNullTerminatedString()
{
const char *ptr = mBuffer->c_buf() + mOffset;
int count = 0;
while ( mOffset + count < mBuffer->size() && ptr[count] )
++count;
if ( mOffset + count >= mBuffer->size() )
throwReaderError( "readNullTerminatedString: at the end of buffer" );
mOffset += count + 1;
return std::string( ptr, count );
}
void ChangesetReader::readRowValues( std::vector<Value> &values )
{
// let's ensure we have the right size of array
if ( values.size() != mCurrentTable.columnCount() )
{
values.resize( mCurrentTable.columnCount() );
}
for ( size_t i = 0; i < mCurrentTable.columnCount(); ++i )
{
int type = readByte();
if ( type == Value::TypeInt ) // 0x01
{
// 64-bit int (big endian)
int64_t v;
uint64_t x;
memcpy( &x, mBuffer->c_buf() + mOffset, 8 );
mOffset += 8;
x = be64toh( x ); // convert big endian to host
memcpy( &v, &x, 8 );
values[i].setInt( v );
}
else if ( type == Value::TypeDouble ) // 0x02
{
// 64-bit double (big endian)
double v;
uint64_t x;
memcpy( &x, mBuffer->c_buf() + mOffset, 8 );
mOffset += 8;
x = be64toh( x ); // convert big endian to host
memcpy( &v, &x, 8 );
values[i].setDouble( v );
}
else if ( type == Value::TypeText || type == Value::TypeBlob ) // 0x03 or 0x04
{
int len = readVarint();
if ( mOffset + len > mBuffer->size() )
throwReaderError( "readRowValues: text/blob: at the end of buffer" );
values[i].setString( type == Value::TypeText ? Value::TypeText : Value::TypeBlob, mBuffer->c_buf() + mOffset, len );
mOffset += len;
}
else if ( type == Value::TypeNull ) // 0x05
{
values[i].setNull();
}
else if ( type == Value::TypeUndefined ) // undefined value (different from NULL)
{
values[i].setUndefined();
}
else
{
throwReaderError( "readRowValues: unexpected entry type" );
}
}
}
void ChangesetReader::readTableRecord()
{
/* A 'table' record consists of:
**
** * A constant 'T' character,
** * Number of columns in said table (a varint),
** * An array of nCol bytes (sPK),
** * A nul-terminated table name.
*/
int nCol = readVarint();
if ( nCol < 0 || nCol > 65536 )
throwReaderError( "readByte: unexpected number of columns" );
mCurrentTable.primaryKeys.clear();
for ( int i = 0; i < nCol; ++i )
{
mCurrentTable.primaryKeys.push_back( readByte() );
}
mCurrentTable.name = readNullTerminatedString();
}
void ChangesetReader::throwReaderError( const std::string &message ) const
{
std::ostringstream stringStream;
stringStream << "Reader error at offset " << mOffset << ":\n" << message;
std::string str = stringStream.str();
throw GeoDiffException( str );
}
<file_sep>
# Changeset Format
The format for changesets is borrowed from SQLite3 session extension's internal format
and it is currently 100% compatible with it. Below are details of the format, extracted
from SQLite3 source code.
## Summary
A changeset is a collection of DELETE, UPDATE and INSERT operations on
one or more tables. Operations on a single table are grouped together,
but may occur in any order (i.e. deletes, updates and inserts are all
mixed together).
Each group of changes begins with a table header:
- 1 byte: Constant 0x54 (capital 'T')
- Varint: Number of columns in the table.
- nCol bytes: 0x01 for PK columns, 0x00 otherwise.
- N bytes: Unqualified table name (encoded using UTF-8). Nul-terminated.
Followed by one or more changes to the table.
- 1 byte: Either SQLITE_INSERT (0x12), UPDATE (0x17) or DELETE (0x09).
- 1 byte: The "indirect-change" flag.
- old.* record: (delete and update only)
- new.* record: (insert and update only)
The "old.*" and "new.*" records, if present, are N field records in the
format described above under "RECORD FORMAT", where N is the number of
columns in the table. The i'th field of each record is associated with
the i'th column of the table, counting from left to right in the order
in which columns were declared in the CREATE TABLE statement.
The new.* record that is part of each INSERT change contains the values
that make up the new row. Similarly, the old.* record that is part of each
DELETE change contains the values that made up the row that was deleted
from the database. In the changeset format, the records that are part
of INSERT or DELETE changes never contain any undefined (type byte 0x00)
fields.
Within the old.* record associated with an UPDATE change, all fields
associated with table columns that are not PRIMARY KEY columns and are
not modified by the UPDATE change are set to "undefined". Other fields
are set to the values that made up the row before the UPDATE that the
change records took place. Within the new.* record, fields associated
with table columns modified by the UPDATE change contain the new
values. Fields associated with table columns that are not modified
are set to "undefined".
# Record Format
Unlike the SQLite database record format, each field is self-contained -
there is no separation of header and data. Each field begins with a
single byte describing its type, as follows:
0x00: Undefined value.
0x01: Integer value.
0x02: Real value.
0x03: Text value.
0x04: Blob value.
0x05: SQL NULL value.
Note that the above match the definitions of SQLITE_INTEGER, SQLITE_TEXT
and so on in sqlite3.h. For undefined and NULL values, the field consists
only of the single type byte. For other types of values, the type byte
is followed by:
- Text values:
A varint containing the number of bytes in the value (encoded using
UTF-8). Followed by a buffer containing the UTF-8 representation
of the text value. There is no nul terminator.
- Blob values:
A varint containing the number of bytes in the value, followed by
a buffer containing the value itself.
- Integer values:
An 8-byte big-endian integer value.
- Real values:
An 8-byte big-endian IEEE 754-2008 real value.
# Varint Format
Varint values are encoded in the same way as varints in the SQLite
record format.
The variable-length integer encoding is as follows:
```
KEY:
A = 0xxxxxxx 7 bits of data and one flag bit
B = 1xxxxxxx 7 bits of data and one flag bit
C = xxxxxxxx 8 bits of data
7 bits - A
14 bits - BA
21 bits - BBA
28 bits - BBBA
35 bits - BBBBA
42 bits - BBBBBA
49 bits - BBBBBBA
56 bits - BBBBBBBA
64 bits - BBBBBBBBC
```
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#include "changesetwriter.h"
#include "geodiffutils.hpp"
#include "changesetputvarint.h"
#include "portableendian.h"
#include <assert.h>
#include <memory.h>
#include <sstream>
void ChangesetWriter::open( const std::string &filename )
{
#ifdef WIN32
mFile.open( stringToWString( filename ), std::ios::out | std::ios::binary );
#else
mFile.open( filename, std::ios::out | std::ios::binary );
#endif
if ( !mFile.is_open() )
throw GeoDiffException( "Unable to open changeset file for writing: " + filename );
}
void ChangesetWriter::beginTable( const ChangesetTable &table )
{
mCurrentTable = table;
writeByte( 'T' );
writeVarint( ( int ) table.columnCount() );
for ( size_t i = 0; i < table.columnCount(); ++i )
writeByte( table.primaryKeys[i] );
writeNullTerminatedString( table.name );
}
void ChangesetWriter::writeEntry( const ChangesetEntry &entry )
{
if ( entry.op != ChangesetEntry::OpInsert && entry.op != ChangesetEntry::OpUpdate && entry.op != ChangesetEntry::OpDelete )
throw GeoDiffException( "wrong op for changeset entry" );
writeByte( ( char ) entry.op );
writeByte( 0 ); // "indirect" always false
if ( entry.op != ChangesetEntry::OpInsert )
writeRowValues( entry.oldValues );
if ( entry.op != ChangesetEntry::OpDelete )
writeRowValues( entry.newValues );
}
void ChangesetWriter::writeByte( char c )
{
mFile.write( &c, 1 );
}
void ChangesetWriter::writeVarint( int n )
{
unsigned char output[9]; // 1-9 bytes
int numBytes = putVarint32( output, n );
mFile.write( ( char * )output, numBytes );
}
void ChangesetWriter::writeNullTerminatedString( const std::string &str )
{
mFile.write( str.c_str(), str.size() + 1 );
}
void ChangesetWriter::writeRowValues( const std::vector<Value> &values )
{
if ( values.size() != mCurrentTable.columnCount() )
throw GeoDiffException( "wrong number of rows in the entry" );
for ( size_t i = 0; i < mCurrentTable.columnCount(); ++i )
{
Value::Type type = values[i].type();
writeByte( ( char ) type );
if ( type == Value::TypeInt ) // 0x01
{
// 64-bit int (big endian)
uint64_t x;
int64_t v = values[i].getInt();
memcpy( &x, &v, 8 );
x = htobe64( x ); // convert host to big endian
mFile.write( ( char * )&x, 8 );
}
else if ( type == Value::TypeDouble ) // 0x02
{
// 64-bit double (big endian)
int64_t x;
double v = values[i].getDouble();
memcpy( &x, &v, 8 );
x = htobe64( x ); // convert host to big endian
mFile.write( ( char * )&x, 8 );
}
else if ( type == Value::TypeText || type == Value::TypeBlob ) // 0x03 or 0x04
{
const std::string &str = values[i].getString();
writeVarint( ( int ) str.size() );
mFile.write( str.c_str(), str.size() );
}
else if ( type == Value::TypeNull ) // 0x05
{
// nothing extra to write
}
else if ( type == Value::TypeUndefined ) // undefined value (different from NULL)
{
// nothing extra to write
}
else
{
throw GeoDiffException( "unexpected entry type" );
}
}
}
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2019 <NAME>
*/
#include "gtest/gtest.h"
#include "geodiff_testutils.hpp"
#include "geodiff.h"
#include "geodiffutils.hpp"
TEST( ModifiedSchemeSqlite3Test, add_attribute )
{
std::cout << "geopackage add attribute to table" << std::endl;
std::string testname = "added_attribute";
makedir( pathjoin( tmpdir(), testname ) );
std::string base = pathjoin( testdir(), "base.gpkg" );
std::string modified = pathjoin( testdir(), "modified_scheme", "added_attribute.gpkg" );
std::string changeset = pathjoin( tmpdir(), testname, "changeset.bin" );
ASSERT_EQ( GEODIFF_createChangeset( testContext(), base.c_str(), modified.c_str(), changeset.c_str() ), GEODIFF_ERROR );
}
TEST( ModifiedSchemeSqlite3Test, add_table )
{
std::cout << "geopackage add table to table" << std::endl;
std::string testname = "add_table";
makedir( pathjoin( tmpdir(), testname ) );
std::string base = pathjoin( testdir(), "base.gpkg" );
std::string modified = pathjoin( testdir(), "modified_scheme", "added_table.gpkg" );
std::string changeset = pathjoin( tmpdir(), testname, "changeset.bin" );
ASSERT_EQ( GEODIFF_createChangeset( testContext(), base.c_str(), modified.c_str(), changeset.c_str() ), GEODIFF_ERROR );
}
TEST( ModifiedSchemeSqlite3Test, delete_attribute )
{
std::cout << "geopackage add attribute to table" << std::endl;
std::string testname = "delete_attribute";
makedir( pathjoin( tmpdir(), testname ) );
std::string base = pathjoin( testdir(), "modified_scheme", "added_attribute.gpkg" );
std::string modified = pathjoin( testdir(), "base.gpkg" );
std::string changeset = pathjoin( tmpdir(), testname, "changeset.bin" );
ASSERT_EQ( GEODIFF_createChangeset( testContext(), base.c_str(), modified.c_str(), changeset.c_str() ), GEODIFF_ERROR );
}
TEST( ModifiedSchemeSqlite3Test, delete_table )
{
std::cout << "geopackage delete table" << std::endl;
std::string testname = "delete_table";
makedir( pathjoin( tmpdir(), testname ) );
std::string base = pathjoin( testdir(), "modified_scheme", "added_table.gpkg" );
std::string modified = pathjoin( testdir(), "base.gpkg" );
std::string changeset = pathjoin( tmpdir(), testname, "changeset.bin" );
ASSERT_EQ( GEODIFF_createChangeset( testContext(), base.c_str(), modified.c_str(), changeset.c_str() ), GEODIFF_ERROR );
}
TEST( ModifiedSchemeSqlite3Test, rename_table )
{
std::cout << "geopackage table count is same, but tables have different name" << std::endl;
std::string testname = "delete_table";
makedir( pathjoin( tmpdir(), testname ) );
std::string base = pathjoin( testdir(), "modified_scheme", "added_table.gpkg" );
std::string modified = pathjoin( testdir(), "modified_scheme", "added_table2.gpkg" );
std::string changeset = pathjoin( tmpdir(), testname, "changeset.bin" );
ASSERT_EQ( GEODIFF_createChangeset( testContext(), base.c_str(), modified.c_str(), changeset.c_str() ), GEODIFF_ERROR );
}
TEST( ModifiedSchemeSqlite3Test, rename_attribute )
{
std::cout << "geopackage attribute count is same, but have different name" << std::endl;
std::string testname = "rename_attribute";
makedir( pathjoin( tmpdir(), testname ) );
std::string base = pathjoin( testdir(), "modified_scheme", "added_attribute.gpkg" );
std::string modified = pathjoin( testdir(), "modified_scheme", "added_attribute2.gpkg" );
std::string changeset = pathjoin( tmpdir(), testname, "changeset.bin" );
ASSERT_EQ( GEODIFF_createChangeset( testContext(), base.c_str(), modified.c_str(), changeset.c_str() ), GEODIFF_ERROR );
}
int main( int argc, char **argv )
{
testing::InitGoogleTest( &argc, argv );
init_test();
int ret = RUN_ALL_TESTS();
finalize_test();
return ret;
}
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#include "changesetutils.h"
#include "base64utils.h"
#include "geodiffutils.hpp"
#include "changesetreader.h"
#include "changesetwriter.h"
#include "tableschema.h"
ChangesetTable schemaToChangesetTable( const std::string &tableName, const TableSchema &tbl )
{
ChangesetTable chTable;
chTable.name = tableName;
for ( const TableColumnInfo &c : tbl.columns )
chTable.primaryKeys.push_back( c.isPrimaryKey );
return chTable;
}
void invertChangeset( ChangesetReader &reader, ChangesetWriter &writer )
{
std::string currentTableName;
std::vector<bool> currentPkeys;
ChangesetEntry entry;
while ( reader.nextEntry( entry ) )
{
assert( entry.table );
if ( entry.table->name != currentTableName )
{
writer.beginTable( *entry.table );
currentTableName = entry.table->name;
currentPkeys = entry.table->primaryKeys;
}
if ( entry.op == ChangesetEntry::OpInsert )
{
ChangesetEntry out;
out.op = ChangesetEntry::OpDelete;
out.oldValues = entry.newValues;
writer.writeEntry( out );
}
else if ( entry.op == ChangesetEntry::OpDelete )
{
ChangesetEntry out;
out.op = ChangesetEntry::OpInsert;
out.newValues = entry.oldValues;
writer.writeEntry( out );
}
else if ( entry.op == ChangesetEntry::OpUpdate )
{
ChangesetEntry out;
out.op = ChangesetEntry::OpUpdate;
out.newValues = entry.oldValues;
out.oldValues = entry.newValues;
// if a column is a part of pkey and has not been changed,
// the original entry has "old" value the pkey value and "new"
// value is undefined - let's reverse "old" and "new" in that case.
for ( size_t i = 0; i < currentPkeys.size(); ++i )
{
if ( currentPkeys[i] && out.oldValues[i].type() == Value::TypeUndefined )
{
out.oldValues[i] = out.newValues[i];
out.newValues[i].setUndefined();
}
}
writer.writeEntry( out );
}
else
{
throw GeoDiffException( "Unknown entry operation!" );
}
}
}
nlohmann::json valueToJSON( const Value &value )
{
nlohmann::json j;
switch ( value.type() )
{
case Value::TypeUndefined:
break; // actually this not get printed - undefined value should be omitted completely
case Value::TypeInt:
j = value.getInt();
break;
case Value::TypeDouble:
j = value.getDouble();
break;
case Value::TypeText:
j = value.getString();
break;
case Value::TypeBlob:
{
// this used to either show "blob N bytes" or would be converted to WKT
// but this is better - it preserves content of any type + can be decoded back
std::string base64 = base64_encode( ( const unsigned char * ) value.getString().data(), ( unsigned int ) value.getString().size() );
j = base64;
break;
}
case Value::TypeNull:
j = "null";
break;
default:
j = "(unknown)"; // should never happen
}
return j;
}
nlohmann::json changesetEntryToJSON( const ChangesetEntry &entry )
{
std::string status;
if ( entry.op == ChangesetEntry::OpUpdate )
status = "update";
else if ( entry.op == ChangesetEntry::OpInsert )
status = "insert";
else if ( entry.op == ChangesetEntry::OpDelete )
status = "delete";
nlohmann::json res;
res[ "table" ] = entry.table->name;
res[ "type" ] = status;
auto entries = nlohmann::json::array();
Value valueOld, valueNew;
for ( size_t i = 0; i < entry.table->columnCount(); ++i )
{
valueNew = ( entry.op == ChangesetEntry::OpUpdate || entry.op == ChangesetEntry::OpInsert ) ? entry.newValues[i] : Value();
valueOld = ( entry.op == ChangesetEntry::OpUpdate || entry.op == ChangesetEntry::OpDelete ) ? entry.oldValues[i] : Value();
nlohmann::json change;
if ( valueNew.type() != Value::TypeUndefined || valueOld.type() != Value::TypeUndefined )
{
change[ "column" ] = i;
nlohmann::json jsonValueOld = valueToJSON( valueOld );
nlohmann::json jsonValueNew = valueToJSON( valueNew );
if ( !jsonValueOld.empty() )
{
if ( jsonValueOld == "null" )
change[ "old" ] = nullptr;
else
change[ "old" ] = jsonValueOld;
}
if ( !jsonValueNew.empty() )
{
if ( jsonValueNew == "null" )
change[ "new" ] = nullptr;
else
change[ "new" ] = jsonValueNew;
}
entries.push_back( change );
}
}
res[ "changes" ] = entries;
return res;
}
nlohmann::json changesetToJSON( ChangesetReader &reader )
{
auto entries = nlohmann::json::array();
ChangesetEntry entry;
while ( reader.nextEntry( entry ) )
{
nlohmann::json msg = changesetEntryToJSON( entry );
if ( msg.empty() )
continue;
entries.push_back( msg );
}
nlohmann::json res;
res[ "geodiff" ] = entries;
return res;
}
//! auxiliary table used to create table changes summary
struct TableSummary
{
TableSummary() : inserts( 0 ), updates( 0 ), deletes( 0 ) {}
int inserts;
int updates;
int deletes;
};
nlohmann::json changesetToJSONSummary( ChangesetReader &reader )
{
std::map< std::string, TableSummary > summary;
ChangesetEntry entry;
while ( reader.nextEntry( entry ) )
{
std::string tableName = entry.table->name;
TableSummary &tableSummary = summary[tableName];
if ( entry.op == ChangesetEntry::OpUpdate )
++tableSummary.updates;
else if ( entry.op == ChangesetEntry::OpInsert )
++tableSummary.inserts;
else if ( entry.op == ChangesetEntry::OpDelete )
++tableSummary.deletes;
}
// write JSON
auto entries = nlohmann::json::array();
for ( const auto &kv : summary )
{
nlohmann::json tableJson;
tableJson[ "table" ] = kv.first;
tableJson[ "insert" ] = kv.second.inserts;
tableJson[ "update" ] = kv.second.updates;
tableJson[ "delete" ] = kv.second.deletes;
entries.push_back( tableJson );
}
nlohmann::json res;
res[ "geodiff_summary" ] = entries;
return res;
}
nlohmann::json conflictToJSON( const ConflictFeature &conflict )
{
nlohmann::json res;
res[ "table" ] = std::string( conflict.tableName() );
res[ "type" ] = "conflict";
res[ "fid" ] = std::to_string( conflict.pk() );
auto entries = nlohmann::json::array();
const std::vector<ConflictItem> items = conflict.items();
for ( const ConflictItem &item : items )
{
nlohmann::json change;
change[ "column" ] = item.column();
nlohmann::json valueBase = valueToJSON( item.base() );
nlohmann::json valueOld = valueToJSON( item.theirs() );
nlohmann::json valueNew = valueToJSON( item.ours() );
if ( !valueBase.empty() )
{
if ( valueBase == "null" )
change[ "base" ] = nullptr;
else
change[ "base" ] = valueBase;
}
if ( !valueOld.empty() )
{
if ( valueOld == "null" )
change[ "old" ] = nullptr;
else
change[ "old" ] = valueOld;
}
if ( !valueNew.empty() )
{
if ( valueNew == "null" )
change[ "new" ] = nullptr;
else
change[ "new" ] = valueNew;
}
entries.push_back( change );
}
res[ "changes" ] = entries;
return res;
}
nlohmann::json conflictsToJSON( const std::vector<ConflictFeature> &conflicts )
{
auto entries = nlohmann::json::array();
for ( const ConflictFeature &item : conflicts )
{
nlohmann::json msg = conflictToJSON( item );
if ( msg.empty() )
continue;
entries.push_back( msg );
}
nlohmann::json res;
res[ "geodiff" ] = entries;
return res;
}
inline int hex2num( unsigned char i )
{
if ( i <= '9' && i >= '0' )
return i - '0';
if ( i >= 'A' && i <= 'F' )
return 10 + i - 'A';
if ( i >= 'a' && i <= 'f' )
return 10 + i - 'a';
assert( false );
return 0; // should never happen
}
inline char num2hex( int n )
{
assert( n >= 0 && n < 16 );
if ( n >= 0 && n < 10 )
return char( '0' + n );
else if ( n >= 10 && n < 16 )
return char( 'A' + n - 10 );
return '?'; // should never happen
}
std::string hex2bin( const std::string &str )
{
assert( str.size() % 2 == 0 );
std::string output( str.size() / 2, 0 );
for ( size_t i = 0; i < str.size(); i += 2 )
{
int n1 = hex2num( str[i] ), n2 = hex2num( str[i + 1] );
output[i / 2] = char( n1 * 16 + n2 );
}
return output;
}
std::string bin2hex( const std::string &str )
{
std::string output( str.size() * 2, 0 );
for ( size_t i = 0; i < str.size(); ++i )
{
unsigned char ch = str[i];
output[i * 2] = num2hex( ch / 16 );
output[i * 2 + 1] = num2hex( ch % 16 );
}
return output;
}
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef DRIVER_H
#define DRIVER_H
#include <map>
#include <memory>
#include <vector>
#include <string>
#include "geodiff.h"
#include "tableschema.h"
class Context;
class ChangesetReader;
class ChangesetWriter;
typedef std::map<std::string, std::string> DriverParametersMap;
/**
* Abstracts all backend-specific work.
*
* A driver is normally opened with a reference to two data sources - the "base" ("old") source and
* the "modified" ("new") data source. By comparing the two sources, it can create changesets
* using createChangeset() method.
*
* When applying an existing changeset using applyChangeset() method, we only need one source which
* will be modified. In this case, a driver may be opened with a single source only, but it will
* not be possible to call createChangeset() because of missing second data source.
*
* Supported driver names:
*
* - "sqlite" - compares two sqlite database files. GeoPackages are supported as well.
* Use sqliteParameters() or sqliteParametersSingleSource() to get parameters to open the driver.
* - "postgres" - TODO:add docs
*
* Use createDriver() to create instance of a driver.
*/
class Driver
{
public:
/**
* Returns list of supported driver names
*/
static std::vector<std::string> drivers();
/**
* Returns whether driver is supported
*/
static bool driverIsRegistered( const std::string &driverName );
/**
* Returns a new instance of a driver given its name. Returns nullptr if such driver does not exist.
*/
static std::unique_ptr<Driver> createDriver( const Context *context, const std::string &driverName );
/**
* Returns driver parameters for Sqlite driver - it needs filenames of two sqlite databases.
*/
static DriverParametersMap sqliteParameters( const std::string &filenameBase, const std::string &filenameModified );
/**
* Returns driver parameters for Sqlite driver, but only using a single database.
*/
static DriverParametersMap sqliteParametersSingleSource( const std::string &filename );
//
explicit Driver( const Context *context );
virtual ~Driver();
/**
* Opens a geodiff session using a set of key-value pairs with connection configuration.
* The expected keys and values depend on the driver being used.
*
* On error the function throws GeoDiffException with the cause.
*/
virtual void open( const DriverParametersMap &conn ) = 0;
/**
* Opens a new geodiff session that creates data source. For example, for Sqlite this means creating
* a new database file, for Postgres this is creation of the specified database schema (namespace).
* \note This method only uses 'base' database ('modified' does not need to be specified)
*/
virtual void create( const DriverParametersMap &conn, bool overwrite = false ) = 0;
/**
* Returns a list of tables in the current connection. The useModified argument
* decides whether the list should be created for the base file/schema or for the locally
* modified file/schema.
*/
virtual std::vector<std::string> listTables( bool useModified = false ) = 0;
/**
* Returns table schema information for a given table. This is used to check compatibility
* between different tables.
*/
virtual TableSchema tableSchema( const std::string &tableName, bool useModified = false ) = 0;
/**
* Writes changes between base and modified tables to the given writer
* \note This method requires that both 'base' and 'modified' databases have been specified
* when opening the driver.
*/
virtual void createChangeset( ChangesetWriter &writer ) = 0;
/**
* Reads changes from the given reader and tries to apply them to the tables.
* \note This method only uses 'base' database ('modified' does not need to be specified when opening)
*/
virtual void applyChangeset( ChangesetReader &reader ) = 0;
/**
* Creates empty tables based on the definition given by 'tables' argument.
* \note This method only uses 'base' database ('modified' does not need to be specified when opening)
*/
virtual void createTables( const std::vector<TableSchema> &tables ) = 0;
/**
* Writes all rows of the specified table to a changeset (it will output only INSERT operations)
*/
virtual void dumpData( ChangesetWriter &writer, bool useModified = false ) = 0;
/**
* Tests whether the table schemas are compatible with our rebase algorithm, i.e. no unsupported
* database features are used. Currently, for example, geodiff rebase does not deal with foreign
* keys or with user-defined triggers.
*
* If the check fails, GeoDiffException is thrown.
*/
virtual void checkCompatibleForRebase( bool useModified = false ) = 0;
static const std::string SQLITEDRIVERNAME;
static const std::string POSTGRESDRIVERNAME;
const Context *context() const;
private:
const Context *mContext; // never null
};
#endif // DRIVER_H
<file_sep>import os
import shutil
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--version', help='version to fetch')
parser.add_argument('--python_version', nargs='?', default=37, help='python version to fetch')
args = parser.parse_args()
VERSION = args.version
PYTHON_VERSION = str(args.python_version)
print("using version " + VERSION)
print("python version " + PYTHON_VERSION)
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
RESULT_DIR = os.path.join(THIS_DIR, os.pardir, "build-platforms")
TMPDIR = os.path.join(RESULT_DIR, "tmp")
FINALDIR = os.path.join(RESULT_DIR, "pygeodiff")
PWD = os.curdir
if os.path.exists(RESULT_DIR):
shutil.rmtree(RESULT_DIR)
os.makedirs(RESULT_DIR)
os.makedirs(TMPDIR)
source = "pygeodiff-" + VERSION + ".tar.gz"
plats = ["win32",
"win_amd64",
"macosx_10_9_x86_64",
"manylinux_2_24_x86_64"
]
print("Download")
os.chdir(TMPDIR)
for plat in plats:
print("Fetching " + plat)
os.system("pip3 download --only-binary=:all: --no-deps --platform "+plat+" --python-version "+PYTHON_VERSION+" --implementation cp --abi cp"+PYTHON_VERSION+"m pygeodiff==" + VERSION )
print("Extract & Combine")
for plat in plats:
platdir = "pygeodiff-" + VERSION + "-" + plat
os.system("unzip pygeodiff-" + VERSION + "-cp"+PYTHON_VERSION+"-cp"+PYTHON_VERSION+"m-" + plat + ".whl -d " + platdir )
if not os.path.exists(FINALDIR):
os.mkdir(FINALDIR)
os.system("cp "+platdir+"/pygeodiff/* "+FINALDIR+"/")
if ((not os.path.exists(FINALDIR)) or
(not os.path.exists(FINALDIR + "/pygeodiff-" + VERSION + "-python.pyd")) or
(not os.path.exists(FINALDIR + "/pygeodiff-" + VERSION + "-python-win32.pyd")) or
(not os.path.exists(FINALDIR + "/libpygeodiff-" + VERSION + "-python.dylib")) or
(not os.path.exists(FINALDIR + "/libpygeodiff-" + VERSION + "-python.so"))
):
print ("ERROR")
exit(1)
else:
print("Done")
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef POSTGRESUTILS_H
#define POSTGRESUTILS_H
#include <assert.h>
#include <string>
extern "C"
{
#include <libpq-fe.h>
}
class PostgresResult
{
public:
explicit PostgresResult( PGresult *result ) : mResult( result ) {}
~PostgresResult()
{
if ( mResult )
::PQclear( mResult );
mResult = nullptr;
}
ExecStatusType status() const
{
return mResult ? ::PQresultStatus( mResult ) : PGRES_FATAL_ERROR;
}
std::string statusErrorMessage() const
{
assert( mResult );
return ::PQresultErrorMessage( mResult );
}
int rowCount() const
{
assert( mResult );
return ::PQntuples( mResult );
}
std::string affectedRows() const
{
assert( mResult );
return ::PQcmdTuples( mResult );
}
std::string value( int row, int col ) const
{
assert( mResult );
return isNull( row, col )
? std::string()
: std::string( ::PQgetvalue( mResult, row, col ) );
}
bool isNull( int row, int col ) const
{
assert( mResult );
return ::PQgetisnull( mResult, row, col );
}
private:
PGresult *mResult = nullptr;
};
PGresult *execSql( PGconn *c, const std::string &sql );
std::string quotedIdentifier( const std::string &ident );
std::string quotedString( const std::string &value );
#endif // POSTGRESUTILS_H
<file_sep>#!/bin/bash
SCRIPT_DIR=$(dirname "$0")
case $SCRIPT_DIR in
"/"*)
;;
".")
SCRIPT_DIR=$(pwd)
;;
*)
SCRIPT_DIR=$(pwd)/$(dirname "$0")
;;
esac
BLACK=$(which black)
if [ $? -ne 0 ]; then
echo "[!] black not installed." >&2
exit 1
fi
$BLACK --version
echo "running run_black for $SCRIPT_DIR/../pygeodiff"
$BLACK $SCRIPT_DIR/../pygeodiff --check --quiet
RETURN=$?
# this does not return 0 or 1 exit code
$BLACK $SCRIPT_DIR/../pygeodiff --verbose
exit $RETURN
<file_sep>
DROP SCHEMA IF EXISTS gd_inserted_1_a CASCADE;
CREATE SCHEMA gd_inserted_1_a;
CREATE TABLE gd_inserted_1_a.simple ( "fid" SERIAL PRIMARY KEY, "geometry" GEOMETRY(POINT, 4326), "name" TEXT, "rating" INTEGER);
INSERT INTO gd_inserted_1_a.simple VALUES (1, ST_GeomFromText('Point (-1.08891928864569065 0.46101231190150482)', 4326), 'feature1', 1);
INSERT INTO gd_inserted_1_a.simple VALUES (2, ST_GeomFromText('Point (-0.36388508891928861 0.56224350205198359)', 4326), 'feature2', 2);
INSERT INTO gd_inserted_1_a.simple VALUES (3, ST_GeomFromText('Point (-0.73050615595075241 0.04240766073871405)', 4326), 'feature3', 3);
INSERT INTO gd_inserted_1_a.simple VALUES (4, ST_GeomFromText('Point (-0.80989507554245277 0.35087659877358479)', 4326), 'my new point A', 1);
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef CHANGESETPUTVARINT_H
#define CHANGESETPUTVARINT_H
#include <stdint.h>
// Contents of this file is entirely based on code from sqlite3
//
// the following macro should be used for varint writing:
// - putVarint32
typedef uint8_t u8;
typedef uint32_t u32;
typedef uint64_t u64;
#define putVarint32(A,B) \
(u8)(((u32)(B)<(u32)0x80)?(*(A)=(unsigned char)(B)),1:\
sqlite3PutVarint((A),(B)))
/*
** Write a 64-bit variable-length integer to memory starting at p[0].
** The length of data write will be between 1 and 9 bytes. The number
** of bytes written is returned.
**
** A variable-length integer consists of the lower 7 bits of each byte
** for all bytes that have the 8th bit set and one byte with the 8th
** bit clear. Except, if we get to the 9th byte, it stores the full
** 8 bits and is the last byte.
*/
static int putVarint64( unsigned char *p, u64 v )
{
int i, j, n;
u8 buf[10];
if ( v & ( ( ( u64 )0xff000000 ) << 32 ) )
{
p[8] = ( u8 )v;
v >>= 8;
for ( i = 7; i >= 0; i-- )
{
p[i] = ( u8 )( ( v & 0x7f ) | 0x80 );
v >>= 7;
}
return 9;
}
n = 0;
do
{
buf[n++] = ( u8 )( ( v & 0x7f ) | 0x80 );
v >>= 7;
}
while ( v != 0 );
buf[0] &= 0x7f;
assert( n <= 9 );
for ( i = 0, j = n - 1; j >= 0; j--, i++ )
{
p[i] = buf[j];
}
return n;
}
static int sqlite3PutVarint( unsigned char *p, u64 v )
{
if ( v <= 0x7f )
{
p[0] = v & 0x7f;
return 1;
}
if ( v <= 0x3fff )
{
p[0] = ( ( v >> 7 ) & 0x7f ) | 0x80;
p[1] = v & 0x7f;
return 2;
}
return putVarint64( p, v );
}
#endif // CHANGESETPUTVARINT_H
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef CHANGESETREADER_H
#define CHANGESETREADER_H
#include "geodiff.h"
#include "changeset.h"
class Buffer;
/**
* Class for reading of binary changeset files.
* First use open() to initialize it, followed by a series of nextEntry() calls.
*
* See changeset-format.md for the documentation of the format.
*/
class ChangesetReader
{
public:
ChangesetReader();
~ChangesetReader();
//! Starts reading of changeset from a file
bool open( const std::string &filename );
//! Reads next changeset entry to the passed object
bool nextEntry( ChangesetEntry &entry );
//! Returns whether the changeset being read is completely empty
bool isEmpty() const;
//! Resets the reader position back to the start of the changeset
void rewind();
private:
char readByte();
int readVarint();
std::string readNullTerminatedString();
void readRowValues( std::vector<Value> &values );
void readTableRecord();
void throwReaderError( const std::string &message ) const;
int mOffset = 0; // where are we in the buffer
std::unique_ptr<Buffer> mBuffer;
ChangesetTable mCurrentTable; // currently processed table
};
#endif // CHANGESETREADER_H
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#include "geodiff.h"
#include "geodifflogger.hpp"
#include "geodiffutils.hpp"
#include <iostream>
void StdoutLogger( GEODIFF_LoggerLevel level, const char *msg )
{
switch ( level )
{
case LevelError:
std::cerr << "Error: " << msg << std::endl;
break;
case LevelWarning:
std::cout << "Warn: " << msg << std::endl;
break;
case LevelDebug:
std::cout << "Debug: " << msg << std::endl;
break;
default: break;
}
}
Logger::Logger()
{
// Sort out which
int envLevel = getEnvVarInt( "GEODIFF_LOGGER_LEVEL", 0 );
if ( envLevel >= 0 && envLevel <= GEODIFF_LoggerLevel::LevelDebug )
{
setMaxLogLevel( static_cast<GEODIFF_LoggerLevel>( envLevel ) );
}
setCallback( &StdoutLogger );
}
void Logger::setCallback( GEODIFF_LoggerCallback loggerCallback )
{
mLoggerCallback = loggerCallback;
}
void Logger::debug( const std::string &msg ) const
{
log( GEODIFF_LoggerLevel::LevelDebug, msg );
}
void Logger::warn( const std::string &msg ) const
{
log( GEODIFF_LoggerLevel::LevelWarning, msg );
}
void Logger::error( const std::string &msg ) const
{
log( GEODIFF_LoggerLevel::LevelError, msg );
}
void Logger::error( const GeoDiffException &exp ) const
{
log( GEODIFF_LoggerLevel::LevelError, exp.what() );
}
void Logger::info( const std::string &msg ) const
{
log( GEODIFF_LoggerLevel::LevelInfo, msg );
}
void Logger::log( GEODIFF_LoggerLevel level, const std::string &msg ) const
{
if ( mLoggerCallback )
{
// Check out if we want to print this message
if ( static_cast<int>( level ) <= static_cast<int>( maxLogLevel() ) )
{
// Send to callback
mLoggerCallback( level, msg.c_str() );
}
}
}
<file_sep>
DROP SCHEMA IF EXISTS gd_base CASCADE;
CREATE SCHEMA gd_base;
CREATE TABLE gd_base.simple ( "fid" SERIAL PRIMARY KEY, "geometry" GEOMETRY(POINT, 4326), "name" TEXT, "rating" INTEGER);
INSERT INTO gd_base.simple VALUES (1, ST_GeomFromText('Point (-1.08891928864569065 0.46101231190150482)', 4326), 'feature1', 1);
INSERT INTO gd_base.simple VALUES (2, ST_GeomFromText('Point (-0.36388508891928861 0.56224350205198359)', 4326), 'feature2', 2);
INSERT INTO gd_base.simple VALUES (3, ST_GeomFromText('Point (-0.73050615595075241 0.04240766073871405)', 4326), 'feature3', 3);
<file_sep># -*- coding: utf-8 -*-
"""
:copyright: (c) 2019 <NAME>
:license: MIT, see LICENSE for more details.
"""
from .testutils import *
import os
import shutil
class UnitTestsPythonApiCalls(GeoDiffTests):
"""Some quick tests of various API calls just to make sure they are not broken"""
def test_global_settigs(self):
print("********************************************************")
print("PYTHON: test setting logger to None and Back")
self.geodiff.set_logger_callback(None)
self.geodiff.set_logger_callback(logger)
def test_api_calls(self):
print("********************************************************")
print("PYTHON: test API calls")
outdir = create_dir("api-calls")
print("-- driver_api")
if len(self.geodiff.drivers()) < 1:
raise TestError("no drivers registered")
if not self.geodiff.driver_is_registered("sqlite"):
raise TestError("sqlite driver not registered")
print("-- concat_changes")
self.geodiff.concat_changes(
[
geodiff_test_dir() + "/concat/foo-insert-update-1.diff",
geodiff_test_dir() + "/concat/foo-insert-update-2.diff",
],
outdir + "/concat.diff",
)
self.geodiff.concat_changes(
[
geodiff_test_dir() + "/concat/bar-insert.diff",
geodiff_test_dir() + "/concat/bar-update.diff",
geodiff_test_dir() + "/concat/bar-delete.diff",
],
outdir + "/concat.diff",
)
# This is not a valid concat - you delete feature and then update (deleted feature) and then insert it
# But it should not crash. Ideally update is ignored (invalid step) and insert is applied
# https://github.com/MerginMaps/geodiff/issues/174
self.geodiff.concat_changes(
[
geodiff_test_dir() + "/concat/bar-delete.diff",
geodiff_test_dir() + "/concat/bar-update.diff",
geodiff_test_dir() + "/concat/bar-insert.diff",
],
outdir + "/concat.diff",
)
print("-- make_copy")
self.geodiff.make_copy(
"sqlite",
"",
geodiff_test_dir() + "/base.gpkg",
"sqlite",
"",
outdir + "/make-copy.gpkg",
)
print("-- make_copy_sqlite")
self.geodiff.make_copy_sqlite(
geodiff_test_dir() + "/base.gpkg", outdir + "/make-copy-sqlite.gpkg"
)
print("-- create_changeset_ex")
self.geodiff.create_changeset_ex(
"sqlite",
"",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/1_geopackage/modified_1_geom.gpkg",
outdir + "/create-ex.diff",
)
print("-- apply_changeset_ex")
self.geodiff.make_copy_sqlite(
geodiff_test_dir() + "/base.gpkg", outdir + "/apply-ex.gpkg"
)
self.geodiff.apply_changeset_ex(
"sqlite",
"",
outdir + "/apply-ex.gpkg",
geodiff_test_dir() + "/1_geopackage/base-modified_1_geom.diff",
)
print("-- create_rebased_changeset_ex")
self.geodiff.create_changeset_ex(
"sqlite",
"",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/2_inserts/inserted_1_B.gpkg",
outdir + "/rebased-ex-base2their.diff",
)
self.geodiff.create_rebased_changeset_ex(
"sqlite",
"",
geodiff_test_dir() + "/base.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
outdir + "/rebased-ex-base2their.diff",
outdir + "/rebased-ex.diff",
outdir + "/rebased-ex-conflicts.json",
)
print("-- rebase_ex")
self.geodiff.make_copy_sqlite(
geodiff_test_dir() + "/2_inserts/inserted_1_B.gpkg",
outdir + "/rebase-ex.gpkg",
)
self.geodiff.rebase_ex(
"sqlite",
"",
geodiff_test_dir() + "/base.gpkg",
outdir + "/rebase-ex.gpkg",
geodiff_test_dir() + "/2_inserts/base-inserted_1_A.diff",
outdir + "/rebase-ex-conflicts.json",
)
print("-- dump_data")
self.geodiff.dump_data(
"sqlite", "", geodiff_test_dir() + "/base.gpkg", outdir + "/dump-data.diff"
)
print("-- schema")
self.geodiff.schema(
"sqlite", "", geodiff_test_dir() + "/base.gpkg", outdir + "/schema.json"
)
<file_sep>
DROP SCHEMA IF EXISTS gd_updated_a CASCADE;
CREATE SCHEMA gd_updated_a;
CREATE TABLE gd_updated_a.simple ( "fid" SERIAL PRIMARY KEY, "geometry" GEOMETRY(POINT, 4326), "name" TEXT, "rating" INTEGER);
INSERT INTO gd_updated_a.simple VALUES (1, ST_GeomFromText('Point (-1.08891928864569065 0.46101231190150482)', 4326), 'feature1', 1);
INSERT INTO gd_updated_a.simple VALUES (2, ST_GeomFromText('Point (-0.92836978235933398 0.23787002896191123)', 4326), 'feature2', 9999);
INSERT INTO gd_updated_a.simple VALUES (3, ST_GeomFromText('Point (-0.73050615595075241 0.04240766073871405)', 4326), 'feature3', 3);
<file_sep>
DROP SCHEMA IF EXISTS gd_floats CASCADE;
CREATE SCHEMA gd_floats;
CREATE TABLE gd_floats.simple ( "fid" SERIAL PRIMARY KEY, "geometry" GEOMETRY(POINT, 4326), num DOUBLE PRECISION);
INSERT INTO gd_floats.simple VALUES (1, ST_GeomFromText('Point (-1.08891928864569065 0.46101231190150482)', 4326), 0.1);
INSERT INTO gd_floats.simple VALUES (2, ST_GeomFromText('Point (-0.36388508891928861 0.56224350205198359)', 4326), 0.0038970831662960364);
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef GEODIFFCONTEXT_H
#define GEODIFFCONTEXT_H
#include <string>
#include <vector>
#include "geodiff.h"
#include "geodifflogger.hpp"
class Context
{
public:
Context();
Logger &logger();
const Logger &logger() const;
void setTablesToSkip( const std::vector<std::string> &tablesToSkip );
bool isTableSkipped( const std::string &tableName ) const;
private:
Logger mLogger;
std::vector<std::string> mTablesToSkip;
};
#endif // GEODIFFCONTEXT_H
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2022 <NAME>
*/
#include "gtest/gtest.h"
#include "geodiff_testutils.hpp"
#include "geodiff.h"
#include "changesetreader.h"
#include "sqliteutils.h"
TEST( GeometryUtilsTest, test_wkb_from_geometry )
{
std::string changeset = pathjoin( testdir(), "1_geopackage", "base-modified_1_geom.diff" );
ChangesetReader reader;
EXPECT_TRUE( reader.open( changeset ) );
ChangesetEntry entry;
EXPECT_TRUE( reader.nextEntry( entry ) );
EXPECT_EQ( entry.table->name, "gpkg_contents" );
EXPECT_TRUE( reader.nextEntry( entry ) );
EXPECT_EQ( entry.table->name, "simple" );
EXPECT_EQ( entry.oldValues[1].type(), Value::TypeBlob );
std::string gpkgWkb = entry.oldValues[1].getString();
const char *c_gpkgWkb = gpkgWkb.c_str();
size_t length = gpkgWkb.length();
const char *c_wkb = nullptr;
size_t wkbLength;
int result = GEODIFF_createWkbFromGpkgHeader( testContext(), c_gpkgWkb, length, &c_wkb, &wkbLength );
EXPECT_EQ( result, GEODIFF_SUCCESS );
std::string wkb( c_wkb, wkbLength );
// re-create GPKG envelope
TableColumnInfo col;
col.geomSrsId = 4326;
col.geomType = "POINT";
std::string binHead = createGpkgHeader( wkb, col );
// fill envelope with geometry
std::string gpb( binHead.size() + wkb.size(), 0 );
memcpy( &gpb[0], binHead.data(), binHead.size() );
memcpy( &gpb[binHead.size()], wkb.data(), wkb.size() );
EXPECT_EQ( gpkgWkb, gpb );
EXPECT_FALSE( reader.nextEntry( entry ) );
}
TEST( GeometryUtilsTest, test_wkb_from_geometry_errors )
{
const char *c_wkb = new char[ 10 ];
const char *res = nullptr;
size_t wkbLength;
int result = GEODIFF_createWkbFromGpkgHeader( nullptr, c_wkb, 1, &res, &wkbLength );
EXPECT_EQ( result, GEODIFF_ERROR );
result = GEODIFF_createWkbFromGpkgHeader( testContext(), nullptr, 1, &res, &wkbLength );
EXPECT_EQ( result, GEODIFF_ERROR );
result = GEODIFF_createWkbFromGpkgHeader( testContext(), c_wkb, 1, nullptr, &wkbLength );
EXPECT_EQ( result, GEODIFF_ERROR );
result = GEODIFF_createWkbFromGpkgHeader( testContext(), c_wkb, 1, &res, nullptr );
EXPECT_EQ( result, GEODIFF_ERROR );
delete []c_wkb;
}
int main( int argc, char **argv )
{
testing::InitGoogleTest( &argc, argv );
init_test();
int ret = RUN_ALL_TESTS();
finalize_test();
return ret;
}
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#include <cassert>
#include "driver.h"
#include "sqlitedriver.h"
#include "geodiff_config.hpp"
#ifdef HAVE_POSTGRES
#include "postgresdriver.h"
#endif
// define driver names
const std::string Driver::SQLITEDRIVERNAME = "sqlite";
const std::string Driver::POSTGRESDRIVERNAME = "postgres";
Driver::Driver( const Context *context )
: mContext( context )
{
assert( mContext );
}
Driver::~Driver() = default;
const Context *Driver::context() const
{
return mContext;
}
std::vector<std::string> Driver::drivers()
{
std::vector<std::string> names;
names.push_back( SQLITEDRIVERNAME );
#ifdef HAVE_POSTGRES
names.push_back( POSTGRESDRIVERNAME );
#endif
return names;
}
bool Driver::driverIsRegistered( const std::string &driverName )
{
const std::vector<std::string> drivers = Driver::drivers();
return std::find( drivers.begin(), drivers.end(), driverName ) != drivers.end();
}
std::unique_ptr<Driver> Driver::createDriver( const Context *context, const std::string &driverName )
{
if ( driverName == SQLITEDRIVERNAME )
{
return std::unique_ptr<Driver>( new SqliteDriver( context ) );
}
#ifdef HAVE_POSTGRES
if ( driverName == POSTGRESDRIVERNAME )
{
return std::unique_ptr<Driver>( new PostgresDriver( context ) );
}
#endif
return std::unique_ptr<Driver>();
}
DriverParametersMap Driver::sqliteParameters( const std::string &filenameBase, const std::string &filenameModified )
{
DriverParametersMap conn;
conn["base"] = filenameBase;
conn["modified"] = filenameModified;
return conn;
}
DriverParametersMap Driver::sqliteParametersSingleSource( const std::string &filename )
{
DriverParametersMap conn;
conn["base"] = filename;
return conn;
}
<file_sep>/*
GEODIFF - MIT License
Copyright (C) 2020 <NAME>
*/
#ifndef CHANGESETGETVARINT_H
#define CHANGESETGETVARINT_H
#include <stdint.h>
// Contents of this file is entirely based on code from sqlite3
//
// the following two macro should be used for varint reading:
// - getVarint32
typedef uint8_t u8;
typedef uint32_t u32;
typedef uint64_t u64;
#define getVarint32(A,B) \
(u8)((*(A)<(u8)0x80)?((B)=(u32)*(A)),1:sqlite3GetVarint32((A),(u32 *)&(B)))
/*
** Bitmasks used by sqlite3GetVarint(). These precomputed constants
** are defined here rather than simply putting the constant expressions
** inline in order to work around bugs in the RVT compiler.
**
** SLOT_2_0 A mask for (0x7f<<14) | 0x7f
**
** SLOT_4_2_0 A mask for (0x7f<<28) | SLOT_2_0
*/
#define SLOT_2_0 0x001fc07f
#define SLOT_4_2_0 0xf01fc07f
/*
** SQLITE_MAX_U32 is a u64 constant that is the maximum u64 value
** that can be stored in a u32 without loss of data. The value
** is 0x00000000ffffffff. But because of quirks of some compilers, we
** have to specify the value in the less intuitive manner shown:
*/
#define SQLITE_MAX_U32 ((((u64)1)<<32)-1)
/*
** Read a 64-bit variable-length integer from memory starting at p[0].
** Return the number of bytes read. The value is stored in *v.
*/
static u8 sqlite3GetVarint( const unsigned char *p, u64 *v )
{
u32 a, b, s;
if ( ( ( signed char * )p )[0] >= 0 )
{
*v = *p;
return 1;
}
if ( ( ( signed char * )p )[1] >= 0 )
{
*v = ( ( u32 )( p[0] & 0x7f ) << 7 ) | p[1];
return 2;
}
/* Verify that constants are precomputed correctly */
assert( SLOT_2_0 == ( ( 0x7f << 14 ) | ( 0x7f ) ) );
assert( SLOT_4_2_0 == ( ( 0xfU << 28 ) | ( 0x7f << 14 ) | ( 0x7f ) ) );
a = ( ( u32 )p[0] ) << 14;
b = p[1];
p += 2;
a |= *p;
/* a: p0<<14 | p2 (unmasked) */
if ( !( a & 0x80 ) )
{
a &= SLOT_2_0;
b &= 0x7f;
b = b << 7;
a |= b;
*v = a;
return 3;
}
/* CSE1 from below */
a &= SLOT_2_0;
p++;
b = b << 14;
b |= *p;
/* b: p1<<14 | p3 (unmasked) */
if ( !( b & 0x80 ) )
{
b &= SLOT_2_0;
/* moved CSE1 up */
/* a &= (0x7f<<14)|(0x7f); */
a = a << 7;
a |= b;
*v = a;
return 4;
}
/* a: p0<<14 | p2 (masked) */
/* b: p1<<14 | p3 (unmasked) */
/* 1:save off p0<<21 | p1<<14 | p2<<7 | p3 (masked) */
/* moved CSE1 up */
/* a &= (0x7f<<14)|(0x7f); */
b &= SLOT_2_0;
s = a;
/* s: p0<<14 | p2 (masked) */
p++;
a = a << 14;
a |= *p;
/* a: p0<<28 | p2<<14 | p4 (unmasked) */
if ( !( a & 0x80 ) )
{
/* we can skip these cause they were (effectively) done above
** while calculating s */
/* a &= (0x7f<<28)|(0x7f<<14)|(0x7f); */
/* b &= (0x7f<<14)|(0x7f); */
b = b << 7;
a |= b;
s = s >> 18;
*v = ( ( u64 )s ) << 32 | a;
return 5;
}
/* 2:save off p0<<21 | p1<<14 | p2<<7 | p3 (masked) */
s = s << 7;
s |= b;
/* s: p0<<21 | p1<<14 | p2<<7 | p3 (masked) */
p++;
b = b << 14;
b |= *p;
/* b: p1<<28 | p3<<14 | p5 (unmasked) */
if ( !( b & 0x80 ) )
{
/* we can skip this cause it was (effectively) done above in calc'ing s */
/* b &= (0x7f<<28)|(0x7f<<14)|(0x7f); */
a &= SLOT_2_0;
a = a << 7;
a |= b;
s = s >> 18;
*v = ( ( u64 )s ) << 32 | a;
return 6;
}
p++;
a = a << 14;
a |= *p;
/* a: p2<<28 | p4<<14 | p6 (unmasked) */
if ( !( a & 0x80 ) )
{
a &= SLOT_4_2_0;
b &= SLOT_2_0;
b = b << 7;
a |= b;
s = s >> 11;
*v = ( ( u64 )s ) << 32 | a;
return 7;
}
/* CSE2 from below */
a &= SLOT_2_0;
p++;
b = b << 14;
b |= *p;
/* b: p3<<28 | p5<<14 | p7 (unmasked) */
if ( !( b & 0x80 ) )
{
b &= SLOT_4_2_0;
/* moved CSE2 up */
/* a &= (0x7f<<14)|(0x7f); */
a = a << 7;
a |= b;
s = s >> 4;
*v = ( ( u64 )s ) << 32 | a;
return 8;
}
p++;
a = a << 15;
a |= *p;
/* a: p4<<29 | p6<<15 | p8 (unmasked) */
/* moved CSE2 up */
/* a &= (0x7f<<29)|(0x7f<<15)|(0xff); */
b &= SLOT_2_0;
b = b << 8;
a |= b;
s = s << 4;
b = p[-4];
b &= 0x7f;
b = b >> 3;
s |= b;
*v = ( ( u64 )s ) << 32 | a;
return 9;
}
/*
** Read a 32-bit variable-length integer from memory starting at p[0].
** Return the number of bytes read. The value is stored in *v.
**
** If the varint stored in p[0] is larger than can fit in a 32-bit unsigned
** integer, then set *v to 0xffffffff.
**
** A MACRO version, getVarint32, is provided which inlines the
** single-byte case. All code should use the MACRO version as
** this function assumes the single-byte case has already been handled.
*/
static u8 sqlite3GetVarint32( const unsigned char *p, u32 *v )
{
u32 a, b;
/* The 1-byte case. Overwhelmingly the most common. Handled inline
** by the getVarin32() macro */
a = *p;
/* a: p0 (unmasked) */
#ifndef getVarint32
if ( !( a & 0x80 ) )
{
/* Values between 0 and 127 */
*v = a;
return 1;
}
#endif
/* The 2-byte case */
p++;
b = *p;
/* b: p1 (unmasked) */
if ( !( b & 0x80 ) )
{
/* Values between 128 and 16383 */
a &= 0x7f;
a = a << 7;
*v = a | b;
return 2;
}
/* The 3-byte case */
p++;
a = a << 14;
a |= *p;
/* a: p0<<14 | p2 (unmasked) */
if ( !( a & 0x80 ) )
{
/* Values between 16384 and 2097151 */
a &= ( 0x7f << 14 ) | ( 0x7f );
b &= 0x7f;
b = b << 7;
*v = a | b;
return 3;
}
{
u64 v64;
u8 n;
p -= 2;
n = sqlite3GetVarint( p, &v64 );
assert( n > 3 && n <= 9 );
if ( ( v64 & SQLITE_MAX_U32 ) != v64 )
{
*v = 0xffffffff;
}
else
{
*v = ( u32 )v64;
}
return n;
}
}
#endif // CHANGESETGETVARINT_H
| f72765c835026736427c7dac19837b76a3f20c50 | [
"SQL",
"CMake",
"Markdown",
"Python",
"C",
"C++",
"Shell"
] | 49 | C++ | timlinux/geodiff | 15df0de3d88439cb381fb9f6ae486f84c5c9def4 | c30aaa803237c27f49c96046cdba69535f26c508 | |
refs/heads/main | <file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace AKUtil
{
/// <summary>
/// singletonパターンで実装されるmonobehaviour
/// 参照時にインスタンスが存在しない場合、gameobjectにadd componentされて自動生成される。
/// デフォルトで、DontDestroyOnLoad しません。
/// </summary>
public class SingletonMonoBehaviour<T> : MonoBehaviour where T : SingletonMonoBehaviour<T>
{
protected static T instance;
public static T Instance {get { return CreateInstance(); } }
public static T CreateInstance()
{
if (instance != null) return instance;
instance = (T)FindObjectOfType(typeof(T));
if (instance != null) return instance;
var name = typeof(T).Name;
Debug.LogFormat("Create singleton object: {0}", name);
GameObject obj = new GameObject(name);
instance = obj.AddComponent<T>();
if (instance != null) return instance;
Debug.LogWarning("Can't find singleton object: " + typeof(T).Name);
Debug.LogError("Can't create singleton object: " + typeof(T).Name);
return null;
}
public static bool IsInstantiated()
{
return (instance != (MonoBehaviour)null);
}
private void Awake()
{
if (CheckInstance())AwakeValidly();
}
protected virtual void AwakeValidly()
{
// do nothing
}
private bool CheckInstance()
{
if (instance == null)
{
instance = (T)this;
return true;
}
if (Instance == this) return true;
Destroy(this);
return false;
}
protected void DontDestroyOnLoad()
{
GameObject.DontDestroyOnLoad(this.gameObject);
}
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UniRx;
/// <summary>
/// 移動方向に関する基底クラス
/// </summary>
public class IFTOInput : MonoBehaviour
{
protected Subject<IInteractive> interactionSubject = new Subject<IInteractive>();
public IObservable<IInteractive> OnInteract { get { return interactionSubject; } }
Vector3 direction;
bool isLocked = false;
public bool IsLocked { get { return isLocked; } set { if(value)Direction = Vector3.zero; isLocked = value; } }
public void Reset()
{
direction = Vector3.zero;
}
public Vector3 Direction
{
get { return direction; }
protected set
{
direction = value;
if (direction.magnitude > 1f) direction = direction.normalized;
}
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UniRx;
public interface IInteractive
{
void Interact(InteractionType interactionType);
InteractionType GetInteractionType();
Vector3 GetPosition();
OutfitData GetDisguiseOutfit();
}
public enum InteractionType
{
Kill,
Pickup,
Use,
Disguise,
Locked,
}<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[CreateAssetMenu(fileName = "OutfitData", menuName = "ScriptableObjects/OutfitData")]
public class OutfitData : ScriptableObject
{
public string Name;
public Outfit Outfit;
public OutfitType Type;
}<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UniRx;
/// <summary>
/// Outfit、外見を変更するクラス
/// </summary>
public class OutfitHundler : MonoBehaviour
{
[SerializeField] Transform outfitParent;
private ReactiveProperty<OutfitData> rpCurrentOutfit = new ReactiveProperty<OutfitData>();
public OutfitData CurrentOutfit { private set { rpCurrentOutfit.Value = value; } get { return rpCurrentOutfit.Value; } }
public IObservable<OutfitData> OnOutfitChanged { get { return rpCurrentOutfit; } }
Outfit outfit;
RelationshipHundler relationshipHundler;
bool isPlayer = false;
public Animator Animator { get { return outfit.Animator; } }
public bool IsNaked
{
get { return CurrentOutfit.Type == OutfitType.Naked; }
}
public void Initialize(OutfitData outfit, RelationshipHundler relationshipHundler)
{
SetOutfit(outfit);
this.relationshipHundler = relationshipHundler;
ChangeColorImmediately();
relationshipHundler.OnRelationshipChanged
.Subscribe(_ => ChangeColorImmediately());
}
void ChangeColorImmediately()
{
var color = GetColor(relationshipHundler.CurrentRelationshop);
SetOutline(color);
}
void SetOutline(Color color)
{
foreach (var renderer in outfit.Renderers)
{
foreach (var mat in renderer.materials)
{
mat.SetColor("_OutlineColor", color);
}
}
}
void SetOutlineWhite()
{
SetOutline(Color.white);
}
Color GetColor(RelationshipType type)
{
switch (type)
{
case RelationshipType.Hostile:
return Color.red;
case RelationshipType.Friendly:
return Color.green;
default:
return Color.green;
}
}
public void SetOutfit(OutfitData next, bool isPlayer = false)
{
if(isPlayer)this.isPlayer = true;
CurrentOutfit = next;
if(outfitParent.childCount >= 1)
{
DestroyImmediate(outfitParent.GetChild(0).gameObject);
}
outfit = Instantiate(CurrentOutfit.Outfit, outfitParent);
outfit.transform.localPosition = Vector3.zero;
outfit.transform.localRotation = Quaternion.identity;
if (this.isPlayer) SetOutlineWhite();
}
public void BeNaked()
{
SetOutfit(DatasetLocator.Instance.NakedOutfitData);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class GameClearView : MonoBehaviour
{
[SerializeField] CanvasGroup canvasGroup;
[SerializeField] Button continueButton;
[SerializeField] Button exitButton;
private void Start()
{
continueButton.onClick.AddListener(() => SceneManager.LoadScene("Main"));
exitButton.onClick.AddListener(() => Application.Quit());
}
public void Show()
{
canvasGroup.alpha = 1;
canvasGroup.interactable = true;
canvasGroup.blocksRaycasts = true;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[RequireComponent(typeof(HumanController))]
public class TargetEnemy : MonoBehaviour
{
// Start is called before the first frame update
void Start()
{
GetComponent<HumanController>().OnDead += () => GameRuleManager.Instance.GameClear();
}
}
<file_sep>using System;
using UnityEngine;
public class AIPath : MonoBehaviour
{
[SerializeField] private Point[] path;
public Point[] Path { get { return path; } }
[Serializable]
public class Point
{
[SerializeField] Transform point;
public Vector3 Position { get { return point.position; } }
public Vector3 Forward { get { return point.forward; } }
[Header("pointに到達後の待機時間")]
public float WaitTime;
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OutfitLimitedInteraction : MonoBehaviour, IInteractive
{
[SerializeField] OutfitType targetOutfit;
public Action OnInteracted;
public InteractionType GetInteractionType()
{
var playerOutfit = GameRuleManager.Instance.Player.CurrentOutfitType;
if (playerOutfit != targetOutfit) return InteractionType.Locked;
return InteractionType.Use;
}
public Vector3 GetPosition()
{
return transform.position;
}
public OutfitData GetDisguiseOutfit()
{
return null;
}
public void Interact(InteractionType interactionType)
{
OnInteracted?.Invoke();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Outfit : MonoBehaviour
{
[SerializeField] Renderer[] renderers;
[SerializeField] Animator animator;
public Renderer[] Renderers { get { return renderers; } }
public Animator Animator { get { return animator; } }
}
public enum OutfitType
{
Police,
Bandit,
Chef,
Naked,
Vip,
Guard,
Worker
}<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using TMPro;
public class InteractButton : MonoBehaviour
{
[SerializeField] Button button;
[SerializeField] TextMeshProUGUI buttonText;
[SerializeField] Player player;
public Action<IInteractive> OnClick;
public IInteractive TargetInteraction { get { return player.TargetInteraction; } }
private void Start()
{
player.OnInteractionExit += SetDisable;
player.OnInteractionUpdate += UpdateButtonView;
UpdateButtonView(InteractionType.Kill);
button.interactable = false;
button.onClick.AddListener(() => OnClick?.Invoke(player.TargetInteraction));
}
void SetDisable()
{
button.interactable = false;
}
void UpdateButtonView(InteractionType type)
{
switch (type)
{
case InteractionType.Disguise:
buttonText.text = "変装";
button.interactable = true;
break;
case InteractionType.Kill:
buttonText.text = "倒す";
button.interactable = true;
break;
case InteractionType.Use:
buttonText.text = "毒を盛る";
button.interactable = true;
break;
case InteractionType.Locked:
button.interactable = false;
return;
default:
break;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[CreateAssetMenu(fileName = "VisionMaterialData",menuName = "ScriptableObjects/CreateVisionMaterialData")]
public class VisionMaterialData : ScriptableObject
{
public Material dangerMaterial;
public Material safeMaterial;
public Material detectedMaterial;
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
using UniRx;
using AKUtil;
public class GameRuleManager : SingletonMonoBehaviour<GameRuleManager>
{
[SerializeField] Player player;
[SerializeField] GameClearView gameClearView;
[SerializeField] GameClearView gameoverView;
internal Player Player { get { return player; } }
public static Action<Player> OnPlayerAppearanceChanged;
bool isClear = false;
public void GameClear()
{
isClear = true;
StartCoroutine(GameClearCoroutine());
}
IEnumerator GameClearCoroutine()
{
yield return new WaitForSeconds(1);
gameClearView.Show();
}
public void PlayPoisonFinisher()
{
isClear = true;
StartCoroutine(PlayFinisherCoroutine());
}
IEnumerator PlayFinisherCoroutine()
{
yield return new WaitForSeconds(2f);
SceneManager.LoadScene("Finisher");
}
public void Gameover()
{
if (isClear) return;
StartCoroutine(GameoverCoroutine());
}
IEnumerator GameoverCoroutine()
{
yield return new WaitForSeconds(0.5f);
gameoverView.Show();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FinisherView : MonoBehaviour
{
[SerializeField] GameClearView clearView;
// Start is called before the first frame update
IEnumerator Start()
{
yield return new WaitForSeconds(10);
clearView.Show();
}
}
<file_sep>using System;
using UniRx;
using UnityEngine;
public class Enemy : MonoBehaviour, IInteractive
{
[SerializeField] AIController ai;
[SerializeField] CharacterDetector characterDetector;
[SerializeField] OutfitHundler outfitHundler;
[SerializeField] HumanController humanController;
public InteractionType CurrentInteraction { private set; get; }
void Start()
{
characterDetector.OnLost.Subscribe(_ => UpdateInteractionType());
characterDetector.OnDetect.Subscribe(_ => UpdateInteractionType());
ai.OnStateChanged.Subscribe(_ => UpdateInteractionType());
outfitHundler.OnOutfitChanged.Subscribe(_ => UpdateInteractionType());
}
void UpdateInteractionType()
{
//敵が死んでいる かつ 洋服をきてる ならば変装可能。
bool canDisguise = ai.IsDead() && !outfitHundler.IsNaked;
if (canDisguise)
{
CurrentInteraction = InteractionType.Disguise;
return;
}
//敵が死んでいて裸なら何もしない
if (ai.IsDead())
{
CurrentInteraction = InteractionType.Locked;
return;
}
//生きていて視界に入ってない ならばキルできる
if (!characterDetector.IsInvision)
{
CurrentInteraction = InteractionType.Kill;
return;
}
//生きていて視界に入っている ならば何もしない
CurrentInteraction = InteractionType.Locked;
return;
}
public void Interact(InteractionType interactionType)
{
humanController.Interacted(interactionType);
}
public InteractionType GetInteractionType()
{
return CurrentInteraction;
}
public Vector3 GetPosition()
{
return transform.position;
}
public OutfitData GetDisguiseOutfit()
{
return outfitHundler.CurrentOutfit;
}
}<file_sep>using System;
using System.Linq;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.AI;
using UnityEngine.Assertions;
using UniRx;
using AKUtil;
/// <summary>
/// ステートベースの敵AI
/// パトロールして、敵を見つけたら追いかけて、見失ったらまた持ち場に戻る。
/// Navmeshによる経路探索を行い、その経路を利用して移動方向のinputを行う。
/// </summary>
[RequireComponent(typeof(NavMeshAgent))]
[RequireComponent(typeof(OutfitHundler))]
[RequireComponent(typeof(CharacterDetector))]
[RequireComponent(typeof(HumanController))]
public class AIController : IFTOInput
{
[SerializeField] AIPath aiPath;
[SerializeField] EnemyData enemyData;
OutfitHundler outfitHundler;
CharacterDetector characterDetector;
HumanController humanController;
NavMeshAgent navMeshAgent;
private StateMachine stateMachine = new StateMachine();
private RelationshipHundler relationshipHundler;
private IInteractive chaseTarget;
private static readonly float arriveDiffThreshold = 0.1f;
private static readonly float killDiffThreshold = 1.05f;
public State Patrol = new State("Patrol");
public State Chase = new State("Chase");
public State Dead = new State("Dead");
public bool IsDead()
{
return IsCurrentState(Dead);
}
//購読だけ公開する
private Subject<RelationshipType> relationshipChangeSubject = new Subject<RelationshipType>();
public IObservable<RelationshipType> OnRelationshipChanged { get { return relationshipChangeSubject; } }
public IObservable<State> OnStateChanged { get { return stateMachine.OnStateChanged; } }
private bool isHostile = false;
public bool IsCurrentState(State target)
{
return target == stateMachine.CurrentState;
}
private void Awake()
{
Setup();
}
void Setup()
{
navMeshAgent = GetComponent<NavMeshAgent>();
humanController = GetComponent<HumanController>();
outfitHundler = GetComponent<OutfitHundler>();
characterDetector = GetComponent<CharacterDetector>();
bool isConflict = false;
//遷移の設定と被りのチェックを行う
isConflict |= Patrol > Chase;
isConflict |= Chase > Patrol;
isConflict |= Patrol > Dead;
isConflict |= Chase > Dead;
Assert.IsFalse(isConflict, "遷移が重複しています");
relationshipHundler = new RelationshipHundler(enemyData);
outfitHundler.Initialize(enemyData.OutfitData, relationshipHundler);
}
[ContextMenu("Setoutfit")]
void Setoutfit()
{
var oh = GetComponent<OutfitHundler>();
oh.SetOutfit(enemyData.OutfitData);
}
private void Start()
{
Patrol.OnStart.Subscribe(_ => OnStartPatrol());
Patrol.OnUpdate.Subscribe(_ => UpdatePatrol());
Chase.OnStart.Subscribe(_ => OnStartChase());
Chase.OnUpdate.Subscribe(_ => OnUpdateChase());
Chase.OnEnd.Subscribe(_ => OnEndChase());
Dead.OnStart.Subscribe(_ => OnStartDead());
stateMachine.SetInitialState(Patrol);
characterDetector.SetRelationship(relationshipHundler);
characterDetector.OnDetect.Subscribe(DetectCallback);
characterDetector.OnLost.Subscribe(_ => LostCollback());
humanController.OnDead += () => stateMachine.PushState(Dead);
}
int pathIndex = 0;
int cornerIndex = 0;
AIPath.Point currentTargetPoint;
Vector3 currentCorner;
Vector3 moveStartPoint;
Vector3 positionBuffer;
Vector3 lookDirection;
float interval = 0;
float timer = 0;
//巡回するために最初の経路を設定する
void OnStartPatrol()
{
Debug.Log("pat");
moveStartPoint = transform.position;
//pathが存在しない
if (aiPath == null || aiPath.Path == null) return;
currentTargetPoint = aiPath.Path[pathIndex];
lookDirection = currentTargetPoint.Forward;
navMeshAgent.SetDestination(currentTargetPoint.Position);
moveStartPoint = transform.position;
timer = 10000;
Direction = Vector3.zero;
}
//設定されたパスに沿って巡回する
void UpdatePatrol()
{
navMeshAgent.SetDestination(currentTargetPoint.Position);
//その場で待機する
if (timer < interval)
{
Direction = lookDirection * 0.001f;
timer += Time.deltaTime;
return;
}
//pathが存在しない
if (aiPath == null || aiPath.Path == null) return;
var navPath = navMeshAgent.path.corners;
if (navPath.Length <= 0) return;
//経路が存在しないか、十分近づいたら到着とみなす
bool hasArrived = navPath.Length <= 1 || (transform.position - currentTargetPoint.Position).sqrMagnitude <= arriveDiffThreshold;
if (hasArrived)
{
Direction = Vector3.zero;
interval = aiPath.Path[pathIndex].WaitTime;
lookDirection = currentTargetPoint.Forward;
pathIndex++;
pathIndex %= aiPath.Path.Length;
moveStartPoint = transform.position;
currentTargetPoint = aiPath.Path[pathIndex];
navMeshAgent.SetDestination(currentTargetPoint.Position);
//待機
timer = 0;
return;
}
//移動の入力を行う
positionBuffer = transform.position;
var posDiff = (navPath[1] - positionBuffer).normalized / 2;
Direction = posDiff;
}
void LostCollback()
{
if (IsCurrentState(Patrol)) return;
stateMachine.PushState(Patrol);
}
void DetectCallback(IInteractive target)
{
if (IsCurrentState(Chase)) return;
chaseTarget = target;
stateMachine.PushState(Chase);
}
void OnStartChase()
{
navMeshAgent.SetDestination(chaseTarget.GetPosition());
}
void OnUpdateChase()
{
navMeshAgent.SetDestination(chaseTarget.GetPosition());
var navPath = navMeshAgent.path.corners;
if (navPath.Length <= 0) return;
//経路が存在しないか、十分近づいたら到着とみなす
bool hasArrived = navPath.Length <= 1 || (transform.position - chaseTarget.GetPosition()).sqrMagnitude <= killDiffThreshold;
if (hasArrived)
{
interactionSubject.OnNext(chaseTarget);
stateMachine.PushState(Patrol);
Direction = Vector3.zero;
Debug.Log("Player killed");
return;
}
//移動の入力を行う
positionBuffer = transform.position;
var posDiff = (navPath[1] - positionBuffer).normalized;
Direction = posDiff;
}
void OnEndChase()
{
Direction = Vector3.zero;
}
void OnStartDead()
{
characterDetector.Sleep();
Direction = Vector3.zero;
}
private void Update()
{
stateMachine.Update();
}
private void FixedUpdate()
{
stateMachine.FixedUpdate();
}
private void LateUpdate()
{
stateMachine.LateUpdate();
}
/// <summary>
/// ギズモに経路を表示
/// </summary>
void OnDrawGizmos()
{
if (navMeshAgent && navMeshAgent.enabled)
{
Gizmos.color = Color.red;
var prefPos = transform.position;
foreach (var pos in navMeshAgent.path.corners)
{
Gizmos.DrawLine(prefPos, pos);
prefPos = pos;
}
}
}
}
/// <summary>
/// プレイヤーとの敵対関係の変化
/// </summary>
public class RelationshipHundler
{
/// <summary>
/// 値が変更された時にメッセージを流すプロパティ
/// 値の読み取りと監視のみ公開する
/// </summary>
private ReactiveProperty<RelationshipType> rpCurrentRelationship = new ReactiveProperty<RelationshipType>();
public IObservable<RelationshipType> OnRelationshipChanged { get { return rpCurrentRelationship; } }
public RelationshipType CurrentRelationshop { get { return rpCurrentRelationship.Value; } }
private EnemyData enemyData;
public RelationshipHundler(EnemyData enemyData)
{
this.enemyData = enemyData;
var player = GameRuleManager.Instance.Player;
ChangeRelationship(player);
//プレイヤーの外見が変わると、敵対関係が変化する
GameRuleManager.OnPlayerAppearanceChanged += ChangeRelationship;
}
private void ChangeRelationship(Player player)
{
if (enemyData.Hostiles.Contains(player.CurrentOutfitType))
{
ChangeIntoHostile();
return;
}
ChangeIntoFriendly();
}
void ChangeIntoHostile()
{
rpCurrentRelationship.Value = RelationshipType.Hostile;
}
void ChangeIntoFriendly()
{
rpCurrentRelationship.Value = RelationshipType.Friendly;
}
}
public enum RelationshipType
{
Hostile,
Friendly
}
<file_sep>using System.Linq;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
[DefaultExecutionOrder(-1)]
[RequireComponent(typeof(HumanController))]
public class Player : MonoBehaviour, IInteractive
{
[SerializeField] OutfitData outfitData;
[SerializeField] OutfitHundler outfitHundler;
HumanController humanController;
static readonly float rayRadius = 1.8f;
public OutfitType CurrentOutfitType { get { return outfitHundler.CurrentOutfit.Type; } }
public Action<InteractionType> OnInteractionUpdate;
public Action OnInteractionExit;
public bool IsDead { get { return humanController.IsDead; } }
public IInteractive TargetInteraction { private set; get; }
private void Awake()
{
outfitHundler.SetOutfit(outfitData,true);
humanController = GetComponent<HumanController>();
humanController.OnDisguise += () =>GameRuleManager.OnPlayerAppearanceChanged?.Invoke(this);
humanController.OnDead += () => GameRuleManager.Instance.Gameover();
}
//球のレイを飛ばして
private void Update()
{
int mask = 1 << (int)Layer.Interactive;
var cols = Physics.SphereCastAll(transform.position, rayRadius, Vector3.up, 0.01f, mask);
//ヒットしなかった
if (cols.Length == 0)
{
//干渉先を見失った
if (TargetInteraction != null)
{
TargetInteraction = null;
OnInteractionExit?.Invoke();
}
return;
}
//近い順に並べて最初をとる
var hit = cols.OrderBy(x => (x.transform.position - transform.position).sqrMagnitude).FirstOrDefault();
var interactive = hit.collider.gameObject.GetComponent<IInteractive>();
if (interactive == null) return;
TargetInteraction = interactive;
OnInteractionUpdate?.Invoke(TargetInteraction.GetInteractionType());
return;
}
public Vector3 GetPosition()
{
return transform.position;
}
public InteractionType GetInteractionType()
{
return InteractionType.Kill;
}
public void Interact(InteractionType interactionType)
{
humanController.Interacted(interactionType);
}
public OutfitData GetDisguiseOutfit()
{
return outfitHundler.CurrentOutfit;
}
}
<file_sep>using System.Collections;
using System;
using System.Linq;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
namespace AKUtil
{
/// <summary>
/// SkinnedMeshRendererのボーン構造をコピーする便利クラスです。
/// 既に存在するキャラクターのPrefabに、ボーン構造が同じ新しいスキンを追加したいときなどに使います。
/// </summary>
public static class SkinnedMeshRendererExtension
{
public static void CopyBones(Transform root, SkinnedMeshRenderer before, SkinnedMeshRenderer newMeshRenderer)
{
// update mesh
var meshrenderer = before;
meshrenderer.sharedMesh = newMeshRenderer.sharedMesh;
Transform[] childrens = root.GetComponentsInChildren<Transform>(true);
// sort bones.
Transform[] bones = new Transform[newMeshRenderer.bones.Length];
for (int boneOrder = 0; boneOrder < newMeshRenderer.bones.Length; boneOrder++)
{
bones[boneOrder] = Array.Find<Transform>(childrens, c => c.name == newMeshRenderer.bones[boneOrder].name);
}
meshrenderer.bones = bones;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UniRx.Toolkit;
namespace AKUtil{
public class ComponentPool<T> : ObjectPool<T>
where T : Component
{
private readonly T prefab;
private readonly Transform parentTransform;
public ComponentPool(Transform parentTransform, T prefab)
{
this.parentTransform = parentTransform;
this.prefab = prefab;
}
/// <summary>
/// オブジェクトの追加生成時に実行される
/// </summary>
protected override T CreateInstance()
{
var e = GameObject.Instantiate(prefab);
e.transform.SetParent(parentTransform);
return e;
}
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Assertions;
using AKUtil;
using UniRx;
/// <summary>
/// 移動部分だけの部分クラス
/// </summary>
public partial class HumanController : MonoBehaviour
{
static readonly float directionThreshold = 0.1f;
[SerializeField] float maxVelocity = 5f;
[SerializeField] float curveVelocity = 0.5f;
[SerializeField] float moveDrug = 10;
[SerializeField] [Range(0, 1)] float angularSpeed = 1f;
float prevVelocity;
Vector3 nextVelocity;
StateMachine stateMachine = new StateMachine();
public State Idle = new State("Idle");
public State Run = new State("Run");
public State Walk = new State("Walk");
partial void OnStart()
{
bool isConflict = false;
//遷移の設定と被りのチェックを行う
isConflict |= Idle > Walk;
isConflict |= Idle > Run;
isConflict |= Walk > Run;
isConflict |= Walk > Idle;
isConflict |= Run > Idle;
isConflict |= Run > Walk;
stateMachine.SetInitialState(Idle);
//Assert.IsFalse(isConflict, "遷移が重複しています");
Idle.OnStart.Subscribe(_ => OnStartIdle());
Walk.OnStart.Subscribe(_ => OnStartWalk());
Run.OnStart.Subscribe(_ => OnStartRun());
}
void OnStartIdle()
{
outfitHundler.Animator.CrossFade("Idle", 0.1f, 0, 0);
}
void OnStartRun()
{
outfitHundler.Animator.CrossFade("Run", 0.1f, 0, 0);
}
void OnStartWalk()
{
outfitHundler.Animator.CrossFade("Walk", 0.1f, 0, 0);
}
void UpdateVelocity()
{
if (input.IsLocked || IsDead)
{
nextVelocity = Vector3.zero;
return;
}
var direction = input.Direction;
var magnitude = direction.magnitude;
//回転
var newForward = new Vector3(direction.x, 0, direction.z).normalized;
newForward = Vector3.Lerp(transform.forward, newForward, angularSpeed);
transform.forward = newForward;
if (magnitude <= 0.02f)
{
//指をはなしている時は速度を減衰させる
prevVelocity = Mathf.Max(0, prevVelocity - moveDrug * Time.deltaTime);
nextVelocity = transform.forward * prevVelocity;
if (!stateMachine.NowStateIs(Idle)) stateMachine.PushState(Idle);
return;
}
/*
//回転の制御
var angleY = Mathf.Atan2(direction.z, direction.x) * Mathf.Rad2Deg;
angleY -= 90f;
var newAngle = Quaternion.Euler(0f, -angleY, 0f);
var rotation = Quaternion.Lerp(transform.rotation, newAngle, angularSpeed);
var velocity = Mathf.Lerp(0,maxVelocity, direction.magnitude);
rigidbody.MoveRotation(rotation);
*/
var velocity = Mathf.Lerp(0, maxVelocity, direction.magnitude);
velocity = Mathf.Min(prevVelocity + 0.5f, velocity);
var velocityVector = transform.forward * velocity;
velocityVector.y = rigidbody.velocity.y;
nextVelocity = velocityVector;
prevVelocity = velocity;
if (magnitude <= 0.8f)
{
if (!stateMachine.NowStateIs(Walk)) stateMachine.PushState(Walk);
return;
}
if (!stateMachine.NowStateIs(Run)) stateMachine.PushState(Run);
}
void UpdateRigidbody()
{
rigidbody.velocity = nextVelocity;
}
// Update is called once per frame
void Update()
{
UpdateVelocity();
//stateMachine.Update();
}
private void FixedUpdate()
{
UpdateRigidbody();
//stateMachine.FixedUpdate();
}
private void LateUpdate()
{
//stateMachine.LateUpdate();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[CreateAssetMenu(fileName = "EnemyData", menuName = "ScriptableObjects/EnemyData")]
public class EnemyData : ScriptableObject
{
public OutfitData OutfitData;
public OutfitType[] Hostiles;
}<file_sep>using System;
using System.Collections.Generic;
using UniRx;
namespace AKUtil
{
public class State
{
public string Name { get; private set; }
private Subject<Unit> StartSubject = new Subject<Unit>();
private Subject<Unit> EndSubject = new Subject<Unit>();
private Subject<Unit> UpdateSubject = new Subject<Unit>();
private Subject<Unit> FixedUpdateSubject = new Subject<Unit>();
private Subject<Unit> LateUpdateSubject = new Subject<Unit>();
public IObservable<Unit> OnStart { get { return StartSubject; } }
public IObservable<Unit> OnEnd { get { return EndSubject; } }
public IObservable<Unit> OnUpdate { get { return UpdateSubject; } }
public IObservable<Unit> OnFixedUpdate { get { return FixedUpdateSubject; } }
public IObservable<Unit> OnLateUpdate { get { return LateUpdateSubject; } }
HashSet<State> nextStates = new HashSet<State>();
public State(string name)
{
Name = name;
}
public static bool operator >(State current, State next)
{
return !current.nextStates.Add(next);
}
public static bool operator <(State next, State prev)
{
return prev > next;
}
public bool CanShiftTo(State next)
{
return (this.nextStates.Contains(next));
}
public void Update()
{
UpdateSubject.OnNext(Unit.Default);
}
public void FixedUdate()
{
FixedUpdateSubject.OnNext(Unit.Default);
}
public void LateUpdate()
{
LateUpdateSubject.OnNext(Unit.Default);
}
public void Enter()
{
StartSubject.OnNext(Unit.Default);
}
public void Exit()
{
EndSubject.OnNext(Unit.Default);
}
}
}<file_sep># fake-them-all
暗殺がテーマの3Dゲームです。 <br>
変装して標的に近づき、誰にも気付かれずに抹殺することが目的です。 <br>
# プレイ動画
以下のサムネイルからプレイ動画に飛びます。<br>
[](https://www.youtube.com/watch?v=dnfj7rQEJig)
# 内容について
3Dモデル等はアセットを使用しているので、<br>
著作権の観点からスクリプトのみ公開しています。<br><br>
Unityの使い方だけでなく、オブジェクト指向やデザインパターンについての理解度を示すために公開しています。<br>
以下、コードの概要です。
- ステートマシンやオブジェクトプール、シングルトンの実装。
- 敵の種類について、「同じ見た目だけど違う挙動をする」等の設定を、ScriptableObjectの組み合わせだけで実現。
- 敵AIをステートマシンで実装。NavMeshの経路だけ取り出し、移動は自前で制御。
- 敵の視界を扇型に飛ぶレイで実装。レイの本数、扇の角度や半径を変更することで精度や視界の大きさを調節可能。
- プレイヤーと敵で共通する処理(移動やパンチ等)は別のクラスに書き出し、プレイヤーのInputクラスや敵AIの入力を利用して動かす。
# 使用技術
- Unity v2019.3.10f1
- C# 7.3
- .NET 4.X
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class HumanAnimator : MonoBehaviour
{
[SerializeField] Animator animator;
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UniRx;
/// <summary>
/// インプット
/// </summary>
public class SwipeController : IFTOInput
{
[SerializeField] InteractButton button;
static readonly float sensitivity = 0.0062f;
Vector3 prevPos;
Vector3 currentPos;
private void Start()
{
button.OnClick += Interact;
Direction = Vector3.zero;
}
void Update ()
{
if (IsLocked) return;
UpdateMousePosition();
}
bool isHoldingAnyKey;
void UpdateMousePosition()
{
var x = Input.GetAxis("Horizontal");
var y = Input.GetAxis("Vertical");
var space = Input.GetKeyDown(KeyCode.Space);
var input = new Vector3(x, 0, y);
if (input.sqrMagnitude >= 0.01f)
{
Direction = new Vector3(x, 0, y).normalized;
isHoldingAnyKey = true;
}
if (input.sqrMagnitude < 0.01f && isHoldingAnyKey)
{
Direction = Vector3.zero;
isHoldingAnyKey = false;
}
if (space)
{
Interact();
}
var onDown = Input.GetMouseButtonDown(0);
var down = Input.GetMouseButton(0);
var onUp = Input.GetMouseButtonUp(0);
if (onDown)
{
prevPos = currentPos = Input.mousePosition;
return;
}
if (down)
{
//prevPos = currentPos;
currentPos = Input.mousePosition;
UpdateController();
return;
}
if (onUp)
{
prevPos = currentPos;
Direction = Vector3.zero;
return;
}
}
void Interact()
{
if (button.TargetInteraction == null) return;
interactionSubject.OnNext(button.TargetInteraction);
}
void Interact(IInteractive target)
{
if (target == null) return;
interactionSubject.OnNext(target);
}
void UpdateController()
{
var delta = currentPos - prevPos;
delta.z = delta.y;
delta.y = 0;
Direction = delta * sensitivity;
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UniRx;
/// <summary>
/// 人間の動作を共通化しているクラス
/// </summary>
[RequireComponent(typeof(OutfitHundler))]
[RequireComponent(typeof(Collider))]
public partial class HumanController : MonoBehaviour
{
[SerializeField] IFTOInput input;
[SerializeField] Rigidbody rigidbody;
OutfitHundler outfitHundler;
public Action OnDead;
public Action OnDisguise;
public bool IsDead { private set; get; } = false;
partial void OnStart();
void Start()
{
input.OnInteract.Subscribe(Interact);
outfitHundler = GetComponent<OutfitHundler>();
OnDead += () => IsDead = true;
OnStart();
}
void Interact(IInteractive interactive)
{
var type = interactive.GetInteractionType();
switch (type)
{
case InteractionType.Kill:
Kill(interactive);
break;
case InteractionType.Disguise:
Disguise(interactive);
break;
case InteractionType.Use:
Poison();
break;
default:
break;
}
interactive.Interact(type);
}
public void Interacted(InteractionType interactionType)
{
switch (interactionType)
{
case InteractionType.Kill:
Killed();
break;
case InteractionType.Disguise:
BeNaked();
break;
default:
return;
}
}
Coroutine coroutine;
void Killed()
{
GetComponent<Collider>().isTrigger = true;
StopCoroutine();
if (IsDead) return;
IsDead = true;
OnDead?.Invoke();
input.IsLocked = true;
outfitHundler.Animator.CrossFade("Death", 0, 0, 0);
}
void StopCoroutine()
{
if (coroutine != null)
{
input.IsLocked = false;
StopCoroutine(coroutine);
}
}
void Kill(IInteractive target)
{
Debug.Log("kill");
transform.LookAt(target.GetPosition());
StopCoroutine();
coroutine = StartCoroutine(KillCoroutine());
}
IEnumerator KillCoroutine()
{
input.IsLocked = true;
outfitHundler.Animator.CrossFade("Punch", 0, 0, 0);
yield return new WaitForSeconds(0.48f);
outfitHundler.Animator.CrossFade("Idle", 0.1f, 0, 0);
yield return new WaitForSeconds(0.1f);
input.IsLocked = false;
}
void Disguise(IInteractive target)
{
StopCoroutine();
coroutine = StartCoroutine(DisguiseCoroutine(target));
}
IEnumerator DisguiseCoroutine(IInteractive target)
{
var outfit = target.GetDisguiseOutfit();
if (outfit == null) yield break;
input.IsLocked = true;
outfitHundler.Animator.CrossFade("Disguise", 0, 0, 0);
yield return new WaitForSeconds(4f/3);
outfitHundler.SetOutfit(outfit);
outfitHundler.Animator.CrossFade("Disguise", 0, 0, 0);
outfitHundler.Animator.CrossFade("Idle", 0.1f, 0, 0);
yield return new WaitForSeconds(0.1f);
OnDisguise?.Invoke();
input.IsLocked = false;
}
void Poison()
{
input.IsLocked = true;
outfitHundler.Animator.CrossFade("Stir", 0, 0, 0);
}
void BeNaked()
{
StopCoroutine();
coroutine = StartCoroutine(BeNakedCoroutine());
}
IEnumerator BeNakedCoroutine()
{
yield return new WaitForSeconds(4f / 3);
outfitHundler.BeNaked();
outfitHundler.Animator.CrossFade("Idle_Death", 0, 0, 0);
}
}<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using AKUtil;
public class DatasetLocator : SingletonMonoBehaviour<DatasetLocator>
{
[SerializeField] VisionMaterialData visionMaterialData;
[SerializeField] OutfitData nakedOutfitData;
public VisionMaterialData VisionMaterialData { get { return visionMaterialData; } }
public OutfitData NakedOutfitData { get { return nakedOutfitData; } }
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[RequireComponent(typeof(OutfitLimitedInteraction))]
public class PoisonFinisher : MonoBehaviour
{
private void Start()
{
GetComponent<OutfitLimitedInteraction>().OnInteracted += GameClear;
}
void GameClear()
{
GameRuleManager.Instance.PlayPoisonFinisher();
}
}
<file_sep>using UnityEditor;
namespace AKUtil
{
public static class ToggleGameObjectActiveKeyboardShortcut
{
public static bool IsAvailable()
{
return Selection.activeGameObject != null;
}
// エスケープのために適当な大文字を使っているが実際はアンダーバーで起動する。
#if UNITY_EDITOR_OSX
[MenuItem("AKUtil/Toggle GameObject ActiveSelf _A_",priority = 610)]
#else
[MenuItem("AKUtil/Toggle GameObject ActiveSelf _¥¥",priority = 410)]
#endif
public static void Execute()
{
foreach (var go in Selection.gameObjects) {
Undo.RecordObject(go, go.name + ".activeSelf");
go.SetActive(!go.activeSelf);
}
}
}
}<file_sep>using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Assertions;
using UniRx;
namespace AKUtil
{
/// <summary>
/// プッシュダウンステートマシン
/// 過去の状態を保存しておくことができる。
/// </summary>
public class StateMachine
{
//変更の監視と値の読み取りだけ公開する
private ReactiveProperty<State> rpCurrentState = new ReactiveProperty<State>();
public State CurrentState { private set{ rpCurrentState.Value = value; } get { return rpCurrentState.Value; } }
public IObservable<State> OnStateChanged { get { return rpCurrentState; } }
Stack<State> stateStack;
public StateMachine()
{
stateStack = new Stack<State>();
}
public void SetInitialState(State next)
{
stateStack.Push(next);
CurrentState = next;
CurrentState.Enter();
}
public void Update()
{
CurrentState.Update();
}
public void FixedUpdate()
{
CurrentState.FixedUdate();
}
public void LateUpdate()
{
CurrentState.LateUpdate();
}
public void PushState(State next)
{
#if UNITY_EDITOR
//Debug.Log(CurrentState.Name + " -> " + next.Name);
#endif
var canShift = CurrentState.CanShiftTo(next);
Assert.IsTrue(canShift, "その遷移は設定されていません");
if (!canShift) return;
CurrentState.Exit();
stateStack.Push(next);
CurrentState = next;
CurrentState.Enter();
}
public bool PopState()
{
var canPop = stateStack.Count <= 1;
Assert.IsTrue(canPop, "ステートがスタックされていません");
if (!canPop) return false;
stateStack.Pop();
CurrentState.Exit();
CurrentState = stateStack.Peek();
CurrentState.Enter();
return true;
}
public bool NowStateIs(State dest)
{
return CurrentState == dest;
}
}
}<file_sep>namespace AKUtil
{
public static class GameLayer
{
/// <summary>
/// 全てのレイヤーと衝突するレイヤーマスク
/// </summary>
/// <returns>The all collision mask.</returns>
/// <param name="targetLayers">Target layers.</param>
public static int GetAllCollisionLayerMask(this Layer[] targetLayers)
{
int layerMask = 0;
foreach (var layer in targetLayers)
{
layerMask |= 1 << (int)layer;
}
return layerMask;
}
}
}
public enum Layer
{
Player = 8,
Interactive = 9
}<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEditor;
using UnityEngine;
using UniRx;
using AKUtil;
/// <summary>
/// レイにより視界を設定し、指定されたレイヤーのプレイヤーが入ったら通知する。
/// </summary>
public class CharacterDetector : MonoBehaviour
{
[SerializeField] CircularSectorMeshRenderer circleRenderer;
[SerializeField] float searchDistance = 0.5f;
[SerializeField] float angle = 90;
[SerializeField] float rayIntervalAngle = 1f; //何度ごとにレイを飛ばすか
[SerializeField] Layer[] targetLayers; //レイキャストのターゲット
Vector3[] rayDirections;
VisionMaterialData materialData;
RelationshipHundler relationshipHundler;
Coroutine colorChangeCoroutine;
private Subject<Player> DetectSubject = new Subject<Player>();
private Subject<Unit> LostSubject = new Subject<Unit>();
public IObservable<Player> OnDetect { get { return DetectSubject; } }
public IObservable<Unit> OnLost { get { return LostSubject; } }
public bool IsActive { private set; get; } = true;
bool isInVision = false;
internal bool IsInvision { get { return isInVision; } }
//関係が変わった時に色を変える
public void SetRelationship(RelationshipHundler relationshipHundler)
{
this.relationshipHundler = relationshipHundler;
ChangeColorImmediately();
relationshipHundler.OnRelationshipChanged
.Subscribe(_ => ChangeColor());
}
public void Sleep()
{
IsActive = false;
circleRenderer.gameObject.SetActive(false);
}
void Awake()
{
SetupCircleRenderer();
SetupRay();
OnLost.Subscribe(_ => ChangeColorImmediately());
}
void ChangeColorImmediately()
{
var mat = GetVisionMaterial(relationshipHundler.CurrentRelationshop);
circleRenderer.SetMaterial(mat);
}
void ChangeColor() {
if (colorChangeCoroutine != null) StopCoroutine(colorChangeCoroutine);
colorChangeCoroutine = StartCoroutine(ChangeVisionColorCoroutine());
}
//追従中にプレイヤーが着替えたりしても、色をそのままにする。
IEnumerator ChangeVisionColorCoroutine()
{
while (isInVision)
{
yield return null;
}
ChangeColorImmediately();
}
private Material GetVisionMaterial(RelationshipType relation)
{
bool isHostileAndDetect = relation == RelationshipType.Hostile && IsInvision;
bool isHostileAndNotDetect = relation == RelationshipType.Hostile && !IsInvision;
bool isFriendly = relation == RelationshipType.Friendly;
if (isHostileAndDetect) return materialData.detectedMaterial;
if (isHostileAndNotDetect) return materialData.dangerMaterial;
if (isFriendly) return materialData.safeMaterial;
return materialData.safeMaterial;
}
[ContextMenu("SetupVision")]
void SetupCircleRenderer()
{
materialData = DatasetLocator.Instance.VisionMaterialData;
circleRenderer.degree = angle;
circleRenderer.radius = searchDistance;
//扇型の視界がまっすぐに見えるよう調整する
float baseAngleOffset = 90f - angle / 2;
circleRenderer.beginOffsetDegree = baseAngleOffset;
}
void SetupRay()
{
//レイ方向を作成する
int rayCount = Mathf.FloorToInt(angle / rayIntervalAngle) + 1;
rayDirections = new Vector3[rayCount];
float baseAngleOffset = 90f - angle / 2;
for (int i = 0; i < rayCount; i++)
{
var offset = baseAngleOffset + i * rayIntervalAngle;
var vector = new Vector3(1, 0, 0) * searchDistance;
vector = Quaternion.Euler(0, offset + 180, 0) * vector;
rayDirections[i] = vector;
}
}
// Update is called once per frame
void Update()
{
if (!IsActive) return;
var hits = RaycastAll();
bool detectedNone = hits.Count == 0;
//視界に何も写っていない
if (detectedNone) return;
//何か見つけた
foreach (var hit in hits)
{
var player = hit.collider.gameObject.GetComponent <Player>();
if (player == null) continue;
if (player.IsDead) return;
if (relationshipHundler.CurrentRelationshop == RelationshipType.Friendly) return;
DetectSubject.OnNext(player);
isInVision = true;
ChangeColorImmediately();
return;
}
//視界から消えた
if (isInVision)
{
isInVision = false;
LostSubject.OnNext(Unit.Default);
return;
}
isInVision = false;
}
List<RaycastHit> RaycastAll()
{
List<RaycastHit> hits = new List<RaycastHit>();
foreach(var direction in rayDirections)
{
Vector3 rotDir = transform.rotation * direction;
var ray = new Ray(transform.position, rotDir);
RaycastHit hit;
//int mask = targetLayers.GetAllCollisionLayerMask();
int mask = ~(1 << (int)Layer.Interactive);
if (Physics.Raycast(transform.position + Vector3.up * 0.3f,rotDir,out hit,searchDistance,mask))hits.Add(hit);
}
return hits;
}
/// <summary>
/// ギズモに経路を表示
/// </summary>
void OnDrawGizmos()
{
Gizmos.color = Color.red;
var prefPos = transform.position;
if (rayDirections == null) SetupRay();
foreach (var direction in rayDirections)
{
var rotDir = transform.rotation * direction;
Gizmos.DrawLine(prefPos, prefPos + rotDir);
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UniRx;
using UniRx.Toolkit;
using AKUtil;
public class ParticlePool : MonoBehaviour
{
ComponentPool<ParticlePool> pool;
public ParticleSystem[] particles;
Transform target;
float time;
float deleteTime;
bool isFirstStart = true;
void OnEnable()
{
if (!isFirstStart) return;
isFirstStart = false;
if (particles[0] == null) particles[0] = GetComponent<ParticleSystem>();
}
void Update()
{
if (time > deleteTime)
{
pool?.Return(this);
}
time += Time.deltaTime;
if (target != null) this.transform.position = target.transform.position;
}
public void Play(float deleteTime, ComponentPool<ParticlePool> pool, Transform target)
{
if (particles[0] == null)
{
particles[0] = GetComponent<ParticleSystem>();
}
time = 0;
this.deleteTime = deleteTime;
this.pool = pool;
this.target = target;
foreach (var particle in particles)
{
particle.Stop();
particle.Play();
}
}
}
| 302d2e6a7dc15b6204deb8ef4da3c3995691286a | [
"Markdown",
"C#"
] | 33 | C# | akf-tentacion/fake-them-alll | f0da096badf296288220f1553926976a4f206f1b | ec94fb1166cba03198aa89af187b5c31dae48248 | |
refs/heads/main | <repo_name>ciliumgroupdev/ciliumweb<file_sep>/assets/js/app.js
const next = document.querySelector('.next');
const prev = document.querySelector('.prev');
const slides = document.querySelectorAll('.slide');
let index = 0;
display(index);
function display(index) {
slides.forEach((slide) => {
slide.style.display = 'none';
});
slides[index].style.display = 'flex';
}
function nextSlide() {
index++;
if (index > slides.length - 1) {
index = 0;
}
display(index);
}
function prevSlide() {
index--;
if (index < 0) {
index = slides.length - 1;
}
display(index);
}
next.addEventListener('click', nextSlide);
prev.addEventListener('click', prevSlide);
const readMoreBtn = document.querySelector(".btn-learn-more");
const text = document.querySelector(".col-lg-6 pt-4 pt-lg-0");
readMoreBtn.addEventListener("click", (e) => {
text.classList.toggle("show-more");
if (readMoreBtn.innerText === "Read More") {
readMoreBtn.innerText = "Read Less";
} else {
readMoreBtn.innerText = "Read More";
}
});
// document.addEventListener('DOMContentLoaded', () => {
// const expandMore = document.querySelectorAll("[expand-More]")
// function expand() {
// const showContent = document.getElementById(this.dataset.target)
// if (showContent.classList.contains('expand-active')) {
// this.innerHTML = this.dataset.showtext
// } else {
// this.innerHTML = this.hidetext
// }
// showContent.classList.toggle('expand-active')
// }
// expandsMore.forEach(expandMore => {
// expandMore.addEventListener('click', expand)
// })
// })<file_sep>/README.md
# ciliumweb
this is the company website
| 8753f8fb89232bb435c736008911b1947304f45a | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | ciliumgroupdev/ciliumweb | 32b5525f57318f03cbe903611d359c4496944cb6 | 0ae16b824be7ab7bcfdc2a6609dc495233d25409 | |
refs/heads/master | <file_sep>(function () {
'use strict';
/* global window */
var beautify = (typeof require === 'function' && require('js-beautify').html) ||
(typeof window !== 'undefined' && window.html_beautify);
if (typeof beautify !== 'function') {
throw new Error('missing HTML beautify function');
}
if (typeof module === 'object') {
// node
module.exports = function () {
setConsoleHtml();
};
}
function setConsoleHtml() {
if (typeof console === 'undefined') {
throw new Error('Weird, console object is undefined');
}
if (typeof console.html === 'function') {
return;
}
console.html = function () {
var args = Array.prototype.slice.call(arguments);
args.forEach(function (k) {
if (typeof k === 'string') {
return console.log(beautify(k));
}
if (typeof k === 'object' &&
typeof k.html === 'function') {
return console.log(beautify(k.html()));
}
if (typeof k.innerHTML === 'string') {
return console.log(beautify(k.innerHTML));
}
});
};
}
setConsoleHtml();
}());
<file_sep>require('..');
console.assert(typeof console.html === 'function',
'installed a function');
console.html('<body><h1>hi</h1></body>');
var foo = {
html: function () {
return '<foo>bar</foo>';
}
};
console.html('several arguments', '<h1>hi again</h1>', foo);
delete console.html;
<file_sep># console.html
> Adds console.html method for convenience
[![NPM][console.html-icon] ][console.html-url]
[![Build status][console.html-ci-image] ][console.html-ci-url]
[![dependencies][console.html-dependencies-image] ][console.html-dependencies-url]
[![devdependencies][console.html-devdependencies-image] ][console.html-devdependencies-url]
Install:
```
npm install console.html --save
bower install console.html --save
```
Use in Node:
```
// call once somewhere in the beginning
require('console.html');
console.html('<body><h1>Hello, world!</h1></body>');
// prints
<body>
<h1>Hello, world!</h1>
</body>
console.html($('#selector'));
// prints nicely formatted HTML from jQuery / D3 selectors
// that have .html() method
console.html($('#one'), $(#two));
// outputs formatted HTML for each selector,
// second selector starts on new line
```
Use in browser:
```html
<script src="bower_components/console.html/dist/console.html.js"></script>
<script>
console.html($('#divId'));
console.html(document.getElementById('divId'));
</script>
```
### Small print
Author: <NAME> © 2014
* [@bahmutov](https://twitter.com/bahmutov)
* [glebbahmutov.com](http://glebbahmutov.com)
* [blog](http://bahmutov.calepin.co/)
License: MIT - do anything with the code, but don't blame me if it does not work.
Spread the word: tweet, star on github, etc.
Support: if you find any problems with this module, email / tweet /
[open issue](https://github.com/bahmutov/console.html/issues) on Github
## MIT License
Copyright (c) 2014 <NAME>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
[console.html-icon]: https://nodei.co/npm/console.html.png?downloads=true
[console.html-url]: https://npmjs.org/package/console.html
[console.html-ci-image]: https://travis-ci.org/bahmutov/console.html.png?branch=master
[console.html-ci-url]: https://travis-ci.org/bahmutov/console.html
[console.html-dependencies-image]: https://david-dm.org/bahmutov/console.html.png
[console.html-dependencies-url]: https://david-dm.org/bahmutov/console.html
[console.html-devdependencies-image]: https://david-dm.org/bahmutov/console.html/dev-status.png
[console.html-devdependencies-url]: https://david-dm.org/bahmutov/console.html#info=devDependencies
<file_sep>var expect = require('expect.js');
var sinon = require('sinon');
describe('console.html', function () {
beforeEach(function () {
// make sure the module is loaded without caching
delete require.cache[require.resolve('../index')];
});
afterEach(function () {
delete console.html;
});
it('fills missing method', function () {
expect(console.html).to.be(undefined);
});
it('installs html method', function () {
require('../index');
expect(typeof console.html).to.be('function');
});
it('calls console.log ultimately', function () {
require('../index');
sinon.spy(console, 'log');
var str = '<h1>hi</h1>';
console.html(str);
expect(console.log.callCount).to.be(1);
expect(console.log.calledWith(str)).to.be(true);
console.log.restore();
});
it('calls console.log with value of html()', function () {
require('../index');
sinon.spy(console, 'log');
console.html({
html: function () {
return 'foo';
}
});
expect(console.log.calledWith('foo')).to.be(true);
console.log.restore();
});
it('calls console.log with value of innerHTML', function () {
require('../index');
sinon.spy(console, 'log');
console.html({
innerHTML: 'foo'
});
expect(console.log.calledWith('foo')).to.be(true);
console.log.restore();
});
it('logs nothing if no html found', function () {
require('../index');
sinon.spy(console, 'log');
console.html({
other: 'foo'
});
expect(console.log.calledWith('foo')).to.be(false);
expect(console.log.called).to.be(false);
console.log.restore();
});
});
| 7b90371815754edb69031c4fd1b115e3c3903355 | [
"JavaScript",
"Markdown"
] | 4 | JavaScript | bahmutov/console.html | 8865805845c70c3c889da41911cd1ee30acf9cb3 | fba4f3edb8f5dddda3aa3856cda148d9b1412b35 | |
refs/heads/master | <repo_name>probablyabear/webpack-landing-page<file_sep>/webpack.production.config.js
/* eslint-disable import/no-extraneous-dependencies */
const path = require("path");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
const { CleanWebpackPlugin } = require("clean-webpack-plugin");
const OptimizeCSSAssetsPlugin = require("optimize-css-assets-webpack-plugin");
const TerserPlugin = require("terser-webpack-plugin");
const postcssPresetEnv = require("postcss-preset-env");
module.exports = {
entry: "./src/index.js",
output: {
path: path.resolve(process.cwd(), "dist"),
filename: "bundle.[contenthash].js"
},
mode: "production",
optimization: {
minimizer: [
new TerserPlugin({
terserOptions: {
ecma: 6,
compress: true,
output: {
comments: false,
beautify: false
}
}
}),
new OptimizeCSSAssetsPlugin({
cssProcessorPluginOptions: {
preset: ["default", { discardComments: { removeAll: true } }]
}
})
]
},
module: {
rules: [
{
test: /\.html$/,
loaders: ["html-loader"]
},
{
test: /\.(jpg|jpeg|png|gif)$/,
use: [
{
loader: "file-loader",
options: {
name: "[name].[ext]",
outputPath: "img/",
publicPath: "img/"
}
},
{
loader: "image-webpack-loader",
options: {
mozjpeg: {
progressive: true,
quality: 85
},
optipng: {
enabled: true
},
pngquant: {
quality: [0.65, 0.9],
speed: 4
},
gifsicle: {
interlaced: false
}
}
}
]
},
{
test: /\.(scss|css)$/,
exclude: "/node_modules/",
use: [
MiniCssExtractPlugin.loader,
{
loader: "css-loader",
options: {
sourceMap: false,
importLoaders: 1
}
},
{
loader: "postcss-loader",
options: {
ident: "postcss",
plugins: () => [
postcssPresetEnv({
stage: 2,
browsers: "last 2 versions",
autoprefixer: { grid: true }
})
]
}
},
{
loader: "sass-loader",
options: {
sourceMap: false
}
}
]
}
]
},
plugins: [
new CleanWebpackPlugin(),
new MiniCssExtractPlugin({
filename: "[name].[contenthash].css",
chunkFilename: "[id].css"
}),
new HtmlWebpackPlugin({
title: "An awesome landing page",
template: "./src/index.html",
minify: {
html5: true,
collapseWhitespace: true,
caseSensitive: true,
removeComments: true,
removeEmptyElements: false
}
})
]
};
<file_sep>/src/index.js
// JS
// eslint-disable-next-line no-unused-vars
import $ from "jquery";
// eslint-disable-next-line no-unused-vars
import popper from "popper.js";
import "bootstrap";
// Styles
import "./styles/index.scss";
// Navbar Scroll Colors
window.onscroll = () => {
const nav = document.querySelector("#navbar");
const navBrand = document.querySelector(".navbar-brand");
const navHeight = nav.offsetHeight;
const navItems = document.querySelectorAll("nav a.nav-link");
if (navHeight <= window.pageYOffset) {
nav.classList.add("navbar-scrolled");
navItems.forEach(element => element.classList.add("nav-link-scrolled"));
navBrand.classList.add("nav-brand-scrolled");
} else {
nav.classList.remove("navbar-scrolled");
navItems.forEach(element => element.classList.remove("nav-link-scrolled"));
navBrand.classList.remove("nav-brand-scrolled");
}
};
| 31e8d207773ca14dd2aa3f11ed0039f21dffe094 | [
"JavaScript"
] | 2 | JavaScript | probablyabear/webpack-landing-page | b6fbaba1c0525475a3ac094d0fe71d725ec19cf0 | 61cdb7a117cc28486ceb7074fda0d657634e92c1 | |
refs/heads/master | <file_sep>'''
File: updateHosts.py
Author: <EMAIL>
Version: 0.0.1
Date: 2012-10-24 14:35:39
'''
import urllib
import os
def GetRemoteHosts(hostsUrl):
fp = urllib.urlopen(hostsUrl)
hosts = [line for line in fp]
fp.close()
return hosts
if __name__ == "__main__":
hosts = GetRemoteHosts("http://tx.txthinking.com/hosts")
hostsPath = "/etc/hosts"
search = "#TX-HOSTS\n"
yourHosts = ""
if os.path.isfile(hostsPath):
fp = open(hostsPath, "r")
for line in fp:
if line == search:
break
yourHosts += line
fp.close()
yourHosts += search
os.rename(hostsPath, hostsPath + ".BAK")
fp = open(hostsPath, "w")
fp.write(yourHosts)
fp.writelines(hosts) #iterable sequence hosts
fp.close()
print "Success"
<file_sep>//
// Update hosts for windows
//
package main
import (
"os"
"io"
"bufio"
"net/http"
"time"
"bytes"
"io/ioutil"
)
var (
HOSTS_PATH string = os.Getenv("SYSTEMROOT")+"\\system32\\drivers\\etc\\hosts"
SEARCH_STRING []byte = []byte("#TX-HOSTS")
HOSTS_SOURCE string = "http://tx.txthinking.com/hosts"
)
func main(){
var hosts []byte
f, err := os.OpenFile(HOSTS_PATH, os.O_RDONLY, 0444)
if err == nil {
bnr := bufio.NewReader(f)
for{
line, _, err := bnr.ReadLine()
if bytes.Compare(line,SEARCH_STRING)==0 || err == io.EOF{
break
}
hosts = append(hosts, append(line,[]byte("\r\n")...)...)
}
f.Close()
}
hosts = append(hosts, append(SEARCH_STRING,[]byte("\r\n")...)...)
res, err := http.Get(HOSTS_SOURCE)
if err != nil {
println(err.Error())
time.Sleep(3 * time.Second)
return
}
data, err := ioutil.ReadAll(res.Body)
if err != nil {
println(err.Error())
time.Sleep(3 * time.Second)
return
}
data = bytes.Replace(data, []byte("\n"), []byte("\r\n"), -1)
hosts = append(hosts, data...)
os.Rename(HOSTS_PATH, HOSTS_PATH+"-BAK-TX-HOSTS")
f, err = os.OpenFile(HOSTS_PATH, os.O_WRONLY|os.O_CREATE, 0644)
if err != nil {
println(err.Error())
time.Sleep(3 * time.Second)
return
}
f.Write(hosts)
println("Success!")
time.Sleep(3 * time.Second)
}
<file_sep>google-hosts
============
### 为什么有这个项目
因为我每天都要访问Google, 以及我的朋友们也会访问Google.<br/>
然而朋友们对其他更复杂的代理未做深入研究, 最简单方法便是帮他们修改 hosts.<br/>
网上其他 hosts 项目有时未能及时更新, 为了朋友们方便, 所以便弄了这个项目.<br/>
此项目参考了[smarthosts][smarthosts], [ipv6-hosts][ipv6-hosts].<br/>
IP不总是可用的, 因素可能是GFW封锁, Google IP变动.<br/>
另外Google的好多服务都已经不挂在北京的IP上了<br/>
你可以用此脚本自己去寻找可用IP.
***
### 脚本如何使用
直接使用DNS解析获取所有域名的IP(这个只能跳过DNS污染, 并不能保证IP是否被封锁)
```
$ cd google-hosts/scripts
$ make #结果会输出到google-hosts/hosts文件
```
查询某段IP详细信息(如:192.168.1.x)(这个可检测IP是否被封锁, 443端口是否被封锁)
```
$ cd google-hosts/scripts
$ ./getssl.sh 192.168.1
```
查询某段IP详细信息(如:192.168.x.x)
```
$ cd google-hosts/scripts
$ ./find.sh 192.168
```
输出的四个字段含义
| IP | LOSS | TIME | SSL |
| --- | --- | --- | --- |
| 此IP | 丢包率| PING值 | 可用ssl域名 |
另外获取Google IP段可供参考
```
$ nslookup -q=TXT _netblocks.google.com 8.8.8.8
```
***
> \>\>[hosts][hosts]\<\< *UPDATE: Mon Jun 9 07:21:51 UTC 2014* <br/>
> 下面的两个程序是用来将此hosts文件替换你系统hosts. 尤其是你的不懂程序的Windows朋友<br/>
> **注意**: **如果**此hosts文件内的IP失效, 就需要你自己用脚本查询了(如果你查到好的IP不妨pull一下 :D)<br/>
### Windows 用户
* 下载[fuckGFW-64.exe][fuckGFW-64.exe](64位)或[fuckGFW-32.exe][fuckGFW-32.exe](32位)双击运行一下即可
* 要更新的话, 也是双击运行一下
* 此程序不会覆盖你原有的 hosts
### *nix/OSX 用户
* 下载此脚本 [updateHosts.py][updateHosts.py]
* 执行 `$ sudo python updateHosts.py`
* 此程序不会覆盖你原有的 hosts
[hosts]: http://tx.txthinking.com/hosts
[fuckGFW-64.exe]: http://tx.txthinking.com/fuckGFW-64.exe
[fuckGFW-32.exe]: http://tx.txthinking.com/fuckGFW-32.exe
[smarthosts]: https://code.google.com/p/smarthosts/
[ipv6-hosts]: https://code.google.com/p/ipv6-hosts/
[updateHosts.py]: https://github.com/txthinking/google-hosts/tree/master/scripts/updateHosts.py
<file_sep>#!/usr/bin/env bash
#
# 会查询一个IP段的IP得到其 PING值,丢包率,SSL可用于的域名
#
# EP: 查询192.168.1.x的IP
# $ ./getssl.sh 192.168.1
#
# Author: <EMAIL>
#
if [ ! -d output ]
then
mkdir output;
fi
output=output/$1.x;
> $output;
echo -e "IP\tLOSS\tTIME\tSSL"
for((i=0;i<255;i++))
do
ip=${1}.${i};
c=$(nmap --host-timeout 3s $ip -p 443 2>/dev/null | grep -Ec "443/tcp open");
if [ $c -ne 1 ]
then
echo -e "$ip\tNO\tNO\tNO";
echo -e "$ip\tNO\tNO\tNO" >> $output;
continue;
fi
cer=$(curl https://$ip 2>&1 | grep -Eo "'\S*'" |head -1);
if [ -z $cer ]
then
echo -e "$ip\tNO\tNO\tNO";
echo -e "$ip\tNO\tNO\tNO" >> $output;
continue;
fi
ping -c 5 -w 5 $ip > /tmp/ping;
loss=$(grep -Eo "\w+%" /tmp/ping);
c=$(grep -c "time=" /tmp/ping);
if [ $c -eq 0 ]
then
echo -e "$ip\t$loss\tNO\t$cer";
echo -e "$ip\t$loss\tNO\t$cer" >> $output;
continue;
fi
avgtime=$(grep -E "time=" /tmp/ping | awk '{print $7}' | awk 'BEGIN {FS="=";s=0;c=0;}{s+=$2;c++;} END {print s/c}');
echo -e "$ip\t$loss\t$avgtime\t$cer";
echo -e "$ip\t$loss\t$avgtime\t$cer" >> $output;
done
sort -k4 -k3n $output -o $output;
sed -i "1iIP\tLOSS\tTIME\tSSL" $output;
cat $output;
echo "[INFO] Done in $output";
<file_sep>#!/usr/bin/env bash
#
# 将hosts.all内容更新到../hosts文件
#
# Author: <EMAIL>
#
> ../hosts
echo "#" >> ../hosts
echo "# link: https://github.com/txthinking/google-hosts" >> ../hosts
echo "#" >> ../hosts
echo "# UPDATE: `date -u`" >> ../hosts
echo "#" >> ../hosts
echo "127.0.0.1 localhost" >> ../hosts
cat hosts.all >> ../hosts
<file_sep>all: do apply
do:
chmod +x getip.sh
./getip.sh
apply:
chmod +x apply.sh
./apply.sh
.PHONY: all
<file_sep>#!/usr/bin/env bash
#
# 这个是getssl.sh的一个wrapper
#
# EP: 查询192.168.x.x的IP
# $ ./find 192.168
#
# Author: <EMAIL>
#
for((i=0;i<255;i++))
do
./getssl.sh ${1}.${i}
done
| 463d4959ce6d967fe1ce3e21a898090d87e8e7fc | [
"Markdown",
"Makefile",
"Python",
"Go",
"Shell"
] | 7 | Python | TorinKwok/google-hosts | 7c93fe7413d3dbed464b7bcd3fe2075545346862 | b7be58a933a2710a7526ad41f103dfd984802aa2 | |
refs/heads/master | <file_sep>package main
import (
"database/sql"
"encoding/json"
"errors"
"flag"
"fmt"
_ "github.com/go-sql-driver/mysql"
"log"
"os"
"strings"
)
var defaults = Configuration{
DbUser: "db_user",
DbPassword: "<PASSWORD>",
DbName: "bd_name",
PkgName: "DbStructs",
TagLabel: "db",
TagLabel2nd: "json",
}
var config Configuration
type Configuration struct {
DbUser string `json:"db_user"`
DbPassword string `json:"db_password"`
DbName string `json:"db_name"`
// PkgName gives name of the package using the stucts
PkgName string `json:"pkg_name"`
// TagLabel produces tags commonly used to match database field names with Go struct members
TagLabel string `json:"tag_label"`
TagLabel2nd string `json:"tag_label_2nd"`
}
type ColumnSchema struct {
TableName string
ColumnName string
IsNullable string
DataType string
CharacterMaximumLength sql.NullInt64
NumericPrecision sql.NullInt64
NumericScale sql.NullInt64
ColumnType string
ColumnKey string
}
func writeStructs(schemas []ColumnSchema, conn *sql.DB) (int, error) {
file, err := os.Create("db_structs.go")
if err != nil {
log.Fatal(err)
}
defer file.Close()
currentTable := ""
neededImports := make(map[string]bool)
// First, get body text into var out
out := ""
for _, cs := range schemas {
if cs.TableName != currentTable {
if currentTable != "" {
out = out + "\n" + getTableCRUDStatements(currentTable, conn) + "\n"
out = out + "}\n\n"
}
out = out + "type " + formatName(cs.TableName) + " struct{\n"
}
goType, requiredImport, err := goType(&cs)
if requiredImport != "" {
neededImports[requiredImport] = true
}
if config.TagLabel2nd == "json" {
neededImports["encoding/json"] = true
}
if err != nil {
log.Fatal(err)
}
out = out + "\t" + formatName(cs.ColumnName) + " " + goType
out = out + "\t`origin:\"" + cs.ColumnType + "\" "
if len(config.TagLabel) > 0 {
out = out + config.TagLabel + ":\"" + cs.ColumnName + "\""
}
if len(config.TagLabel2nd) > 0 {
out = out + " " + config.TagLabel2nd + ":\"" + cs.ColumnName + "\""
}
out = out + "`\n"
currentTable = cs.TableName
}
out = out + "}"
// Now add the header section
header := "package " + config.PkgName + "\n\n"
if len(neededImports) > 0 {
header = header + "import (\n"
for imp := range neededImports {
header = header + "\t\"" + imp + "\"\n"
}
header = header + ")\n\n"
}
totalBytes, err := fmt.Fprint(file, header+out)
if err != nil {
log.Fatal(err)
}
return totalBytes, nil
}
func getSchema(conn *sql.DB) []ColumnSchema {
q := "SELECT TABLE_NAME, COLUMN_NAME, IS_NULLABLE, DATA_TYPE, " +
"CHARACTER_MAXIMUM_LENGTH, NUMERIC_PRECISION, NUMERIC_SCALE, COLUMN_TYPE, " +
"COLUMN_KEY FROM COLUMNS WHERE TABLE_SCHEMA = ? ORDER BY TABLE_NAME, ORDINAL_POSITION"
rows, err := conn.Query(q, config.DbName)
if err != nil {log.Fatal(err)}
columns := []ColumnSchema{}
for rows.Next() {
cs := ColumnSchema{}
err := rows.Scan(&cs.TableName, &cs.ColumnName, &cs.IsNullable, &cs.DataType,
&cs.CharacterMaximumLength, &cs.NumericPrecision, &cs.NumericScale,
&cs.ColumnType, &cs.ColumnKey)
if err != nil {
log.Fatal(err)
}
columns = append(columns, cs)
}
if err := rows.Err(); err != nil {
log.Fatal(err)
}
return columns
}
func getTableCRUDStatements(TableName string, conn *sql.DB) string {
var total_output string = ""
var output string = ""
q := "select concat('SELECT ',group_concat(c.column_name),' FROM ',table_name) from information_schema.columns c " +
"where c.table_name=? and c.table_schema=? order by c.ORDINAL_POSITION"
if err := conn.QueryRow(q, TableName, config.DbName).Scan(&output); err != nil {
log.Fatal(err)
}
total_output = total_output + "// Select all columns: " + output + "\n"
q = "SELECT concat('SELECT * FROM ',table_name, concat(' WHERE ',replace(group_concat(column_name)," +
"',',' = ? AND '),' = ?')) FROM INFORMATION_SCHEMA.STATISTICS WHERE table_name=? and TABLE_SCHEMA = ? " +
"group by index_name"
rows, err := conn.Query(q, TableName, config.DbName)
if err != nil {
log.Fatal(err)
}
for rows.Next() {
err = rows.Scan(&output)
if err != nil {
log.Fatal(err)
}
total_output = total_output + "// Select all by key: " + output + "\n"
}
if err = rows.Err(); err != nil { log.Fatal(err) }
q = "select concat('INSERT INTO ',table_name, '(',group_concat(c.column_name),') VALUES " +
"(',group_concat('?'),')') from columns c where c.table_name=? and c.table_schema=? " +
"order by c.ORDINAL_POSITION"
if err = conn.QueryRow(q, TableName, config.DbName).Scan(&output); err != nil {
log.Fatal(err)
}
total_output = total_output + "// Insert with all columns: " + output + "\n"
q = "select concat('UPDATE ',table_schema,'.',table_name,' SET ' ,group_concat(c.column_name,'=?')," +
"' WHERE ',ifnull((SELECT concat(replace(group_concat(column_name),',',' = ? AND '),' = ?') as tail " +
"FROM INFORMATION_SCHEMA.STATISTICS WHERE table_name=? and TABLE_SCHEMA = ? " +
"and INDEX_NAME='PRIMARY' group by index_name),'')) from columns c " +
"where c.table_name=? and c.table_schema=? order by c.ORDINAL_POSITION;"
if err = conn.QueryRow(q, TableName, config.DbName, TableName, config.DbName).Scan(&output); err != nil {
log.Fatal(err)
}
total_output = total_output + "// Update all columns by primary key: " + output + "\n"
return total_output
}
func formatName(name string) string {
parts := strings.Split(name, "_")
newName := ""
for _, p := range parts {
if len(p) < 1 {
continue
}
newName = newName + strings.Replace(p, string(p[0]), strings.ToUpper(string(p[0])), 1)
}
return newName
}
func goType(col *ColumnSchema) (string, string, error) {
requiredImport := ""
if col.IsNullable == "YES" {
requiredImport = "database/sql"
}
var gt string = ""
switch col.DataType {
case "char", "varchar", "enum", "set", "text", "longtext", "mediumtext", "tinytext":
if col.IsNullable == "YES" {
gt = "sql.NullString"
} else {
gt = "string"
}
case "blob", "mediumblob", "longblob", "varbinary", "binary":
gt = "[]byte"
case "date", "time", "datetime", "timestamp":
gt, requiredImport = "time.Time", "time"
case "tinyint", "smallint", "bit", "int", "mediumint", "bigint":
if col.IsNullable == "YES" {
gt = "sql.NullInt64"
} else {
gt = "int64"
}
case "float", "decimal", "double":
if col.IsNullable == "YES" {
gt = "sql.NullFloat64"
} else {
gt = "float64"
}
}
if gt == "" {
n := col.TableName + "." + col.ColumnName
return "", "", errors.New("No compatible datatype (" + col.DataType + ") for " + n + " found")
}
return gt, requiredImport, nil
}
var configFile = flag.String("json", "", "Config file")
func main() {
flag.Parse()
if len(*configFile) > 0 {
f, err := os.Open(*configFile)
if err != nil {
log.Fatal(err)
}
err = json.NewDecoder(f).Decode(&config)
if err != nil {
log.Fatal(err)
}
} else {
config = defaults
}
conn, err := sql.Open("mysql", config.DbUser+":"+config.DbPassword+"@/information_schema")
if err != nil {
log.Fatal(err)
}
columns := getSchema(conn)
bytes, err := writeStructs(columns, conn)
if err != nil {
log.Fatal(err)
}
defer conn.Close()
fmt.Printf("Ok %d\n", bytes)
}
<file_sep>struct-create
=============
Creates Go source file of structs for use in some MySQL database packages. It uses [go-sql-driver/mysql](https://github.com/go-sql-driver/mysql) for querying the information_schema database. I created this for personal use, so it's not written for extensibility, but shouldn't be difficult to adapt for your own use.
## ADDITION:
This struct-create also adds comments at the bottom of each struct that contain:
* all the possible SELECT statements by indexes
* UPDATE by primary key
* a basic INSERT statemet.
The purpose of these basic SQL statements is for you to copy/paste them into the
string, template engine or framework of choice to save you time.
There is also 3 annotations for each column: original database datatype, db and json.
##
Configuration may be set in the source file:
```
var defaults = Configuration{
DbUser: "db_user",
DbPassword: "<PASSWORD>",
DbName: "bd_name",
// PKG_NAME gives name of the package using the stucts
PkgName: "DbStructs",
// TAG_LABEL produces tags commonly used to match database field names with Go struct
//members. This will be skipped if the string is empty.
TagLabel: "db",
}
```
Or by a JSON file using the json flag `struct-create --json=test.json`
```
{
"db_user": "db_user",
"db_password": "<PASSWORD>",
"db_name": "db_name",
"pkg_name": "JsonTest",
"tag_label": "db"
}
```
Sample output file:
```
package DbStructs
import (
"encoding/json"
"database/sql"
"time"
)
type ADMINUSER struct{
ID int64 `origin:"bigint(20)" db:"ID" json:"ID"`
EMAIL sql.NullString `origin:"varchar(256)" db:"EMAIL" json:"EMAIL"`
NAME sql.NullString `origin:"varchar(256)" db:"NAME" json:"NAME"`
PASSWORD sql.NullString `origin:"varchar(256)" db:"PASSWORD" json:"<PASSWORD>"`
// Select all columns: SELECT ID,EMAIL,NAME,PASSWORD FROM ADMINUSER
// Select all by key: SELECT * FROM ADMINUSER WHERE EMAIL = ?
// Select all by key: SELECT * FROM ADMINUSER WHERE ID = ?
// Insert with all columns: INSERT INTO ADMINUSER(ID,EMAIL,NAME,PASSWORD) VALUES (?,?,?,?)
// Update all columns by primary key: UPDATE ADMINUSER SET ID=?,EMAIL=?,NAME=?,PASSWORD=? WHERE ID = ?
}
```
| bebcaae18f68240b6295a036a069da58600ebd4d | [
"Markdown",
"Go"
] | 2 | Go | jonathanvx/struct-create | 1f519a4d901aca208e8886f4eee097385c1e3f0e | 9db96a78b1e6d3096b1c5ff0ed8dc31406932602 | |
refs/heads/master | <repo_name>mallorybucell/LetterWordCounter<file_sep>/README.md
# LetterWordCounter
3rd project at the IronYard. Simple letter counter- you can tell I'm still learning both ruby and to think like a programmer. I had some trouble with some of the sorting, but completed it in the end. Also, some good teamwork on this one as far as sharing of insights and parts different classmates figured out at different times. Yay for shared success.<file_sep>/letter-countUpdateEng.rb
require 'pry'
#Throw out punctuation (" " "." "," "'", "!", "?",)
#Ignore Case
#Print Output in order of Most Freq to Least Freq
#Works for English only
puts "Please type a string to analyze."
input = gets.chomp
puts "You typed in: #{input}"
input = input.downcase
letters = input.split("")
alphabet = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"]
#could do this simpler with just range_alpha = ("a".."z")
result = {}
letters.each do |letter|
# see if the letter is already in result
# if so increment count
# if not, add it to the hash and set count to 1
# VVVV
# if # letter is in result
# # increment count for that letter by 1
# else
# # add to result with count 1
# end
# VVVV
# could use result.has_key? or result.include?
when alphabet.include?(letter)
if result[letter]
# increment count for that letter by 1
# result[letter] += 1
old_count = result[letter]
result[letter] = old_count + 1
else
# add to result with count 1
result[letter] = 1
end
end
end
#sort Hash.result by value
result = result.sort_by {|k,v| v}.reverse.to_h
puts "Result is: #{result}"
def encrypt
#<file_sep>/letter-count-encrypt-hash.rb
˜point_values = Hash.new
point_values["a"] = "m"
point_values["b"] = "n"
point_values["c"] = "o"
point_values["d"] = "p"
point_values["e"] = "q"
point_values["f"] = "r"
point_values["g"] = "s"
point_values["h"] = "t"
point_values["i"] = "u"
point_values["j"] = "v"
point_values["k"] = "w"
point_values["l"] = "x"
point_values["m"] = "y"
point_values["n"] = "z"
point_values["o"] = "a"
point_values["p"] = "b"
point_values["q"] = "c"
point_values["r"] = "d"
point_values["s"] = "e"
point_values["t"] = "f"
point_values["u"] = "g"
point_values["v"] = "h"
point_values["w"] = "i"
point_values["x"] = "j"
point_values["y"] = "k"
point_values["z"] = "l"
puts point_values
| 554925ed35b1ef40c4b11511e3943a563868be93 | [
"Markdown",
"Ruby"
] | 3 | Markdown | mallorybucell/LetterWordCounter | 9adf80693082a95ac1992f872999b225e0b11b4a | 756dbd079d99c0cd27efda4cb55a9fdebc694239 | |
refs/heads/master | <file_sep>require 'pry'
class Artist
attr_accessor :name, :songs
@@all = []
@@total_song_count = 0
def initialize(name)
@name = name
@songs = []
@@all << self
end
def add_song(song)
song.artist = self
@songs << song
@@total_song_count += 1
end
def add_song_by_name(name)
song = Song.new(name)
self.add_song(song)
end
def self.song_count
@@total_song_count
end
end
#create another class variabls @@count. Each time a new song is created, increment the class counter by 1. | 14b9bd466a05264fc6b8603e6988bbc5e397cc08 | [
"Ruby"
] | 1 | Ruby | crishanks/ruby-objects-has-many-lab-seattle-web-career-012819 | c989c7fed4af4cd95e8e7db77960cf527b57d2c3 | 6c36d33ca1d99459516b3ace43ccafa7fc1986b7 | |
refs/heads/master | <file_sep>#! /bin/bash
if [[ -d taiga-front-dist ]]; then
rm -rf taiga-front-dist
fi
git clone -b stable --single-branch https://github.com/taigaio/taiga-front-dist.git
docker build -t xtraterrestrial/taiga-front .
<file_sep>-i https://pypi.org/simple
aggdraw==1.3.11
amqp==2.5.1
asana==0.6.7
asn1crypto==1.2.0
attrs==19.3.0
billiard==3.6.1.0
bleach==3.1.4
cairocffi==1.1.0
cairosvg==2.0.3
celery==4.3.0
certifi==2019.9.11
cffi==1.13.1
chardet==3.0.4
cryptography==2.3.1
cssselect==1.1.0
cssutils==1.0.2
diff-match-patch==20121119
django-ipware==1.1.6
django-jinja==2.3.1
django-pglocks==1.0.2
django-picklefield==0.3.2
django-sampledatahelper==0.4.1
django-sites==0.9
django-sr==0.0.4
Django>=1.11.27
djmail==1.0.1
docopt==0.6.2
easy-thumbnails==2.4.1
fn==0.4.3
gunicorn==19.9.0
html5lib==1.0.1
idna==2.8
importlib-metadata==0.23
jinja2==2.10.3
kombu==4.6.5
lxml==4.4.1
markdown==3.1.1
markupsafe==1.1.1
more-itertools==7.2.0
netaddr==0.7.19
oauthlib[signedtoken]==3.1.0
packbits==0.6
pillow==6.2.1
premailer==3.0.1
psd-tools==1.8.27
psycopg2-binary==2.7.5
pycparser==2.19
pygments==2.2.0
pyjwt==1.7.1
python-dateutil==2.7.5
python-magic==0.4.15
pytz==2019.3
raven==6.10.0
redis==2.10.5
requests-oauthlib==0.8.0
requests==2.21.0
sampledata==0.3.7
serpy==0.1.1
six==1.12.0
tinycss==0.4
unidecode==0.4.20
urllib3==1.24.3
vine==1.3.0
webcolors==1.9.1
webencodings==0.5.1
zipp==0.6.0
<file_sep>FROM python:3.5
MAINTAINER <NAME> "<EMAIL>"
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && apt-get autoremove -y && apt-get install locales -y
RUN locale-gen en_US.UTF-8 && dpkg-reconfigure locales
WORKDIR /usr/src/app/taiga-back
COPY taiga-back/requirements.txt .
RUN pip install -r requirements.txt
COPY docker-settings.py settings/local.py
COPY locale.gen /etc/locale.gen
COPY default.locale /etc/default/locale
COPY taiga-back .
COPY regenerate.sh .
EXPOSE 8000
VOLUME ["/taiga/static","/taiga/media"]
RUN locale -a
CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"]
| c516f7e20829db73997acfe837c39e7ea70e4b85 | [
"Text",
"Dockerfile",
"Shell"
] | 3 | Shell | nij4t/taiga.io | fa752f8ec8cd22b85f2210c65e31927ca7448631 | 16f9d0681d46741d62010fcd71643ae3090046e4 | |
refs/heads/master | <repo_name>tmaciulis22/Beteasier<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/base/BaseHolder.kt
package com.d.beteasier.base
import android.content.Context
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.annotation.IdRes
import androidx.annotation.LayoutRes
import androidx.recyclerview.widget.RecyclerView
import kotlinx.android.extensions.LayoutContainer
abstract class BaseHolder<ListItem>(itemView: View) : RecyclerView.ViewHolder(itemView),
LayoutContainer {
// Do not use this from subclasses, use onViewClick to deliver click actions
internal var onViewClickListeners: (viewId: Int, data: Any?) -> Unit = { _, _ -> }
internal var onViewLongClickListeners: (viewId: Int, data: Any?) -> Unit = { _, _ -> }
override val containerView: View = itemView
val parentRecyclerView: RecyclerView?
get() {
val parent = itemView.parent
return if (parent is RecyclerView)
parent
else
null
}
val context: Context
get() = itemView.context
constructor(parent: ViewGroup, @LayoutRes layoutRes: Int) : this(
LayoutInflater.from(parent.context).inflate(layoutRes, parent, false)
)
abstract fun onBind(listItem: ListItem)
fun onViewClick(@IdRes viewId: Int, isLongClick: Boolean = false) =
onViewClick(viewId, Any(), isLongClick)
fun <D : Any> onViewClick(@IdRes viewId: Int, data: D?, isLongClick: Boolean = false) {
if (!isLongClick)
onViewClickListeners.invoke(viewId, data)
else
onViewLongClickListeners.invoke(viewId, data)
}
fun <V : View> onViewClick(view: V, isLongClick: Boolean = false) {
onViewClick(view, Any(), isLongClick)
}
fun <V : View, D : Any> onViewClick(view: V, data: D?, isLongClick: Boolean = false) {
if (!isLongClick)
onViewClickListeners.invoke(view.id, data)
else
onViewLongClickListeners.invoke(view.id, data)
}
fun <V : View> onViewClickWithPositionData(view: V, isLongClick: Boolean = false) {
if (adapterPosition == -1) return
onViewClick(view, adapterPosition, isLongClick)
}
fun onViewClickWithPositionData(viewId: Int, isLongClick: Boolean = false) {
if (adapterPosition == -1) return
onViewClick(viewId, adapterPosition, isLongClick)
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/api/BackEndMock.kt
package com.d.beteasier.api
object BackEndMock {
private val users: MutableList<User> = mutableListOf(
User(
id = "1",
email = "<EMAIL>",
password = "<PASSWORD>",
name = "Juris",
surname = "Jurgaitis",
country = "Zimbabwe",
balance = 5000.0,
monthlyLimit = 500.0,
betLimit = 25.0,
bets = mutableListOf(
Bet(
userId = "1",
matchId = "7",
team = "PSG",
amount = 20.0,
result = Result.Lost
),
Bet(
userId = "1",
matchId = "8",
team = "Real Madrid",
amount = 24.0,
result = Result.Won
)
)
)
)
private var currentUser: User? = null
private val matches: MutableList<Match> = mutableListOf(
Match(
"1",
"Bayern Munich",
"Schalke",
"2020-06-02 19:00",
Category.Football,
1.1,
3.1,
false
),
Match(
"2",
"Borussia",
"Mainz",
"2020-06-03 19:00",
Category.Football,
1.3,
2.1,
false
),
Match(
"3",
"Real Madrid",
"Barcelona",
"2020-08-02 21:00",
Category.Football,
2.1,
2.3,
false
),
Match(
"4",
"Zalgiris",
"CSKA Moscow",
"2020-09-10 20:00",
Category.Basketball,
2.0,
1.6,
false
),
Match(
"5",
"Vilniaus Rytas",
"Neptunas",
"2020-09-12 21:00",
Category.Basketball,
1.1,
2.1,
false
),
Match(
"6",
"<NAME>",
"B<NAME>",
"2020-10-02 18:00",
Category.Baseball,
2.1,
2.1,
false
),
Match(
"7",
"PSG",
"Juventus",
"2020-02-02 19:00",
Category.Football,
1.4,
1.3,
true,
1,
2
),
Match(
"8",
"Chelsea",
"Real Madrid",
"2020-02-02 19:00",
Category.Football,
1.1,
2.1,
true,
1,
3
)
)
fun login(email: String, password: String): Boolean {
currentUser = users.find { it.email == email && it.password == <PASSWORD> }
return currentUser != null
}
fun register(email: String, password: String) =
if (users.none { it.email == email }) {
users.add(User((users.last().id.toInt() + 1).toString(), email, password))
currentUser = users.last()
true
}
else
false
fun getCurrentUser() = currentUser
fun updateUser(
name: String,
surname: String,
country: String,
monthlyLimit: String,
betLimit: String
) = currentUser?.apply {
if (name.isNotBlank()) this.name = name
if (surname.isNotBlank()) this.surname = surname
if (country.isNotBlank()) this.country = country
if (monthlyLimit.isNotBlank()) this.monthlyLimit = monthlyLimit.toDouble()
if (betLimit.isNotBlank()) this.betLimit = betLimit.toDouble()
}
fun addFunds(amount: Double) {
val currentBalance = currentUser?.balance ?: 0.0
currentUser?.balance = currentBalance + amount
}
fun addBet(bet: Bet) {
currentUser?.bets?.add(bet)
}
fun deleteCurrentUser() {
users.remove(currentUser)
currentUser = null
}
fun getMatches() = matches.toList()
fun getMatchById(id: String) = matches.first { it.id == id }
fun getBets() = currentUser?.bets ?: listOf<Bet>()
fun getBetByMatchId(id: String) = currentUser?.bets?.first { it.matchId == id }
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/api/User.kt
package com.d.beteasier.api
data class User(
val id: String,
val email: String,
val password: String,
var name: String? = null,
var surname: String? = null,
var country: String? = null,
var balance: Double? = null,
var monthlyLimit: Double? = null,
var betLimit: Double? = null,
val bets: MutableList<Bet> = mutableListOf()
)<file_sep>/README.md
# Beteasier
Betting app developed for Android using Kotlin for Software Engineering course at Vilnius University.
➡ New documentation and app created by:
<NAME>
<NAME>
<NAME>
<NAME>
➡ Old documentation created by:
<NAME>
<NAME>
<NAME>
<NAME>
<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/bet/BetsViewModel.kt
package com.d.beteasier.bet
import androidx.lifecycle.ViewModel
import com.d.beteasier.api.BackEndMock
class BetsViewModel : ViewModel() {
fun getBets() = BackEndMock.getBets()
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/match/MatchViewHolder.kt
package com.d.beteasier.match
import android.view.ViewGroup
import com.d.beteasier.R
import com.d.beteasier.api.Match
import com.d.beteasier.base.BaseAdapter
import com.d.beteasier.base.BaseHolder
import kotlinx.android.synthetic.main.holder_match.*
class MatchViewHolder(parent: ViewGroup) : BaseHolder<BaseAdapter.SingleTypeItem<Match>>(
parent,
R.layout.holder_match
) {
override fun onBind(listItem: BaseAdapter.SingleTypeItem<Match>) {
listItem.item.apply {
imageView.setImageResource(category.imageRes)
firstTeamView.text = firstTeam
secondTeamView.text = secondTeam
dateTimeView.text = dateTime
}
openButton.setOnClickListener {
onViewClick(it, listItem.item)
}
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/profile/LoginActivity.kt
package com.d.beteasier.profile
import android.app.Activity
import android.content.Intent
import android.os.Bundle
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.d.beteasier.MainActivity
import com.d.beteasier.R
import kotlinx.android.synthetic.main.activity_login.*
import org.koin.androidx.viewmodel.ext.android.sharedViewModel
import org.koin.androidx.viewmodel.ext.android.viewModel
class LoginActivity : AppCompatActivity() {
private val viewModel: ProfileViewModel by viewModel()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_login)
initViews()
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (requestCode == 0 && resultCode == Activity.RESULT_OK)
finish()
}
private fun initViews() {
loginButton.setOnClickListener {
val email = emailInput.text.toString()
val password = passwordInput.text.toString()
when {
email.isEmpty() -> {
emailInput?.error = "Please enter your email"
emailInput?.requestFocus()
}
password.isEmpty() -> {
passwordInput?.error = "Please enter your password"
passwordInput?.requestFocus()
}
else -> {
if (viewModel.login(email, password)) {
startActivity(Intent(this, MainActivity::class.java))
finish()
} else
Toast.makeText(this, "User not found", Toast.LENGTH_SHORT).show()
}
}
}
registerButton.setOnClickListener {
startActivityForResult(Intent(this, RegisterActivity::class.java), 0)
}
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/match/MatchInfoActivity.kt
package com.d.beteasier.match
import android.os.Bundle
import android.view.View
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.d.beteasier.R
import com.d.beteasier.api.Match
import com.d.beteasier.api.Result
import kotlinx.android.synthetic.main.activity_match_info.*
import org.koin.androidx.viewmodel.ext.android.viewModel
class MatchInfoActivity : AppCompatActivity() {
private val viewModel: MatchesViewModel by viewModel()
private val match: Match
get() = viewModel.model
private val shouldAllowBets: Boolean by lazy {
intent.getBooleanExtra("ALLOW_BETS", true)
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_match_info)
initViews()
}
private fun initViews() {
initMatchInfo()
if (!shouldAllowBets) {
replaceUIElements()
val bet = viewModel.getBetByMatchId(match.id)
val result = bet?.result ?: Result.TBD
bettingAmountLabel.text = result.text
bettingAmountView.text = when (result) {
Result.Won -> {
val rate = if (bet?.team == match.firstTeam)
match.firstRate
else
match.secondRate
String.format("%.2f EUR", (bet?.amount ?: 0.0) * rate)
}
else -> "${bet?.amount.toString()} EUR"
}
} else {
betButton.setOnClickListener {
val user = viewModel.getCurrentUser()
val betLimit = user?.betLimit ?: 10000000.0
val firstBetEditable = firstTeamBet.text
val secondBetEditable = secondTeamBet.text
val firstBet = firstBetEditable?.toString() ?: ""
val secondBet = secondBetEditable?.toString() ?: ""
if (firstBet.isBlank() && secondBet.isBlank())
Toast.makeText(
this,
"Please enter the amount you wish to bet",
Toast.LENGTH_SHORT
).show()
else if ((!firstBet.isBlank() && firstBet.toDouble() > betLimit)
|| (!secondBet.isBlank() && secondBet.toDouble() > betLimit)
)
Toast.makeText(this, "You cannot exceed your bet limit", Toast.LENGTH_SHORT)
.show()
else {
if (!firstBet.isBlank())
viewModel.addBet(
userId = user?.id ?: "1",
matchId = match.id,
team = match.firstTeam,
amount = firstBet.toDouble()
)
if (!secondBet.isBlank())
viewModel.addBet(
userId = user?.id ?: "1",
matchId = match.id,
team = match.secondTeam,
amount = secondBet.toDouble()
)
finish()
}
}
}
}
private fun initMatchInfo() {
imageView.setImageResource(match.category.imageRes)
dateTimeView.text = match.dateTime
firstTeamView.text = match.firstTeam
secondTeamView.text = match.secondTeam
firstRateView.text = match.firstRate.toString()
secondRateView.text = match.secondRate.toString()
}
private fun replaceUIElements() {
firstTeamBet.visibility = View.GONE
secondTeamBet.visibility = View.GONE
betButton.visibility = View.GONE
bettingAmountLabel.visibility = View.VISIBLE
bettingAmountView.visibility = View.VISIBLE
firstTeamScore.visibility = View.VISIBLE
secondTeamScore.visibility = View.VISIBLE
scoreDivider.visibility = View.VISIBLE
firstTeamScore.text = match.firstTeamScore?.toString() ?: "TBD"
secondTeamScore.text = match.secondTeamScore?.toString() ?: "TBD"
}
}<file_sep>/app-beteasier-kotlin/settings.gradle
rootProject.name='BetEasier'
include ':app'
<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/match/MatchesViewModel.kt
package com.d.beteasier.match
import androidx.lifecycle.ViewModel
import com.d.beteasier.api.BackEndMock
import com.d.beteasier.api.Bet
import com.d.beteasier.api.Match
import com.d.beteasier.api.User
class MatchesViewModel : ViewModel() {
private var matchList: List<Match> = listOf()
var user: User? = null
lateinit var model: Match
fun getMatches(): List<Match> {
if (matchList.isEmpty())
matchList = BackEndMock.getMatches()
return matchList
}
fun getBetByMatchId(id: String?) = BackEndMock.getBetByMatchId(id ?: "")
fun getCurrentUser(): User? {
if (user == null)
user = BackEndMock.getCurrentUser()
return user
}
fun addBet(userId: String, matchId: String, team: String, amount: Double) = BackEndMock.addBet(
Bet(
userId,
matchId,
team,
amount
)
)
fun setModelFromMatchId(id: String) {
model = BackEndMock.getMatchById(id)
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/api/Match.kt
package com.d.beteasier.api
import android.os.Parcelable
import androidx.annotation.DrawableRes
import com.d.beteasier.R
import kotlinx.android.parcel.Parcelize
@Parcelize
data class Match(
val id: String,
val firstTeam: String,
val secondTeam: String,
val dateTime: String,
val category: Category,
val firstRate: Double,
val secondRate: Double,
val isFinished: Boolean,
val firstTeamScore: Int? = null,
val secondTeamScore: Int? = null
) : Parcelable
enum class Category(@DrawableRes val imageRes: Int) {
Football(R.drawable.ic_football),
Basketball(R.drawable.ic_basketball),
Baseball(R.drawable.ic_baseball);
}
<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/base/BaseAdapter.kt
package com.d.beteasier.base
import android.view.ViewGroup
import androidx.annotation.CallSuper
import androidx.recyclerview.widget.DefaultItemAnimator
import androidx.recyclerview.widget.RecyclerView
import com.d.beteasier.util.indexOfFirstOrNull
@Suppress("UNCHECKED_CAST")
abstract class BaseAdapter<ListItem> : RecyclerView.Adapter<BaseHolder<ListItem>>() {
private var onViewClickListeners = mutableMapOf<Enum<*>, (viewId: Int, data: Any?) -> Unit>()
private var onViewLongClickListeners = mutableMapOf<Enum<*>, (viewId: Int, data: Any?) -> Unit>()
var items: MutableList<ListItem> = mutableListOf()
set(value) {
field = sort(value).toMutableList()
notifyDataSetChanged()
}
/**
* Use it for auto attach/detach RecyclerView ItemDecorations.
* If you update after adapter is set to the RecyclerView - changes will not show
*/
open val itemAnimator: RecyclerView.ItemAnimator = DefaultItemAnimator()
open val itemDecorations = mutableListOf<RecyclerView.ItemDecoration>()
open var baseRecyclerView: RecyclerView? = null
abstract fun <VH : BaseHolder<ListItem>> getViewHolder(parent: ViewGroup, viewType: Enum<*>): VH
open fun getViewTypeEnum(ordinal: Int): Enum<*> = BaseViewType.SINGLE
final override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): BaseHolder<ListItem> {
val viewTypeEnum = getViewTypeEnum(viewType)
val viewHolder = getViewHolder<BaseHolder<ListItem>>(parent, viewTypeEnum)
val clickListenerPos = onViewClickListeners.keys.firstOrNull { it == BaseViewType.KEY_ALL_TYPES_ON_CLICK || it == viewTypeEnum }
val longClickListenerPos = onViewLongClickListeners.keys.firstOrNull { it == BaseViewType.KEY_ALL_TYPES_ON_CLICK || it == viewTypeEnum }
clickListenerPos?.let {
viewHolder.onViewClickListeners = { int, any ->
onViewClickListeners[clickListenerPos]?.invoke(int, any)
}
}
longClickListenerPos?.let {
viewHolder.onViewLongClickListeners = { int, any ->
onViewLongClickListeners[clickListenerPos]?.invoke(int, any)
}
}
try {
return viewHolder
} finally {
onAfterCreateViewHolder(viewHolder)
}
}
open fun onAfterCreateViewHolder(holder: BaseHolder<ListItem>) = Unit
override fun getItemViewType(position: Int): Int {
val item = items[position]
return if (item is BaseItem<*>)
item.viewType.ordinal
else
super.getItemViewType(position)
}
@CallSuper
override fun onAttachedToRecyclerView(recyclerView: RecyclerView) {
this.baseRecyclerView = recyclerView
recyclerView.itemAnimator = itemAnimator
itemDecorations.forEach { recyclerView.addItemDecoration(it) }
}
@CallSuper
override fun onDetachedFromRecyclerView(recyclerView: RecyclerView) {
this.baseRecyclerView = null
itemDecorations.forEach { recyclerView.removeItemDecoration(it) }
}
override fun onBindViewHolder(holder: BaseHolder<ListItem>, position: Int) {
holder.onBind(items[position])
}
override fun getItemCount(): Int = items.size
open fun sort(items: List<ListItem>): List<ListItem> = items
fun <D> setOnViewClickListener(callback: (viewId: Int, data: D) -> Unit) {
// This type is used for global click listeners
onViewClickListeners[BaseViewType.KEY_ALL_TYPES_ON_CLICK] = { viewId, any ->
callback(viewId, any as D)
}
}
fun <D> addOnViewTypeViewClickListener(viewType: Enum<*>, callback: (viewId: Int, data: D) -> Unit) {
onViewClickListeners[viewType] = { int, any ->
callback.invoke(int, any as D)
}
}
fun <D> setOnViewLongClickListener(callback: (viewId: Int, data: D) -> Unit) {
// This type is used for global click listeners
onViewLongClickListeners[BaseViewType.KEY_ALL_TYPES_ON_CLICK] = { viewId, any ->
callback(viewId, any as D)
}
}
fun <D> addOnViewTypeViewLongClickListener(viewType: Enum<*>, callback: (viewId: Int, data: D) -> Unit) {
onViewLongClickListeners[viewType] = { int, any ->
callback.invoke(int, any as D)
}
}
fun clearViewClickListeners() = onViewClickListeners.clear()
fun clearViewLongClickListeners() = onViewLongClickListeners.clear()
fun <BH : BaseHolder<*>> findViewHolderByType(viewType: Enum<*>): BH? {
val index = items.indexOfFirstOrNull { (it as? BaseItem<*>)?.viewType == viewType }
?: return null
return baseRecyclerView?.findViewHolderForLayoutPosition(index) as? BH
}
fun <BH : BaseHolder<*>> findViewHolderByIndex(index: Int): BH? =
baseRecyclerView?.findViewHolderForLayoutPosition(index) as? BH
fun removeAt(position: Int, notify: Boolean = true) {
items.removeAt(position)
if (notify)
notifyItemRemoved(position)
}
fun <ListItem> updateAt(
position: Int,
notify: Boolean = true,
update: ListItem.() -> Unit
) {
(items[position] as ListItem).update()
if (notify)
notifyItemChanged(position)
}
fun <T> notifyItemChanged(viewType: Enum<*>) =
notifyItemChanged(items.map { it as? BaseItem<T> }.indexOfFirst { it?.viewType == viewType })
fun <T> getItemModel(viewType: Enum<*>) =
items.map { it as? BaseItem<T> }.firstOrNull { it?.viewType == viewType }?.item
fun <T> getItemModel(position: Int) = (items[position] as? BaseItem<T>)?.item
open class BaseItem<ListItem>(val viewType: Enum<*>, var item: ListItem)
open class SingleTypeItem<ListItem>(item: ListItem) : BaseItem<ListItem>(BaseViewType.SINGLE, item)
enum class BaseViewType {
KEY_ALL_TYPES_ON_CLICK,
SINGLE
}
}
<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/match/MatchesAdapter.kt
package com.d.beteasier.match
import android.view.ViewGroup
import com.d.beteasier.api.Category
import com.d.beteasier.api.Match
import com.d.beteasier.base.BaseAdapter
import com.d.beteasier.base.BaseHolder
class MatchesAdapter : BaseAdapter<BaseAdapter.SingleTypeItem<Match>>() {
override fun <VH : BaseHolder<SingleTypeItem<Match>>> getViewHolder(
parent: ViewGroup,
viewType: Enum<*>
): VH = MatchViewHolder(parent) as VH
fun initItems(matches: List<Match>){
items = matches.map { SingleTypeItem(it) }.toMutableList()
}
fun filterItems(matches: List<Match>, category: Category) {
items = matches.filter { match -> match.category == category }.map { SingleTypeItem(it) }.toMutableList()
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/profile/ProfileViewModel.kt
package com.d.beteasier.profile
import androidx.lifecycle.ViewModel
import com.d.beteasier.api.BackEndMock
import com.d.beteasier.api.User
class ProfileViewModel : ViewModel() {
private var model: User? = null
fun isPasswordValid(password: String) =
password.matches(Regex("^(?=.*[a-z])(?=.*[A-Z])(?=.*\\d)[a-zA-Z\\d]{8,}$"))
fun register(email: String, password: String) = BackEndMock.register(email, password)
fun login(email: String, password: String) = BackEndMock.login(email, password)
fun getCurrentUser(): User? {
if (model == null)
model = BackEndMock.getCurrentUser()
return model
}
fun updateProfile(
name: String,
surname: String,
country: String,
monthlyLimit: String,
betLimit: String
) {
BackEndMock.updateUser(
name,
surname,
country,
monthlyLimit,
betLimit
)
model = BackEndMock.getCurrentUser()
}
fun addFunds(amount: Double) = BackEndMock.addFunds(amount)
fun deleteCurrentUser() = BackEndMock.deleteCurrentUser()
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/api/Statistic.kt
package com.d.beteasier.api
class Statistic {
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/bet/BetViewHolder.kt
package com.d.beteasier.bet
import android.view.ViewGroup
import com.d.beteasier.R
import com.d.beteasier.api.BackEndMock
import com.d.beteasier.api.Bet
import com.d.beteasier.api.Result
import com.d.beteasier.base.BaseAdapter
import com.d.beteasier.base.BaseHolder
import kotlinx.android.synthetic.main.holder_bet.*
class BetViewHolder(parent: ViewGroup) : BaseHolder<BaseAdapter.SingleTypeItem<Bet>>(
parent,
R.layout.holder_bet
) {
override fun onBind(listItem: BaseAdapter.SingleTypeItem<Bet>) {
val bet = listItem.item
val match = BackEndMock.getMatchById(bet.matchId) //TODO REFACTOR do not fetch from BE here
imageView.setImageResource(match.category.imageRes)
firstTeamView.text = match.firstTeam
secondTeamView.text = match.secondTeam
dateTimeView.text = match.dateTime
resultView.text = bet.result.text
amountView.text = when(bet.result) {
Result.Won -> {
val rate = if (bet.team == match.firstTeam)
match.firstRate
else
match.secondRate
String.format("%.2f EUR", bet.amount * rate)
}
else -> "${bet.amount.toString()} EUR"
}
openButton.setOnClickListener {
onViewClick(it, bet.matchId)
}
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/bet/BetsAdapter.kt
package com.d.beteasier.bet
import android.view.ViewGroup
import com.d.beteasier.api.Bet
import com.d.beteasier.base.BaseAdapter
import com.d.beteasier.base.BaseHolder
class BetsAdapter : BaseAdapter<BaseAdapter.SingleTypeItem<Bet>>() {
override fun <VH : BaseHolder<SingleTypeItem<Bet>>> getViewHolder(
parent: ViewGroup,
viewType: Enum<*>
): VH = BetViewHolder(parent) as VH
fun initItems(bets: List<Bet>){
items = bets.map { SingleTypeItem(it) }.toMutableList()
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/util/Extensions.kt
package com.d.beteasier.util
import androidx.annotation.IdRes
import androidx.fragment.app.FragmentActivity
import androidx.fragment.app.FragmentTransaction
import androidx.lifecycle.Lifecycle
import com.d.beteasier.base.BaseFragment
fun <T : BaseFragment> FragmentActivity.replaceAndCommit(
@IdRes containerId: Int,
fragment: T,
extraCalls: FragmentTransaction.() -> Unit = {}
) =
supportFragmentManager.beginTransaction().apply {
if (fragment.isAdded) return@apply
extraCalls()
replace(containerId, fragment, fragment::class.java.name)
safeCommit(lifecycle.currentState)
}
fun FragmentTransaction.safeCommit(lifecycleState: Lifecycle.State) =
if (lifecycleState.isAtLeast(Lifecycle.State.RESUMED))
commit()
else
commitAllowingStateLoss()
inline fun <T> List<T>.indexOfFirstOrNull(predicate: (T) -> Boolean): Int? {
val index = indexOfFirst(predicate)
if (index == -1) {
return null
}
return index
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/MainActivity.kt
package com.d.beteasier
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import com.d.beteasier.bet.BetsFragment
import com.d.beteasier.match.MatchesFragment
import com.d.beteasier.profile.ProfileFragment
import com.d.beteasier.util.replaceAndCommit
import kotlinx.android.synthetic.main.activity_main.*
class MainActivity : AppCompatActivity() {
private val matchesFragment: MatchesFragment by lazy {
MatchesFragment()
}
private val betsFragment: BetsFragment by lazy {
BetsFragment()
}
private val profileFragment: ProfileFragment by lazy {
ProfileFragment()
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
initBottomNavigationView()
}
private fun initBottomNavigationView() =
bottomNavigationView.apply {
setOnNavigationItemSelectedListener {
show(it.itemId)
true
}
bottomNavigationView.selectedItemId = R.id.matchesItem
}
private fun show(menuItemId: Int) {
val fragment = when (menuItemId) {
R.id.matchesItem -> matchesFragment
R.id.betsItem -> betsFragment
else -> profileFragment
}
replaceAndCommit(R.id.fragmentContainer, fragment) {
setCustomAnimations(android.R.animator.fade_in, android.R.animator.fade_out)
}
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/profile/ProfileFragment.kt
package com.d.beteasier.profile
import android.content.Intent
import android.os.Bundle
import android.view.View
import android.widget.Toast
import com.d.beteasier.R
import com.d.beteasier.base.BaseFragment
import kotlinx.android.synthetic.main.fragment_profile.*
import org.koin.androidx.viewmodel.ext.android.viewModel
class ProfileFragment : BaseFragment() {
override val fragmentLayoutRes: Int
get() = R.layout.fragment_profile
private val viewModel: ProfileViewModel by viewModel()
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
loadUserData()
initViews()
}
private fun loadUserData() = viewModel.getCurrentUser()?.apply {
emailView.setText(email)
balanceView.setText(balance?.let { "${it} EUR" } ?: "0.00 EUR")
nameEditView.setText(name)
surnameEditView.setText(surname)
countryEditView.setText(country)
monthlyLimitEditView.setText(monthlyLimit?.toString() ?: "")
betLimitEditView.setText(betLimit?.toString() ?: "")
}
private fun initViews() {
buttonUpdateInfo.setOnClickListener {
val name = nameEditView.text.toString()
val surname = surnameEditView.text.toString()
val country = countryEditView.text.toString()
val monthlyLimit = monthlyLimitEditView.text.toString()
val betLimit = betLimitEditView.text.toString()
viewModel.updateProfile(name, surname, country, monthlyLimit, betLimit)
loadUserData()
Toast.makeText(context, "Profile has been updated", Toast.LENGTH_SHORT).show()
}
addFundsButton.setOnClickListener {
viewModel.addFunds(100.0)
Toast.makeText(context, "Added 100 EUR from your PayPal account", Toast.LENGTH_SHORT)
.show()
balanceView.setText("${viewModel.getCurrentUser()?.balance.toString()} EUR")
}
deleteAccountButton.setOnClickListener {
viewModel.deleteCurrentUser()
startActivity(Intent(context, LoginActivity::class.java))
activity?.finish()
}
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/match/MatchesFragment.kt
package com.d.beteasier.match
import android.content.Intent
import android.os.Bundle
import android.view.View
import android.widget.AdapterView
import android.widget.ArrayAdapter
import com.d.beteasier.base.BaseFragment
import com.d.beteasier.R
import com.d.beteasier.api.Category
import com.d.beteasier.api.Match
import kotlinx.android.synthetic.main.fragment_matches.*
import org.koin.androidx.viewmodel.ext.android.viewModel
class MatchesFragment : BaseFragment(), AdapterView.OnItemSelectedListener {
override val fragmentLayoutRes: Int
get() = R.layout.fragment_matches
private val viewModel: MatchesViewModel by viewModel()
private val adapter: MatchesAdapter by lazy {
MatchesAdapter()
}
override fun onNothingSelected(parent: AdapterView<*>?) =
adapter.initItems(viewModel.getMatches())
override fun onItemSelected(parent: AdapterView<*>?, view: View?, position: Int, id: Long) =
when (position) {
0 -> adapter.initItems(viewModel.getMatches())
else -> adapter.filterItems(viewModel.getMatches(), Category.values()[position-1])
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
initAdapter()
initSpinner()
}
override fun onResume() {
super.onResume()
adapter.initItems(viewModel.getMatches())
}
private fun initAdapter() =
adapter.apply {
recyclerView.adapter = this
setOnViewClickListener<Match> { _, match ->
viewModel.model = match
startActivity(Intent(context, MatchInfoActivity::class.java))
}
initItems(viewModel.getMatches())
}
private fun initSpinner() =
ArrayAdapter.createFromResource(
requireContext(),
R.array.category_array,
android.R.layout.simple_spinner_item
).also { adapter ->
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
categorySpinner.adapter = adapter
categorySpinner.onItemSelectedListener = this@MatchesFragment
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/bet/BetsFragment.kt
package com.d.beteasier.bet
import android.content.Intent
import android.os.Bundle
import android.view.View
import com.d.beteasier.R
import com.d.beteasier.base.BaseFragment
import com.d.beteasier.match.MatchInfoActivity
import com.d.beteasier.match.MatchesViewModel
import kotlinx.android.synthetic.main.fragment_bets.*
import org.koin.androidx.viewmodel.ext.android.viewModel
class BetsFragment : BaseFragment() {
override val fragmentLayoutRes: Int
get() = R.layout.fragment_bets
private val betsViewModel: BetsViewModel by viewModel()
private val matchesViewModel: MatchesViewModel by viewModel() //TODO REFACTOR REMOVE DEPENDENCY ON THIS VM
private val adapter: BetsAdapter by lazy {
BetsAdapter()
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
initAdapter()
}
override fun onResume() {
super.onResume()
adapter.initItems(betsViewModel.getBets())
}
private fun initAdapter() = adapter.apply {
recyclerView.adapter = this
setOnViewClickListener<String> { _, matchId ->
matchesViewModel.setModelFromMatchId(matchId)
startActivity(
Intent(
context,
MatchInfoActivity::class.java
).putExtra("ALLOW_BETS", false)
)
}
initItems(betsViewModel.getBets())
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/api/Bet.kt
package com.d.beteasier.api
data class Bet(
val userId: String,
val matchId: String,
val team: String,
val amount: Double,
val result: Result = Result.TBD
)
enum class Result(val text: String) {
TBD("TBD:"),
Won("Won:"),
Lost("Lost:");
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/profile/RegisterActivity.kt
package com.d.beteasier.profile
import android.app.Activity
import android.content.Intent
import android.os.Bundle
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.d.beteasier.MainActivity
import com.d.beteasier.R
import kotlinx.android.synthetic.main.activity_register.*
import org.koin.androidx.viewmodel.ext.android.viewModel
class RegisterActivity : AppCompatActivity() {
private val viewModel: ProfileViewModel by viewModel()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_register)
initViews()
}
private fun initViews() {
registerButton.setOnClickListener {
val email = emailInput.text.toString()
val password = passwordInput.text.toString()
val repeatPassword = passwordRepeatInput.text.toString()
when {
email.isEmpty() -> {
emailInput.error = "Please enter your email"
emailInput.requestFocus()
}
password.isEmpty() -> {
passwordInput.error = "Please enter your password"
passwordInput.requestFocus()
}
repeatPassword.isEmpty() -> {
passwordRepeatInput.error = "Please repeat your password"
passwordRepeatInput.requestFocus()
}
password != repeatPassword ->
Toast.makeText(this, "Password do not match", Toast.LENGTH_SHORT).show()
!viewModel.isPasswordValid(password) -> {
passwordInput.error = "Password has to be: minimum eight characters, at least one uppercase letter, one lowercase letter and one number"
passwordInput.requestFocus()
}
else -> {
if (viewModel.register(email, password)) {
startActivity(Intent(this, MainActivity::class.java))
setResult(Activity.RESULT_OK)
finish()
} else
Toast.makeText(this, "User exists with this email", Toast.LENGTH_SHORT).show()
}
}
}
}
}<file_sep>/app-beteasier-kotlin/app/src/main/java/com/d/beteasier/util/Koin.kt
package com.d.beteasier.util
import com.d.beteasier.bet.BetsViewModel
import com.d.beteasier.match.MatchesViewModel
import com.d.beteasier.profile.ProfileViewModel
import org.koin.androidx.viewmodel.dsl.viewModel
import org.koin.dsl.module
object Koin {
val appModule = module {
single { MatchesViewModel() }
viewModel { ProfileViewModel() }
viewModel { BetsViewModel() }
}
} | 099c08353399504ecc3d31be3f3a49e64bf9ef51 | [
"Markdown",
"Kotlin",
"Gradle"
] | 25 | Kotlin | tmaciulis22/Beteasier | 2da650f45e6d3a39fda9270980f20f5a2cd9e915 | 9f1a88c1b01072ae8a33cae382d59f5683aa5670 | |
refs/heads/main | <file_sep># Generated by Django 3.2.9 on 2021-11-05 20:17
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Apartment',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=30, null=True)),
('location', models.CharField(blank=True, max_length=255, null=True)),
('address', models.CharField(blank=True, max_length=255, null=True)),
('landlord_id', models.IntegerField(blank=True, null=True)),
('gym', models.IntegerField(blank=True, null=True)),
('parking', models.IntegerField(blank=True, null=True)),
('utility', models.IntegerField(blank=True, null=True)),
('laundry', models.IntegerField(blank=True, null=True)),
('swimming_pool', models.IntegerField(blank=True, null=True)),
('description', models.CharField(blank=True, max_length=255, null=True)),
],
options={
'db_table': 'apartment',
'managed': False,
},
),
migrations.CreateModel(
name='Landlord',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=30, null=True)),
('email', models.CharField(blank=True, max_length=50, null=True)),
('phone', models.CharField(blank=True, max_length=20, null=True)),
],
options={
'db_table': 'landlord',
'managed': False,
},
),
migrations.CreateModel(
name='Photo',
fields=[
('photo_id', models.IntegerField(primary_key=True, serialize=False)),
('come_from', models.IntegerField(blank=True, null=True)),
('photo_link', models.CharField(blank=True, max_length=255, null=True)),
('property_apartment_id', models.IntegerField(blank=True, null=True)),
('property_room_id', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'photo',
'managed': False,
},
),
migrations.CreateModel(
name='Room',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('apartment_id', models.IntegerField(blank=True, null=True)),
('bedroom_num', models.IntegerField(blank=True, null=True)),
('bathroom_num', models.IntegerField(blank=True, null=True)),
('price', models.FloatField(blank=True, null=True)),
('start_time', models.DateField(blank=True, null=True)),
('end_time', models.DateField(blank=True, null=True)),
('description', models.CharField(blank=True, max_length=255, null=True)),
],
options={
'db_table': 'room',
'managed': False,
},
),
migrations.CreateModel(
name='User',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=30, null=True)),
('password', models.CharField(blank=True, max_length=50, null=True)),
],
options={
'db_table': 'user',
'managed': False,
},
),
migrations.CreateModel(
name='Favorite',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.DO_NOTHING, primary_key=True, serialize=False, to='pillow.user')),
],
options={
'db_table': 'favorite',
'managed': False,
},
),
migrations.CreateModel(
name='Rating',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.DO_NOTHING, primary_key=True, serialize=False, to='pillow.user')),
('star', models.FloatField(blank=True, null=True)),
],
options={
'db_table': 'rating',
'managed': False,
},
),
]
| b9f6c07285b8ef99cff313b4414e14b15b057667 | [
"Python"
] | 1 | Python | zzsusan/renting_system | 7501ce042b399254da9fbf08b9c88d83e40c191d | 7bcb06102e14bbd6351cddefe569195fac716b9b | |
refs/heads/master | <repo_name>tundesalisu/SMASHBRO<file_sep>/smashbro-firstdraft/app/public/js/add.js
// Code here handles what happens when a user submits a new character on the form.
// Effectively it takes the form inputs then sends it to the server to save in the DB.
// When user clicks add-btn
$("#chirp-submit").on("click", function(event) {
event.preventDefault();
// Make a newChirp object
var newChirp = {
TITLE: $("#TITLE-box").val().trim(),
ADDRESS: $("#ADDRESS-box").val().trim(),
LOCATION: $("#LOCATION-box").val().trim(),
VENUE: $("#VENUE-box").val().trim(),
TIME: $("#INFO-box").val().trim(),
INFO: $("#CATEGORY-box").val().trim(),
CATEGORY: $("#LINK-box").val().trim(),
LINK: $("#IMAGE-box").val().trim(),
VIDEO: $("#VIDEO-box").val().trim()
};
console.log(newChirp);
// Send an AJAX POST-request with jQuery
$.post("/api/new", newChirp)
// On success, run the following code
.then(function() {
var row = $("<div>");
row.addClass("chirp");
row.append("<p>" + newChirp.TITLE + " NEWLY ENTERED </p>");
row.append("<p>" + newChirp.ADDRESS + " NEWLY ENTERED </p>");
row.append("<p>" + newChirp.LOCATION + " NEWLY ENTERED </p>");
row.append("<p>" + newChirp.VENUE + " NEWLY ENTERED </p>");
row.append("<p>" + newChirp.TIME + " NEWLY ENTERED </p>");
row.append("<p>" + newChirp.INFO + " NEWLY ENTERED </p>");
row.append("<p>" + newChirp.CATEGORY + " NEWLY ENTERED </p>");
row.append("<p>" + newChirp.LINK + " NEWLY ENTERED </p>");
row.append("<p>" + newChirp.IMAGE + " NEWLY ENTERED </p>");
row.append("<p>" + newChirp.VIDEO + " NEWLY ENTERED </p>");
$("#chirp-area").prepend(row);
});
// Empty each input box by replacing the value with an empty string
$("#TITLE-box").val("")
$("#ADDRESS-box").val("")
$("#LOCATION-box").val("")
$("#VENUE-box").val("")
$("#INFO-box").val("")
$("#CATEGORY-box").val("")
$("#LINK-box").val("")
$("#IMAGE-box").val("")
$("#VIDEO-box").val("")
});<file_sep>/smashbro-firstdraft/app/public/js/view.js
// Code here handles queries for specific characters in the database
// In this case, the user submits a character's name... we then pass that character's name as a
// URL parameter. Our server then performs the search to grab that character from the Database.
// when user hits the search-btn
$("#search-btn").on("click", function() {
// save the character they typed into the character-search input
var searchedCharacter = $("#character-search")
.val()
.trim();
// Using a RegEx Pattern to remove spaces from searchedCharacter
// You can read more about RegEx Patterns later https://www.regexbuddy.com/regex.html
searchedCharacter = searchedCharacter.replace(/\s+/g, "").toLowerCase();
// run an AJAX GET-request for our servers api,
// including the user's character in the url
$.get("/api/" + searchedCharacter, function(data) {
// log the data to our console
console.log(data);
// empty to well-section before adding new content
$("#well-section").empty();
// if the data is not there, then return an error message
if (!data) {
$("#well-section").append("<h2> No event found!!!!!!!!!!!! </h2>");
}
else {
// otherwise
// append the character name
// for (var i = 1; i < data.length; i++) {
// var row = $("<div>");
// row.addClass("chirp");
// row.append("<p>" + data[i].TITLE + " chirped.. </p>");
// row.append("<p>" + data[i].TITLE + "</p>");
for (var i = 1; i < data.length; i++) {
// // create a parent div for the oncoming elements
// var wellSection = $("<div>");
// // add a class to this div: 'well'
// wellSection.addClass("well");
// // add an id to the well to mark which well it is
// wellSection.attr("id", "character-well-" + i);
// // append the well to the well section
// $("#well-section").append(wellSection);
// // Now add all of our character data to the well we just placed on the page
// // make the name an h2,
// $("#character-well-" + i).append("<h2>" + data[i].TITLE + "</h2>");
// // the role an h3,
// $("#character-well-" + i).append("<h3>Role: " + data[i].TITLE + "</h4>");
// // the age an h3,
// $("#character-well-" + i).append("<h3>Age: " + data[i].TITLE + "</h4>");
var row =
'<div class="card-columns">' +
'<div class="card cdbg">' +
'<img class="card-img-top" src=' +'"'+ data[i].IMAGE +'"' + 'alt="">' +
'<div class="card-body">' +
'<h5 class="card-title">' + data[i].TITLE + '</h5>' +
'<p class="card-text">INFORMATIOM OF THE AREA PLAACED HERE</p>' +
'</div>' +
'</div>' +
'<div class="card p-3 cdbg">'+
'<blockquote class="blockquote mb-0 card-body">' +
'<p>' + data[i].ADDRESS + '</p> ' +
'</blockquote>' +
'</div>' +
'<div class="card p-3 cdbg">' +
'<blockquote class="blockquote mb-0 card-body">' +
'<p>' + data[i].VENUE + '</p>' +
'</blockquote>' +
'</div>' +
'<div class="card p-3 text-center cdbg">' +
'<blockquote class="blockquote mb-0">' +
'<p>' + data[i].TIME + '</p>' +
'</blockquote>' +
'</div>' +
'<div class="card cdbg">' +
'<div class="card-body">' +
'<p class="card-text"><a href="' + data[i].LINK + '">Tournament Page</a></p>' +
'</div>' +
'</div>' +
'<div class="card p-3 cdbg">' +
'<blockquote class="blockquote mb-0 card-body">' +
'<p><iframe width="300" height="360" src="https://www.youtube.com/embed/P-kQWJrjRV4" frameborder="0" allow="autoplay; encrypted-media" allowfullscreen></iframe></p>' +
'</blockquote>' +
'</div>' +
'<div class="card p-3 cdbg">' +
'<blockquote class="blockquote mb-0 card-body">' +
'<p>map map map map amp map map</p>' +
'</blockquote>' +
'</div>' +
'</div>'
// $("#chirp-area").append(row);
$("#well-section").prepend(row);
}
}
});
});
<file_sep>/smashbro-firstdraft/app/routes/api-routes.js
// *********************************************************************************
// api-routes.js - this file offers a set of routes for displaying and saving data to the db
// *********************************************************************************
// Dependencies
// =============================================================
var Chirp = require("../models/chirp.js");
// Routes
// =============================================================
module.exports = function(app) {
// Get all chirps
app.get("/api/:LOCAL?", function(req, res) {
// If the user provides a specific character in the URL...
if (req.params.LOCAL) {
// Then display the JSON for ONLY that character.
// (Note how we're using the ORM here to run our searches)
Chirp.findAll({
where: {
LOCATION: req.params.LOCAL
}
}).then(function(result) {
return res.json(result);
});
}
else {
// Otherwise...
// Otherwise display the data for all of the characters.
// (Note how we're using Sequelize here to run our searches)
Chirp.findAll({}).then(function(result) {
return res.json(result);
});
}
});
//"/api/:state"
// Add a chirp
app.post("/api/new", function(req, res) {
var smashtables = req.body;
console.log("Chirp Data:");
console.log(req.body);
var routeName = smashtables.LOCATION.replace(/\s+/g, "").toLowerCase();
Chirp.create({
//routeName: routeName,
TITLE: ChirpTITLE,
ADDRESS: ChirpADDRESS,
LOCATION: ChirpLOCATION,
VENUE: ChirpVENUE,
TIME: ChirpTIME,
INFO: ChirpINFO,
CATEGORY: ChirpCATEGORY,
LINK: ChirpLINK,
IMAGE: ChirpIMAGE,
VIDEO: Chirp.VIDEO
});
});
};<file_sep>/smashbro-firstdraft/app/models/chirp.js
// Dependencies
// =============================================================
// This may be confusing but here Sequelize (capital) references the standard library
var Sequelize = require("sequelize");
// sequelize (lowercase) references our connection to the DB.
var sequelize = require("../config/connection.js");
// Creates a "Chirp" model that matches up with DB
var Chirp = sequelize.define("smashtables", {
// routeName: Sequelize.STRING,
TITLE: Sequelize.STRING,
ADDRESS: Sequelize.STRING,
LOCATION: Sequelize.STRING,
VENUE: Sequelize.STRING,
TIME: Sequelize.STRING,
INFO: Sequelize.STRING,
CATEGORY: Sequelize.STRING,
LINK: Sequelize.STRING,
IMAGE: Sequelize.STRING,
VIDEO: Sequelize.STRING
});
// Syncs with DB
Chirp.sync();
// Makes the Chirp Model available for other files (will also create a table)
module.exports = Chirp;
| dfc716fb1689b802e945b438e2510c65965c6aca | [
"JavaScript"
] | 4 | JavaScript | tundesalisu/SMASHBRO | 76bccdd89ae0874f44590d489e02bb48ef23eb5e | c9f9c2fdfc2af46911e9266543f8d8e7c9a2250b | |
refs/heads/master | <file_sep>import data
import math
def Run():
market_date = data.GetDates()
# artificial price
spy_aprc = data.GetSPY()
# can use other stock, e.g.
#ko_aprc = data.GetKO()
# aprc = data.GetPEP()
# aprc = data.GetXLP()
#RunStrategy(market_date, ko_aprc)
RunStrategy(market_date, spy_aprc)
def GetReturnSeries(nav_list):
return_series = []
for i in range(len(nav_list)):
if i == 0:
return_series.append(0)
else:
return_series.append((nav_list[i] / nav_list[i-1]) - 1)
return return_series
def Average(l):
return sum(l) / len(l)
def Analyze(nav_list):
# Make Return time series
return_list = GetReturnSeries(nav_list)
# Calculate return average
# nav_list[0] refers to the first element of nav_list.
# nav_list[-1] refers to the *last* element of nav_list.
average_return = math.pow(nav_list[-1] / nav_list[0], 1 / (len(nav_list)-1)) - 1
# Calculate return SD
variance_list = [(r - average_return)**2 for r in return_list]
return_sd = math.sqrt(Average(variance_list))
# Calculate sharpe ratio
trading_days = 252
annual_return = math.pow(nav_list[-1] / nav_list[0], 250.0 / (len(nav_list)-1)) - 1
annual_volatility = return_sd * math.sqrt(trading_days)
# Sharpe
sharpe = annual_return / annual_volatility
results = {
'return': annual_return,
'volatility': annual_volatility,
'sharpe': sharpe,
}
return results
def RunStrategy(market_date, aprc):
# When SPY rises for 2 consecutive days, then long.
# Exit when lower than previous day close.
cash = 100000.0
long_quantity = 0
nav_history = []
for i, d in enumerate(market_date):
# Skip first two days.
if i < 2:
continue
# Exit if lower than previous day close
if aprc[i] < aprc[i-1] and long_quantity > 0:
cash += long_quantity * aprc[i]
long_quantity = 0
# Enter if raised for two consecutive days
if aprc[i] > aprc[i-1] and aprc[i-1] > aprc[i-2] and long_quantity == 0:
long_quantity = cash / aprc[i]
cash = 0
# Book keeping
nav = cash + long_quantity * aprc[i]
nav_history.append(nav)
# Print out result every 250 days
if i % 250 == 0:
print 'Date: %s, NAV: %f' % (d, nav)
results = Analyze(nav_history)
print 'Return: ', results['return']
print 'Volatility: ', results['volatility']
print 'Sharpe: ', results['sharpe']
if __name__ == '__main__':
Run()
<file_sep>dysnomia
======== | 62405ab861aa0ac793dd531974e90ee5a18ca9c5 | [
"Markdown",
"Python"
] | 2 | Python | yipjustin/dysnomia | 2bf0ea86788d9f1da2b9853cef7592a9a30480a1 | 616a1245eff96bad380211c24e51912ccf5a8411 | |
refs/heads/master | <repo_name>psucharzewski/Dziekanat-Console-<file_sep>/Dziekanat Console/main.cpp
#include <iostream>
#include <string>
#include "funkcje.h"
using namespace std;
int main()
{
cout << "No to jedziem z projektem!" << endl;
cout << "Podaj swoj login" << endl;
Student a2,a1("pawel","dupa","dupa2",2,5,1236);
cout << a1.first_name;
cout << a2.first_name;
return 0;
}
<file_sep>/Dziekanat Console/funkcje.h
#include <iostream>
#include <string>
using namespace std;
class User
{
public:
string first_name;
};
class Lecturer : public User
{
public:
string first_name;
string last_name;
string title;
string birth_date;
string login;
int brith_date;
};
class Student:public User
{
public:
string first_name;
string last_name;
string field_of_study;
int group;
int year_of_study;
int birth_date;
};
class Admin : public User
{
public:
string first_name;
string last_name;
};
class Older_student : public Student
{
string nic;
};
/*class group
{
};*/
<file_sep>/README.md
# Dziekanat-Console-
Project for Introduction to Programing - PK
<file_sep>/Dziekanat Console/funkcje.cpp
#include <string>
#include <iostream>
using namespace std;
Student::Student(string a= "brak",string b = "brak",string c= "brak", int d = 0, int e=0 , int f = 1996)
{
first_name=a;
last_name=b;
field_of_study=c;
group=d;
year_of_study=e;
birth_date=f;
}
| 285d3723eeb4c0067f11e3ed63924796f1b91a98 | [
"Markdown",
"C++"
] | 4 | C++ | psucharzewski/Dziekanat-Console- | 7df0f06a3dfc7ab262ae35a6fa8ad6918ebc161b | a3bb1b4e65ee2215992785e624ac056872d319e6 | |
refs/heads/master | <repo_name>4lk4tr43/CordovaManager<file_sep>/Cordova Manager/Controls/AppDetails.xaml.cs
using System;
using System.Xml;
namespace Cordova_Manager.Controls
{
/// <summary>
/// Displays and updates Cordova project properties.
/// </summary>
public partial class AppDetails
{
public AppDetails()
{
App.AppDetails = this;
InitializeComponent();
}
public class Data
{
private string _filePath;
private string _author;
private string _authorEmail;
private string _authorWebPage;
private string _description;
private string _id;
private string _name;
private string _version;
public string Author
{
get { return _author; }
set { _author = _author == value ? value : SaveToXml("author", null, value); }
}
public string AuthorEmail
{
get { return _authorEmail; }
set { _authorEmail = _authorEmail == value ? value : SaveToXml("author", "email", value); }
}
public string AuthorWebPage
{
get { return _authorWebPage; }
set { _authorWebPage = _authorWebPage == value ? value : SaveToXml("author", "href", value); }
}
public string Description
{
get { return _description; }
set { _description = _description == value ? value : SaveToXml("description", null, value); }
}
public string Id
{
get { return _id; }
set { _id = _id == value ? value : SaveToXml("widget", "id", value); }
}
public string Name
{
get { return _name; }
set { _name = _name == value ? value : SaveToXml("name", null, value); }
}
public string Version
{
get { return _version; }
set { _version = _version == value ? value : SaveToXml("widget", "version", value); }
}
public static Data LoadFromFile(string path)
{
var document = new XmlDocument();
document.Load(path);
var widget = document["widget"];
var data = new Data
{
_filePath = path,
// ReSharper disable PossibleNullReferenceException
Name = widget["name"].InnerText,
Id = widget.Attributes["id"].Value,
Version = widget.Attributes["version"].Value,
Description = widget["description"].InnerText,
Author = widget["author"].InnerText,
AuthorEmail = widget["author"].Attributes["email"].Value,
AuthorWebPage = widget["author"].Attributes["href"].Value
// ReSharper restore PossibleNullReferenceException
};
return data;
}
private string SaveToXml(string tag, string attribute, string value)
{
var document = new XmlDocument();
document.Load(_filePath);
var widget = document["widget"];
if (tag == "widget")
{
if (string.IsNullOrEmpty(attribute)) throw new Exception();
// ReSharper disable PossibleNullReferenceException
widget.Attributes[attribute].Value = value;
}
else
{
if (string.IsNullOrEmpty(attribute))
{
widget[tag].InnerText = value;
}
else
{
widget[tag].Attributes[attribute].Value = value;
// ReSharper restore PossibleNullReferenceException
}
}
document.Save(_filePath);
return value;
}
}
}
}<file_sep>/Cordova Manager/Controls/RenameApp.xaml.cs
using System.IO;
using System.Windows;
namespace Cordova_Manager.Controls
{
/// <summary>
/// Interaction logic for RenameApp.xaml
/// </summary>
public partial class RenameApp : Window
{
public RenameApp()
{
InitializeComponent();
}
private void ButtonSave_Click(object sender, RoutedEventArgs e)
{
var source = ((DirectoryInfo) App.AppBrowser.ListBoxApps.SelectedItem).FullName;
var destination = Properties.Settings.Default.RootFolderPath + "\\" + TextBoxName.Text;
if (Directory.Exists(destination))
{
MessageBoxResult result = MessageBox.Show(this, "Directory already exists, do you want to delete it?",
"Directory collision", MessageBoxButton.YesNo);
if (result == MessageBoxResult.Yes)
{
Directory.Delete(destination, true);
App.TextBoxConsole.Text += "\n" + "Deleted directory \"" + destination + "\"";
}
else
{
Close();
return;
}
}
Directory.Move(source, destination);
App.TextBoxConsole.Text += "\n" + "Renamed directory \"" + source + "\"" + " to \"" + destination + "\"";
App.AppBrowser.RefreshAppsList();
Close();
}
private void ButtonCancel_Click(object sender, RoutedEventArgs e)
{
Close();
}
}
}
<file_sep>/Cordova Manager/Controls/AppBrowser.xaml.cs
using System;
using System.IO;
using System.Linq;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Forms;
using Cordova_Manager.Helpers;
using Cordova_Manager.Properties;
using Helpers.IO;
using ListBox = System.Windows.Controls.ListBox;
using MessageBox = System.Windows.MessageBox;
namespace Cordova_Manager.Controls
{
/// <summary>
/// Interaction logic for AppBrowser.xaml
/// </summary>
public partial class AppBrowser
{
public AppBrowser()
{
App.AppBrowser = this;
InitializeComponent();
RefreshAppsList();
}
private void TextBox_TextChanged(object sender, TextChangedEventArgs e)
{
RefreshAppsList();
}
public void RefreshAppsList()
{
if (string.IsNullOrEmpty(Settings.Default.RootFolderPath)) return;
var directory = new DirectoryInfo(Settings.Default.RootFolderPath);
if (string.IsNullOrWhiteSpace(TextBoxSearch.Text))
{
ListBoxApps.ItemsSource = directory.GetDirectories()
.Where(subdirectory =>
Cordova.IsCordovaDirectory(subdirectory.FullName));
}
else
{
ListBoxApps.ItemsSource = directory.GetDirectories()
.Where(subdirectory =>
Cordova.IsCordovaDirectory(subdirectory.FullName) &&
subdirectory.Name.ToLower().Contains(TextBoxSearch.Text.ToLower()));
}
}
private void ButtonCreateNewProject_Click(object sender, RoutedEventArgs e)
{
var window = new CreateApp { Owner = Window.GetWindow(this) };
window.ShowDialog();
}
private void ButtonDeleteProject_Click(object sender, RoutedEventArgs e)
{
string path = ((DirectoryInfo) ListBoxApps.SelectedItem).FullName;
Window owner = Window.GetWindow(this);
if (owner == null) throw new Exception();
MessageBoxResult result = MessageBox.Show(owner, "Do you want to delete \"" + path + "\"",
"Delete Application", MessageBoxButton.YesNo);
if (result == MessageBoxResult.Yes)
{
if (Directory.Exists(path))
{
Directory.Delete(path, true);
App.TextBoxConsole.Text += "\n" + "Deleted directory \"" + path + "\"";
}
RefreshAppsList();
}
}
private void ButtonCopyProject_Click(object sender, RoutedEventArgs e)
{
var dialog = new FolderBrowserDialog();
if (DialogResult.OK == dialog.ShowDialog())
{
var directory = (DirectoryInfo) ListBoxApps.SelectedItem;
directory.Copy(dialog.SelectedPath + directory.Name);
App.TextBoxConsole.Text += "\n" + "Copied application \"" + directory.Name + "\" to \"" +
dialog.SelectedPath + "\"";
RefreshAppsList();
}
}
private void ButtonRenameProject_Click(object sender, RoutedEventArgs e)
{
var window = new RenameApp { Owner = Window.GetWindow(this) };
window.ShowDialog();
}
}
}<file_sep>/Cordova Manager/Controls/CreateApp.xaml.cs
using Cordova_Manager.Helpers;
using System;
using System.IO;
using System.Windows;
using System.Windows.Controls;
using Helpers.Windows;
namespace Cordova_Manager.Controls
{
public partial class CreateApp
{
public CreateApp()
{
InitializeComponent();
TextBoxBasePath.Text = Properties.Settings.Default.RootFolderPath + "\\";
}
private void TextBoxName_TextChanged(object sender, TextChangedEventArgs e)
{
string name = TextBoxName.Text;
TextBoxExtensionPath.Text = name;
string id = TextBoxId.Text;
if (string.IsNullOrWhiteSpace(id))
{
TextBoxId.Text = "io.cordova." + name;
}
else if (id.Contains("."))
{
int index = id.LastIndexOf(".", StringComparison.Ordinal);
TextBoxId.Text = id.Substring(0, index + 1) + name;
}
else
{
TextBoxId.Text = TextBoxId.Text + "." + name;
}
}
private void ButtonCancel_Click(object sender, RoutedEventArgs e)
{
Close();
}
private void ButtonSave_Click(object sender, RoutedEventArgs e)
{
string path = TextBoxBasePath.Text + TextBoxExtensionPath.Text;
if (Directory.Exists(path))
{
MessageBoxResult result = MessageBox.Show(this, "Directory already exists, do you want to delete it?",
"Directory collision", MessageBoxButton.YesNo);
if (result == MessageBoxResult.Yes)
{
Directory.Delete(path, true);
App.TextBoxConsole.Text += "\n" + "Deleted directory \"" + path + "\"";
}
else
{
Close();
return;
}
}
App.TextBoxConsole.Text += "\n" + CommandPrompt.Run(CordovaCommands.CreateApp(path, TextBoxName.Text, TextBoxId.Text));
App.AppBrowser.RefreshAppsList();
Close();
}
}
}<file_sep>/Helpers/IO/DirectoryInfoExtensions.cs
using System.IO;
namespace Helpers.IO
{
public static class DirectoryInfoExtensions
{
public static void Copy(this DirectoryInfo self, string destinationPath,
bool copySubDirectories = true, bool overwriteFiles = false)
{
var directory = new DirectoryInfo(self.FullName);
if (!directory.Exists) throw new DirectoryNotFoundException();
if (!Directory.Exists(destinationPath)) Directory.CreateDirectory(destinationPath);
FileInfo[] files = directory.GetFiles();
foreach (FileInfo file in files)
{
string destinationFilePath = Path.Combine(destinationPath, file.Name);
file.CopyTo(destinationFilePath, overwriteFiles);
}
if (copySubDirectories)
{
foreach (DirectoryInfo subdir in directory.GetDirectories())
{
string temppath = Path.Combine(destinationPath, subdir.Name);
subdir.Copy(temppath, true, overwriteFiles);
}
}
}
}
}<file_sep>/Helpers/Converters/IndexToBoolConverter.cs
using System;
using System.Globalization;
using System.Windows.Data;
namespace Helpers.Converters
{
public class IndexToBoolConverter : IValueConverter
{
public object Convert(object value, Type targetType, object parameter, CultureInfo culture)
{
var index = (int) value;
return index >= 0;
}
public object ConvertBack(object value, Type targetType, object parameter, CultureInfo culture)
{
var enabled = (bool) value;
return enabled ? 0 : -1;
}
}
}
<file_sep>/Cordova Manager/App.xaml.cs
using System.Windows.Controls;
using Cordova_Manager.Controls;
namespace Cordova_Manager
{
public partial class App
{
public static TextBox TextBoxConsole { get; set; }
public static AppBrowser AppBrowser { get; set; }
public static AppDetails AppDetails { get; set; }
}
}<file_sep>/Cordova Manager/MainWindow.xaml.cs
using System;
using Cordova_Manager.Controls;
namespace Cordova_Manager
{
/// <summary>
/// Manager startup window.
/// </summary>
public partial class MainWindow
{
public MainWindow()
{
if (string.IsNullOrEmpty(Properties.Settings.Default.RootFolderPath))
{
Properties.Settings.Default.RootFolderPath = Environment.GetFolderPath(Environment.SpecialFolder.Personal);
}
InitializeComponent();
App.TextBoxConsole = TextBoxConsole;
}
}
}<file_sep>/Cordova Manager/Controls/ManagerSettings.xaml.cs
using Cordova_Manager.Properties;
using System.Windows;
using System.Windows.Forms;
namespace Cordova_Manager.Controls
{
/// <summary>
/// Displays and updates the managers settings.
/// </summary>
public partial class ManagerSettings
{
// TODO Install menu
public ManagerSettings()
{
InitializeComponent();
}
private void ButtonBrowseRootDirectory_Click(object sender, RoutedEventArgs e)
{
var dialog = new FolderBrowserDialog();
if (DialogResult.OK == dialog.ShowDialog())
{
Settings.Default.RootFolderPath = dialog.SelectedPath;
Settings.Default.Save();
App.AppBrowser.RefreshAppsList();
}
}
}
}<file_sep>/Cordova Manager/Helpers/CordovaCommands.cs
namespace Cordova_Manager.Helpers
{
internal static class CordovaCommands
{
public static string CreateApp(string path,
string name = "",
string id = "io.cordova.project")
{
string processedName = string.IsNullOrWhiteSpace(name) ? "" : " " + name;
string processedUrl = string.IsNullOrWhiteSpace(id) ? "" : " " + id;
return string.Format("cordova create {0}{1}{2}", path, processedUrl, processedName);
}
}
}<file_sep>/Cordova Manager/Helpers/Cordova.cs
using System.IO;
namespace Cordova_Manager.Helpers
{
internal static class Cordova
{
public static bool IsCordovaDirectory(string path)
{
return File.Exists(path + "\\config.xml") &&
Directory.Exists(path + "\\www") &&
Directory.Exists(path + "\\plugins") &&
Directory.Exists(path + "\\platforms") &&
Directory.Exists(path + "\\hooks");
}
}
}<file_sep>/Helpers/Windows/CommandPrompt.cs
using System.Diagnostics;
namespace Helpers.Windows
{
public static class CommandPrompt
{
public static string Run(string command)
{
var startInfo = new ProcessStartInfo
{
FileName = "cmd.exe",
Arguments = "/c \"" + command + "\"",
RedirectStandardOutput = true,
UseShellExecute = false,
CreateNoWindow = true
};
var process = new Process
{
StartInfo = startInfo
};
process.Start();
return process.StandardOutput.ReadToEnd();
}
}
} | d46a35fd45ca20fb13367254ce24bdec4b2537b4 | [
"C#"
] | 12 | C# | 4lk4tr43/CordovaManager | c365aa59eb36a40568dc7abdfb96d902f1f19464 | fb3b8c4e698e96c42b62b987adb18c4117473534 | |
refs/heads/master | <repo_name>jlmorenorodriguez/Ejercicio30octubre<file_sep>/app/src/main/java/com/example/joseluis/ejercicio30octubre/MainActivity.java
package com.example.joseluis.ejercicio30octubre;
import android.content.Intent;
import android.content.SharedPreferences;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity implements View.OnClickListener {
private EditText edtNombre,edtEdad;
private TextView txtResult;
private Button btnComprobar,btnBorrar;
private final int CODE1 =1 , CODE2 =2;
private SharedPreferences sp;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
edtNombre = (EditText)findViewById(R.id.edtNombre);
edtEdad = (EditText) findViewById(R.id.edtEdad);
txtResult = (TextView)findViewById(R.id.txtResult);
btnComprobar = (Button)findViewById(R.id.btnComprobar);
btnComprobar.setOnClickListener(this);
btnBorrar = (Button)findViewById(R.id.btnBorrar);
btnBorrar.setOnClickListener(this);
sp = getSharedPreferences("datos",MODE_PRIVATE);
mostrarInfo();
}
private void mostrarInfo() {
edtNombre.setText(sp.getString("nombre",""));
edtEdad.setText(sp.getString("edad",""));
txtResult.setText(sp.getString("result",""));
}
@Override
public void onClick(View v) {
SharedPreferences.Editor editor = sp.edit();
switch (v.getId()){
case R.id.btnComprobar:
editor.putString("nombre",edtNombre.getText().toString());
editor.putString("edad",edtEdad.getText().toString());
editor.commit();
int edad = Integer.parseInt(edtEdad.getText().toString());
if(edad>18){
Intent i = new Intent(MainActivity.this,Main2Activity.class);
startActivityForResult(i,CODE1);
}else if(edad<18){
Intent i2 = new Intent(MainActivity.this,Main3Activity.class);
startActivityForResult(i2, CODE2);
}
else{
Toast.makeText(getApplicationContext(),
getResources().getString(R.string.msgError), Toast.LENGTH_LONG).show();
}
break;
case R.id.btnBorrar:
editor.clear();
editor.commit();
mostrarInfo();
break;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode){
case CODE1:
if(resultCode==RESULT_OK){
SharedPreferences.Editor editor = sp.edit();
editor.putString("result", data.getStringExtra("result"));
editor.commit();
}
break;
case CODE2:
if(resultCode==RESULT_OK){
SharedPreferences.Editor editor = sp.edit();
editor.putString("result",data.getStringExtra("result"));
editor.commit();
}
break;
}
txtResult.setText(sp.getString("result",""));
}
}
<file_sep>/app/src/main/java/com/example/joseluis/ejercicio30octubre/Main3Activity.java
package com.example.joseluis.ejercicio30octubre;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
public class Main3Activity extends AppCompatActivity implements CompoundButton.OnCheckedChangeListener{
private CheckBox chk1,chk2,chk3,chk4,chk5,chk6;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main3);
chk1 = (CheckBox)findViewById(R.id.chk1);
chk2 = (CheckBox)findViewById(R.id.chk2);
chk3 = (CheckBox)findViewById(R.id.chk3);
chk4 = (CheckBox)findViewById(R.id.chk4);
chk5 = (CheckBox)findViewById(R.id.chk5);
chk6 = (CheckBox)findViewById(R.id.chk6);
chk1.setOnCheckedChangeListener(this);
chk2.setOnCheckedChangeListener(this);
chk3.setOnCheckedChangeListener(this);
chk4.setOnCheckedChangeListener(this);
chk5.setOnCheckedChangeListener(this);
chk6.setOnCheckedChangeListener(this);
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
switch (buttonView.getId()) {
case R.id.chk1:
case R.id.chk2:
case R.id.chk3:
case R.id.chk4:
case R.id.chk5:
case R.id.chk6:
//(buttonView).getText().toString();
Intent i = new Intent();
i.putExtra("result",(buttonView).getText().toString());
setResult(RESULT_OK,i);
finish();
break;
}
}
}
| 6730ceda36ef242ff9ce7e092ead4012c80a5a7a | [
"Java"
] | 2 | Java | jlmorenorodriguez/Ejercicio30octubre | aa1d48341d9d9e8cb667dcb6d868f514933fea76 | f6c8528ee468931f3767abcab7d4e10e2719a565 | |
refs/heads/master | <file_sep>from google.appengine.ext import ndb
class ChasmConfig(ndb.Model):
current = ndb.BooleanProperty()
project_id = ndb.StringProperty()
bucket_id = ndb.StringProperty()
<file_sep>import logging
import os
import json
import sys
import traceback
import base64
import operator
import logging
import time
import cloudstorage as gcs
import re
import uuid
from google.cloud import bigquery
from chasm_common.simplegrok import SimpleGrok
from chasm_common.chasm_gcs import chasm_gcs
from chasm_common.chasm_ds_config import ChasmConfig
from google.cloud.bigquery import QueryJobConfig
from google.appengine.api import app_identity
from google.appengine.ext import ndb
from google.appengine.api import taskqueue
class BQAlerts(ndb.Model):
alert_name = ndb.StringProperty()
alert_description = ndb.StringProperty()
alert_query = ndb.TextProperty()
alert_schedule_minutes = ndb.IntegerProperty()
class BQQuerySync(ndb.Model):
locked = ndb.BooleanProperty()
class BQAlertData(ndb.Model):
alert_id = ndb.StringProperty()
alert_name = ndb.StringProperty()
alert_result = ndb.StringProperty()
email_sent = ndb.BooleanProperty(default=False)
alert_status = ndb.IntegerProperty(default=0)
class chasm_bq(object):
def __init__(self):
logging.info("Initialize chasm bq")
self.sync, self.conf = self.load_defaults()
self.error = ""
self.bytes_processed = 0
self.bytes_billed = 0
#if 'DEV' in project_id:
self.client = bigquery.Client(project=self.conf.project_id)
# else:
# self.client = bigquery.Client(project=project_id)
#self.bucket_name = os.environ.get('BUCKET_NAME',
# app_identity.get_default_gcs_bucket_name())
self.load_job = None #main load job
self.rt_load_job = None #realtime batches
self.chsm_gcs = chasm_gcs()
def table_exists(self,client, table_reference):
"""Return if a table exists.
Args:
client (google.cloud.bigquery.client.Client):
A client to connect to the BigQuery API.
table_reference (google.cloud.bigquery.table.TableReference):
A reference to the table to look for.
Returns:
bool: ``True`` if the table exists, ``False`` otherwise.
"""
from google.cloud.exceptions import NotFound
try:
self.client.get_table(table_reference)
return True
except NotFound:
return False
# def insert_rows(self, rows_to_insert, event_type):
# dataset_ref = self.client.dataset("chasm")
# dataset = bigquery.Dataset(dataset_ref)
# tables = list(self.client.list_dataset_tables(dataset_ref))
# source_table_ref = dataset_ref.table("events_all")
# source_table = self.client.get_table(source_table_ref)
# SOURCE_SCHEMA = list(source_table.schema)
# table_ref = dataset.table("EVENTS_{}".format(event_type))
# errors = []
# if self.table_exists(self.client,table_ref):
# table = bigquery.Table(table_ref, schema=SOURCE_SCHEMA)
# errors = self.client.create_rows(table, rows_to_insert)
# else:
# table = bigquery.Table(table_ref, schema=SOURCE_SCHEMA)
# table.partitioning_type = 'DAY'
# table = self.client.create_table(table)
# errors = self.client.create_rows(table, rows_to_insert)
# if not errors:
# logging.info('Loaded {} row into {}:{}'.format(len(rows_to_insert),
# "chasm", event_type))
# else:
# logging.error('Errors:{}'.format(errors))
# def insert_batch(self,rows):
# #sort list by event type so we can do some batching on insert
# rows.sort(key=operator.itemgetter('EVENTTYPE'))
# event_type = ""
# temp_rows = []
# row_len = len(rows)
# i = 1
# for row in rows:
# if event_type == "":
# temp_rows.append(row)
# event_type = row["EVENTTYPE"]
# if i == row_len:
# self.insert_rows(temp_rows, event_type)
# del temp_rows[:]
# i = i + 1
# elif event_type != row["EVENTTYPE"]:
# self.insert_rows(temp_rows, event_type)
# del temp_rows[:]
# temp_rows.append(row)
# if i == row_len:
# self.insert_rows(temp_rows, event_type)
# del temp_rows[:]
# event_type = row["EVENTTYPE"]
# i = i + 1
# else:
# temp_rows.append(row)
# if i == row_len:
# self.insert_rows(temp_rows, event_type)
# del temp_rows[:]
# i = i + 1
def run_query(self, query,query_type='Temp'):
query_uuid = uuid.uuid1()
alert_table_name = 'alert_' + str(query_uuid)
alert_table_name = alert_table_name.replace('-','_')
table_ref = self.client.dataset('chasm').table(alert_table_name)
job_config = bigquery.QueryJobConfig()
if query_type == 'Alert':
job_config.destination = table_ref
job_config.maximumBytesBilled = 1588743680
job_config.useLegacySql = False
self.error = ""
query_job = self.client.query(query,job_config=job_config)
results = None
try:
results = query_job.result(timeout=30)
if query_job.cache_hit:
bytes_processed = 0
bytes_billed = 0
else:
bytes_processed = query_job.total_bytes_processed or 0
bytes_billed = query_job.total_bytes_billed or 0
except:
ex = sys.exc_info()
self.error = ex
logging.error(self.error)
return None, None, None, None, self.error, none
result_rows = list(results)
total_rows = results.total_rows
schema = {
'fields': [
field.to_api_repr()
for field in results.schema],
}
return schema, result_rows, bytes_processed, bytes_billed, self.error , alert_table_name
def load_job_from_gcs(self):
logging.info("Loading job from GCS")
files = self.chsm_gcs.get_batch()
if not files:
return "No Logs to Process"
lock = BQQuerySync.get_by_id('Default')
if lock.locked:
return "Query Lock Running"
self.load_job = None
initial_error = ''
dataset_ref = self.client.dataset("chasm")
job_config = bigquery.LoadJobConfig()
job_config.source_format = 'NEWLINE_DELIMITED_JSON'
job_config._properties['schemaUpdateOptions'] = ['ALLOW_FIELD_ADDITION']
job_config.write_disposition = 'WRITE_APPEND'
dataset = bigquery.Dataset(dataset_ref)
#Main batch job
self.load_job = self.client.load_table_from_uri(
files,
dataset.table('logs'),
job_config=job_config)
#Realtime batch job
batch_uuid = uuid.uuid1()
rt_table_name = 'realtime_' + str(batch_uuid)
rt_table_name = rt_table_name.replace('-','_')
self.rt_load_job = self.client.load_table_from_uri(
files,
dataset.table(rt_table_name),
job_config=job_config)
try:
logging.info("Trying initial load")
self.load_job.result()
self.rt_load_job.result()
for del_file in files:
print "Removing file {}".format(del_file)
self.chsm_gcs.delete_log(del_file)
return "success on initial load"
except Exception, e:
logging.info("Trying initial load Exception Caught : " + str(e))
logging.info("Trying initial load Exception Caught Load Job : " + str(self.load_job.errors))
error_type = ''
location = ''
initial_error = str(self.load_job.errors)
print str(self.load_job.errors)
if 'No such field' in str(self.load_job.errors):
print "No such field in except"
error_type = 'No such feield +++ '
error_list = self.load_job.errors
error_type = error_list[0]
location = error_type['location']
error_job_config = bigquery.LoadJobConfig()
error_job_config.autodetect = True
error_job_config.source_format = 'NEWLINE_DELIMITED_JSON'
error_job_config._properties['schemaUpdateOptions'] = ['ALLOW_FIELD_ADDITION']
error_job_config.write_disposition = 'WRITE_APPEND'
dataset = bigquery.Dataset(dataset_ref)
#Main batch job
self.load_job = self.client.load_table_from_uri(
location,
dataset.table('logs'),
job_config=error_job_config)
#Realtime batch job
batch_uuid = uuid.uuid1()
rt_table_name = 'realtime_' + str(batch_uuid)
rt_table_name = rt_table_name.replace('-','_')
self.rt_load_job = self.client.load_table_from_uri(
location,
dataset.table(rt_table_name),
job_config=error_job_config)
try:
self.load_job.result()
self.rt_load_job.result()
self.chsm_gcs.delete_log(location)
return "New field found in log {}".format(location)
except:
return str(self.load_job.errors) + initial_error + location
elif 'No schema specified on job or table.' in str(self.load_job.errors):
error_job_config = bigquery.LoadJobConfig()
error_job_config.autodetect = True
error_job_config.source_format = 'NEWLINE_DELIMITED_JSON'
error_job_config._properties['schemaUpdateOptions'] = ['ALLOW_FIELD_ADDITION']
error_job_config.write_disposition = 'WRITE_APPEND'
dataset = bigquery.Dataset(dataset_ref)
#Main batch job
self.load_job = self.client.load_table_from_uri(
files,
dataset.table('logs'),
job_config=error_job_config)
#Realtime batch job
batch_uuid = uuid.uuid1()
rt_table_name = 'realtime_' + str(batch_uuid)
rt_table_name = rt_table_name.replace('-','_')
self.rt_load_job = self.client.load_table_from_uri(
files,
dataset.table(rt_table_name),
job_config=error_job_config)
try:
self.load_job.result()
self.rt_load_job.result()
#self.load_job.result()
for del_file in files:
self.chsm_gcs.delete_log(del_file)
return "No schema specified for job of table"
except:
if 'No such field' in str(self.load_job.errors):
print "No Such Field Type Found"
error_type = 'No such feield +++ '
error_list = self.load_job.errors
error_type = error_list[0]
location = error_type['location']
error_job_config = bigquery.LoadJobConfig()
error_job_config.autodetect = True
error_job_config.source_format = 'NEWLINE_DELIMITED_JSON'
error_job_config._properties['schemaUpdateOptions'] = ['ALLOW_FIELD_ADDITION']
error_job_config.write_disposition = 'WRITE_APPEND'
dataset = bigquery.Dataset(dataset_ref)
#Main batch job
self.load_job = self.client.load_table_from_uri(
location,
dataset.table('logs'),
job_config=error_job_config)
#Realtime batch job
batch_uuid = uuid.uuid1()
rt_table_name = 'realtime_' + str(batch_uuid)
rt_table_name = rt_table_name.replace('-','_')
self.rt_load_job = self.client.load_table_from_uri(
location,
dataset.table(rt_table_name),
job_config=error_job_config)
try:
self.load_job.result()
self.rt_load_job.result()
self.chsm_gcs.delete_log(location)
return "Field added successfully"
except:
return str(self.load_job.errors) + initial_error + location
else:
# return str(self.load_job.errors) + location
return str(self.load_job.errors) + location
def run_scheduled_alerts(self,scheduled_time):
logging.info("Run scheduled alerts")
lock = self.sync #BQQuerySync.get_by_id('Default')
logging.info(lock.locked)
if lock.locked:
logging.info("Query Locked - Scheduled alerts will not run")
return "Query Lock Running"
#lock = BQQuerySync.get_by_id('Default')
lock.locked = True
lock.put()
logging.info(str(lock))
q = BQAlerts.query().fetch()
#table = self.list_tables()
try:
for item in q:
logging.info(item)
schema, result_rows, bytes_processed, bytes_billed, error, alert_table = self.run_query(item.alert_query,query_type='Alert')
#logging.info(schema)
logging.info(error)
#logging.info(result_rows)
#Save alert
if result_rows:
logging.info("Result rows found")
#logging.info(result_rows)
alert = BQAlertData()
alert.alert_id=item.key.urlsafe()
alert.alert_name=item.alert_name
alert.alert_result =alert_table
alert.email_sent=False
alert.put()
else:
logging.info("No result rows found")
logging.info(alert_table)
table_ref = self.client.dataset('chasm').table(alert_table)
self.client.delete_table(table_ref)
#tables = self.get_tables()
logging.info("Display tables")
table_query = "SELECT table_id FROM `" + self.conf.project_id +".chasm.__TABLES_SUMMARY__`"
schema, result_rows, bytes_processed, bytes_billed, error, alert_table = self.run_query(table_query)
for table in result_rows:
if 'realtime_' in table['table_id']:
logging.info(table['table_id'])
table_ref = self.client.dataset('chasm').table(table['table_id'])
self.client.delete_table(table_ref)
lock = BQQuerySync.get_by_id('Default')
lock.locked = False
lock.put()
except Exception, e:
logging.error("Error occurred during query execution" + str(e))
lock = BQQuerySync.get_by_id('Default')
lock.locked = False
lock.put()
def get_alerts(self):
query = BQAlertData.query(BQAlertData.alert_status == 0).fetch()
return query
#def get_table_data_by_name(self,table_name):
def update_generated_alert_by_key(self, alert_status, alert_key):
alert_key = ndb.Key(urlsafe=alert_key)
alert = alert_key.get()
logging.info(alert)
logging.info("alert status")
logging.info(alert_status)
alert.alert_status = alert_status
key = alert.put()
return key.urlsafe()
def get_generated_alert_by_key(self,alert_key):
alert_key = ndb.Key(urlsafe=alert_key)
alert = alert_key.get()
destination_table_ref = alert.alert_result
table_ref = self.client.dataset('chasm').table(destination_table_ref)
table = self.client.get_table(table_ref)
rows = self.client.list_rows(table)
schema = {
'fields': [
field.to_api_repr()
for field in rows.schema],
}
return alert,rows,schema
def add_scheduled_alert(self,alert_name,alert_description,
alert_query,alert_minutes):
alert = BQAlerts(alert_name =alert_name ,
alert_description = alert_description,
alert_query = alert_query,
alert_schedule_minutes= alert_minutes)
alert_key = alert.put()
return alert_key.urlsafe()
def update_scheduled_alert(self,alert_name,alert_description,
alert_query,alert_minutes,alert_key):
alert_key = ndb.Key(urlsafe=alert_key)
alert = alert_key.get()
alert.alert_name =alert_name
alert.alert_description = alert_description
alert.alert_query = alert_query
alert.alert_schedule_minutes= alert_minutes
alert_key = alert.put()
return alert_key.urlsafe()
def delete_scheduled_alert(self,alert_key):
alert_key = ndb.Key(urlsafe=alert_key)
alert = alert_key.get()
alert.key.delete()
def get_scheduled_alert(self,alert_key):
alert_key = ndb.Key(urlsafe=alert_key)
alert = alert_key.get()
return alert.alert_name, alert.alert_description,alert.alert_query
def get_scheduled_alerts(self):
query = BQAlerts.query().fetch()
return query
def load_defaults(self):
"""load patterns"""
keyname = "Default"
sync = BQQuerySync.get_or_insert(keyname, locked=False)
conf = ChasmConfig.get_or_insert(keyname, project_id="TEMP_PROJECT", bucket_id="TEMP_BUCKET")
return sync, conf<file_sep>import logging
import os
import json
import sys
import traceback
import base64
import operator
import logging
import uuid
from google.appengine.api import app_identity
from chasm_common.chasm_ds_config import ChasmConfig
from google.cloud import storage
from google.appengine.api import app_identity
class chasm_gcs(object):
def __init__(self):
logging.info("Initialize chasm bq")
self.conf = self.load_defaults()
self.error = ""
self.bytes_processed = 0
self.bytes_billed = 0
self.client = storage.Client(project=self.conf.project_id)
print "bucket id"
print self.conf.bucket_id
self.bucket = self.client.bucket(self.conf.bucket_id)
print "bucket"
print self.bucket
def delete_log(self,log_uri):
try:
print "deleting location {}".format(log_uri)
blob_name = log_uri.rsplit('/', 1)[-1]
print "deleting blob {}".format(blob_name)
print "Current blob {}".format(self.conf.bucket_id)
blob = self.bucket.blob(blob_name)
blob.delete()
except:
print "error occured deleting {}".format(sys.exc_info()[0])
def get_batch(self):
bucket = '/' + self.conf.bucket_id
page_size = 1
print bucket
blobs = self.bucket.list_blobs(max_results=50)
stats = ['gs://' + self.conf.bucket_id + '/' + stat.name for stat in blobs]
return stats
def write_logs_file(self,log_data):
try:
file_uuid = uuid.uuid1()
filename = 'events_' + str(file_uuid)
json_string = ''
blob = self.bucket.blob(filename)
for line in log_data:
json_string = json_string + json.dumps(line)
json_string = json_string + '\n'
blob.upload_from_string(
json_string,
content_type='application/json; charset=UTF-8')
return filename
except:
exc_tuple = sys.exc_info()
print str(exc_tuple)
def load_defaults(self):
"""load patterns"""
keyname = "Default"
s = ChasmConfig.get_or_insert(keyname, project_id="TEMP_PROJECT")
return s<file_sep>import re
import os
import requests
import time
from collections import defaultdict
from google.appengine.api import app_identity
from google.appengine.ext import ndb
from requests_toolbelt.adapters import appengine
appengine.monkeypatch()
class Pattern(ndb.Model):
pattern_name = ndb.StringProperty()
pattern_string = ndb.StringProperty()
class SimpleGrok(object):
"""Grok for this app return Dict of field names and data"""
types = {
'WORD': r'\w+',
'NUMBER': r'\d+',
'GREEDYDATA': r'.*',
'WINPATH': r'(?:[A-Za-z]+:|\\)(?:\\[^\\?*]*)+',
'POSINT': r'\b(?:[1-9][0-9]*)\b',
'NONNEGINT': r'\b(?:[0-9]+)\b',
'WORD': r'\b\w+\b',
'NOTSPACE': r'\S+',
'SPACE': r'\s*',
'DATA': r'.*?',
'INT': r'(?:[+-]?(?:[0-9]+))',
# todo: extend me
}
def __init__(self):
print "loading grok patterns"
#print "grok project id {}".format(project_id)
self.grok_patterns = defaultdict(list)
self.temp_patterns = defaultdict(list)
self.load_patterns()
def grokker(self, m):
return "(?P<" + m.group(2) + ">" + self.types[m.group(1)] + ")"
def clean_empty(self, m):
return self.types[m.group(1)]
def compile(self, pat):
temp_reg = re.sub(r'%{(\w+):EMPTY}', self.clean_empty, pat)
reg = re.sub(r'%{(\w+):(\w+)}', self.grokker, temp_reg)
return reg
def get_field_dict(self, element):
"""Run through all parser and return a dict of values"""
event_fields = {}
event_fields["EVENTTYPE"] = "NONE"
event_fields["FULLEVENT"] = element
for key, list_item in self.grok_patterns.iteritems():
for reg in list_item:
compiled_reg = self.compile(reg)
match_obj = re.search(compiled_reg, element)
if match_obj is not None:
my_dict = match_obj.groupdict()
for k, v in my_dict.items():
if 'EMPTY' not in k:
event_fields[k] = v
event_fields["EVENTTYPE"] = key
break
return event_fields
def add_pattern(self, pattern_name, pattern):
self.grok_patterns[pattern_name].append(pattern)
def remove_pattern(self, pattern_name):
if pattern_name in self.grok_patterns.keys():
del self.grok_patterns[pattern_name]
def get_patterns(self):
return self.grok_patterns
def load_patterns(self):
"""load patterns"""
print "Loading patterns"
fetched_entities = Pattern.query()
i = 0
for pat in fetched_entities:
print "Pattern found"
self.grok_patterns[pat.pattern_name].append(pat.pattern_string)
i = i + 1
if i < 1:
print "Putting new pattern"
self.temp_patterns["SYSMON_PROCESS_TERMINATED"].append("EventType: WCCLogSource: %{GREEDYDATA:EVENTSOURCE} Source: %{GREEDYDATA:SOURCE} EventID: 5 P%{GREEDYDATA:EMPTY} UtcTime: %{GREEDYDATA:UTCTIME} ProcessGuid: %{GREEDYDATA:PROCESSGUID} Proc%{GREEDYDATA:EMPTY} Image: %{WINPATH:IMAGE}")
self.temp_patterns["SYSMON_NETWORK_CONNECTION"].append("EventType: WCCLogSource: %{GREEDYDATA:EVENTSOURCE} Source: %{GREEDYDATA:SOURCE} EventID: 3 Net%{GREEDYDATA:EMPTY} UtcTime: %{GREEDYDATA:UTCTIME} ProcessGuid: %{GREEDYDATA:PROCESSGUID} P%{GREEDYDATA:EMPTY} Image: %{GREEDYDATA:IMAGE} User: %{GREEDYDATA:USER} Protocol: %{GREEDYDATA:PROTOCOL} Initiated: %{GREEDYDATA:INITIATED} SourceIsIpv6: %{GREEDYDATA:SRCISIPV6} SourceIp: %{GREEDYDATA:SRCIP} SourceHostname: %{GREEDYDATA:SRCHOSTNAME}SourcePort: %{INT:SRCPORT} SourcePortName: %{GREEDYDATA:SRCPORTNAME} DestinationIsIpv6: %{GREEDYDATA:DSTISIPV6} DestinationIp: %{GREEDYDATA:DSTIP} DestinationHostname: %{GREEDYDATA:DSTHOSTNAME} DestinationPort: %{GREEDYDATA:DSTPORT} DestinationPortName: %{GREEDYDATA:DSTPORTNAME}")
self.temp_patterns["SYSMON_DRIVER_LOADED"].append("EventType: WCCLogSource: %{GREEDYDATA:EVENTSOURCE} Source: %{GREEDYDATA:SOURCE} EventID: 6 D%{GREEDYDATA:EMPTY} UtcTime: %{GREEDYDATA:UTCTIME} ImageLoaded: %{WINPATH:IMAGELOADED} Hashes: %{GREEDYDATA:HASHES} Signed: %{WORD:SIGNED} Signature: %{GREEDYDATA:SIGNATURE}")
self.temp_patterns["SYSMON_FILE_CREATION_TIME"].append("EventType: WCCLogSource: %{GREEDYDATA:EVENTSOURCE} Source: %{GREEDYDATA:SOURCE} EventID: 2 F%{GREEDYDATA:EMPTY} UtcTime: %{GREEDYDATA:UTCTIME} ProcessGuid: %{GREEDYDATA:PROCESSGUID} P%{GREEDYDATA:EMPTY} Image: %{WINPATH:IMAGE} TargetFilename: %{WINPATH:TARGETFILENAME} CreationUtcTime: %{GREEDYDATA:CREATIONUTCTIME} PreviousCreationUtcTime: %{GREEDYDATA:PREVCREATIONTIME}")
self.temp_patterns["SYSMON_PROCESS_CREATE"].append("EventType: WCCLogSource: %{GREEDYDATA:EVENTSOURCE} Source: %{GREEDYDATA:SOURCE} EventID: 1 P%{GREEDYDATA:EMPTY} UtcTime: %{GREEDYDATA:UTCTIME} ProcessGuid: %{GREEDYDATA:PROCESSGUID} P%{GREEDYDATA:EMPTY} Image: %{WINPATH:IMAGE} CommandLine: %{GREEDYDATA:COMMANDLINE} CurrentDirectory: %{WINPATH:CURRENTDIRECTORY} User: %{GREEDYDATA:USER} LogonGuid: %{GREEDYDATA:LOGONGUID} Log%{GREEDYDATA:EMPTY} Hashes: %{GREEDYDATA:HASHES} ParentProcessGuid: %{GREEDYDATA:PARENTPROCESSGUID} Pare%{GREEDYDATA:EMPTY} ParentCommandLine: %{GREEDYDATA:PARENTCOMMANDLINE}")
self.temp_patterns["SYSMON_PROCESS_CREATE"].append("EventType: WCCLogSource: %{GREEDYDATA:EVENTSOURCE} Source: %{GREEDYDATA:SOURCE} EventID: 1 P%{GREEDYDATA:EMPTY} UtcTime: %{GREEDYDATA:UTCTIME} ProcessGuid: %{GREEDYDATA:PROCESSGUID} P%{GREEDYDATA:EMPTY} Image: %{WINPATH:IMAGE} CommandLine: %{GREEDYDATA:COMMANDLINE} CurrentDirectory: %{WINPATH:CURRENTDIRECTORY} User: %{GREEDYDATA:USER} LogonGuid: %{GREEDYDATA:LOGONGUID} Log%{GREEDYDATA:EMPTY} Hashes: %{GREEDYDATA:HASHES} ParentProcessGuid: %{GREEDYDATA:PARENTPROCESSGUID} Pare%{GREEDYDATA:EMPTY} ParentCommandLine: %{GREEDYDATA:PARENTCOMMANDLINE}")
self.temp_patterns["SYSMON_PROCESS_CREATE"].append("EventType: WCCLogSource: %{GREEDYDATA:EVENTSOURCE} Source: %{GREEDYDATA:SOURCE} EventID: 1 Process Create: UtcTime: %{GREEDYDATA:UTCTIME} ProcessGuid: %{GREEDYDATA:PROCESSGUID} ProcessId:%{GREEDYDATA:EMPTY} Image: %{GREEDYDATA:IMAGE} CommandLine: %{GREEDYDATA:COMMANDLINE} CurrentDirectory: %{GREEDYDATA:EMPTY} User: %{GREEDYDATA:USER} LogonGuid:%{GREEDYDATA:EMPTY} Hashes: %{GREEDYDATA:HASHES} ParentProcessGuid: %{GREEDYDATA:PARENTPROCESSGUID} ParentProcessId:%{GREEDYDATA:EMPTY} ParentImage: %{GREEDYDATA:PARENTIMAGE} ParentCommandLine: %{GREEDYDATA:PARENTCOMMANDLINE}")
for key, list_item in self.temp_patterns.iteritems():
for reg in list_item:
new_pat = Pattern()
new_pat.pattern_name = key
new_pat.pattern_string = reg
new_pat.put()
time.sleep(3)
fetched_entities = Pattern.query()
for pat in fetched_entities:
self.grok_patterns[pat.pattern_name].append(pat.pattern_string)
<file_sep>GoogleAppEngineCloudStorageClient==1.9.22.1
google-cloud-bigquery==0.30.0
requests-toolbelt
flask-bootstrap==3.3.7.1
flask-wtf==0.14.2
<file_sep># CHASM
# Learning Project
Creating an effective Endpoint Detection Platform using Google Cloud IOT, Appengine Standard and Big Query to analyze security events at scale. Dealing with the necessary events to even handle my home lab had become an annoyance so I began looking for a "cloud" based solution to free me from managing hardware while still allowing me to run queries over billions of events and log entries on a budget.
# Goals and Constraints
* Learn to program (Python/C#/GO).
* Run on GCP free tier.
* Fully managed solution so analyst can focus on detection.
* 5 Minute basic setup.
# Update 07/26/2018
Current version works to parse 5 basic Sysmon events from a C# windows client. Events are sent securely over Cloud IOT, pushed to Appengine Standard instance where they are written to Cloud Datastore and batched into Big Query. Basic event alerting scheduled on Appengine standard cron.
# Next Steps
* Review Python code and understand possible insecure coding practices.
* Automate CHASM client install as service and certificate creation for Cloud IOT.
* Work on a better solution so alert queries are isolated to new events only to minimize Big Query billing.
* Build basic documentation if other users can benefit from the solution.
<file_sep>from google.appengine.ext import vendor
import os
# Add any libraries installed in the "lib" folder.
vendor.add(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'lib'))
import sys
on_appengine = os.environ.get('SERVER_SOFTWARE','').startswith('Development')
if on_appengine and os.name == 'nt':
sys.platform = "Not Windows"<file_sep>from google.appengine.ext import testbed
import os
import main
import unittest
import tempfile
import json
import base64
from chasm_common.simplegrok import SimpleGrok
from chasm_common.chasm_ds_config import ChasmConfig
from google.appengine.ext import ndb
class MainTestCase(unittest.TestCase):
def setUp(self):
#self.db_fd, flaskr.app.config['DATABASE'] = tempfile.mkstemp()
# First, create an instance of the Testbed class.
# Then activate the testbed, which prepares the service stubs for use.
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_all_stubs()
main.app.config['WTF_CSRF_ENABLED'] = False
main.app.testing = True
self.app = main.app.test_client()
ndb.get_context().clear_cache()
s = ChasmConfig.get_or_insert("Default",
project_id=os.environ.get('APPLICATION_ID'),
bucket_id=os.environ.get('BUCKET_NAME'))
#with main.app.app_context():
# flaskr.init_db()
def tearDown(self):
# Don't forget to deactivate the testbed after the tests are
# completed. If the testbed is not deactivated, the original
# stubs will not be restored.
self.testbed.deactivate()
def test_hello(self):
"""Basic home page"""
rv = self.app.get('/')
#print rv.data
assert b'stuff' in rv.get_data()
def test_pubsubmessages(self):
"""Testing pubsub message"""
rv = self.app.post('/_ah/push-handlers/pushsub',
data=json.dumps({"message":{"data":"<KEY>
"attributes":{"projectId":"project","deviceId":"LAPTOP1","deviceNumId":"11111111111111","deviceRegistryId":"events","deviceRegistryLocation":"us-central1","subFolder":""},"message_id":"22520556751004","messageId":"22520556751004","publish_time":"2018-01-12T22:54:16.500Z","publishTime":"2018-01-12T22:54:16.500Z"},"subscription":"projects/sub/subscriptions/push"}
),follow_redirects=True)
print rv.data
assert b'Success 4' in rv.data
def test_grok_pattern(self):
"""Testing grok pattern testing form"""
rv = self.app.post('/grok_test', data=dict(
event_text='EventType: WCCLogSource: Microsoft-Windows-Sysmon/Operational Source: DESKTOP-3L8ABIH EventID: 1 Process Create: UtcTime: 2018-02-08 23:46:10.129 ProcessGuid: {EF342AE3-E142-5A7C-0000-001082982B33} ProcessId: 18648 Image: C:\Program Files\Docker\Docker\resources\com.docker.9pdb.exe CommandLine: "C:\Program Files\Docker\Docker\Resources\com.docker.9pdb.exe" set com.docker.driver.amd64-linux/slirp/dns="nameserver 192.168.6.1 nameserver 172.16.58.3 nameserver 172.16.31.10 search hil-grvwyhx.slc.wayport.net" CurrentDirectory: C:\WINDOWS\system32\ User: NT AUTHORITY\SYSTEM LogonGuid: {EF342AE3-AF49-5A73-0000-0020E7030000} LogonId: 0x3E7 TerminalSessionId: 0 IntegrityLevel: System Hashes: SHA1=E5A302480361BCF66F34EB933BD599715A7A50D9 ParentProcessGuid: {EF342AE3-AF4C-5A73-0000-001041870200} ParentProcessId: 3404 ParentImage: C:\Program Files\Docker\Docker\com.docker.service ParentCommandLine: "C:\Program Files\Docker\Docker\com.docker.service"',
pattern_text='EventType: WCCLogSource: %{GREEDYDATA:EVENTSOURCE} Source: %{GREEDYDATA:SOURCE} EventID: 1 Process Create: UtcTime: %{GREEDYDATA:UTCTIME} ProcessGuid: %{GREEDYDATA:PROCESSGUID} ProcessId:%{GREEDYDATA:EMPTY} Image: %{GREEDYDATA:IMAGE} CommandLine: %{GREEDYDATA:COMMANDLINE} CurrentDirectory: %{GREEDYDATA:EMPTY} User: %{GREEDYDATA:USER} LogonGuid:%{GREEDYDATA:EMPTY} Hashes: %{GREEDYDATA:HASHES} ParentProcessGuid: %{GREEDYDATA:PARENTPROCESSGUID} ParentProcessId:%{GREEDYDATA:EMPTY} ParentImage: %{GREEDYDATA:PARENTIMAGE} ParentCommandLine: %{GREEDYDATA:PARENTCOMMANDLINE}'
), follow_redirects=True)
assert b'<h4> EVENTTYPE </h4> TEST_PARSER' in rv.data
assert b'<h4> IMAGE </h4> C:\Program Files\Docker\Docker\resources\com.docker.9pdb.exe' in rv.data
assert b'<h4> PARENTPROCESSGUID </h4> {EF342AE3-AF4C-5A73-0000-001041870200}' in rv.data
assert b'<h4> UTCTIME </h4> 2018-02-08 23:46:10.129' in rv.data
assert b'<h4> USER </h4> NT AUTHORITY\SYSTEM' in rv.data
assert b'<h4> PROCESSGUID </h4> {EF342AE3-E142-5A7C-0000-001082982B33}' in rv.data
assert b'<h4> SOURCE </h4> DESKTOP-3L8ABIH' in rv.data
assert b'<h4> EVENTSOURCE </h4> Microsoft-Windows-Sysmon/Operational' in rv.data
assert b'<h4> EVENTSOURCE </h4> Microsoft-Windows-Sysmon/Operational' in rv.data
def test_grok_default_patterns(self):
"""Testing parsers"""
rv = self.app.get('/grok_parsers' , follow_redirects=True)
assert b'<div>Current Patterns<br>' in rv.data
assert b'<h4> SYSMON_NETWORK_CONNECTION [u'EventType: WCCLogSource: %{GREEDYDATA:EVENTSOURCE} Source: %{GREEDYDATA:SOURCE} EventID: 3 Net%{GREEDYDATA:EMPTY} UtcTime: %{GREEDYDATA:UTCTIME} ProcessGuid: %{GREEDYDATA:PROCESSGUID} P%{GREEDYDATA:EMPTY} Image: %{GREEDYDATA:IMAGE} User: %{GREEDYDATA:USER} Protocol: %{GREEDYDATA:PROTOCOL} Initiated: %{GREEDYDATA:INITIATED} SourceIsIpv6: %{GREEDYDATA:SRCISIPV6} SourceIp: %{GREEDYDATA:SRCIP} SourceHostname: %{GREEDYDATA:SRCHOSTNAME}SourcePort: %{INT:SRCPORT} SourcePortName: %{GREEDYDATA:SRCPORTNAME} DestinationIsIpv6: %{GREEDYDATA:DSTISIPV6} DestinationIp: %{GREEDYDATA:DSTIP} DestinationHostname: %{GREEDYDATA:DSTHOSTNAME} DestinationPort: %{GREEDYDATA:DSTPORT} DestinationPortName: %{GREEDYDATA:DSTPORTNAME}']</h4>' in rv.data
def test_big_query(self):
"""Testing query from bigdata"""
rv = self.app.post('/query_main', data=dict(
query_text="""SELECT * FROM `chasm.logs*` where
REGEXP_CONTAINS(fullevent,r"EF342AE3-060D-5A34-0000-0010328BDE01")
limit 10""",
), follow_redirects=True)
assert b'<td>{EF342AE3-060D-5A34-0000-0010328BDE01}' in rv.data
def test_add_alert_big_query(self):
"""Testing add a big query alert."""
rv1 = self.app.post('/add_alert_big_query', data=dict(
alert_name="TESTERNAME",
alert_description="TESTERDESCRIPTION",
alert_query="SELECT",
submit="submit"
), follow_redirects=True)
rv = self.app.get('/add_alert_big_query', follow_redirects=True)
assert b'">TESTERNAME</a>' in rv.data
def test_alert_query(self):
"""Testing running bigquery alert test."""
rv1 = self.app.post('/add_alert_big_query', data=dict(
alert_name="TESTER",
alert_description="TESTERDESCRIPTION",
alert_query="SELECT FULLEVENT FROM `chasm.logs` limit 10",
submit="submit"
), follow_redirects=True)
rv = self.app.get('/add_alert_big_query', follow_redirects=True)
assert b'">TESTER</a>' in rv.data
rv = self.app.get('/fifteen_minute_cron', follow_redirects=True)
rv = self.app.get('/view_alerts', follow_redirects=True)
assert b'<td>TESTER</td>' in rv.data
if __name__ == '__main__':
unittest.main()<file_sep>class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
class TestingConfig(Config):
TESTING = True
WTF_CSRF_ENABLED = False<file_sep># [START app]
import logging
import os
import json
import sys
from flask import Flask,flash, request,redirect, render_template
import base64
from chasm_common.simplegrok import SimpleGrok
from chasm_common.chasm_bq import chasm_bq
from chasm_common.chasm_gcs import chasm_gcs
from google.appengine.api import app_identity
from flask_bootstrap import Bootstrap
from google.appengine.ext import ndb
import cloudstorage as gcs
from google.appengine.api import app_identity
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField,TextAreaField,SelectField
from wtforms.validators import DataRequired
app = Flask(__name__)
Bootstrap(app)
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
s_grok = None
bq_chasm_client = None
chasm_gcs_client = None
PROJECT_ID = None
class AlertsDisplayForm(FlaskForm):
alert_status = SelectField(choices=[('0', 'New'),
('1', 'False Positive'),
('3', 'Attack')])
submit = SubmitField('Resolve')
cancel = SubmitField('Cancel')
class AlertsForm(FlaskForm):
alert_name = StringField('Alert Name', validators=[DataRequired()])
alert_description = StringField('Alert Description', validators=[DataRequired()])
alert_query = TextAreaField('Alert Query', validators=[DataRequired()])
#remember_me = BooleanField('Remember Me')
test_alert = SubmitField('Test Alert Query')
submit = SubmitField('Add Alert')
update = SubmitField('Update Alert')
delete = SubmitField('Delete Alert')
def setup_app(app):
""" Initialize Parser """
#global s_grok
#global bq_chasm_client
global PROJECT_ID
PROJECT_ID = "TEMP"
on_appengine = os.environ.get('SERVER_SOFTWARE', '').startswith('Development')
print "Initialize Flask"
if not on_appengine:
print "not on appengine"
PROJECT_ID = os.getenv('APPLICATION_ID')
else:
print "Looking for google cloud project"
PROJECT_ID = os.environ.get('GOOGLE_CLOUD_PROJECT')
if PROJECT_ID is None:
PROJECT_ID = "DEV"
if 's~' in PROJECT_ID:
PROJECT_ID = PROJECT_ID.replace('s~','')
setup_app(app)
@app.route('/_ah/push-handlers/pushsub',methods=['GET','POST'])
def pubsub_push():
global s_grok
global bq_chasm_client
global chasm_gcs_client
rows = []
if s_grok is None:
s_grok = SimpleGrok()
if bq_chasm_client is None:
bq_chasm_client = chasm_bq()
if chasm_gcs_client is None:
chasm_gcs_client = chasm_gcs()
page_data = request.data.decode('utf-8')
try:
envelope = json.loads(request.data.decode('utf-8'))
base64_payload = base64.b64decode(envelope['message']['data'])
data = json.loads(base64_payload)
for log in data:
event_fields = s_grok.get_field_dict(log)
rows.append(event_fields)
chasm_gcs_client.write_logs_file(rows)
except:
exc_tuple = sys.exc_info()
print str(exc_tuple)
return "Success {}".format(len(rows))
@app.route('/manage_alert',methods=['GET','POST'])
def manage_alert():
global s_grok
global bq_chasm_client
global chasm_gcs_client
if s_grok is None:
s_grok = SimpleGrok()
if bq_chasm_client is None:
bq_chasm_client = chasm_bq()
if chasm_gcs_client is None:
chasm_gcs_client = chasm_gcs()
alert_key = request.args.get('id')
alert, rows, schema = bq_chasm_client.get_generated_alert_by_key(alert_key)
form = AlertsDisplayForm()
if form.validate_on_submit():
if form.submit.data:
value = form.alert_status.data
bq_chasm_client.update_generated_alert_by_key(int(value),alert_key)
return redirect('/view_alerts',code=302)
else:
form.alert_status.data = str(alert.alert_status)
return render_template('bq_manage_alert.html',alert=alert,form=form,rows=rows,schema=schema)
@app.route('/view_alerts',methods=['GET','POST'])
def view_alerts():
global s_grok
global bq_chasm_client
global chasm_gcs_client
if s_grok is None:
s_grok = SimpleGrok()
if bq_chasm_client is None:
bq_chasm_client = chasm_bq()
if chasm_gcs_client is None:
chasm_gcs_client = chasm_gcs()
alerts = bq_chasm_client.get_alerts()
return render_template('bq_display_alerts.html',alerts=alerts)
@app.route('/cron_batch_logs',methods=['GET','POST'])
def cron_batch_logs():
global s_grok
global bq_chasm_client
global chasm_gcs_client
rows = []
if s_grok is None:
s_grok = SimpleGrok()
if bq_chasm_client is None:
bq_chasm_client = chasm_bq()
if chasm_gcs_client is None:
chasm_gcs_client = chasm_gcs()
result = bq_chasm_client.load_job_from_gcs()
return result
@app.route('/cron_real_time_alert',methods=['GET','POST'])
def cron_real_time_alert():
global s_grok
global bq_chasm_client
global chasm_gcs_client
rows = []
if s_grok is None:
s_grok = SimpleGrok()
if bq_chasm_client is None:
bq_chasm_client = chasm_bq()
if chasm_gcs_client is None:
chasm_gcs_client = chasm_gcs()
result = bq_chasm_client.load_job_from_gcs()
return result
@app.route('/')
def hello():
return render_template('index.html')
@app.route('/fifteen_minute_cron', methods=['GET'])
def fifteen_minute_cron():
global bq_chasm_client
if bq_chasm_client is None:
bq_chasm_client = chasm_bq()
results = bq_chasm_client.run_scheduled_alerts(15)
print results
return render_template('index.html')
@app.route('/add_alert_big_query',methods=['GET','POST'])
def add_alert_big_query():
global bq_chasm_client
if bq_chasm_client is None:
bq_chasm_client = chasm_bq()
form = AlertsForm()
if form.validate_on_submit():
flash('Login requested for user {}, remember_me={}'.format(
form.alert_name.data, form.alert_description.data))
if form.submit.data:
bq_chasm_client.add_scheduled_alert(form.alert_name.data,
form.alert_description.data,
form.alert_query.data,15)
return redirect('/add_alert_big_query',code=302)
elif form.test_alert.data:
pattern_text = form.alert_query.data
print pattern_text
schema,rows,bytes_billed,byte_processed,errors,alert_table = bq_chasm_client.run_query(pattern_text)
return render_template('bq_alert.html', title='Sign In', form=form,
alert_data=bq_chasm_client.get_scheduled_alerts(),
bytes_billed=bytes_billed,
byte_processed=byte_processed,
schema=schema,query_result=rows,
search_text=pattern_text,
error=errors)
return render_template('bq_alert.html', title='Sign In', form=form,
alert_data=bq_chasm_client.get_scheduled_alerts())
@app.route('/edit_alert',methods=['GET','POST'])
def edit_alert():
global bq_chasm_client
if bq_chasm_client is None:
bq_chasm_client = chasm_bq()
form = AlertsForm()
if form.validate_on_submit():
flash('Login requested for user {}, remember_me={}'.format(
form.alert_name.data, form.alert_description.data))
if form.update.data:
alert_key = request.args.get('id')
bq_chasm_client.update_scheduled_alert(form.alert_name.data,
form.alert_description.data,
form.alert_query.data,15,alert_key)
return render_template('bq_edit_alert.html', title='Sign In', form=form,
alert_data=bq_chasm_client.get_scheduled_alerts())
elif form.test_alert.data:
pattern_text = form.alert_query.data
print pattern_text
schema,rows,bytes_billed,byte_processed,errors,alert_table = bq_chasm_client.run_query(pattern_text)
return render_template('bq_edit_alert.html', title='Sign In', form=form,
alert_data=bq_chasm_client.get_scheduled_alerts(),
bytes_billed=bytes_billed,
byte_processed=byte_processed,
schema=schema,query_result=rows,
search_text=pattern_text,
error=errors)
elif form.delete.data:
alert_key = request.args.get('id')
bq_chasm_client.delete_scheduled_alert(alert_key)
return redirect('/add_alert_big_query',code=302)
alert_key = request.args.get('id')
alert_name, alert_description, alert_query = bq_chasm_client.get_scheduled_alert(alert_key)
form.alert_name.data = alert_name
form.alert_description.data = alert_description
form.alert_query.data = alert_query
return render_template('bq_edit_alert.html', title='Sign In', form=form,
alert_data=bq_chasm_client.get_scheduled_alerts())
@app.route('/grok_test',methods=['GET','POST'])
def grok_test():
global s_grok
if s_grok is None:
s_grok = SimpleGrok()
if request.method == 'POST':
s_grok.remove_pattern("TEST_PARSER")
event_text = request.form['event_text']
pattern_text = request.form['pattern_text']
s_grok.add_pattern("TEST_PARSER",pattern_text)
event_fields = s_grok.get_field_dict(event_text)
return render_template('grok_test.html',event=event_text,
pattern=pattern_text,
parse_results=event_fields,
current_reg=s_grok.types)
else:
return render_template('grok_test.html')
@app.route('/grok_parsers',methods=['GET','POST'])
def grok_current_parsers():
global s_grok
if s_grok is None:
s_grok = SimpleGrok()
if request.method == 'GET':
grok_patterns = s_grok.get_patterns()
return render_template('grok_current_parsers.html',patterns=grok_patterns)
@app.route('/query_main',methods=['GET','POST'])
def query_main():
global s_grok
global bq_chasm_client
#rows = []
print PROJECT_ID
if bq_chasm_client is None:
bq_chasm_client = chasm_bq()
if request.method == 'POST':
pattern_text = request.form['query_text']
print pattern_text
schema,rows,bytes_billed,byte_processed,errors,alert_table = bq_chasm_client.run_query(pattern_text)
#print rows
return render_template('query_main.html',bytes_billed=bytes_billed,
byte_processed=byte_processed,
schema=schema,query_result=rows,
search_text=pattern_text,
error=errors)
else:
return render_template('query_main.html')
@app.errorhandler(500)
def server_error(e):
# Log the error and stacktrace.
logging.exception('An error occurred during a request.')
return 'An internal error occurred.', 500
# [END app] | c92c9d0052a2ef1424ef76b8b8e8c7f0408a7f40 | [
"Markdown",
"Python",
"Text"
] | 10 | Python | beginsec/chasmpy | 16c3384d6f4cfd4b855c15475f48621b4bfa3247 | 1d689d8c4cc5e9be89a570512d41fc2828524969 | |
refs/heads/master | <repo_name>Annette70/springmvc<file_sep>/src/main/resources/message.properties
typeMismatch.theCustomer.freePasses=Must be a number<file_sep>/src/main/java/edu/wctc/customer/controller/CustomerController.java
package edu.wctc.customer.controller;
import edu.wctc.customer.entity.Customer;
import edu.wctc.customer.service.CustomerService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.propertyeditors.StringTrimmerEditor;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.InitBinder;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import javax.validation.Valid;
import java.util.List;
@Controller
@RequestMapping("/validation")
public class CustomerController {
@Autowired
private CustomerService customerService;
@RequestMapping("/list")
public String showCustomerList(Model model){
List<Customer> list = customerService.getCustomerList();
model.addAttribute("allCustomers", list);
return "customer/customer-list";
}
@InitBinder
public void trimWhitespace(WebDataBinder dataBinder){
StringTrimmerEditor ste = new StringTrimmerEditor(true);
dataBinder.registerCustomEditor(String.class, ste);
}
@RequestMapping("/form")
public String showForm(Model model){
model.addAttribute("theCustomer", new Customer());
return "customer/customer-form";
}
@RequestMapping("/processForm")
public String orderTickets(@Valid @ModelAttribute("theCustomer") Customer theCustomer,
BindingResult validation){
System.out.println(theCustomer.toString());
if (validation.hasErrors()) {
return "customer/customer-form";
}
return "customer/customer-confirmation";
}
}
<file_sep>/src/main/java/edu/wctc/customer/dao/CustomerDAO.java
package edu.wctc.customer.dao;
import edu.wctc.customer.entity.Customer;
import java.util.List;
public interface CustomerDAO {
List<Customer> getCustomerList();
}
<file_sep>/src/main/java/edu/wctc/customer/service/LastNameSortingCustomerService.java
package edu.wctc.customer.service;
import edu.wctc.customer.dao.CustomerDAO;
import edu.wctc.customer.entity.Customer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Comparator;
import java.util.List;
@Service
public class LastNameSortingCustomerService implements CustomerService {
@Autowired
private CustomerDAO customerDAO;
@Override
public List<Customer> getCustomerList() {
List<Customer> list= customerDAO.getCustomerList();
list.sort(Comparator.comparing(Customer::getLastName));
return null;
}
}
<file_sep>/src/main/java/edu/wctc/travel/TravelController.java
package edu.wctc.travel;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import java.util.Map;
@Controller
@RequestMapping("/travel")
public class TravelController {
@Value("#{gradeLevelProp}")
private Map<Integer, String> gradeLevelMap;
@RequestMapping("/showForm")
public String showForm(Model model){
ExchangeStudent es = new ExchangeStudent();
model.addAttribute("exchStudent",es);
model.addAttribute("countries", new CountryOptions());
model.addAttribute("gradeLevels", gradeLevelMap);
model.addAttribute("travelMethods", TravelMethod.values());
return "travelOutput/form";
}
@RequestMapping("/processForm")
public String processForm(@ModelAttribute("exchStudent") ExchangeStudent es){
return "travelOutput/confirm";
}
}
| afec3a93eee9130f0708260aad530e6246abd99b | [
"Java",
"INI"
] | 5 | INI | Annette70/springmvc | 07a3ad1e82631f86134315bd6030ba34996cebcf | fb6a7aa3ec85c1db20ae6988a20b7c54abd36d57 | |
refs/heads/master | <repo_name>JNPRZ2000/Proyecto2_IPC1<file_sep>/hospital/appweb/static/js/modificarmed.js
function volver(){
window.history.back();
}
function getCookie(name) {
let cookieValue = null;
if (document.cookie && document.cookie !== '') {
const cookies = document.cookie.split(';');
for (let i = 0; i < cookies.length; i++) {
const cookie = cookies[i].trim();
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
var posicion;
var nompos;
function getParametros(){
let ruta = new String(window.location);
const usuario = ruta.split("?");
let re = /%20/g;
console.log(usuario[1])
let usuarioco = usuario[1].replace(re," ");
nompos = usuarioco;
console.log(usuarioco)
let data = {
nombre: usuarioco
}
console.log(data);
fetch("/modificar-medicina/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
}).then(
function(respuesta){
respuesta.text().then(
function(da){
console.log(da)
let ingreso = da.split(",")
document.getElementById("nombre").value = ingreso[0];
document.getElementById("precio").value = ingreso[1];
document.getElementById("descripcion").value = ingreso[2];
document.getElementById("cantidad").value = ingreso[3];
posicion = parseInt(ingreso[4]);
}
);
}
).catch(
function(error){
alert("Ha ocurrido algun error");
console.log(error);
}
);
}
function actualizar(){
let continuar = true
let nombre = document.getElementById("nombre").value;
let precio = document.getElementById("precio").value;
let descripcion = document.getElementById("descripcion").value;
let cantidad = document.getElementById("cantidad").value;
if(precio == null || precio.length == 0){
alert("El campo de precio está vacío");
continuar = false;
}if(descripcion == null || descripcion.length == 0){
continuar = false;
alert("El campo de descripción está vacío");
}if(cantidad == null || cantidad.length == 0){
continuar = false;
alert("El campo de cantidad está vacío")
}if(nombre == null || nombre.length == 0){
continuar = false;
alert("El campo de nombre está vacío");
}else if(nombre.length > 0){
if(continuar == true){
if(nombre == nompos){
let infor = {
nombre: nombre,
precio: precio,
descripcion: descripcion,
cantidad: cantidad,
indice: posicion
}
senMod(infor);
}
else{
comprobarMed(nombre,precio,descripcion,cantidad,posicion);
}
}
}
if(continuar == true){
console.log("se enviaran los campos")
}
}
function comprobarMed(nombre,precio,descripcion,cantidad,indice){
let data = {
nombre: nombre,
basura: "b"
}
let informacion = {
nombre: nombre,
precio: precio,
descripcion: descripcion,
cantidad: cantidad,
indice: indice
}
fetch("/modificar-medicina/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
})
.then(
function(respuesta){
respuesta.text().then(
function(da){
if(da =="existe"){
alert("Ya hay una medicina registrada con este nombre");
}
else{
senMod(informacion)
}
}
);
}
)
.catch(
function(error){
alert("Error ocurrido!");
console.log(error);
}
);
}
function senMod(inf){
fetch("/modificar-medicina/",{
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(inf),
mode: "cors",
cache: "default",
credentials: "include"
})
.then(
window.location.href = ("/administrador/tabs/")
)
.catch(
function(error){
alert("Ha ocurrido algun error");
console.log(error);
}
);
}
window.addEventListener("load", getParametros);<file_sep>/hospital/appweb/templates/registro.html
{% load static %}
<!DOCTYPE html>
<html>
<head>
<meta charset='utf-8'>
<meta http-equiv='X-UA-Compatible' content='IE=edge'>
<title>Registrarme</title>
<link rel="shortcut icon" type="image/png" href="{% static 'img/marathon.png' %}">
<meta name='viewport' content='width=device-width, initial-scale=1'>
<link rel='stylesheet' type='text/css' media='screen' href="{% static 'css/registro.css' %}">
<script type="text/javascript" src="{% static 'js/registro.js' %}"></script>
</head>
<body>
<header id="encabezado">
<a href="{% url 'login' %}">
<div id="head-desc">
<span id="nam-site">Hospital Universitario</span>
<span id="des-site">Existiendo por la salud de todos</span>
</div>
</a>
<a id="link-register" href="{% url 'login' %}">Ingresar</a>
</header>
<form id="formulario" method="POST">{% csrf_token %}
<input type="text" name="nombre" id="nombre" placeholder="Nombre" required title="Ingrese su Nombre">
<input type="text" name="apellido" id="apellido" placeholder="Apellido" required title="Ingrese su Apellido">
<legend>Fecha de nacimiento</legend>
<input type="date" name="nacimiento" id="nacimiento" required title="Ingrese su fecha de nacimiento">
<select name="sexo" id="sexo">
<option value="nulo" selected disabled>sexo</option>
<option value="F">Femenino</option>
<option value="M">Masculino</option>
</select>
<input type="number" name="telefono" id="telefono" placeholder="Teléfono">
<input type="text" name="usuario" id="usuario" placeholder="Usuario"
required title="Ingrese su Usuario">
<input type="password" name="contrasena" id="contrasena" placeholder="<PASSWORD>" minlength="8" required title="Ingrese una contraseña con al menos 8 caracteres">
<input type="button" value="Ingresar" id="enviar" onclick="registrar()">
</form>
<img src="{% static 'img/woh.png' %}" alt="woh.png">
<footer id="pie">
<div id="contacto">
<ul>
<li id="liCo">Contactenos</li>
<li id="liCo"><a id="aMail" href="mailto:<EMAIL>">Correo</a></li>
<li id="liCo"><a id="aMail" href="https://www.facebook.com/USACFacultadDeIngenieria" target="_blank">Facebook</a></li>
<li id="liCo"><a id="aMail" href="https://www.instagram.com/fiusac/?hl=es" target="_blank">Instagram</a></li>
<li id="liCo"><NAME><br><NAME></li>
<li id="liCo">201807201</li>
</ul>
</div>
<div id="mis" >
<span id="span1"><p>Misión</p></span> <br>
<span id="span2"><p>
Lorem ipsum dolor sit amet consectetur<br>
adipisicing elit. Praesentium sed quibusdam <br>
consequatur facere sequi vel atque placeat <br>
quisquam harum aliquam esse blanditiis <br>
dolores eos odio, possimus soluta sint <br>
voluptatem. Ad.</p>
</span>
</div>
<div id="vis" >
<span id="span1"><p>Visión</p></span> <br>
<span id="span2"><p>
Lorem ipsum dolor sit amet consectetur<br>
adipisicing elit. Praesentium sed quibusdam <br>
consequatur facere sequi vel atque placeat <br>
quisquam harum aliquam esse blanditiis <br>
dolores eos odio, possimus soluta sint <br>
voluptatem. Ad.</p>
</span>
</div>
<div id="acerca" >
<span id="span1"><p>Acerca de:</p></span> <br>
<span id="span2"><p>
Lorem ipsum dolor sit amet consectetur<br>
adipisicing elit. Praesentium sed quibusdam <br>
consequatur facere sequi vel atque placeat <br>
</span>
</div>
</footer>
</body>
</html><file_sep>/hospital/appweb/clases/objetos.py
class Doctor:
def __init__(self, nom, ape, fec, sex, usu, cont, esp, tel, pos):
self.nombre = nom
self.apellido = ape
self.fecha = fec
self.sexo = sex
self.usuario = usu
self.contrasena = cont
self.especialidad = esp
self.telefono = tel
self.posicion = pos
def to_string(self):
return "%s*%s*%s*%s*%s*%s*%s*%s*%s" %(self.nombre,self.apellido,self.fecha,self.sexo,self.usuario,self.contrasena,self.especialidad,self.telefono,self.posicion)
class Nurse:
def __init__(self, nom, ape, fec, sex, usu, cont, tel, pos):
self.nombre = nom
self.apellido = ape
self.fecha = fec
self.sexo = sex
self.usuario = usu
self.contrasena = cont
self.telefono = tel
self.posicion = pos
def to_string(self):
return "%s*%s*%s*%s*%s*%s*%s*%s" %(self.nombre,self.apellido,self.fecha,self.sexo,self.usuario,self.contrasena,self.telefono,self.posicion)
class Patient:
def __init__(self, nom, ape, fec, sex, usu, cont, tel, pos):
self.nombre = nom
self.apellido = ape
self.fecha = fec
self.sexo = sex
self.usuario = usu
self.contrasena = cont
self.telefono = tel
self.posicion = pos
def to_string(self):
return "%s*%s*%s*%s*%s*%s*%s*%s" %(self.nombre,self.apellido,self.fecha,self.sexo,self.usuario,self.contrasena,self.telefono,self.posicion)
class Medicine:
def __init__(self, nom, prec, desc, can):
self.nombre = nom
self.precio = prec
self.descripcion = desc
self.cantidad = can
def to_string(self):
return "%s,%s,%s,%s" %(self.nombre,self.precio,self.descripcion,self.cantidad)
class AdminWeb:
def __init__(self):
self.nombre = "Javier"
self.apellido = "Golon"
self.usuario = "admin"
self.contrasena = "1234"
class Receta:
def __init__(self, pad, vec):
self.padecimiento = pad
self.veces = vec<file_sep>/hospital/appweb/views.py
from django.shortcuts import render,HttpResponse
from appweb.clases.objetos import AdminWeb, Doctor, Nurse, Patient, Medicine, Receta
import json
# Create your views here.
admin = AdminWeb()
doctors = []
nurses = []
patients = []
medicines = []
recetas = []
def login(request):
if request.method == "POST":
datos = json.loads(request.body)
li = searchup(datos["usuario"],datos["contrasena"])
return HttpResponse(str(li[0]+"-"+str(li[1])))
else:
return render(request, "login.html")
def searchup(usuario,contrasena):
tipo = "noexiste"
continuar = True
cdo = 0
cen = 0
cpa = 0
indice = -1
while continuar == True and cdo < len(doctors):
if usuario == doctors[cdo].usuario and contrasena == doctors[cdo].contrasena:
indice = cdo
tipo = "doc"
continuar = False
cdo += 1
while continuar == True and cen < len(nurses):
if usuario == nurses[cen].usuario and contrasena == nurses[cen].contrasena:
indice = cen
tipo = "enf"
continuar = False
cen += 1
while continuar == True and cpa < len(patients):
if usuario == patients[cpa].usuario and contrasena == patients[cpa].contrasena:
indice = cpa
tipo = "pac"
continuar = False
cpa += 1
return [tipo,indice]
def registro(request):
if request.method == "POST":
continuar = True
datos = json.loads(request.body)
if(len(datos.items()) == 1):
username = datos["usuario"]
contd = 0
conte = 0
contp = 0
while contd < len(doctors) and continuar != False:
if doctors[contd].usuario == username:
continuar = False
break
contd += 1
while conte < len(nurses) and continuar != False:
if nurses[conte].usuario == username:
continuar = False
break
conte += 1
while contp < len(patients) and continuar != False:
if patients[contp].usuario == username:
continuar = False
break
contp += 1
if(continuar == True):
print("ook")
return HttpResponse("ok")
else:
print("ooknt")
return HttpResponse("oknt")
if(len(datos.items())==7):
nombre = datos["nombre"]
apellido = datos["apellido"]
fecha = datos["fecha"]
genero = datos["genero"]
telefono = datos["telefono"]
username = datos["usuario"]
contrasena = datos["contraseña"]
patients.append(Patient(nombre,apellido,fecha,genero,username,contrasena,telefono,len(patients)))
return render(request, "registro.html")
else:
return render(request, "registro.html")
def administracion(request):
if request.method == "POST":
datos = json.loads(request.body)
cont = 0
elementos = datos["usuarios"]
if datos["tipo"] == "doc":
cont = ingresar_doc(elementos)
elif datos["tipo"] == "enf":
cont = ingresar_enf(elementos)
elif datos["tipo"] == "pac":
cont = ingresar_pac(elementos)
elif datos["tipo"] == "med":
cont = ingresar_med(elementos)
return HttpResponse(cont)
else:
return render(request,"admin.html",{"admin":admin})
def ingresar_doc(elementos):
contador = 0
for i in range(len(elementos)):
if comprobar_usuario(elementos[i][4]) == True:
doctors.append(Doctor(elementos[i][0],elementos[i][1],elementos[i][2],elementos[i][3],
elementos[i][4],elementos[i][5],elementos[i][6],elementos[i][7],len(doctors)))
contador += 1
return contador
def ingresar_enf(elementos):
contador = 0
for i in range(len(elementos)):
if comprobar_usuario(elementos[i][4]) == True:
nurses.append(Nurse(elementos[i][0],elementos[i][1],elementos[i][2],
elementos[i][3],elementos[i][4],elementos[i][5],elementos[i][6],len(nurses)))
contador += 1
return contador
def ingresar_pac(elementos):
contador = 0
for i in range(len(elementos)):
if comprobar_usuario(elementos[i][4]) == True:
patients.append(Patient(elementos[i][0],elementos[i][1],elementos[i][2],elementos[i][3],
elementos[i][4],elementos[i][5],elementos[i][6],len(patients)))
contador += 1
return contador
def ingresar_med(elementos):
print(elementos)
contador = 0
for i in range(len(elementos)):
if comprobar_medicina(elementos[i][0]) == True:
medicines.append(Medicine(elementos[i][0],elementos[i][1],elementos[i][2],elementos[i][3]))
contador += 1
return contador
def comprobar_usuario(usr):
contd = 0
conte = 0
contp = 0
continuar = True
while contd < len(doctors) and continuar != False:
if doctors[contd].usuario == usr:
continuar = False
break
contd += 1
while conte < len(nurses) and continuar != False:
if nurses[conte].usuario == usr:
continuar = False
break
conte += 1
while contp < len(patients) and continuar != False:
if patients[contp].usuario == usr:
continuar = False
break
contp += 1
return continuar
def comprobar_medicina(nam):
continuar = True
for i in range(len(medicines)):
if medicines[i].nombre == nam:
continuar = False
break
return continuar
def admin_tabs(request):
if request.method == "POST":
datos = json.loads(request.body)
eliminar(datos["tipo"],datos["elemento"])
return HttpResponse("eliminado")
else:
for i in range(len(recetas)-1,0,-1):
for j in range(i):
if recetas[j].veces>recetas[j+1].veces:
temp = recetas[j]
recetas[j] = recetas[j+1]
recetas[j+1] = temp
lispad = []
if len(recetas) == 1:
lispad.append(recetas[0])
elif len(recetas) == 2:
lispad.append(recetas[1])
lispad.append(recetas[0])
elif len(recetas) == 3:
lispad.append(recetas[2])
lispad.append(recetas[1])
lispad.append(recetas[0])
elif len(recetas) == 4:
lispad.append(recetas[3])
lispad.append(recetas[2])
lispad.append(recetas[1])
lispad.append(recetas[0])
elif len(recetas) == 5:
lispad.append(recetas[4])
lispad.append(recetas[3])
lispad.append(recetas[2])
lispad.append(recetas[1])
lispad.append(recetas[0])
elif len(recetas)>5:
lispad.append(recetas[len(recetas)-1])
lispad.append(recetas[len(recetas)-2])
lispad.append(recetas[len(recetas)-3])
lispad.append(recetas[len(recetas)-4])
lispad.append(recetas[len(recetas)-5])
return render(request, "tablas_administrador.html",
{'doctores': doctors,'enfermeras': nurses,'pacientes': patients,'medicinas':medicines,'recetas':lispad})
def eliminar(tipo,elemento):
if tipo == "doc":
for i in range (len(doctors)):
if doctors[i].usuario == elemento:
del doctors[i]
break
if tipo == "enf":
for i in range (len(nurses)):
if nurses[i].usuario == elemento:
del nurses[i]
break
if tipo == "pac":
for i in range (len(patients)):
if patients[i].usuario == elemento:
del patients[i]
break
if tipo == "med":
for i in range (len(medicines)):
if medicines[i].nombre == elemento:
del medicines[i]
break
def modify_doc(request):
if request.method == "POST":
dat = json.loads(request.body)
if len(dat.items()) == 1:
for i in range(len(doctors)):
if doctors[i].usuario == dat["doctor"]:
print(doctors[i].to_string())
return HttpResponse(doctors[i].to_string())
if len(dat.items()) == 9:
pos = int(dat["indice"])
doctors[pos].nombre=dat["nombre"]
doctors[pos].apellido=dat["apellido"]
doctors[pos].fecha=dat["fecha"]
doctors[pos].sexo=dat["sexo"]
doctors[pos].telefono=dat["telefono"]
doctors[pos].especialidad=dat["especialidad"]
doctors[pos].usuario=dat["usuario"]
doctors[pos].contrasena=dat["contrasena"]
return HttpResponse("a")
else:
return render(request, "modificardoc.html")
def modify_ep(request):
if request.method == "POST":
datos = json.loads(request.body)
print(datos)
if len(datos.items()) == 2:
if datos["tipo"] == "enf":
for i in range(len(nurses)):
if nurses[i].usuario == datos["usuario"]:
return HttpResponse(nurses[i].to_string())
if datos["tipo"] == "pac":
for i in range(len(patients)):
if patients[i].usuario == datos["usuario"]:
return HttpResponse(patients[i].to_string())
if len(datos.items()) == 9:
if datos["tipo"] == "enf":
print(datos)
pos = int(datos["indice"])
nurses[pos].nombre=datos["nombre"]
nurses[pos].apellido=datos["apellido"]
nurses[pos].fecha=datos["fecha"]
nurses[pos].sexo=datos["sexo"]
nurses[pos].telefono=datos["telefono"]
nurses[pos].usuario=datos["usuario"]
nurses[pos].contrasena=datos["contrasena"]
return HttpResponse("a")
if datos["tipo"] == "pac":
print(datos)
pos = int(datos["indice"])
patients[pos].nombre=datos["nombre"]
patients[pos].apellido=datos["apellido"]
patients[pos].fecha=datos["fecha"]
patients[pos].sexo=datos["sexo"]
patients[pos].telefono=datos["telefono"]
patients[pos].usuario=datos["usuario"]
patients[pos].contrasena=datos["contrasena"]
return HttpResponse("a")
else:
return render(request, "modificar.html")
def modify_med(request):
if request.method == "POST":
datos = json.loads(request.body)
if len(datos.items()) == 1:
for i in range(len(medicines)):
if datos["nombre"] == medicines[i].nombre:
return HttpResponse([medicines[i].nombre+","+medicines[i].precio+","+medicines[i].descripcion
+","+medicines[i].cantidad+","+ str(i)])
if len(datos.items()) == 2:
exis = False
for i in range(len(medicines)):
if datos["nombre"] == medicines[i].nombre:
exis = True
break
if exis == True:
return HttpResponse("existe")
else:
return HttpResponse("existent")
if len(datos.items()) == 5:
pos = int(datos["indice"])
medicines[pos].nombre = datos["nombre"]
medicines[pos].precio = datos["precio"]
medicines[pos].descripcion = datos["descripcion"]
medicines[pos].cantidad = datos["cantidad"]
return HttpResponse("")
else:
return render(request,"modificarmed.html")
def verdoc(request):
if request.method == "POST":
dato = json.loads(request.body)
if len(dato.items()) == 1:
usuario = dato["doctor"]
for i in range(len(doctors)):
if usuario == doctors[i].usuario:
return HttpResponse([doctors[i].nombre+","+doctors[i].apellido+","+doctors[i].fecha+","+
doctors[i].sexo+","+doctors[i].telefono+","+doctors[i].especialidad+","+doctors[i].usuario+","+
doctors[i].contrasena])
else:
return render(request, "verdoc.html")
def verep(request):
if request.method == "POST":
dato = json.loads(request.body)
if dato["tipo"] == "enf":
for i in range(len(nurses)):
if dato["usuario"] == nurses[i].usuario:
return HttpResponse([nurses[i].nombre+","+nurses[i].apellido+","+nurses[i].fecha+","+
nurses[i].sexo+","+nurses[i].telefono+","+nurses[i].usuario+","+nurses[i].contrasena])
if dato["tipo"] == "pac":
for i in range(len(patients)):
if dato["usuario"] == patients[i].usuario:
return HttpResponse([patients[i].nombre+","+patients[i].apellido+","+patients[i].fecha+","+
patients[i].sexo+","+patients[i].telefono+","+patients[i].usuario+","+patients[i].contrasena])
else:
return render(request, "verpersona.html")
def vermed(request):
if request.method == "POST":
dato = json.loads(request.body)
for i in range(len(medicines)):
if dato["nombre"] == medicines[i].nombre:
return HttpResponse(medicines[i].to_string())
else:
return render(request, "vermed.html")
def home_doctor(request):
if request.method == "POST":
existencia = False
dato = json.loads(request.body)
for i in range(len(recetas)):
if dato["padecimiento"] == recetas[i].padecimiento:
recetas[i].veces += 1
existencia = True
break
if existencia == False:
recetas.append(Receta(dato["padecimiento"],1))
for i in range(len(recetas)):
print(recetas[i].padecimiento)
print(recetas[i].veces)
return HttpResponse("exito")
else:
return render(request, "doctor.html")
def home_nurse(request):
if request.method == "POST":
print("hola enfermera")
else:
return render(request, "enfermera.html")
def factura_nurse(request):
return render(request, "enfermera_factura.html",{"doctores": doctors})
def home_patient(request):
if request.method == "POST":
print("Hola paciente")
else:
return render(request, "paciente.html")<file_sep>/hospital/appweb/static/js/modificar.js
function volver(){
window.history.back();
}
function getCookie(name) {
let cookieValue = null;
if (document.cookie && document.cookie !== '') {
const cookies = document.cookie.split(';');
for (let i = 0; i < cookies.length; i++) {
const cookie = cookies[i].trim();
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
var posicion;
var posUs;
var tipoUs;
var anterior;
var usu;
var ancestro;
function getParametros(){
let ruta = new String(window.location);
const usuario = ruta.split("?");
let tipo;
ancestro = usuario[2];
if(usuario[2]=="enfe" || usuario[2] == "enf"){
tipo ="enf"
}
if(usuario[2] == "paci" || usuario[2] =="pac"){
tipo = "pac"
}
anterior = usuario[3];
let data = {
usuario: usuario[1],
tipo: tipo
}
tipoUs = tipo;
console.log(data);
fetch("/modificar/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
}).then(
function(respuesta){
respuesta.text().then(
function(da){
console.log(da)
let ingreso = da.split("*")
document.getElementById("nombre").value = ingreso[0];
document.getElementById("apellido").value = ingreso[1];
let fe = ingreso[2].toString()
let fe1;
if (fe.includes("/") == false){
fe1 = fe.split("-")
}else{
fe1 = fe.split("/")
}
let fe2
if (fe1[0].length == 2 ){
fe2 = new String(fe1[2]+"-"+fe1[1]+"-"+fe1[0])
}
else{
fe2 = new String(fe1[0]+"-"+fe1[1]+"-"+fe1[2])
}
document.getElementById("nacimiento").value = fe2;
if (ingreso[3]=="F"){
document.getElementById("sexo").selectedIndex = 1
}
else{
document.getElementById("sexo").selectedIndex = 2
}
document.getElementById("telefono").value = ingreso[6];
document.getElementById("usuario").value = ingreso[4];
document.getElementById("contrasena").value = ingreso[5];
posicion = parseInt(ingreso[7]);
posUs = ingreso[4];
}
);
}
).catch(
function(error){
console.log(error);
alert("Ha ocurrido algun error1");
}
);
}
function actualizar(){
let continuar = true
let nombre = document.getElementById("nombre").value;
let apellido = document.getElementById("apellido").value;
let fecha = document.getElementById("nacimiento").value;
let genero = document.getElementById("sexo").value;
let telefono = document.getElementById("telefono").value;
let usuario = document.getElementById("usuario").value;
let contraseña = document.getElementById("contrasena").value;
if(nombre == null|| nombre.length ==0){
alert("No ha ingresado su nombre");
continuar = false;
}if(apellido == null || apellido.length == 0){
alert("No ha ingresado su apellido");
continuar = false;
}if( fecha.length == 0){
alert("No ha ingresado una fecha valida");
continuar = false;
}if(genero == "nulo"){
continuar = false;
alert("Seleccione su sexo");
}if(telefono.length>0 && telefono.length!=8){
continuar = false;
alert("Ingrese un numero valido (8 Números)\nSi no desea ingresar su número telefónico,"+
"\nborre los datos de este campo");
}if(contraseña == null || contraseña.length < 8){
continuar = false
alert("Ingrese una contraseña valida (Al menos 8 caracteres)");
}if(usuario == null || usuario.length == 0){
continuar = false;
alert("Rellene el campo de usuario");
}else if(usuario.length > 0){
if(usuario =="admin"){
alert("usuario no valido");
continuar = false;
}else if(continuar == true){
if(usuario == posUs){
let infor = {
nombre: nombre,
apellido: apellido,
fecha: fecha,
sexo: sexo,
telefono: telefono,
usuario: usuario,
contrasena: contraseña,
indice: posicion,
tipo: tipoUs
}
usu = usuario;
senMod(infor);
}
else{
comprobarUsuario("usuario",nombre,apellido,fecha,genero,telefono,contraseña,posicion);
}
}
}
}
function comprobarUsuario(identificador,nombre,apellido,fecha,genero,telefono,contraseña,indice){
let usuario = document.getElementById(identificador).value;
let data = {
usuario: usuario,
}
let informacion = {
nombre: nombre,
apellido: apellido,
fecha: fecha,
sexo: genero,
telefono: telefono,
usuario: usuario,
contrasena: contraseña,
indice: indice,
tipo: tipoUs
}
usu = usuario;
fetch("/registro/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
})
.then(
function(respuesta){
respuesta.text().then(
function(da){
if(da =="oknt"){
alert("este nombre de usuario no está disponible")
}
else{
senMod(informacion)
}
}
);
}
)
.catch(
function(error){
console.log(error)
alert("Error ocurrido!")
}
);
}
function senMod(inf){
fetch("/modificar/",{
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(inf),
mode: "cors",
cache: "default",
credentials: "include"
})
.then(
redireccionar()
)
.catch(
function(error){
console.log(error)
alert("Ha ocurrido algun error")
}
)
}
function redireccionar(){
if(ancestro == "enfe"){
window.location.href = ("/doctor/"+"?"+posicion+"?"+usu)
}else if(ancestro == "paci"){
window.location.href = ("/paciente/"+"?"+posicion+"?"+usu)
}else{
window.location.href = ("/administrador/tabs/")
}
}
window.addEventListener("load",getParametros);<file_sep>/hospital/appweb/static/js/doctor.js
var indice
var usuario
function getCookie(name) {
let cookieValue = null;
if (document.cookie && document.cookie !== '') {
const cookies = document.cookie.split(';');
for (let i = 0; i < cookies.length; i++) {
const cookie = cookies[i].trim();
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
function inicio(){
document.getElementById("fecha").valueAsDate = new Date();
let direccion = window.location.toString();
let datos = direccion.split("?");
indice = datos[1];
usuario = datos[2];
let enlace = document.createElement("a");
let texto = document.createTextNode("Modificar Datos De Usuario: "+usuario);
enlace.appendChild(texto);
let seccion = document.getElementById("verdoctor");
enlace.setAttribute("href","/modificar-doctor/"+"?"+usuario+"?doc");
enlace.style.color = "#666";
enlace.style.fontSize= "25px";
enlace.style.marginLeft = "30px";
seccion.appendChild(enlace);
}
function generarReceta(){
let continuar = true;
let paciente = document.getElementById("paciente").value;
let padecimiento = (document.getElementById("padecimiento").value).toLowerCase();
let descripcion = document.getElementById("descripcion").value;
if (paciente == null || paciente.length == 0){
alert("Es obligatorio ingresar el nombre del paciente")
continuar = false;
}if (padecimiento == null || padecimiento.length == 0){
alert("Es obligatorio ingresar el padecimiento")
continuar = false;
}if (descripcion == null || descripcion.length == 0){
alert("Es obligatorio ingresar una descripción")
continuar = false;
}
if (continuar == true){
imprimir();
guardarReceta(padecimiento)
}
}
function imprimir(){
const $elementoParaConvertir = document.getElementById("seccion1"); // <-- Aquí puedes elegir cualquier elemento del DOM
html2pdf()
.set({
margin: 0.2,
filename: "receta.pdf",
image: {
type: 'jpeg',
quality: 0.98
},
html2canvas: {
scale: 3, // A mayor escala, mejores gráficos, pero más peso
letterRendering: true,
},
jsPDF: {
unit: "in",
format: "a5",
orientation: 'portrait' // landscape o portrait
}
})
.from($elementoParaConvertir)
.save()
.catch(err => console.log(err));
}
function guardarReceta(padecimiento){
let data = {
indice: indice,
padecimiento: padecimiento
}
fetch("/doctor/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
})
.then(
function(respuesta){
respuesta.text().then(
function(dat){
console.log(dat)
}
)
}
)
.catch(
function(error){
error.text().then(
function(e){
console.log(e)
}
)
}
)
}
window.addEventListener("load", inicio);<file_sep>/hospital/appweb/static/js/vermed.js
function volver(){
window.history.back();
}
function getCookie(name) {
let cookieValue = null;
if (document.cookie && document.cookie !== '') {
const cookies = document.cookie.split(';');
for (let i = 0; i < cookies.length; i++) {
const cookie = cookies[i].trim();
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
var posicion;
var posUs;
function getParametros(){
let ruta = new String(window.location);
const usuario = ruta.split("?");
let re = /%20/g;
console.log(usuario[1])
let usuarioco = usuario[1].replace(re," ");
console.log(usuarioco)
let data = {
nombre: usuarioco
}
console.log(data);
fetch("/ver-medicina/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
}).then(
function(respuesta){
respuesta.text().then(
function(da){
console.log(da)
console.log(da)
let ingreso = da.split(",")
let seccion2 = document.getElementById("seccion2");
let panom = document.createElement("p");
let papre = document.createElement("p");
let pades = document.createElement("p");
let pacan = document.createElement("p");
panom.innerHTML = "Nombre: "+ingreso[0];
papre.innerHTML = "Precio: "+ingreso[1];
pades.innerHTML = "Descripción: "+ingreso[2];
pacan.innerHTML = "Cantidad: "+ingreso[3];
seccion2.appendChild(panom);
seccion2.appendChild(papre);
seccion2.appendChild(pades);
seccion2.appendChild(pacan);
}
);
}
).catch(
function(error){
alert("Ha ocurrido algun error")
console.log(error)
}
);
}
window.addEventListener("load", getParametros);<file_sep>/hospital/appweb/static/js/enfermera_factura.js
function darFecha(){
document.getElementById("fecha").valueAsDate = new Date();
}
function volver(){
window.history.back();
}
function imprimir(){
let paciente = document.getElementById("paciente").value;
let doctor = document.getElementById("doctor").value;
let consulta = document.getElementById("consulta").value;
let continuar = true;
if (paciente == null || paciente.length == 0){
alert("No ha ingresado un paciente");
continuar = false;
}if(doctor == "nulo"){
alert("No ha seleccionado un doctor");
continuar = false;
}if(consulta == null || consulta.length == 0){
alert("No ha ingresado el monto de la consulta");
continuar = false;
}
if(continuar == true){
const $elementoParaConvertir = document.getElementById("seccion2"); // <-- Aquí puedes elegir cualquier elemento del DOM
html2pdf()
.set({
margin: 0.2,
filename: "factura.pdf",
image: {
type: 'jpeg',
quality: 0.98
},
html2canvas: {
scale: 3, // A mayor escala, mejores gráficos, pero más peso
letterRendering: true,
},
jsPDF: {
unit: "in",
format: "a4",
orientation: 'portrait' // landscape o portrait
}
})
.from($elementoParaConvertir)
.save()
.catch(err => console.log(err));
}
}
window.addEventListener('load', darFecha);<file_sep>/hospital/appweb/static/js/registro.js
function getCookie(name) {
let cookieValue = null;
if (document.cookie && document.cookie !== '') {
const cookies = document.cookie.split(';');
for (let i = 0; i < cookies.length; i++) {
const cookie = cookies[i].trim();
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
function registrar(){
let continuar = true
let nombre = document.getElementById("nombre").value;
let apellido = document.getElementById("apellido").value;
let fecha = document.getElementById("nacimiento").value;
let genero = document.getElementById("sexo").value;
let telefono = document.getElementById("telefono").value;
let usuario = document.getElementById("usuario").value;
let contraseña = document.getElementById("contrasena").value;
if(nombre == null|| nombre.length ==0){
alert("No ha ingresado su nombre");
continuar = false;
}if(apellido == null || apellido.length == 0){
alert("No ha ingresado su apellido");
continuar = false;
}if( fecha.length == 0){
alert("No ha ingresado una fecha valida");
continuar = false;
}if(genero == "nulo"){
continuar = false;
alert("Seleccione su sexo");
}if(telefono.length>0 && telefono.length!=8){
continuar = false;
alert("Ingrese un numero valido (8 Números)\nSi no desea ingresar su número telefónico,"+
"\nborre los datos de este campo");
}if(usuario == null || usuario.length == 0){
continuar = false;
alert("Rellene el campo de usuario");
}else if(usuario.length > 0){
if(usuario =="admin"){
alert("usuario no valido");
continuar = false;
}else if(continuar == true){
comprobarUsuario("usuario",nombre,apellido,fecha,genero,telefono,contraseña);
}
}if(contraseña == null || contraseña.length < 8){
continuar = false
alert("Ingrese una contraseña valida (Al menos 8 caracteres)");
}
if(continuar == true){
console.log("se enviaran los campos")
}
}
function comprobarUsuario(identificador,nombre,apellido,fecha,genero,telefono,contraseña){
let usuario = document.getElementById(identificador).value;
let data = {
usuario: usuario,
}
let informacion = {
nombre: nombre,
apellido: apellido,
fecha: fecha,
genero: genero,
telefono: telefono,
usuario: usuario,
contraseña: <PASSWORD>
}
fetch("/registro/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
})
.then(
function(respuesta){
respuesta.text().then(
function(da){
if(da =="oknt"){
alert("este nombre de usuario no está disponible");
}
else{
fetch("/registro/",{
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(informacion),
mode: "cors",
cache: "default",
credentials: "include"
})
.then(
window.location.href = ("/login/")
)
.catch(
function(error){
alert("Ha ocurrido algun error");
console.log(error);
}
);
}
}
);
}
)
.catch(
function(error){
alert("Error ocurrido!");
console.log(error);
}
);
}<file_sep>/hospital/appweb/static/js/login.js
var usernom;
function getCookie(name) {
let cookieValue = null;
if (document.cookie && document.cookie !== '') {
const cookies = document.cookie.split(';');
for (let i = 0; i < cookies.length; i++) {
const cookie = cookies[i].trim();
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
function loguear(){
let usuario = document.getElementById("usuario").value;
let contrasena = document.getElementById("contrasena").value;
if(usuario == "admin" && contrasena == "1234"){
window.location.href = "/administrador/"
}else{
solicitudBuscar(usuario,contrasena)
}
}
function solicitudBuscar(usuario,contrasena){
usernom = usuario;
data = {
usuario: usuario,
contrasena: contrasena
}
fetch("/login/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
})
.then(
function(respuesta){
respuesta.text().then(
function(dat){
console.log(dat)
let lis = dat.replace(/\r/g,'').split("-")
console.log(lis)
if(lis[0] == "noexiste"){
alert("verifique sus credenciales")
}else{
renderizarVista(lis[0],lis[1])
}
}
)
}
)
.catch(
function(error){
alert("Error ocurrido!")
console.log(error)
}
);
}
function renderizarVista(tipo,indice){
if (tipo == "doc"){
window.location.href = "/doctor/"+"?"+indice+"?"+usernom
}
if (tipo == "enf"){
window.location.href = "/enfermeria/"+"?"+indice+"?"+usernom
}
if (tipo == "pac"){
window.location.href = "/paciente/"+"?"+indice+"?"+usernom
}
}<file_sep>/hospital/hospital/urls.py
"""hospital URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from appweb import views
urlpatterns = [
#path('admin/', admin.site.urls),
path('login/', views.login, name="login"),
path('registro/', views.registro, name="register"),
path('administrador/', views.administracion, name = "administrador"),
path('administrador/tabs/', views.admin_tabs, name="admintabs"),
path('modificar-doctor/',views.modify_doc, name="moddoc"),
path('modificar/',views.modify_ep, name="mod"),
path('modificar-medicina/', views.modify_med, name="modmed"),
path('ver-doctor/',views.verdoc, name="verdoc"),
path('ver-datos/', views.verep, name="verep"),
path('ver-medicina/', views.vermed, name="vermed"),
path('doctor/', views.home_doctor, name="doctor"),
path('enfermeria/', views.home_nurse, name="enfermera"),
path('enfermeria/factura/',views.factura_nurse, name="factura"),
path('paciente/', views.home_patient, name="paciente"),
]
<file_sep>/hospital/appweb/static/js/admin.js
function getCookie(name) {
let cookieValue = null;
if (document.cookie && document.cookie !== '') {
const cookies = document.cookie.split(';');
for (let i = 0; i < cookies.length; i++) {
const cookie = cookies[i].trim();
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
function cargarD(){
const filedoc = document.getElementById("inputDoc");
filedoc.addEventListener("change",function(e){
let archivo = e.target.files[0];
let lector = new FileReader();
lector.readAsText(archivo);
lector.addEventListener("load",function(e){
let archivo = e.target.result;
let lineas = archivo.replace(/\r/g, '').split("\n");
let comas = lineas[0].split(",")
let contenido = new Array();
if(comas.length != 8){
alert("el archivo no contiene la información necesaria");
let formulario = document.getElementById("archivos1").reset();
}else{
for (var i = 1; i < lineas.length-1; i++){
let auxline = lineas[i].split(",");
contenido[i-1]=auxline;
}
data = {
usuarios: contenido,
tipo: "doc"
}
fetch("/administrador/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
}).then(
function(respuesta){
respuesta.text().then(
function(d){
alert("carga con éxito: Doctore - "+d)
}
)
}
).catch(
function(error){
alert("Ha ocurrido algun error");
console.log(error);
}
);
}
})
})
}
function cargarE(){
const filedoc = document.getElementById("inputEnf");
filedoc.addEventListener("change",function(e){
let archivo = e.target.files[0];
let lector = new FileReader();
lector.readAsText(archivo);
lector.addEventListener("load",function(e){
let archivo = e.target.result;
let lineas = archivo.replace(/\r/g, '').split("\n");
let comas = lineas[0].split(",")
let contenido = new Array();
if(comas.length != 7){
alert("el archivo no contiene la información necesaria");
let formulario = document.getElementById("archivos2").reset();
}else{
for (var i = 1; i < lineas.length-1; i++){
let auxline = lineas[i].split(",");
contenido[i-1]=auxline;
}
data = {
usuarios: contenido,
tipo: "enf"
}
fetch("/administrador/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
}).then(
function(respuesta){
respuesta.text().then(
function(d){
alert("carga con éxito: Enfermería - "+d)
}
)
}
).catch(
function(error){
alert("Ha ocurrido algun error");
console.log(error);
}
);
}
})
})
}
function cargarM(){
const filedoc = document.getElementById("inputMed");
filedoc.addEventListener("change",function(e){
let archivo = e.target.files[0];
let lector = new FileReader();
lector.readAsText(archivo);
lector.addEventListener("load",function(e){
let archivo = e.target.result;
let lineas = archivo.replace(/\r/g, '').split("\n");
let comas = lineas[0].split(",")
let contenido = new Array();
if(comas.length != 4){
alert("el archivo no contiene la información necesaria");
let formulario = document.getElementById("archivos4").reset();
}else{
for (var i = 1; i < lineas.length-1; i++){
let auxline = lineas[i].split(",");
contenido[i-1]=auxline;
}
data = {
usuarios: contenido,
tipo: "med"
}
fetch("/administrador/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
}).then(
function(respuesta){
respuesta.text().then(
function(d){
alert("carga con éxito: Medicamento - "+d)
}
)
}
).catch(
function(error){
alert("Ha ocurrido algun error");
console.log(error);
}
);
}
})
})
}
function cargarP(){
const filedoc = document.getElementById("inputPac");
filedoc.addEventListener("change",function(e){
let archivo = e.target.files[0];
let lector = new FileReader();
lector.readAsText(archivo);
lector.addEventListener("load",function(e){
let archivo = e.target.result;
let lineas = archivo.replace(/\r/g, '').split("\n");
let comas = lineas[0].split(",")
let contenido = new Array();
if(comas.length != 7){
alert("el archivo no contiene la información necesaria");
let formulario = document.getElementById("archivos3").reset();
}else{
for (var i = 1; i < lineas.length-1; i++){
let auxline = lineas[i].split(",");
contenido[i-1]=auxline;
}
data = {
usuarios: contenido,
tipo: "pac"
}
fetch("/administrador/", {
method: "POST",
headers: {
"X-CSRFToken": getCookie("csrftoken"),
"Content-Type": "application/json",
"Accept": "application/json",
"X-Request-With": "XMLHttpRequest"
},
body: JSON.stringify(data),
mode: "cors",
cache: "default",
credentials: "include"
}).then(
function(respuesta){
respuesta.text().then(
function(d){
alert("carga con éxito: Paciente - "+d)
}
)
}
).catch(
function(error){
alert("Ha ocurrido algun error");
console.log(error);
}
);
}
})
})
}
window.addEventListener("load",cargarD);
window.addEventListener("load",cargarE);
window.addEventListener("load",cargarM);
window.addEventListener("load",cargarP);
| e79b6a3315ac3e29646d1b207fdbfbf539c055bf | [
"JavaScript",
"Python",
"HTML"
] | 12 | JavaScript | JNPRZ2000/Proyecto2_IPC1 | 144160a43c5863897f330f64493be2f5d8161db4 | 85cdf94eab2442b751b0a9c3ab69787f2ad5a7ee |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.