branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
---|---|---|---|---|---|---|---|---|
refs/heads/master
|
<file_sep>let AddworkExpBtn = document.querySelector("#weAdd-btn");
let RMworkExpBtn = document.querySelector("#weRem-btn");
let workExpFeild = document.querySelector("#workExp");
let workExpTime = document.getElementsByClassName("weFieldTIME");
let eduAddBtn = document.querySelector("#eduAddbtn");
let eduRMBtn = document.querySelector("#eduRem-btn");
let ProjAddBtn = document.querySelector("#PAdd-btn");
let ProjRMBtn = document.querySelector("#PRRem-btn");
let ProjectField = document.querySelector("#projectUser");
let skillAddBtn = document.querySelector("#skAdd-btn");
let skillRMBtn = document.querySelector("#skRem-btn");
let skillField = document.querySelector("#skilltUser");
let InterestAddBtn = document.querySelector("#ITAdd-btn");
let InterestRMBtn = document.querySelector("#ITRem-btn");
let educationField = document.querySelector("#education");
let InterestField = document.querySelector("#interest");
let workfeid;
let timeworkxp;
// work exp Add
AddworkExpBtn.addEventListener("click", () => {
workfeid = document.createElement("textarea");
workfeid.classList.add("form-control");
workfeid.classList.add("weField");
workfeid.classList.add("mt-3");
workfeid.setAttribute("rows", 2);
workfeid.setAttribute("placeholder", "Role at company");
timeworkxp = document.createElement("input");
timeworkxp.classList.add("form-control");
timeworkxp.classList.add("weFieldTIME");
timeworkxp.classList.add("mt-2");
timeworkxp.setAttribute("placeholder", "TimePeriod");
timeworkxp.setAttribute("maxlength", "20");
workExpFeild.appendChild(workfeid);
workExpFeild.appendChild(timeworkxp);
});
// work exp remove
RMworkExpBtn.addEventListener("click", () => {
let del = confirm("Do you really want to remove it");
if (del == true) {
// remove
workExpFeild.removeChild(workExpFeild.lastElementChild);
workExpFeild.removeChild(workExpFeild.lastElementChild);
} else {
console.log(2);
}
});
//Edu Add
eduAddBtn.addEventListener("click", () => {
let ed = document.createElement("textarea");
ed.classList.add("form-control");
ed.classList.add("EdField");
ed.classList.add("mt-3");
ed.setAttribute("rows", 2);
ed.setAttribute("placeholder", "Enter here");
educationField.appendChild(ed);
});
// edu remove
eduRMBtn.addEventListener("click", () => {
let del = confirm("Do you really want to remove it");
if (del == true) {
// remove
educationField.removeChild(educationField.lastElementChild);
} else {
console.log(2);
}
});
// Interest add
InterestAddBtn.addEventListener("click", () => {
let Interestf = document.createElement("textarea");
Interestf.classList.add("form-control");
Interestf.classList.add("interestField");
Interestf.classList.add("mt-3");
Interestf.setAttribute("rows", 1);
Interestf.setAttribute("placeholder", "Enter here");
InterestField.appendChild(Interestf);
});
// Interest remove
InterestRMBtn.addEventListener("click", () => {
let del = confirm("Do you really want to remove it");
if (del == true) {
// remove
InterestField.removeChild(InterestField.lastElementChild);
} else {
console.log(2);
}
});
// Project add btn
ProjAddBtn.addEventListener("click", () => {
let projectittle = document.createElement("input");
projectittle.classList.add("form-control");
projectittle.classList.add("projectFieldTittle");
projectittle.classList.add("mt-3");
projectittle.setAttribute("type", "text");
projectittle.setAttribute("placeholder", "Project tittle");
ProjectField.appendChild(projectittle);
});
// Project remove btn
ProjRMBtn.addEventListener("click", () => {
let del = confirm("Do you really want to remove it");
if (del == true) {
// remove
ProjectField.removeChild(ProjectField.lastElementChild);
} else {
console.log(2);
}
});
// skills add btn
skillAddBtn.addEventListener("click", () => {
let skilltittle = document.createElement("input");
skilltittle.classList.add("form-control");
skilltittle.classList.add("skillFieldTittle");
skilltittle.classList.add("mt-3");
skilltittle.setAttribute("type", "text");
skilltittle.setAttribute("placeholder", "skill title");
skillField.appendChild(skilltittle);
});
// skill rem btn
skillRMBtn.addEventListener("click", () => {
let del = confirm("Do you really want to remove it");
if (del == true) {
// remove
skillField.removeChild(skillField.lastElementChild);
} else {
console.log(2);
}
});
GenerateCV = () => {
// PhotoCV
let photoField = document.getElementById("photocvfield").files;
let imgTemplate = document.getElementById("imgT");
// console.log(photoField);
let imageReader = new FileReader();
if (photoField.length != 0) imageReader.readAsDataURL(photoField[0]);
imageReader.onload = () => {
// fixed to read img time
// console.log(imageReader.result);
if (imageReader.result != "") {
imgTemplate.src = imageReader.result;
}
};
// names
let nameField = document.getElementById("nameField").value;
let templateName = document.getElementById("nameT");
templateName.innerHTML = nameField;
// mobile
let mobieField = document.getElementById("contactField").value;
let mobileT = document.getElementById("mobileT");
mobileT.innerHTML = mobieField;
// address
let addressField = document.getElementById("addressField").value;
let addressT = document.getElementById("addressT");
addressT.innerHTML = addressField;
// email
let emailField = document.getElementById("emailField").value;
let emailtT = document.getElementById("emailT");
emailtT.innerHTML = emailField;
// job title
let jobField = document.getElementById("jobtitleField").value;
let templateJobtitle = document.getElementById("JobNameT");
templateJobtitle.innerHTML = jobField;
// Social links
let linkField = document.getElementById("linkedinField").value;
let linkT = document.getElementById("linkIdT");
// console.log(linkField);
let alinkedin = document.createElement("a");
alinkedin.href = linkField;
alinkedin.innerText = "Linkedin";
linkT.innerHTML = "";
linkT.appendChild(alinkedin);
// github
let gitField = document.getElementById("gitField").value;
let gitT = document.getElementById("gitIdT");
let agitub = document.createElement("a");
agitub.href = gitField;
agitub.innerText = "Github";
gitT.innerHTML = "";
gitT.appendChild(agitub);
// portfolio
let portfolioField = document.getElementById("portfolioField").value;
let portT = document.getElementById("portIdT");
let aport = document.createElement("a");
aport.href = portfolioField;
aport.innerText = "Portfolio";
portT.innerHTML = "";
portT.appendChild(aport);
// Work Experience
let workExp = document.getElementsByClassName("weField"); //array
let str = "";
for (let i of workExp) {
str =
str +
` <li style="
margin-bottom: 10px;
">${i.value}</li>`;
}
let weTemplate = document.getElementById("weT");
weTemplate.innerHTML = str;
let workexperiod = document.getElementById("TworkPeriod");
let expString = "";
for (let j of workExpTime) {
expString =
expString +
` <li style="margin-bottom: 10px; list-style-type: none;">${j.value}</li>`;
}
workexperiod.innerHTML = expString;
// Projects
let projtittle = document.getElementsByClassName("projectFieldTittle");
let projdesc = document.getElementsByClassName("projectFieldDesc");
let tittleStr = "";
let descStr = "";
for (let i of projtittle) {
tittleStr =
tittleStr +
`<li class="project-title" id="TprojTitle" style="display: block margin-bottom: 10px;">${i.value}</li> `;
}
let TprojTitle = document.getElementById("projSec");
TprojTitle.innerHTML = tittleStr;
// for(let j of projdesc){
// descStr+= `<span id="TprojDesc" class="project-tagline">${j.value}</span>`
// }
// let Tprojdesc = document.getElementById("TprojDesc")
// Tprojdesc.innerHTML=descStr;
// skills
let skilltittle = document.getElementsByClassName("skillFieldTittle");
let skilltemp = document.getElementById("skillId");
let skillStr = "";
for (let i of skilltittle) {
skillStr =
skillStr +
`<li class="level-title badge badge-pill badge-primary"style="display: block margin-bottom: 10px;">${i.value}</li> `;
}
skilltemp.innerHTML = skillStr;
// Languages
let nativeLanField = document.getElementById("NatLField").value;
let proLanField = document.getElementById("proLField").value;
let proLag = document.getElementById("proL");
let nativeLag = document.getElementById("nativeL");
proLag.innerHTML = proLanField;
nativeLag.innerHTML = nativeLanField;
// Interest
let interestFvalue = document.getElementsByClassName("interestField");
let interest = document.getElementById("TInterest");
let intStr = "";
for (let i of interestFvalue) {
intStr += ` <li>${i.value}</li>`;
}
interest.innerHTML = intStr;
// Education Qualification
let EdField = document.getElementsByClassName("EdField");
let str1 = "";
for (let i of EdField) {
str1 = str1 + `<li>${i.value}</li>`;
}
let eduTemplate = document.getElementById("edT");
eduTemplate.innerHTML = str1;
// console.log(str1);
document.getElementById("cv-form").style.display = "none";
document.getElementById("cv-template").style.display = "block";
};
PrintCV = () => {
window.print();
};
EditBack = () => {
document.getElementById("cv-template").style.display = "none";
document.getElementById("cv-form").style.display = "block";
};
// !todo :image not selected alert
<file_sep>
# <div align="center"><img src="https://github.com/swaraj961/Resume-Builder/blob/master/images/online_cv.svg" alt="icon" width=40> Resume-Builder </div>
- As the name suggests it's a <b>Resume builder app </b> that takes input from user about his professional and personal details,skills,photo etc and builds resume quickly.
- It comes along with <b>5 different Themes </b> for UI of resume.
- User can re edit the fields and print and save the resume at pdf.
- Serves the purpose of creating a resume at ease.
- simply Dom manipulation project.
- Made using HTML, CSS, and JavaScript ,bootstrap.
- Made for #Pephack-2.
   

## Demo- : <a href="https://youtu.be/SBGNM9pxZaw">Video</a> : <a href="https://swaraj961.github.io/Resume-Builder/">Live website</a>
<img src="https://github.com/swaraj961/Resume-Builder/blob/master/demo.gif"/>
|
ac9776220025b4aa5ac27f5395ea36b70ef484a0
|
[
"JavaScript",
"Markdown"
] | 2 |
JavaScript
|
swaraj961/Resume-Builder
|
16ff180f21e6811abdfaa0274d84f645e1d73b44
|
20e1cccfe3d3a51f4ef4b84ed8a82668a924923a
|
refs/heads/master
|
<repo_name>MartinCui/martincui.com<file_sep>/Article/ArticleProperty.cs
using System;
using System.Collections.Generic;
namespace martincui.com.Article
{
public class ArticleProperty
{
public string Title{get;set;}
public DateTime CreateDate{get;set;}
public DateTime UpdateDate{get;set;}
public List<string> Tags{get;set;}
public List<string> Keywords{get;set;}
}
}<file_sep>/Logging/LogRequestMiddleware.cs
namespace martincui.com.Logging
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Extensions;
using Microsoft.Extensions.Logging;
public class LogRequestMiddleware
{
private readonly RequestDelegate next;
private readonly ILogger _logger;
public LogRequestMiddleware(RequestDelegate next, ILoggerFactory loggerFactory)
{
this.next = next;
_logger = loggerFactory.CreateLogger<LogRequestMiddleware>();
}
public async Task Invoke(HttpContext context)
{
var requestBodyStream = new MemoryStream();
var originalRequestBody = context.Request.Body;
await context.Request.Body.CopyToAsync(requestBodyStream);
requestBodyStream.Seek(0, SeekOrigin.Begin);
var url = UriHelper.GetDisplayUrl(context.Request);
var requestBodyText = new StreamReader(requestBodyStream).ReadToEnd();
string currentDateTimeString = DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss fff");
_logger.LogInformation(1, $"{currentDateTimeString}, {context.Request.Method}, {url}, {requestBodyText}");
requestBodyStream.Seek(0, SeekOrigin.Begin);
context.Request.Body = requestBodyStream;
await next(context);
context.Request.Body = originalRequestBody;
}
}
}<file_sep>/Controllers/HomeController.cs
namespace martincui.com.Controllers
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Markdig;
using Microsoft.Extensions.FileProviders;
using martincui.com.Article;
using Microsoft.Extensions.Logging;
public class HomeController : Controller
{
private readonly ILogger<HomeController> _logger;
private IArticleCollection _articleCollection;
public HomeController(IArticleCollection articleCollection, ILogger<HomeController> logger)
{
_articleCollection = articleCollection;
_logger = logger;
}
public IActionResult Index()
{
_logger.LogInformation($"Visiting home page.");
SetAllTags();
string queryTag = null;
if (Request.Query.ContainsKey("tag"))
{
queryTag = Request.Query["tag"];
}
ViewBag.QueryTag = queryTag;
IList<Article> allArticles = _articleCollection.AllArticles.Value;
IEnumerable<Article> orderedArticles = null;
if (string.IsNullOrWhiteSpace(queryTag))
{
orderedArticles = allArticles.OrderBy(a => a.UpdateDate).Reverse();
}
else
{
orderedArticles = allArticles.Where(a => a.Tags.Contains(queryTag)).OrderBy(a => a.UpdateDate).Reverse();
}
ViewBag.ShowArticles = orderedArticles;
return View();
}
public IActionResult ReadArticle(string articleName)
{
_logger.LogInformation($"ReadArticle: {articleName}");
SetAllTags();
IList<Article> allArticles = _articleCollection.AllArticles.Value;
Article article = allArticles.FirstOrDefault(a => a.Name == articleName);
if (article == null)
{
Response.StatusCode = 404;
_logger.LogWarning($"ReadArticle: Cannot find article: {articleName}");
return View("404");
}
else
{
ViewBag.Article = article;
return View();
}
}
private void SetAllTags()
{
IList<Article> allArticles = _articleCollection.AllArticles.Value;
Dictionary<string, int> allTags = new Dictionary<string, int>();
foreach (Article article in allArticles)
{
foreach (string tag in article.Tags)
{
if (allTags.ContainsKey(tag))
allTags[tag]++;
else
allTags[tag] = 1;
}
}
ViewBag.Tags = allTags;
}
}
}
<file_sep>/articles/sling-alias-lost-in-aem6.3.md
## Case
This is probably AEM 6.3 specific issue, hopefully Adobe is going to have this issue fixed soon.
We’ve been experiencing sling:alias loss which affects most of the sites and pages. Since upgrade to AEM 6.3.
Our site is global and non-English websites rely heavily on AEM's sling:alias page property to support local language specific urls while the page names keep the same as that of English version for better tracking and comparison purpose. Let's say, we have a page design named "how-it-works" and it exists in 3 language versions, content layout in author would be something like below:
- /content/our-company/gb/en/how-it-works.html
- /content/our-company/mx/es/how-it-works.html (with sling:alias value "cómo-funciona" for Spanish)
- /content/our-company/tr/tr/how-it-works.html (with sling:alias value "nasıl-çalışır" for Turkish)
These 3 pages will be visible by Google like below
- https://www.example.com/en-gb/how-it-works.html
- https://www.example.com/es-mx/**cómo-funciona**.html (by browser optimized URL display)
- https://www.example.com/tr-tr/**nasıl-çalışır**.html (by browser optimized URL display)
while keep the same page name of "how-it-works" -- this can be used to compare these 3 pages in different markets while these 3 pages are having different urls.
After upgrade to AEM 6.3, authors noticed sometimes after content editing and page activation in author, the urls relying on these sling:alias are partially unaccessible with 404 code. I went to publishers and confirmed that sling:alias does exist there. This can be temporariry solved by changing sling:alias to something else and then change it back so the cache gets refreshed. But appparently this is not an option for us who rely so heavily on sling:alias property.
The thing is, authors didn't find any pattern to reproduce this issue -- sometimes it happens, sometimes it doesn't.......
## Why
We thought it’s random or permission related or something wrong in our side. That's why I spent a lot of time checking these normal settings. I finally confirmed that nothing wrong in configuration or our code after several hours and started to suspect AEM silng:alias mechanism's robustness itself.
I then got lucky after I found I was able to reproduce this issue if I import different packages (one from QA and one from live) to my local publisher.
Then by following this only clue, after hours of having fun with decompiled Java code, I finally found the root cause.
So, in this version of sling, by default, there is a caching mechanism to deal with sling:alias so that alias configurations are normally read from cache instead of JCR node. This also means this cache needs to be refreshed by some event listener each time JCR nodes get updated/deleted.
In class “org.apache.sling.resourceresolver.impl.mapping.MapEntries”, method “removeResource” deals with node removal event.
As shown below, “null” is passed to “removeAlias” when “contentPath” (as a caching key) is “/content/our-company/mx/es” and the actual removed node is, say, “/content/our-company/mx/es/jcr:content/a/b/c/d/e”.
Then, because “null” is passed to “removeAlias”, that method removes this caching key from caching dictionary.
This causes alias caching under /content/Englishtown/mx/es gets cleared.
Further child page activation might be able to bring themselves’ alias back, but the list won’t cover all until all child pages are activated.

## How to solve it
I found in code there should be a switch which controls whether this advanced caching feature should be turned on.
It's in in OSGi configuration: "Apache Sling Resource Resolver Factory".
The issue was gone after I changed this configuration. It might not cause some performance issue for us since we have another 2 layers of caching mechanisms on top of publishers.

## Follow up
I take this issue as something serious since returning 404 for some urls really hurts clients like us.
I got surprised when 6.3 has been out for several months and this issue has been there for that long…… other uses don’t use alias?
I was about to send a put request to sling project before I found this issue has been spotted in [This case](https://issues.apache.org/jira/browse/SLING-7018):
and fixed in [this commit (July 24th this year)](https://github.com/apache/sling/commit/7fd064349247a81a8e48711253ed1f64ab1330b3#diff-0bb6bb8f8fd6e2f123df8bed3bdd9876L334):
with code commit comment:
**SLING-7018: Fix a bug that removed to many aliases in certain cases when a resource got removed.**
Glad to know it's been noticed and fixed in the latest version of sling. For AEM, I'd wait until a stable release patch is out.
<file_sep>/Article/ArticleCollection.cs
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.Extensions.FileProviders;
using System.Collections.ObjectModel;
using Newtonsoft.Json;
namespace martincui.com.Article
{
public class ArticleCollection : IArticleCollection
{
private IFileProvider _fileProvider;
private Lazy<IList<Article>> _allArticles;
public ArticleCollection(IFileProvider fileProvider)
{
_fileProvider = fileProvider;
_allArticles = new Lazy<IList<Article>>(GetAllArticles);
}
public Lazy<IList<Article>> AllArticles{
get => _allArticles;
}
public ReadOnlyCollection<Article> GetAllArticles()
{
Dictionary<string, Article> articles = new Dictionary<string, Article>();
List<Article> articleList = new List<Article>();
foreach(IFileInfo fileInfo in _fileProvider.GetDirectoryContents("articles"))
{
if(fileInfo.IsDirectory || fileInfo.Name.StartsWith('.'))
continue;
if(!fileInfo.Name.EndsWith(".json") && !fileInfo.Name.EndsWith(".md"))
continue;
string name = fileInfo.Name.Substring(0, fileInfo.Name.LastIndexOf('.')).ToLower();
Article article = null;
if(articles.ContainsKey(name)){
article = articles[name];
}
else{
article = new Article{ Name = name };
articles[name] = article;
articleList.Add(article);
}
String fileContent = null;
using (Stream fileStream = fileInfo.CreateReadStream())
using (StreamReader reader = new StreamReader(fileStream)){
fileContent = reader.ReadToEnd();
}
if(fileInfo.Name.EndsWith(".json")){
ArticleProperty property = JsonConvert.DeserializeObject<ArticleProperty>(fileContent);
article.CreateDate = property.CreateDate;
article.Tags = property.Tags;
article.Title = property.Title;
article.UpdateDate = property.UpdateDate > property.CreateDate ? property.UpdateDate : property.CreateDate;
article.Keywords = property.Keywords;
}
else{
article.MdContent = fileContent;
}
}
return articleList.AsReadOnly();
}
}
}<file_sep>/articles/the-beauty-of-literature.md
> 成于 2017-02-13, 记录在我的iCloud Notes里,现在搬家到个人网站上。
那日女友驾车从 London 往 Tonbridge 去,路上飘起小雪,温度很低。
伴随着英国冬天的妖风,雪花漫天飞舞,视线受阻,更显天地苍茫。
我极目远眺,茫茫风雪无边无际,不觉随口吟起:“千山鸟飞绝,万径人踪灭”。
此事现在回味起来仍然觉得有趣。
我学习柳宗元这首绝句是在十几年前,背诵时脑海里依据自己对诗句的理解来勾勒图画以加深理解和记忆。想不到十几年后英国某处景观竟能让我回溯起这首绝句。
想来诗词果真有如此魅力,记于心、融于景、会于神。先人以诗写景,后人以景韵诗。
都说诗书画印不分家,此可为一证也。
<file_sep>/Article/IArticleCollection.cs
using System;
using System.Collections.Generic;
namespace martincui.com.Article
{
public interface IArticleCollection
{
Lazy<IList<Article>> AllArticles{get;}
}
}<file_sep>/Startup.cs
namespace martincui.com
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using martincui.com.Article;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.FileProviders;
using Microsoft.Extensions.Logging;
public class Startup
{
private IHostingEnvironment _hostingEnvironment;
public Startup(IConfiguration configuration, IHostingEnvironment hostingEnvironment)
{
Configuration = configuration;
_hostingEnvironment = hostingEnvironment;
}
public IConfiguration Configuration { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
services.AddMvc();
var physicalProvider = _hostingEnvironment.ContentRootFileProvider;
services.AddSingleton<IFileProvider>(physicalProvider);
services.AddSingleton<IArticleCollection, ArticleCollection>();
services.AddSingleton<IHttpContextAccessor, HttpContextAccessor>();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory)
{
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
else
{
app.UseExceptionHandler("/Home/Error");
}
app.UseStaticFiles();
app.UseMiddleware<Logging.LogRequestMiddleware>();
app.UseMvc(routes =>
{
routes.MapRoute(
name: "home",
template: "",
defaults: new { controller = "Home", action = "Index" });
routes.MapRoute(
name: "article",
template: "{*articleName}",
defaults: new { controller = "Home", action = "ReadArticle" });
});
}
}
}
<file_sep>/articles/a-way-to-implement-conditional-donut-caching-or-server-side-content-targeting-using-varnish.md
> Even problems this article is trying to solve is related to Adobe AEM Personalization, but the technique it uses is AEM agnostic.
## About Varnish
[Varnish](https://varnish-cache.org/index.html) has been widely used nowadays as a caching HTTP reverse proxy. I use Varnish to act as second level [Load Balancer](https://f5.com/glossary/load-balancer) behind [F5](https://f5.com/), traffic controller during release, cookie manager, redirection manager and first layer cache server.
## About Adobe Content Targeting
[AEM Personalization](https://docs.adobe.com/docs/en/aem/6-3/author/personalization.html) uses client side javascript to change page parts according to configurations defined by authors. A classic example for this is, to maxmize your campaign's outcome, you probably want your campaign page's staging component show different attraction photos according to visitor's age, geo-location and devices they are using. When you need a proper report to record and compare diffferent content's result, you need to use [Adobe Target](http://www.adobe.com/uk/marketing-cloud/target.html) as Targeting Engine -- you need to pay money for using this service for sure. Otherwise when you just need to change the content and don't need report (for example, you just want to show different phone numbers based on visitors' geo-location), you will choose AEM as Targeting Engine to save money and energy.
Like all other client side content targeting mechenisms, AEM as Targeting Engine has all the issues it should:
- page rendering either gets blocked or it causes flashes when changing targeted content.
- targeted component is shown asynchronously so javascript inside the component also executes asynchronously which might cause bugs that are hard to locate and fix.
AEM as Targeting Engine also has issues it shouldn't have:
- complex and distributed configurations in author.
- oversized campaign size and client side javascript file as time goes by.
- uses <noscript> tag to hide default experience which causes problem when default experience also outputs <noscript> tag
- hard coded timeout (2 seconds as I remember)
- shows duplicated experiences when network is unstable.
I spent some time trying to implement content targeting in server side to solve all the problems listed above. The goal is, the targeted component gets rendered on page just like other non-targeted components.
## Use Varnish to implement server side content targeting
Varnish supports this [Edge Side Includes](https://varnish-cache.org/docs/5.1/users-guide/esi.html) to allow us to implement donut caching easily. But to implement the goal mentioned above, this donut caching needs to support showing different esi:include according to conditions predefined. I'd like to call this common requirement as **conditional donut caching**. A simple example would be, for given html page below:
```html
<html>
<body>
<esi:include src="/c1.html"/>
<esi:include src="/c2.html"/>
</body>
</html>
```
with c1.html:
```html
<div>GB</div>
```
and c2.html:
```html
<div>OTHERS</div>
```
The page shows c1.html content for visitors from gb, and shows c2.html content for others.
To achieve a common solution, I assume this condition is known when parent page is generated, so the conditions defined can bewritten into esi:include urls. Then the parent page would look like:
```html
<html>
<body>
<esi:include src="/c1.html?positive-match&showMeWhenVisitorIsFrom=gb"/>
<esi:include src="/c2.html?negative-match&showMeWhenVisitorIsFrom=gb"/>
</body>
</html>
```
In varnish code, it uses "positive-match" and "negative-match" to know whether current request is doing normal page request, or doing requests for esi:include. When it knows it's processing esi:include requests, it also [req_top](https://varnish-cache.org/docs/5.1/reference/vcl.html#req-top) to access parent page's client context information -- req_top.http.X-Visitor-Geo-Location in this case. It then finishes the request flow so real c1/c2 html page will be returned, or returns empty 200 page, according to condition match result and negative/positive proces it is in:
```
sub vcl_deliver {
if (req.http.X-Recv-Flow == "conditional-esi") {
set req.http.X-Esi-Condition = regsub(req.http.X-Original-Query, ".*&showMeWhenVisitorIsFrom=([^&]+).*", "\1");
set req.http.X-Esi-Context = req_top.http.X-Visitor-Geo-Location;
if(req.http.X-Original-Query ~ "^positive-match&"){
if(req.http.X-Esi-Condition != req.http.X-Esi-Context){
return (synth(200, "positive: context doesn't match condition, ignoring...."));
}
}
else{
if(req.http.X-Esi-Condition == req.http.X-Esi-Context){
return (synth(200, "negative: context matches condition, ignoring...."));
}
}
}
}
```
The code above makes request to c1.html show empty content when visitor is not from GB. It also makes request to c2.html show empty content when visitor is from GB. Both c1.html and c2.html are cached based on other customized caching mechanism and they are used only when conditions match. Now the simplest version of conditional donut caching is implemented. It makes server side content targeting become possible.
## Conclusion
By expand example shown above, readers can easily append more segmentation factors to c1.html and c2.html. Apart from c1 and c2, you can also add more donut pages to be shown based on positive/negative conditions of your own. When this all finishes, server side content targeting of your own needs is completed.
Some known restrictions should be highlighted here though:
- Parent page shows different content according to all segmentations factors, in theory, this means parent page's server side hash generation and client side caching mechanism should take this into consideration. Otherwise the same visitor might see old cached page when he/she visits the same page with different conditions in url.
- Regular expression cannot be used when trying to judge whether the context matches the condition since both of them are dynamic. Hopefully one day Varnish starts to support dynamic expression in regular expression. [Email me](mailto:<EMAIL>) when you find it's finally supported please.
<file_sep>/Article/Article.cs
using System;
using System.Collections.Generic;
using System.Text;
using Markdig;
namespace martincui.com.Article
{
public class Article
{
private Lazy<string> _htmlContent;
public string Name{get;set;}
public string Title{get;set;}
public DateTime CreateDate{get;set;}
public DateTime UpdateDate{get;set;}
public List<string> Tags{get;set;}
public List<string> Keywords{get;set;}
public string MdContent{get;set;}
public Lazy<string> HtmlContent
{
get
{
return _htmlContent;
}
}
public Article(){
_htmlContent = new Lazy<string>(()=>{
return Markdown.ToHtml(this.MdContent);
});
}
public override string ToString()
{
return $"Title: {Title}; Name: {Name}; CreateDate: {CreateDate}; UpdateDate: {UpdateDate}; Tags: {Tags}; MdContent: {MdContent}";
}
public override int GetHashCode()
{
return this.Name.GetHashCode();
}
public override bool Equals(object obj)
{
if(obj == null)
return false;
Article that = obj as Article;
if(that == null)
return false;
return String.Equals(this.Name, that.Name, StringComparison.OrdinalIgnoreCase);
}
public string SampleContent{
get{
return this.MdContent.Substring(0, Math.Min(this.MdContent.Length, 400));
}
}
public string DisplayCreateDate{
get{
return this.CreateDate.ToString("yyyy-MM-dd");
}
}
public string DisplayUpdateDate{
get{
return this.UpdateDate.ToString("yyyy-MM-dd");
}
}
public bool Updated{
get{
return this.UpdateDate > this.CreateDate;
}
}
public string DisplayKeywords{
get{
if(this.Keywords == null || this.Keywords.Count == 0){
return String.Empty;
}
StringBuilder sb = new StringBuilder();
foreach(string keyword in this.Keywords){
sb.Append(keyword);
sb.Append(",");
}
return sb.ToString();
}
}
}
}<file_sep>/articles/about.md
I have been working as a software engineer since 2004. From initial video encoding/decoding algorithm (H.263 back then) optimization written in pure C, to log based inter database real time data replication research and implementation using C++ (I also coded some RPG on OS/400 if you are old enough to know). I stopped dealing with allocating and releasing memory life and started coding using C# on top of .net Framework 1.1. I have worked with almost all .net versions since then until currently the most recent one -- this page is meant to be rendered by [.net Core](https://dotnet.github.io/) 2.0. I started using Java when I was already very familiar with C# and .net and found them quite similar from each other -- they all try to solve the same problem using similar approaches, they all try to invent more and more grammar sugar to attract beginners while not telling them sugar overdose causes problems. I miss C#'s *real* runtime generic types while using Java; I miss Java's immensive libraries while using C#.
Regarding the big website I have been maintaining, most of the active front pages are rendered by and maintained in [Adobe AEM](http://www.adobe.com/uk/marketing-cloud/experience-manager.html) while the backend parts are mainly on .net framework and will be working on .net core based on [Docker](https://www.docker.com/) maintained by [Kubernetes](https://kubernetes.io/) soon (well, hopefully). I work deeply with AEM and solve most of AEM platform related problems. I also maintain most of our main website's [Varnish](https://varnish-cache.org/) code (Or I call varnish script because of grammar restriction). Like all of online business company's websites, [Adobe Analytics](http://www.adobe.com/uk/data-analytics-cloud/analytics.html) tracking, [Google Analytics](https://analytics.google.com/) tracking, [Adobe Targeting](http://www.adobe.com/uk/marketing-cloud/target.html), SEO implementation, [Qubit](http://www.qubit.com/) tag management, [Adobe DTM](https://dtm.adobe.com/) tag management, E-Commerce payments support are all maintained by team in London now. When dealing with all of these internal and external products, business requirements and technical requirements, and some tough platform based problems, I found something worth writting down and being shared -- those that cannot be Googled -- that's why you are seeing this website.
I also love writing python code and I use it to create development tools. We also have a big python based automation test framework (we also have a Java based one for history reason). But my python code has got only one chance to run on live environment -- it lasted around 2 hours acting as an emergency file based storage during database switch and saved several thousands Pounds they say. Things will improve after platform and language agnostic architecture is fully set up -- we are working on that and the legacy systems migration lasts very very long.
As a realist, I use javascript heavily as well. But I still get surprised nowadays when javascript spreads everywhere. The frontend requirement growth hides this language's issues and even makes it prosper. I also don't understand, after so many years, why I still see third party code listening and eating all javascript errors, manipulating all ajax communications and waiting for asynchronous call to finish by using *while(true)*. Not to mention dealing with browser and device differences -- sometimes even crying in toilet doesn't ease the frustration.
And my css skill is as good as you see on this website. Just one question to you css guy, I saw some third party css defining my title to be with red color using *important*, can I change it to be black by using *more important*?
## Disclosures
Since most of my working time is spent on long term software products develpment(no matter a new product development or existing systems' maintenance) instead of short term projects, I do realize my professional point of view normally stems from treating product as a whole comparing to finishing projects in phases -- that is even after I hold a [PMP(Project Management Professional)](https://www.pmi.org/certifications/types/project-management-pmp) certificate. When software development process is treated as separated phases, instead of inter connected whole package, short cuts emerge. And short cuts nearly always hurt -- users, customers, developers -- and the costs they generate normally are much more than they save. I've been on this direction so far that I am not sure I am holding some bias in it.
Regarding framwork, library and architechture evaluation, I also find myself tend to take those tools like, parts replacable, non-invasive, standard based implementations as decent, while taking those whole platform like, closed, invasive, unnecessary self invention approaches as "old fasioned", "better to avoid". That might be because I've been working on, maintaining and building software running on these kinds of frameworks, libraries and architectures and was hurt too many times -- where extreme views against "better to avoid" approaches might grow.
## Contact information
Best way to contact me is through email: [<EMAIL>](mailto:<EMAIL>, "email me").
### Mailing Address
<NAME>\
EF Solutions (Services) Ltd\
22 Chelsea Manor Street\
London\
SW3 5RL\
United Kindom
### Social Media
- [Facebook](https://www.facebook.com/jiandong.cui.9).
- [Twitter](https://twitter.com/MartinCui_).
- Wechat: cjd0659.
- [LinkedIn](https://www.linkedin.com/in/martin-cui-22562074/).
## History
I was born in a small village in Inner Mongolia, China. I grew up there before I went to a boarding school in TongLiao city for high school education. From TongLiao I went to ZheJiang University and got my bachelor degree in Information Technology. After graducation I went to ShangHai and worked for a small software development company called Taoroad for 5 years. I joined EF Labs in 2010. Three years later I was transferred from ShangHai office to EF London office in 2013 and have been working for EF London since then.
|
af7b050563dea2d8e439940889903d1fcd35c17d
|
[
"Markdown",
"C#"
] | 11 |
C#
|
MartinCui/martincui.com
|
4183e519fbf1b284cb3b21f23b43e21b25fdc14d
|
3734a8fe25fa49990ff49444728191727c204b0b
|
refs/heads/master
|
<repo_name>carlosmonzon/CodeFestBelatrix2014<file_sep>/RearSensorDroid/app/src/main/java/com/codefest/rearsensordroid/MyActivity.java
package com.codefest.rearsensordroid;
import android.app.ListActivity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.content.Intent;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Set;
import java.util.UUID;
public class MyActivity extends ListActivity {
private ArrayAdapter<String> mArrayAdapter;
private BluetoothAdapter mBluetoothAdapter;
private BluetoothSocket btSocket;
private ArrayList<BluetoothDevice> btDeviceArray = new ArrayList<BluetoothDevice>();
private ConnectAsyncTask connectAsyncTask;
private ConnectedThread connectedThread;
TextView distance;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//application keeps the screen on
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
distance = (TextView) findViewById(R.id.distancia);
mArrayAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1);
setListAdapter(mArrayAdapter);
// Instance AsyncTask
connectAsyncTask = new ConnectAsyncTask();
//Get Bluetooth Adapter
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
// Check smartphone support Bluetooth
if (mBluetoothAdapter == null) {
//Device does not support Bluetooth
Toast.makeText(getApplicationContext(), "Not support bluetooth", Toast.LENGTH_LONG).show();
finish();
}
// Check Bluetooth enabled
if (!mBluetoothAdapter.isEnabled()) {
Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBtIntent, 1);
}
// Querying paired devices
Set<BluetoothDevice> pairedDevices = mBluetoothAdapter.getBondedDevices();
if (pairedDevices.size() > 0) {
for (BluetoothDevice device : pairedDevices) {
mArrayAdapter.add(device.getName() + "\n" + device.getAddress());
btDeviceArray.add(device);
}
}
}
@Override
protected void onListItemClick(ListView l, View v, int position, long id) {
BluetoothDevice device = btDeviceArray.get(position);
connectAsyncTask.execute(device);
}
// Click event on Button
private OnClickListener btToggleOnClickListener = new OnClickListener() {
@Override
public void onClick(View v) {
connectedThread.write(new String("L").getBytes());
}
};
private class ConnectAsyncTask extends AsyncTask<BluetoothDevice, Integer, BluetoothSocket> {
private BluetoothSocket mmSocket;
private BluetoothDevice mmDevice;
@Override
protected BluetoothSocket doInBackground(BluetoothDevice... device) {
mmDevice = device[0];
try {
String mmUUID = "00001101-0000-1000-8000-00805F9B34FB";
mmSocket = mmDevice.createInsecureRfcommSocketToServiceRecord(UUID.fromString(mmUUID));
mmSocket.connect();
} catch (Exception e) {
}
return mmSocket;
}
@Override
protected void onPostExecute(BluetoothSocket result) {
btSocket = result;
connectedThread = new ConnectedThread(btSocket);
connectedThread.start();
}
}
/**
* Created by Carlos on 11/15/2014.
*/
private class ConnectedThread extends Thread {
private final BluetoothSocket mmSocket;
private final InputStream mmInStream;
private final OutputStream mmOutStream;
public ConnectedThread(BluetoothSocket socket) {
mmSocket = socket;
InputStream tmpIn = null;
OutputStream tmpOut = null;
try {
tmpIn = socket.getInputStream();
tmpOut = socket.getOutputStream();
} catch (IOException e) {
}
mmInStream = tmpIn;
mmOutStream = tmpOut;
}
public void run() {
byte[] buffer = new byte[1024];
int begin = 0;
int bytes = 0;
while (true) {
try {
bytes += mmInStream.read(buffer, bytes, buffer.length - bytes);
for (int i = begin; i < bytes; i++) {
if (buffer[i] == "#".getBytes()[0]) {
mHandler.obtainMessage(1, begin, i, buffer).sendToTarget();
begin = i + 1;
if (i == bytes - 1) {
bytes = 0;
begin = 0;
}
}
}
} catch (IOException e) {
break;
}
}
}
public void write(byte[] bytes) {
try {
mmOutStream.write(bytes);
} catch (IOException e) {
Log.d("WRITE", e.getLocalizedMessage());
}
}
public void cancel() {
try {
mmSocket.close();
} catch (IOException e) {
}
}
}
Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
byte[] writeBuf = (byte[]) msg.obj;
int begin = (int) msg.arg1;
int end = (int) msg.arg2;
switch (msg.what) {
case 1:
String writeMessage = new String(writeBuf);
writeMessage = writeMessage.substring(begin, end);
updateText(writeMessage);
break;
}
}
};
void updateText(String text) {
text = text.replace("#", "").trim();
if (text != "") {
int value = Integer.valueOf(text);
if (value != 0) {
distance.setText(text + " cm");
if (value < 40) {
distance.setTextSize(250);
} else {
distance.setTextSize(180);
}
}
}
}
}
|
2a8661d41f4a75cd728c360ea68379cb7f564d58
|
[
"Java"
] | 1 |
Java
|
carlosmonzon/CodeFestBelatrix2014
|
0170fca09634e4ef3ac1f1de514b324de3b11cdb
|
b93e605fc270d4c8be229d6ee632325eeb0691e5
|
refs/heads/main
|
<repo_name>majent/update-s3-metadata<file_sep>/README.md
# update-s3-metadata<file_sep>/main.py
import boto3
def update_meta(bucket_name, prefix):
s3 = boto3.resource('s3')
bucket = s3.Bucket(bucket_name)
extensions = ['.webp', '.png']
for bucket_object in bucket.objects.filter(Prefix=prefix):
print(bucket_object.key)
if bucket_object.key.endswith(tuple(extensions)):
obj = bucket.Object(bucket_object.key)
bucket_object.copy_from(CopySource={'Bucket': bucket_name, 'Key': bucket_object.key},
CacheControl='max-age=31536000', ContentType=obj.content_type,
MetadataDirective='REPLACE')
update_meta('BUCKET_NAME', 'PREFIX')
|
e60ea4752960669d44e09da6e65c532e842894ad
|
[
"Markdown",
"Python"
] | 2 |
Markdown
|
majent/update-s3-metadata
|
4658f46dbc38c3ad447d760e7cb2b6d0016141c5
|
dba2ef434945bdebeee99c6c08a3068390a7e9ad
|
refs/heads/master
|
<file_sep># -*- coding: utf-8 -*-
# Stdlib
import logging
# 3rd party
# Flask
from flask_appbuilder import Model
from flask_appbuilder.models.mixins import AuditMixin, FileColumn, ImageColumn
# SQLalchemy
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
LOGGER = logging.getLogger(__name__)
"""
You can use the extra Flask-AppBuilder fields and Mixin's
AuditMixin will add automatic timestamp of created and modified by who
"""
<file_sep># -*- coding: utf-8 -*-
__author__ = '<NAME>'
__email__ = '<EMAIL>'
__version__ = '0.1.0'
__all__ = []
# Stdlib
import logging
"""
TBD: Move this elsewhere
Logging configuration
"""
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
logging.getLogger().setLevel(logging.DEBUG)
from trans_passports.app.flask_app import flask_app, db, appbuilder
__all__.append('flask_app')
__all__.append('db')
__all__.append('appbuilder')
from trans_passports.app import views
__all__.append('views')
<file_sep>#!/usr/bin/env bash
# Install coveralls for python
/opt/venv/bin/pip install git+git://github.com/z4r/python-coveralls.git@master
# Run test
/opt/venv/bin/coverage run --source /opt/trans_passports /opt/trans_passports/code/tests.py
# Conditional on test exit code
if [ $? == 0 ]; then
#Tests Successfully Passed, continuing build
COVERALLS_REPO_TOKEN= /opt/venv/bin/coveralls \
--base_dir /opt/trans_passports \
--data_file /.coverage \
--config_file /opt/.coveragerc
else
#Tests Failed, exiting build
exit 1
fi
<file_sep>#!/usr/bin/env bash
/opt/venv/bin/gunicorn trans_passports:flask_app \
-w 4 \
-b 0.0.0.0:5000 \
--log-level=debug \
--chdir=/opt/trans_passports/code \
--timeout=$((60 * 5)) \
--worker-class=gthread \
--reload
<file_sep># -*- coding: utf-8 -*-
# Stdlib
import os
import sys
import logging
from importlib import import_module
# 3rd party
import click
import yaml
from flask_appbuilder import const as fab_const
LOGGER = logging.getLogger(__name__)
FLASK_PROJECT_DIRECTORY = '/opt/trans_passports/code/trans_passports'
def import_application(app_package, appbuilder):
sys.path.append(os.getcwd())
try:
_app = import_module(app_package)
except Exception as e:
click.echo(click.style('Was unable to import {0} Error: {1}'.format(app_package, e), fg='red'))
exit(3)
if hasattr(_app, 'appbuilder'):
return getattr(_app, appbuilder)
else:
click.echo(click.style('There in no appbuilder var on your package, you can use appbuilder parameter to config', fg='red'))
exit(3)
def create_flask_admin(**params):
curdir = os.path.curdir
os.chdir(FLASK_PROJECT_DIRECTORY)
try:
# TBD: Put in a yaml file
params_dflt = {
'app': 'trans_passports',
'appbuilder': u'appbuilder',
'username': u'admin',
'firstname': u'admin',
'lastname': u'user',
'email': u'<EMAIL>',
'password': u'<PASSWORD>'
}
# Update defaults and rename as 'params' for clarity
params_dflt.update(params)
params = params_dflt
_appbuilder = import_application(params['app'], params['appbuilder'])
# Describe auth method
auth_type = {fab_const.AUTH_DB:"Database Authentications",
fab_const.AUTH_OID:"OpenID Authentication",
fab_const.AUTH_LDAP:"LDAP Authentication",
fab_const.AUTH_REMOTE_USER:"WebServer REMOTE_USER Authentication",
fab_const.AUTH_OAUTH:"OAuth Authentication"}
click.echo(click.style('Recognized auth method {0}.'.format(auth_type.get(_appbuilder.sm.auth_type,'No Auth method')), fg='green'))
# Create roles
role_admin = _appbuilder.sm.find_role(_appbuilder.sm.auth_role_admin)
user = _appbuilder.sm.add_user(
params['username'], params['firstname'], params['lastname'],
params['email'], role_admin, params['password'])
if user:
click.echo(click.style('Admin User {0} created.'.format(params['username']), fg='green'))
else:
click.echo(click.style('No user created an error occured', fg='red'))
finally:
# Change back to the original directory
os.chdir(curdir)
if __name__ == '__main__':
create_flask_admin()
<file_sep>[tox]
envlist = py26, py27, py33, py34, py35, flake8
[testenv:flake8]
basepython=python
deps=flake8
commands=flake8 trans_passports
[testenv]
setenv =
PYTHONPATH = {toxinidir}:{toxinidir}/trans_passports
commands = python setup.py test
<file_sep># -*- coding: utf-8 -*-
# Stdlib
import logging
# 3rd party
# Flask
from flask import render_template
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder import ModelView
# Custom
from trans_passports.app.flask_app import appbuilder, db
LOGGER = logging.getLogger(__name__)
"""
Create your Views::
class MyModelView(ModelView):
datamodel = SQLAInterface(MyModel)
Next, register your Views::
appbuilder.add_view(MyModelView, "My View", icon="fa-folder-open-o", category="My Category", category_icon='fa-envelope')
"""
"""
Application wide 404 error handler
"""
@appbuilder.app.errorhandler(404)
def page_not_found(e):
return (
render_template(
'404.html', base_template=appbuilder.base_template, appbuilder=appbuilder), 404)
# After all views have been defined and added.
db.create_all()
<file_sep>
# Trans-Passports Docker Project
This project represents a Docker deployment of Trans-Passports.
### Build/Deploy Status:
Master Branch: _Not configured_
Master Branch Code Coverage: _Not configured_
Deployment Status: _Not configured_
This project structures a python-based app.
## Docker
This project consists of the following docker containers:
* trans_passports_api - Rails API
* trans_passports_frontend - React-driven frontend (_in progress_)
* trans_passports_db - Postgres database
* trans_passports_redis - Redis cache
* nginx - NGINX server managed by supervisor (_in progress_)
* sumo - log aggregation client - customized per app (_in progress_)
## Building
For local building and testing, run the following command:
`make local_build`
## Running Containers
For local running, try the following:
`make local_up`
If you're having trouble getting changes to be recognized, the following will build the images (forceably, even if the daemon doesn't think there have been updates) and will start containers.
`docker-compose -f compose/local.yml -p trans_passports up --force-recreate -d`
### Running Tests
To run the test on this project you'd run the following command
`docker-compose -f compose/local.yml -p trans_passports run trans_passports_main bash ./opt/test_trans_passports_app.sh`
The test_trans_passports_app.sh file is the standard, regardless of the command needed to execute the commands. All tech specific calls for tests should be executed from within that file.
## CI Integration
Inside the project - in the `compose` folder - you will see a file `ci.yml`. This file is what produces the build artifacts that are published to a docker registry.
## Deployment Templates
Also inside the project - in the `compose` folder - you will see target environment specific compose files, like `qa_template.yml` and `production_template.yml`. These files are used as the jinja templates for the deployment pipeline. They are responsible for receiving variables - like `imageTag` - that are supplied to them during the deployment process. They are also responsible for setting all environment specific variables.
<file_sep>#!/usr/bin/env bash
# Startup application et al. via supervisor
echo "Starting supervisor for nginx..."
/opt/venv/bin/supervisord -c /etc/supervisord.conf -n
<file_sep>=====
Usage
=====
To use Trans-Passports in a project::
import trans_passports
<file_sep>===============================
Trans-Passports
===============================
.. image:: https://img.shields.io/pypi/v/trans_passports.svg
:target: https://pypi.python.org/pypi/trans_passports
.. image:: https://img.shields.io/travis/approximatelylinear/trans_passports.svg
:target: https://travis-ci.org/approximatelylinear/trans_passports
.. image:: https://readthedocs.org/projects/trans-passports/badge/?version=latest
:target: https://trans-passports.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://pyup.io/repos/github/approximatelylinear/trans_passports/shield.svg
:target: https://pyup.io/repos/github/approximatelylinear/trans_passports/
:alt: Updates
Dockerization of the trans-passports codebase
* Free software: MIT license
* Documentation: https://trans-passports.readthedocs.io.
Features
--------
* TODO
Credits
---------
This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
<file_sep>#!/usr/bin/env bash
rake db:create db:migrate db:seed
rails s -p 3000 -b '0.0.0.0'
<file_sep>=======
Credits
=======
Development Lead
----------------
* <NAME> <<EMAIL>>
Contributors
------------
None yet. Why not be the first?
<file_sep>#!/usr/bin/env bash
# Startup application et al. via supervisor
echo "Starting supervisor for local work..."
/opt/venv/bin/supervisord -c /etc/supervisord_local.conf -n
<file_sep># -*- coding: utf-8 -*-
# Stdlib
import logging
# 3rd party
# Flask
from flask import Flask
from flask_appbuilder import SQLA, AppBuilder
flask_app = Flask(__name__)
flask_app.config.from_object('trans_passports.app.config')
db = SQLA(flask_app)
appbuilder = AppBuilder(flask_app, db.session)
"""
from sqlalchemy.engine import Engine
from sqlalchemy import event
#Only include this for SQLLite constraints
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
# Will force sqllite contraint foreign keys
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
"""
<file_sep>-- ---
-- Globals
-- ---
-- SET SQL_MODE="NO_AUTO_VALUE_ON_ZERO";
-- SET FOREIGN_KEY_CHECKS=0;
-- ---
-- Table 'nodes'
--
-- ---
DROP TABLE IF EXISTS `nodes`;
CREATE TABLE `nodes` (
`node_id` INTEGER NULL AUTO_INCREMENT DEFAULT NULL,
`node_data_id` INTEGER NULL DEFAULT NULL COMMENT 'FK to data table',
`node_parent` INTEGER NULL DEFAULT NULL COMMENT 'Parent id of this node',
`node_created` TIMESTAMP NULL DEFAULT NULL,
`node_modified` TIMESTAMP NULL DEFAULT NULL,
PRIMARY KEY (`node_id`)
);
-- ---
-- Table 'data'
--
-- ---
DROP TABLE IF EXISTS `data`;
CREATE TABLE `data` (
`data_id` INTEGER NULL AUTO_INCREMENT DEFAULT NULL,
`data_parent` INTEGER NULL DEFAULT NULL COMMENT 'Parent of this data element',
`data_type` ENUM(100) NULL DEFAULT NULL COMMENT 'Available types: question, question-group, answer, answer-gr',
`data_name` VARCHAR NULL DEFAULT NULL COMMENT 'Name of this data element',
`data_value` MEDIUMTEXT NULL DEFAULT NULL COMMENT 'Value for this data element',
`data_created` TIMESTAMP NULL DEFAULT NULL,
`data_modified` TIMESTAMP NULL DEFAULT NULL,
PRIMARY KEY (`data_id`)
);
-- ---
-- Table 'node_attributes'
-- Attributes for the node table
-- ---
DROP TABLE IF EXISTS `node_attributes`;
CREATE TABLE `node_attributes` (
`node_attribute_id` INTEGER NULL AUTO_INCREMENT DEFAULT NULL,
`node_attribute_node_id` INTEGER NULL DEFAULT NULL COMMENT 'FK to nodes table',
`node_attribute_value` MEDIUMTEXT NULL DEFAULT NULL,
`node_attribute_created` TIMESTAMP NULL DEFAULT NULL,
`node_attribute_modified` TIMESTAMP NULL DEFAULT NULL,
PRIMARY KEY (`node_attribute_id`)
) COMMENT 'Attributes for the node table';
-- ---
-- Foreign Keys
-- ---
ALTER TABLE `nodes` ADD FOREIGN KEY (node_data_id) REFERENCES `data` (`data_id`);
ALTER TABLE `nodes` ADD FOREIGN KEY (node_parent) REFERENCES `nodes` (`node_id`);
ALTER TABLE `data` ADD FOREIGN KEY (data_parent) REFERENCES `data` (`data_id`);
ALTER TABLE `node_attributes` ADD FOREIGN KEY (node_attribute_node_id) REFERENCES `nodes` (`node_id`);
-- ---
-- Table Properties
-- ---
-- ALTER TABLE `nodes` ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- ALTER TABLE `data` ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- ALTER TABLE `node_attributes` ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- ---
-- Test Data
-- ---
-- INSERT INTO `nodes` (`node_id`,`node_data_id`,`node_parent`,`node_created`,`node_modified`) VALUES
-- ('','','','','');
-- INSERT INTO `data` (`data_id`,`data_parent`,`data_type`,`data_name`,`data_value`,`data_created`,`data_modified`) VALUES
-- ('','','','','','','');
-- INSERT INTO `node_attributes` (`node_attribute_id`,`node_attribute_node_id`,`node_attribute_value`,`node_attribute_created`,`node_attribute_modified`) VALUES
-- ('','','','','');
<file_sep>include .env
.DEFAULT_GOAL=build
build_api:
@echo "Building api..."
docker-compose -f compose/local.yml build
build_frontend:
@echo "Building frontend..."
docker-compose build trans_passports_frontend
local_build:
@echo "Building the local containers..."
docker-compose -f compose/local.yml build
local_up:
@echo "Composing the local containers..."
docker-compose -f compose/local.yml up
|
6e2b4d5306136e685c2b8df3bb67dfcd9526ea38
|
[
"SQL",
"reStructuredText",
"Markdown",
"Makefile",
"INI",
"Python",
"Shell"
] | 17 |
Python
|
CNXTEoE/trans-passports-docker
|
53fc237a741b25139eb86da8d94757363579936d
|
31550a8265207df670efa8d4bc601e39e3a99618
|
refs/heads/master
|
<file_sep>import os
import csv
budget_data = ['1', '2']
month_counter = 0
sum_revenue = 0
sum_revenue_change = 0
for files in budget_data:
budget = os.path.join('budget_data.csv')
with open(budget, newline="") as csvfile:
csvreader = csv.reader(csvfile, delimiter=",")
with open(budget) as csvFile:
csvReader = csv.reader(csvFile, delimiter=',')
next(csvReader, None)
line = next(csvReader,None)
max_month = line[0]
min_month = line[0]
revenue = float(line[1])
min_revenue = revenue
max_revenue = revenue
previous_revenue = revenue
month_counter = 1
sum_revenue = float(line[1])
sum_revenue_change = 0
for line in csvReader:
month_counter = month_counter + 1
revenue = float(line[1])
sum_revenue = sum_revenue + revenue
revenue_change = revenue - previous_revenue
sum_revenue_change = sum_revenue_change + revenue_change
if revenue_change > max_revenue:
max_month = line[0]
max_revenue = revenue_change
if revenue_change < min_revenue:
min_month = line[0]
min_revenue = revenue_change
previous_revenue = revenue
average_revenue = sum_revenue/month_counter
average_revenue_change = sum_revenue_change/(month_counter-1)
sum_revenue = int(sum_revenue)
average_revenue_change = int(average_revenue_change)
max_revenue = int(max_revenue)
min_revenue = int(min_revenue)
print(f"Financial Analysis:")
print("-------------------------------------------------------")
print(f"Total Months: {month_counter}")
print(f"Total Revenue: ${sum_revenue:,}")
print(f"Average Revenue Change: ${average_revenue_change:,}")
print(f"Greatest Increase in Revenue: {max_month} ${max_revenue:,}")
print(f"Greatest Decrease in Revenue: {min_month} ${min_revenue:,}")
print("")
file = open("final_budget.txt", "w")
file.write(f"Financial Analysis:\n")
file.write("-------------------------------------------------------\n")
file.write(f"Total Months: {month_counter}\n")
file.write(f"Total Revenue: ${sum_revenue:,}\n")
file.write(f"Average Revenue Change: ${average_revenue_change:,}\n")
file.write(f"Greatest Increase in Revenue: {max_month} ${max_revenue:,}\n")
file.write(f"Greatest Decrease in Revenue: {min_month} ${min_revenue:,}\n")
file.write("")
file.close()<file_sep># python_challenge
Assignment 3 - Python
<file_sep>import os
import csv
candidates = []
num_votes = 0
vote_counts = []
file = 'election_data.csv'
filepath = os.path.join('file')
with open(file, newline="") as csvfile:
csvreader = csv.reader(csvfile, delimiter=",")
csv_header = next(csvreader)
line = next(csvreader,None)
for line in csvreader:
num_votes = num_votes + 1
candidate = line[2]
if candidate in candidates:
candidate_index = candidates.index(candidate)
vote_counts[candidate_index] = vote_counts[candidate_index] + 1
else:
candidates.append(candidate)
vote_counts.append(1)
percentages = []
max_votes = vote_counts[0]
max_index = 0
for count in range(len(candidates)):
vote_percentage = vote_counts[count]/num_votes*100
percentages.append(vote_percentage)
if vote_counts[count] > max_votes:
max_votes = vote_counts[count]
max_index = count
winner = candidates[max_index]
print('Election Results')
print('--------------------------')
print(f'Total Votes: {num_votes:,}')
for count in range(len(candidates)):
print(f'{candidates[count]}: {percentages[count]:.2f}% ({vote_counts[count]:,})')
print('---------------------------')
print(f'Winner: {winner}')
print('---------------------------')
filewrite = open('final_election.txt', mode = 'w')
filewrite.write('Election Results\n')
filewrite.write('--------------------------\n')
filewrite.write(f'Total Votes: {num_votes:,}\n')
for count in range(len(candidates)):
filewrite.write(f'{candidates[count]}: {percentages[count]:.2f}% ({vote_counts[count]:,})\n')
filewrite.write('---------------------------\n')
filewrite.write(f'Winner: {winner}\n')
filewrite.write('---------------------------\n')
filewrite.close()
|
89094992eea0a3f367dbd2ddda315499f593b167
|
[
"Markdown",
"Python"
] | 3 |
Python
|
brian1581/python_challenge
|
e7f34976bc23ac537c6dfad0bf7289be957ce86e
|
f475b932af33cd972c3ff9e93cfc0f4555a3d890
|
refs/heads/master
|
<file_sep>#' Returns the specified available feature for user by name.Required scope: None
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_available_feature_by_name <- function(token, featureName, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/availableFeatures(featureName='{featureName}')"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of available features for the userRequired scope: None
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_available_features <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/availableFeatures"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
<file_sep>#' Grants the specified user permissions to the specified workspace.Notes:
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
add_group_user <- function(token, groupId, displayName, emailAddress, groupUserAccessRight, identifier, principalType, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/users"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(displayName = displayName, emailAddress = emailAddress, groupUserAccessRight = groupUserAccessRight, identifier = identifier, principalType = principalType))
process(response, output)
}
#' Creates new workspace.Required scope: Workspace.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
create_group <- function(token, workspaceV2 = NULL, name, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(name = name))
process(response, output)
}
#' Deletes the specified workspace.Required scope: Workspace.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
delete_group <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Deletes the specified user permissions from the specified workspace.Required scope: Workspace.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
delete_user_in_group <- function(token, groupId, user, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/users/{user}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of workspaces the user has access to.Note: Users that have been recently added to a group may not have their new group immediately available, see Refresh user permissions.Required scope: Workspace.Read.All or Workspace.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_groups <- function(token, filter = NULL, skip = NULL, top = NULL, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of users that have access to the specified workspace.Required scope: Workspace.Read.All or Workspace.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_group_users <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/users"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Update the specified user permissions to the specified workspace.Notes:
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_group_user <- function(token, groupId, displayName, emailAddress, groupUserAccessRight, identifier, principalType, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/users"
response <- httr::PUT(url = glue::glue(path),
config = httr::config(token = token),
body = list(displayName = displayName, emailAddress = emailAddress, groupUserAccessRight = groupUserAccessRight, identifier = identifier, principalType = principalType))
process(response, output)
}
<file_sep>#' Returns the specified installed app.Required scope: App.Read.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_app <- function(token, appId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/apps/{appId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of installed apps.Required scope: App.Read.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_apps <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/apps"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified dashboard from the specified app.Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dashboard <- function(token, appId, dashboardId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/apps/{appId}/dashboards/{dashboardId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of dashboards from the specified app.Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dashboards <- function(token, appId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/apps/{appId}/dashboards"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified report from the specified app.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_report <- function(token, appId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/apps/{appId}/reports/{reportId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of reports from the specified app.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_reports <- function(token, appId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/apps/{appId}/reports"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified tile within the specified dashboard from the specified app.Note: All tile types are supported except for "model tiles", which include datasets and live tiles that contain an entire report page. Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_tile <- function(token, appId, dashboardId, tileId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/apps/{appId}/dashboards/{dashboardId}/tiles/{tileId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of tiles within the specified dashboard from the specified app.Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_tiles <- function(token, appId, dashboardId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/apps/{appId}/dashboards/{dashboardId}/tiles"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
<file_sep>#' Returns a list of capacities the user has access to.Required scope: Capacity.Read.All or Capacity.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_capacities <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/capacities"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the current state of a workload and if the workload is enabled also returns the maximum memory percentage that the workload can consume.Required scope: Capacity.Read.All or Capacity.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_workload <- function(token, capacityId, workloadName, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/capacities/{capacityId}/Workloads/{workloadName}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the current state of the specified capacity workloads, if a workload is enabled also returns the maximum memory percentage that the workload can consume.Required scope: Capacity.Read.All or Capacity.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_workloads <- function(token, capacityId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/capacities/{capacityId}/Workloads"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Assigns "My Workspace" to the specified capacity.Note: To perform this operation, the user must have admin or assign permissions on the capacity. To unassign "My Workspace" from a capacity, Empty Guid (00000000-0000-0000-0000-000000000000) should be provided as capacityId. Required scope: Capacity.ReadWrite.All and Workspace.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_assignmyworkspacetocapacity <- function(token, capacityId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/AssignToCapacity"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(capacityId = capacityId))
process(response, output)
}
#' Assigns the specified workspace to the specified capacity.Note: To perform this operation, the user must be admin on the specified workspace and have admin or assign permissions on the capacity. To unassign the specified workspace from a capacity, Empty Guid (00000000-0000-0000-0000-000000000000) should be provided as capacityId. Required scope: Capacity.ReadWrite.All and Workspace.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_assigntocapacity <- function(token, groupId, capacityId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/AssignToCapacity"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(capacityId = capacityId))
process(response, output)
}
#' Gets the status of the assignment to capacity operation of the specified workspace.Note: To perform this operation, the user must be admin on the specified workspace. Required scope: Workspace.Read.All and Workspace.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_capacityassignmentstatus <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/CapacityAssignmentStatus"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Gets the status of "My Workspace" assignment to capacity operation.Note: Required scope: Workspace.Read.All and Workspace.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_capacityassignmentstatusmyworkspace <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/CapacityAssignmentStatus"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Changes the state of a specific workload to Enabled or Disabled. When enabling a workload the maximum memory percentage that the workload can consume must be set.Required scope: Capacity.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
patch_workload <- function(token, capacityId, workloadName, maxMemoryPercentageSetByUser, state, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/capacities/{capacityId}/Workloads/{workloadName}"
response <- httr::PATCH(url = glue::glue(path),
config = httr::config(token = token),
body = list(maxMemoryPercentageSetByUser = maxMemoryPercentageSetByUser, state = state))
process(response, output)
}
<file_sep>#' Adds an encryption key for Power BI workspaces assigned to a capacity.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 600 requests per hour at maximum. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
add_power_bi_encryption_key <- function(token, activate, isDefault, keyVaultKeyIdentifier, name, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/tenantKeys"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(activate = activate, isDefault = isDefault, keyVaultKeyIdentifier = keyVaultKeyIdentifier, name = name))
process(response, output)
}
#' Assigns the provided workspaces to the specified capacity.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
capacities_assignworkspacestocapacity <- function(token, capacityMigrationAssignments, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/capacities/AssignWorkspaces"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(capacityMigrationAssignments = capacityMigrationAssignments))
process(response, output)
}
#' Unassigns the provided workspaces from capacity.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
capacities_unassignworkspacesfromcapacity <- function(token, workspacesToUnassign, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/capacities/UnassignWorkspaces"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(workspacesToUnassign = workspacesToUnassign))
process(response, output)
}
#' Returns a list of dashboards for the organization.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
dashboards_getdashboardsasadmin <- function(token, expand = NULL, filter = NULL, skip = NULL, top = NULL, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/dashboards"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of dashboards from the specified workspace.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
dashboards_getdashboardsingroupasadmin <- function(token, groupId, filter, skip, top, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}/dashboards"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of tiles within the specified dashboard.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
dashboards_gettilesasadmin <- function(token, dashboardId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/dashboards/{dashboardId}/tiles"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Exports the specified dataflow definition to a .json file.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.AllApplication only and delegated permissions are supported.To set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
dataflows_exportdataflowasadmin <- function(token, dataflowId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/dataflows/{dataflowId}/export"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasources for the specified dataflow.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.AllApplication only and delegated permissions are supported.To set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
dataflows_getdataflowdatasourcesasadmin <- function(token, dataflowId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/dataflows/{dataflowId}/datasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of dataflows for the organization.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
dataflows_getdataflowsasadmin <- function(token, filter = NULL, skip = NULL, top = NULL, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/dataflows"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of dataflows from the specified workspace.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
dataflows_getdataflowsingroupasadmin <- function(token, groupId, filter, skip, top, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}/dataflows"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of upstream dataflows for the specified dataflow.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
dataflows_getupstreamdataflowsingroupasadmin <- function(token, dataflowId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}/dataflows/{dataflowId}/upstreamDataflows"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasets for the organization.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API processes each request for 2 seconds, in the mean time other requests will be queued. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_getdatasetsasadmin <- function(token, filter = NULL, skip = NULL, top = NULL, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/datasets"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasets from the specified workspace.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_getdatasetsingroupasadmin <- function(token, groupId, expand, filter, skip, top, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}/datasets"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of upstream dataflows for datasets from the specified workspace.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_getdatasettodataflowslinksingroupasadmin <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}/datasets/upstreamDataflows"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasources for the specified dataset.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API processes each request for 0.5 seconds, in the mean time other requests will be queued. Required scope: Tenant.Read.AllApplication only and delegated permissions are supported.To set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_getdatasourcesasadmin <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/datasets/{datasetId}/datasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of audit activity events for a tenant.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.Read.All or Tenant.ReadWrite.All. To call this API, provide either a continuation token or both a start and end date time. StartDateTime and EndDateTime must be in the same UTC day.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_activity_events <- function(token, filter = NULL, continuationToken = NULL, endDateTime = NULL, startDateTime = NULL, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/activityevents"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of capacities for the organization.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_capacities_as_admin <- function(token, expand = NULL, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/capacities"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the encryption keys for the tenant.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_power_bi_encryption_keys <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/tenantKeys"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Grants user permissions to the specified workspace.Note: This API is currently limited to updating workspaces in the new workspace experience. The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_adduserasadmin <- function(token, groupId, displayName, emailAddress, groupUserAccessRight, identifier, principalType, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}/users"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(displayName = displayName, emailAddress = emailAddress, groupUserAccessRight = groupUserAccessRight, identifier = identifier, principalType = principalType))
process(response, output)
}
#' Removes user permissions to the specified workspace.Note: This API is currently limited to updating workspaces in the new workspace experience. The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_deleteuserasadmin <- function(token, groupId, user, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}/users/{user}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of workspaces for the organization.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_getgroupsasadmin <- function(token, expand, filter, skip, top, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups?$top={$top}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Restores a deleted workspace.Note: This API is currently limited to restoring workspaces in the new workspace experience. The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. This API allows 200 requests per hour at maximum. Required scope: Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_restoredeletedgroupasadmin <- function(token, groupId, emailAddress, name, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}/restore"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(emailAddress = emailAddress, name = name))
process(response, output)
}
#' Updates the specified workspace properties.Note: This API is currently limited to updating workspaces in the new workspace experience. Only name and description can be updated, and name must be unique inside an organization. The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_updategroupasadmin <- function(token, groupId, capacityId, dashboards, dataflowStorageId, dataflows, datasets, description, id, isOnDedicatedCapacity, isReadOnly, name, reports, state, type, users, workbooks, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}"
response <- httr::PATCH(url = glue::glue(path),
config = httr::config(token = token),
body = list(capacityId = capacityId, dashboards = dashboards, dataflowStorageId = dataflowStorageId, dataflows = dataflows, datasets = datasets, description = description, id = id, isOnDedicatedCapacity = isOnDedicatedCapacity, isReadOnly = isReadOnly, name = name, reports = reports, state = state, type = type, users = users, workbooks = workbooks))
process(response, output)
}
#' Returns a list of imports for the organization.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
imports_getimportsasadmin <- function(token, expand = NULL, filter = NULL, skip = NULL, top = NULL, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/imports"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Changes the specific capacity information. Currently, only supports changing the capacity encryption keyNote: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
patch_capacity_as_admin <- function(token, capacityId, tenantKeyId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/capacities/{capacityId}"
response <- httr::PATCH(url = glue::glue(path),
config = httr::config(token = token),
body = list(tenantKeyId = tenantKeyId))
process(response, output)
}
#' Returns a list of reports for the organization.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
reports_getreportsasadmin <- function(token, filter = NULL, skip = NULL, top = NULL, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/reports"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of reports from the specified workspace.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 200 requests per hour at maximum. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
reports_getreportsingroupasadmin <- function(token, groupId, filter, skip, top, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/groups/{groupId}/reports"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Rotate the encryption key for Power BI workspaces assigned to a capacity.Note: The user must have administrator rights (such as Office 365 Global Administrator or Power BI Service Administrator) to call this API. This API allows 600 requests per hour at maximum. Required scope: Tenant.Read.All or Tenant.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
rotate_power_bi_encryption_key <- function(token, tenantKeyId, keyVaultKeyIdentifier, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/admin/tenantKeys/{tenantKeyId}/Default.Rotate"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(keyVaultKeyIdentifier = keyVaultKeyIdentifier))
process(response, output)
}
<file_sep>library(rvest)
url = "https://docs.microsoft.com/en-us/rest/api/power-bi/admin/dashboards_getdashboardsasadmin"
make_operation <- function(url, name) {
message(url)
page <- read_html(url)
endpoint <- page %>%
html_node("pre") %>%
html_text() %>%
strsplit(" ") %>%
unlist()
method <- endpoint[1]
path <- endpoint[2]
description <- page %>%
html_node("p") %>%
html_text() %>%
sub(" To set the permissions scope, see Register an app.", "", .)
h2_text <- page %>%
html_nodes("h2") %>%
html_text()
params <- c("URI Parameters", "Request Body") %in% h2_text
tables <- html_table(page)
if (all(params)) {
uri_params <- fetch_params(tables[[1]])
body_params <- fetch_params(tables[[2]])
} else if (params[1] == FALSE) {
uri_params <- NULL
body_params <- fetch_params(tables[[1]])
} else if (params[2] == FALSE) {
uri_params <- fetch_params(tables[[1]])
body_params <- NULL
}
fun <- functionalize(name, method, path, uri_params, body_params)
doc <- document(description, param_names, param_descriptions)
paste(doc, fun, sep = "\n")
}
fetch_params <- function(table) {
names <- table$Name
if (length(names) == 0) {
return(NULL)
} else if (names[[1]] == "200 OK") {
return(NULL)
}
names <- gsub("$", "", names, fixed = TRUE)
descriptions <- table$Description
required <- table$Required
list(names = names,
descriptions = descriptions,
required = required)
}
document <- function(description, param_names, param_descriptions) {
paste(
paste("#'", gsub("\n", "\n#' ", description, fixed = TRUE), collapse = ""),
paste("\n#' @param", param_names, param_descriptions, collapse = ""),
"\n#' @return A `data.frame` object.",
"\n#' @export",
collapse = "\n#' "
)
}
functionalize <- function(name, method, path,
uri_params, body_params) {
uri_names <- uri_params$names
body_names <- body_params$names
if (!is.null(body_names)) {
body_names <- ifelse(body_names %in% uri_names,
paste("body_", body_names, sep = "_"),
body_names)
}
if (is.null(uri_names)) {
uri_names <- ""
} else {
uri_required <- uri_params$required
if (is.null(uri_required)) uri_required <- rep(TRUE, length(uri_names))
uri_names <- ifelse(is.na(uri_required),
paste(uri_names, "= NULL"),
uri_names)
uri_names <- paste0(
", ",
paste(uri_names, collapse = ", ")
)
}
if (is.null(body_names)) {
body <- FALSE
body_names <- ""
} else {
body <- enlist(body_names)
body_required <- body_params$required
if (is.null(body_required)) body_required <- rep(TRUE, length(body_names))
body_names <- ifelse(is.na(body_required),
paste(body_names, "= NULL"),
body_names)
body_names <- paste0(
", ",
paste(body_names, collapse = ", ")
)
}
glue::glue(
"[name] <- function(token[uri_names][body_names], output = \"value\") {
path <- \"[path]\"
response <- httr::[method](url = glue::glue(path),
config = httr::config(token = token),
body = [body])
process(response, output)
}",
.open = "[",
.close = "]"
)
}
enlist <- function(body_names) {
paste0(
"list(",
paste(
paste(
body_names,
body_names,
sep = " = "
),
collapse = ", "
),
")"
)
}
get_table_col <- function(url, type = "path") {
if (type == "path") {
url %>%
read_html() %>%
html_nodes("td:nth-child(1) a") %>%
html_attr("href")
} else if (type == "name") {
url %>%
read_html() %>%
html_table() %>%
.[[1]] %>%
.[[1]] %>%
tolower() %>%
gsub(" ", "_", .)
}
}
make_pbr <- function() {
base_url <- "https://docs.microsoft.com/en-us/rest/api/power-bi/"
groups <- get_table_col(base_url)
group_urls <- paste0(base_url, gsub(" ", "", groups), "/")
operations <- lapply(group_urls, get_table_col)
function_names <- lapply(group_urls, get_table_col, type = "name")
operation_urls <- lapply(operations, function(x) paste0(base_url, x))
operation_functions <- mapply(
function(url, name) {
unlist(mapply(make_operation, url, name))
},
url = operation_urls,
name = function_names
)
script_names <- gsub(" ", "_", get_table_col(base_url, type = "name"))
names(operation_functions) <- script_names
if (!file.exists("R")) dir.create("R")
mapply(
function(funs, script_name) {
script <- paste(funs, collapse = "\n\n")
write(script, file = file.path("R", glue::glue("{script_name}.R")))
TRUE
},
funs = operation_functions,
script_name = script_names
)
# styler::style_dir("R")
roxygen2::roxygenise()
}
make_pbr()
# TODO fix optional uri pars, remember $ stuff
# TODO fix description, add urls
# TODO add status verification
# TODO add readme<file_sep>process <- function(response, output = "value") {
if (output == "raw") {
return(response)
}
content <- jsonlite::fromJSON(
httr::content(response, type = "text", encoding = "UTF-8")
)
content[[output]]
}
<file_sep># Install
# Authenticate
# Use<file_sep>#' Deletes a dataflow from the CDS for Analytics storage, including its definition file and actual model.Required scope: Dataflow.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
delete_dataflow <- function(token, dataflowId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dataflows/{dataflowId}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Exports the specified dataflow definition to a .json file.Required scope: Dataflow.ReadWrite.All or Dataflow.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dataflow <- function(token, dataflowId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dataflows/{dataflowId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasources for the specified dataflow.Required scope: Dataflow.ReadWrite.All or Dataflow.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dataflow_data_sources <- function(token, dataflowId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dataflows/{dataflowId}/datasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of all dataflows from the specified workspace.Required scope: Dataflow.ReadWrite.All or Dataflow.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dataflows <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dataflows"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of upstream dataflows for the specified dataflow.Required scope: Dataflow.ReadWrite.All or Dataflow.Read.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_upstream_dataflows_in_group <- function(token, dataflowId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dataflows/{dataflowId}/upstreamDataflows"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Triggers a refresh for the specified dataflow.Required scope: Dataflow.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
refresh_dataflow <- function(token, dataflowId, groupId, notifyOption, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dataflows/{dataflowId}/refreshes"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(notifyOption = notifyOption))
process(response, output)
}
#' Creates or updates the specified dataflow refresh schedule configuration.Required scope: Dataflow.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_refresh_schedule <- function(token, dataflowId, groupId, value, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dataflows/{dataflowId}/refreshSchedule"
response <- httr::PATCH(url = glue::glue(path),
config = httr::config(token = token),
body = list(value = value))
process(response, output)
}
<file_sep>#' Creates a temporary blob storage to be used to import large .pbix files larger than 1 GB and up to 10 GB.
#' To import large .pbix files, create a temporary upload location and upload the .pbix file using the shared access signature (SAS) url from the response, and then call Post Import and specify 'fileUrl' to be the SAS url in the Request BodyNote: Import large .pbix files is only available for workspaces on premium capacity and for .pbix files that are between 1 GB and 10 GB. Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
create_temporary_upload_location <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/imports/createTemporaryUploadLocation"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Creates a temporary blob storage to be used to import large .pbix files larger than 1 GB and up to 10 GB.
#' To import large .pbix files, create a temporary upload location and upload the .pbix file using the shared access signature (SAS) url from the response, and then call Post Import In Group and specify 'fileUrl' to be the SAS url in the Request Body.Note: Import large .pbix files is only available for workspaces on premium capacity and for .pbix files that is between 1 GB and 10 GB. Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
create_temporary_upload_location_in_group <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/imports/createTemporaryUploadLocation"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified import from "My Workspace".Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_import <- function(token, importId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/imports/{importId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified import from the specified workspace.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_import_in_group <- function(token, groupId, importId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/imports/{importId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of imports from "My Workspace".Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_imports <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/imports"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of imports from the specified workspace.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_imports_in_group <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/imports"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Creates new content on "My Workspace" from .pbix, Excel, Rdl or file path in OneDrive for Business.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
post_import <- function(token, nameConflict, skipReport, datasetDisplayName, connectionType, filePath, fileUrl, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/imports?datasetDisplayName={datasetDisplayName}"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(connectionType = connectionType, filePath = filePath, fileUrl = fileUrl))
process(response, output)
}
#' Creates new content on the specified workspace from .pbix, .json, Excel, Rdl, or file path in OneDrive for Business.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
post_import_in_group <- function(token, groupId, nameConflict, skipReport, datasetDisplayName, connectionType, filePath, fileUrl, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/imports?datasetDisplayName={datasetDisplayName}"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(connectionType = connectionType, filePath = filePath, fileUrl = fileUrl))
process(response, output)
}
<file_sep>#' Generates an embed token to view the specified dashboard from the specified workspace.This API is relevant only to 'App owns data' embed scenario.Required scope: (all of the below)
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
dashboards_generatetokeningroup <- function(token, dashboardId, groupId, accessLevel, allowSaveAs, datasetId, identities, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dashboards/{dashboardId}/GenerateToken"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(accessLevel = accessLevel, allowSaveAs = allowSaveAs, datasetId = datasetId, identities = identities))
process(response, output)
}
#' Generates an embed token to Embed Q&A based on the specified dataset from the specified workspace.This API is relevant only to 'App owns data' embed scenario.Required scope: Dataset.ReadWrite.All or Dataset.Read.AllWhen using service principal for authentication, refer to Service Principal with Power BI document along with considerations and limitations section.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_generatetokeningroup <- function(token, datasetId, groupId, accessLevel, allowSaveAs, body__datasetId, identities, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/GenerateToken"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(accessLevel = accessLevel, allowSaveAs = allowSaveAs, body__datasetId = body__datasetId, identities = identities))
process(response, output)
}
#' Generates an embed token for multiple reports, datasets and target workspaces. Reports and datasets do not have to be related. The binding of a report to a dataset can be done during embedding. Target workspaces are workspaces where creation of reports is allowed.This API is relevant only to 'App owns data' embed scenario.Required scope:
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
generate_token <- function(token, datasets, identities, reports, targetWorkspaces, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/GenerateToken"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(datasets = datasets, identities = identities, reports = reports, targetWorkspaces = targetWorkspaces))
process(response, output)
}
#' Generates an embed token to allow report creation on the specified workspace based on the specified dataset.This API is relevant only to 'App owns data' embed scenario.Required scope: (all of the below)
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
reports_generatetokenforcreateingroup <- function(token, groupId, accessLevel, allowSaveAs, datasetId, identities, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/GenerateToken"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(accessLevel = accessLevel, allowSaveAs = allowSaveAs, datasetId = datasetId, identities = identities))
process(response, output)
}
#' Generates an embed token to view or edit the specified report from the specified workspace.This API is relevant only to 'App owns data' embed scenario.Required scope: (all of the below)
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
reports_generatetokeningroup <- function(token, groupId, reportId, accessLevel, allowSaveAs, datasetId, identities, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/GenerateToken"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(accessLevel = accessLevel, allowSaveAs = allowSaveAs, datasetId = datasetId, identities = identities))
process(response, output)
}
#' Generates an embed token to view the specified tile from the specified workspace.This API is relevant only to 'App owns data' embed scenario.Required scope: (all of the below)
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
tiles_generatetokeningroup <- function(token, dashboardId, groupId, tileId, accessLevel, allowSaveAs, datasetId, identities, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dashboards/{dashboardId}/tiles/{tileId}/GenerateToken"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(accessLevel = accessLevel, allowSaveAs = allowSaveAs, datasetId = datasetId, identities = identities))
process(response, output)
}
<file_sep>#' Creates a new empty dashboard on "My Workspace".Required scope: Content.Create
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
add_dashboard <- function(token, name, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/dashboards"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(name = name))
process(response, output)
}
#' Creates a new empty dashboard on the specified workspace.Required scope: Content.Create
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
add_dashboard_in_group <- function(token, groupId, name, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dashboards"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(name = name))
process(response, output)
}
#' Clones the specified tile from "My Workspace".If target report id and target dataset are not specified, the following can occur:
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
clone_tile <- function(token, dashboardId, tileId, positionConflictAction, targetDashboardId, targetModelId, targetReportId, targetWorkspaceId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/dashboards/{dashboardId}/tiles/{tileId}/Clone"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(positionConflictAction = positionConflictAction, targetDashboardId = targetDashboardId, targetModelId = targetModelId, targetReportId = targetReportId, targetWorkspaceId = targetWorkspaceId))
process(response, output)
}
#' Clones the specified tile from the specified workspace.If target report id and target dataset are missing, the following can occur:
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
clone_tile_in_group <- function(token, dashboardId, groupId, tileId, positionConflictAction, targetDashboardId, targetModelId, targetReportId, targetWorkspaceId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dashboards/{dashboardId}/tiles/{tileId}/Clone"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(positionConflictAction = positionConflictAction, targetDashboardId = targetDashboardId, targetModelId = targetModelId, targetReportId = targetReportId, targetWorkspaceId = targetWorkspaceId))
process(response, output)
}
#' Returns the specified dashboard from "My Workspace".Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dashboard <- function(token, dashboardId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/dashboards/{dashboardId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified dashboard from the specified workspace.Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dashboard_in_group <- function(token, dashboardId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dashboards/{dashboardId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of dashboards from "My Workspace".Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dashboards <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/dashboards"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of dashboards from the specified workspace.Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dashboards_in_group <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dashboards"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified tile within the specified dashboard from "My Workspace".Note: All tile types are supported except for "model tiles", which include datasets and live tiles that contain an entire report page. Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_tile <- function(token, dashboardId, tileId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/dashboards/{dashboardId}/tiles/{tileId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified tile within the specified dashboard from the specified workspace.Note: All tile types are supported except for "model tiles", which include datasets and live tiles that contain an entire report page. Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_tile_in_group <- function(token, dashboardId, groupId, tileId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dashboards/{dashboardId}/tiles/{tileId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of tiles within the specified dashboard from "My Workspace".Note: All tile types are supported except for "model tiles", which include datasets and live tiles that contain an entire report page. Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_tiles <- function(token, dashboardId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/dashboards/{dashboardId}/tiles"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of tiles within the specified dashboard from the specified workspace.Note: All tile types are supported except for "model tiles", which include datasets and live tiles that contain an entire report page. Required scope: Dashboard.ReadWrite.All or Dashboard.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_tiles_in_group <- function(token, dashboardId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/dashboards/{dashboardId}/tiles"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
<file_sep>#' Returns authentication token.
#' @param name The app name
#' @param key The app key
#' @param secret The app secret
#' @return A `token` object.
#' @export
authenticate <- function(name, key, secret = NULL) {
my_app <- httr::oauth_app(
appname = name,
key = key,
secret = secret
)
my_token <- httr::oauth2.0_token(
endpoint = httr::oauth_endpoints("azure"),
app = my_app,
user_params = list(resource = "https://analysis.windows.net/powerbi/api"),
use_oob = FALSE,
cache = FALSE
)
my_token
}
<file_sep>#' Grants or updates the permissions required to use the specified datasource for the specified user.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
add_datasource_user <- function(token, datasourceId, gatewayId, datasourceAccessRight, displayName, emailAddress, identifier, principalType, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}/datasources/{datasourceId}/users"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(datasourceAccessRight = datasourceAccessRight, displayName = displayName, emailAddress = emailAddress, identifier = identifier, principalType = principalType))
process(response, output)
}
#' Creates a new datasource on the specified gateway.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
create_datasource <- function(token, gatewayId, connectionDetails, credentialDetails, dataSourceName, dataSourceType, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}/datasources"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(connectionDetails = connectionDetails, credentialDetails = credentialDetails, dataSourceName = dataSourceName, dataSourceType = dataSourceType))
process(response, output)
}
#' Deletes the specified datasource from the specified gateway.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
delete_datasource <- function(token, datasourceId, gatewayId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}/datasources/{datasourceId}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Removes the specified user from the specified datasource.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
delete_datasource_user <- function(token, datasourceId, emailAdress, gatewayId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}/datasources/{datasourceId}/users/{emailAdress}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified datasource from the specified gateway.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasource <- function(token, datasourceId, gatewayId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}/datasources/{datasourceId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasources from the specified gateway.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasources <- function(token, gatewayId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}/datasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Checks the connectivity status of the specified datasource from the specified gateway.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasource_status <- function(token, datasourceId, gatewayId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}/datasources/{datasourceId}/status"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of users who have access to the specified datasource.Required scope: Dataset.ReadWrite.All or Dataset.Read.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasource_users <- function(token, datasourceId, gatewayId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}/datasources/{datasourceId}/users"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified gateway.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_gateway <- function(token, gatewayId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of gateways for which the user is an admin.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_gateways <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Updates the credentials of the specified datasource from the specified gateway.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_datasource <- function(token, datasourceId, gatewayId, credentialDetails, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/gateways/{gatewayId}/datasources/{datasourceId}"
response <- httr::PATCH(url = glue::glue(path),
config = httr::config(token = token),
body = list(credentialDetails = credentialDetails))
process(response, output)
}
<file_sep>#' Binds the specified dataset from "My Workspace" to the specified gateway with (optional) given set of datasource Ids. This only supports the On-Premises Data Gateway.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
bind_to_gateway <- function(token, datasetId, datasourceObjectIds, gatewayObjectId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/Default.BindToGateway"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(datasourceObjectIds = datasourceObjectIds, gatewayObjectId = gatewayObjectId))
process(response, output)
}
#' Binds the specified dataset from the specified workspace to the specified gateway with (optional) given set of datasource Ids. This only supports the On-Premises Data Gateway.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
bind_to_gateway_in_group <- function(token, datasetId, groupId, datasourceObjectIds, gatewayObjectId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/Default.BindToGateway"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(datasourceObjectIds = datasourceObjectIds, gatewayObjectId = gatewayObjectId))
process(response, output)
}
#' Deletes the specified dataset from "My Workspace".Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
delete_dataset <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Deletes the specified dataset from the specified workspace.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
delete_dataset_in_group <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of gateways which the specified dataset from "My Workspace" can be bound to.
#' This API is relevant only for datasets that have at least one on-premises connection. For datasets with cloud-only connections, it will return an empty list. Required scope: Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
discover_gateways <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/Default.DiscoverGateways"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of gateways that the specified dataset from the specified workspace can be bound to.
#' This API is relevant only for datasets that have at least one on-premises connection. For datasets with cloud-only connections, it will return an empty list. Required scope: Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
discover_gateways_in_group <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/Default.DiscoverGateways"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified dataset from "My Workspace".Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dataset <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified dataset from the specified workspace.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dataset_in_group <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasets from "My Workspace".Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasets <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasets from the specified workspace.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasets_in_group <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of upstream dataflows for datasets from the specified workspace.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dataset_to_dataflows_links_in_group <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/upstreamDataflows"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasources for the specified dataset from "My Workspace".Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasources <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/datasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasources for the specified dataset from the specified workspace.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasources_in_group <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/datasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the refresh schedule of a specified DirectQuery or LiveConnection dataset from "My Workspace".Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_direct_query_refresh_schedule <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/directQueryRefreshSchedule"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the refresh schedule of a specified DirectQuery or LiveConnection dataset from the specified workspace.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_direct_query_refresh_schedule_in_group <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/directQueryRefreshSchedule"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of gateway datasources for the specified dataset from "My Workspace".Note: Use the new Dataset - Get Datasources API instead.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_gateway_datasources <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/Default.GetBoundGatewayDatasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of gateway datasources for the specified dataset from the specified workspace.Note: Use the Dataset - Get Datasources In Group API instead.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_gateway_datasources_in_group <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/Default.GetBoundGatewayDatasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of parameters for the specified dataset from "My Workspace".Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_parameters <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/parameters"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of parameters for the specified dataset from the specified workspace.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_parameters_in_group <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/parameters"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the refresh history of the specified dataset from "My Workspace".Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_refresh_history <- function(token, datasetId, top, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/refreshes"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the refresh history of the specified dataset from the specified workspace.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_refresh_history_in_group <- function(token, datasetId, groupId, top, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/refreshes"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the refresh schedule of the specified dataset from "My Workspace".Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_refresh_schedule <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/refreshSchedule"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the refresh schedule of the specified dataset from the specified workspace.Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_refresh_schedule_in_group <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/refreshSchedule"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Triggers a refresh for the specified dataset from "My Workspace".In Shared capacities this call is limited to eight times per day (including refreshes executed via Scheduled Refresh)In Premium capacities this call is not limited in number of times per day, but only by the available resources in the capacity, hence if overloaded, the refresh execution may be throttled until the load is reduced. If this throttling exceeds 1 hour, the refresh will fail.Required scope: Dataset.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
refresh_dataset <- function(token, datasetId, notifyOption, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/refreshes"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(notifyOption = notifyOption))
process(response, output)
}
#' Triggers a refresh for the specified dataset from the specified workspace.In Shared capacities this call is limited to eight times per day (including refreshes executed via Scheduled Refresh)In Premium capacities this call is not limited in number of times per day, but only by the available resources in the capacity, hence if overloaded, the refresh execution may be throttled until the load is reduced. If this throttling exceeds 1 hour, the refresh will fail.Required scope: Dataset.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
refresh_dataset_in_group <- function(token, datasetId, groupId, notifyOption, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/refreshes"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(notifyOption = notifyOption))
process(response, output)
}
#' Note: This API is deprecated and no longer supported.Updates all connections for the specified dataset from "My Workspace".Notes:
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
set_all_dataset_connections <- function(token, datasetId, connectionString, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/Default.SetAllConnections"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(connectionString = connectionString))
process(response, output)
}
#' Note: This API is deprecated and no longer supported.Updates all connections for the specified dataset from the specified workspace.Notes:
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
set_all_dataset_connections_in_group <- function(token, datasetId, groupId, connectionString, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/Default.SetAllConnections"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(connectionString = connectionString))
process(response, output)
}
#' Transfers ownership over the specified dataset to the current authorized user.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
take_over_in_group <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/Default.TakeOver"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Updates the datasources of the specified dataset from "My Workspace".Important:
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_datasources <- function(token, datasetId, updateDetails, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/Default.UpdateDatasources"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(updateDetails = updateDetails))
process(response, output)
}
#' Updates the datasources of the specified dataset from the specified workspace.Important:
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_datasources_in_group <- function(token, datasetId, groupId, updateDetails, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/Default.UpdateDatasources"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(updateDetails = updateDetails))
process(response, output)
}
#' Updates the refresh schedule for the specified DirectQuery or LiveConnection dataset from "My Workspace".This operation is only supported for the dataset owner.A request should contain either a combination of days and times (setting times is optional, otherwise a default single time per day is used) or a valid frequency, but not both.Setting frequency will automatically truncate the days and times arrays.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_direct_query_refresh_schedule <- function(token, datasetId, value, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/directQueryRefreshSchedule"
response <- httr::PATCH(url = glue::glue(path),
config = httr::config(token = token),
body = list(value = value))
process(response, output)
}
#' Updates the refresh schedule for the specified DirectQuery or LiveConnection dataset from the specified workspace.This operation is only supported for the dataset owner.A request should contain either a combination of days and times (setting times is optional, otherwise a default single time per day is used) or a valid frequency, but not both.Setting frequency will automatically truncate the days and times arrays.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_direct_query_refresh_schedule_in_group <- function(token, datasetId, groupId, value, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/directQueryRefreshSchedule"
response <- httr::PATCH(url = glue::glue(path),
config = httr::config(token = token),
body = list(value = value))
process(response, output)
}
#' Updates the parameters values for the specified dataset from "My Workspace".Important: The dataset must be refreshed for new parameters values to be applied. Wait 30 minutes for the update parameters operation to complete before refreshing.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_parameters <- function(token, datasetId, updateDetails, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/Default.UpdateParameters"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(updateDetails = updateDetails))
process(response, output)
}
#' Updates the parameters values for the specified dataset from the specified workspace.Important: The dataset must be refreshed for the new parameter values to be applied. Wait 30 minutes for the update parameters operation to complete before refreshing.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_parameters_in_group <- function(token, datasetId, groupId, updateDetails, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/Default.UpdateParameters"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(updateDetails = updateDetails))
process(response, output)
}
#' Updates the refresh schedule for the specified dataset from "My Workspace".This operation is only supported for the dataset owner.A request that disables the refresh schedule should contain no other changes.The days array should not be set to empty array.The times may be set to empty array (in which case Power BI will use a default single time per day).The limit on number of time slots per day depends on the type of capacity used (Premium or Shared), see What is Microsoft Power BI Premium.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_refresh_schedule <- function(token, datasetId, value, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/refreshSchedule"
response <- httr::PATCH(url = glue::glue(path),
config = httr::config(token = token),
body = list(value = value))
process(response, output)
}
#' Updates the refresh schedule for the specified dataset from the specified workspace.This operation is only supported for the dataset owner.A request that disables the refresh schedule should contain no other changes.The days array should not be set to empty array.The times may be set to empty array (in which case Power BI will use a default single time per day).The limit on number of time slots per day depends on the type of capacity used (Premium or Shared), see What is Microsoft Power BI Premium.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_refresh_schedule_in_group <- function(token, datasetId, groupId, value, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/refreshSchedule"
response <- httr::PATCH(url = glue::glue(path),
config = httr::config(token = token),
body = list(value = value))
process(response, output)
}
<file_sep>#' Deletes all rows from the specified table within the specified dataset from "My Workspace".Note: this API supports only Push datasets. Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_deleterows <- function(token, datasetId, tableName, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/tables/{tableName}/rows"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Deletes all rows from the specified table, within the specified dataset, from the specified workspace.Note: This API supports only Push datasets. Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_deleterowsingroup <- function(token, datasetId, groupId, tableName, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/tables/{tableName}/rows"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of tables tables within the specified dataset from "My Workspace".Note: This API supports only Push datasets. Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_gettables <- function(token, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/tables"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of tables within the specified dataset from the specified workspace.Note: This API supports only Push datasets. Required scope: Dataset.ReadWrite.All or Dataset.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_gettablesingroup <- function(token, datasetId, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/tables"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Creates a new dataset on "My Workspace".Note: This API supports only Push datasets.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_postdataset <- function(token, defaultRetentionPolicy = NULL, datasources, defaultMode, name, relationships, tables, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(datasources = datasources, defaultMode = defaultMode, name = name, relationships = relationships, tables = tables))
process(response, output)
}
#' Creates a new dataset in the specified workspace.Note: This API supports only Push datasets.Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_postdatasetingroup <- function(token, groupId, defaultRetentionPolicy, datasources, defaultMode, name, relationships, tables, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(datasources = datasources, defaultMode = defaultMode, name = name, relationships = relationships, tables = tables))
process(response, output)
}
#' Adds new data rows to the specified table within the specified dataset from "My Workspace".Note: This API supports only Push datasets. REST API Limitations: See Power BI REST API limitations. Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_postrows <- function(token, datasetId, tableName, rows, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/tables/{tableName}/rows"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(rows = rows))
process(response, output)
}
#' Adds new data rows to the specified table, within the specified dataset, from the specified workspace.Note: This API supports only Push datasets. REST API Limitations: See Power BI REST API limitations. Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_postrowsingroup <- function(token, datasetId, groupId, tableName, rows, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/tables/{tableName}/rows"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(rows = rows))
process(response, output)
}
#' Updates the metadata and schema for the specified table within the specified dataset from "My Workspace".Note: This API supports only Push datasets. Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_puttable <- function(token, datasetId, tableName, columns, measures, name, rows, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/datasets/{datasetId}/tables/{tableName}"
response <- httr::PUT(url = glue::glue(path),
config = httr::config(token = token),
body = list(columns = columns, measures = measures, name = name, rows = rows))
process(response, output)
}
#' Updates the metadata and schema for the specified table, within the specified dataset, from the specified workspace.Note: This API supports only Push datasets. Required scope: Dataset.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
datasets_puttableingroup <- function(token, datasetId, groupId, tableName, columns, measures, name, rows, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/datasets/{datasetId}/tables/{tableName}"
response <- httr::PUT(url = glue::glue(path),
config = httr::config(token = token),
body = list(columns = columns, measures = measures, name = name, rows = rows))
process(response, output)
}
<file_sep>#' Clones the specified report from "My Workspace".If after cloning the report and its dataset reside in two different upgraded workspaces or "My Workspace", a shared dataset will be created in the report's workspace. Reports with live connection will lose the live connection when cloning, and will have a direct binding to the target dataset.Required scope: Content.CreateTo set the permissions scope, see Register an app. Required permissions: Report - Write permissions. Target dataset (if provided) - Build permissions.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
clone_report <- function(token, reportId, name, targetModelId, targetWorkspaceId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/Clone"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(name = name, targetModelId = targetModelId, targetWorkspaceId = targetWorkspaceId))
process(response, output)
}
#' Clones the specified report from the specified workspace.If after cloning the report and its dataset reside in two different upgraded workspaces or "My Workspace", a shared dataset will be created in the report's workspace. Reports with live connection will lose the live connection when cloning, and will have a direct binding to the target dataset.Required scope: Content.Create Required permissions: Report - Write permissions. Target dataset (if provided) - Build permissions
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
clone_report_in_group <- function(token, groupId, reportId, name, targetModelId, targetWorkspaceId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/Clone"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(name = name, targetModelId = targetModelId, targetWorkspaceId = targetWorkspaceId))
process(response, output)
}
#' Deletes the specified report from "My Workspace".Required scope: Report.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
delete_report <- function(token, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Deletes the specified report from the specified workspace.Required scope: Report.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
delete_report_in_group <- function(token, groupId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}"
response <- httr::DELETE(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Exports the specified report from "My Workspace" to a .pbix file.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
export_report <- function(token, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/Export"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Exports the specified report from the specified workspace to a .pbix file.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
export_report_in_group <- function(token, groupId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/Export"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Exports the specified report from "My Workspace" to the requested format.Required scope: (all of the below)
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
export_to_file <- function(token, reportId, format, paginatedReportConfiguration, powerBIReportConfiguration, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/ExportTo"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(format = format, paginatedReportConfiguration = paginatedReportConfiguration, powerBIReportConfiguration = powerBIReportConfiguration))
process(response, output)
}
#' Exports the specified report from the specified workspace to the requested format.Required scope: (all of the below)
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
export_to_file_in_group <- function(token, groupId, reportId, format, paginatedReportConfiguration, powerBIReportConfiguration, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/ExportTo"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(format = format, paginatedReportConfiguration = paginatedReportConfiguration, powerBIReportConfiguration = powerBIReportConfiguration))
process(response, output)
}
#' Returns a list of datasources for the specified RDL report from "My Workspace".Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasources <- function(token, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/datasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of datasources for the specified RDL report from the specified workspace.Required scope: Report.ReadWrite.All or Reportt.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_datasources_in_group <- function(token, groupId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/datasources"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the status of the Export to file job from "My Workspace".Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_export_to_file_status <- function(token, exportId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/exports/{exportId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the status of the Export to file job from the specified workspace.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_export_to_file_status_in_group <- function(token, exportId, groupId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/exports/{exportId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the file of the Export to file job of the specified report from "My Workspace".Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_file_of_export_to_file <- function(token, exportId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/exports/{exportId}/file"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the file of the Export to file job of the specified report from the specified group.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_file_of_export_to_file_in_group <- function(token, exportId, groupId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/exports/{exportId}/file"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified page within the specified report from "My Workspace".Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_page <- function(token, pageName, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/pages/{pageName}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified page within the specified report from the specified workspace.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_page_in_group <- function(token, groupId, pageName, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/pages/{pageName}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of pages within the specified report from "My Workspace".Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_pages <- function(token, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/pages"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of pages within the specified report from the specified workspace.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_pages_in_group <- function(token, groupId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/pages"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified report from "My Workspace".Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_report <- function(token, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns the specified report from the specified workspace.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_report_in_group <- function(token, groupId, reportId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of reports from "My Workspace".Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_reports <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Returns a list of reports from the specified workspace.Required scope: Report.ReadWrite.All or Report.Read.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_reports_in_group <- function(token, groupId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Rebinds the specified report from "My Workspace" to the requested dataset.If the dataset resides in a different upgraded workspace, a shared dataset will be created in "My Workspace".Reports with live connection will lose the live connection when rebinding, and will have a direct binding to the target dataset.Required scope: Report.ReadWrite.All Required permissions: Report - Write permissions. Target dataset - Build permissions.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
rebind_report <- function(token, reportId, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/Rebind"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(datasetId = datasetId))
process(response, output)
}
#' Rebinds the specified report from the specified workspace to the requested dataset.If the dataset resides in a different upgraded workspace or in "My Workspace", a shared dataset will be created in the report's workspace.Reports with live connection will lose the live connection when rebinding, and will have a direct binding to the target dataset.Required scope: Report.ReadWrite.All Required permissions: Report - Write permissions. Target dataset - Build permissions
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
rebind_report_in_group <- function(token, groupId, reportId, datasetId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/Rebind"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(datasetId = datasetId))
process(response, output)
}
#' Updates the specified report from "My Workspace" to have the same content as the report in the request body.Required scope: Report.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_report_content <- function(token, reportId, sourceReport, sourceType, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/reports/{reportId}/UpdateReportContent"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(sourceReport = sourceReport, sourceType = sourceType))
process(response, output)
}
#' Updates the specified report from the specified workspace to have the same content as the specified report in the request body.Required scope: Report.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
update_report_content_in_group <- function(token, groupId, reportId, sourceReport, sourceType, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/reports/{reportId}/UpdateReportContent"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(sourceReport = sourceReport, sourceType = sourceType))
process(response, output)
}
<file_sep>#' Refreshes user permissions in Power BI
#' When a user is granted permissions to a workspace, app, or artifact, it might not be immediately available through API calls. This operation refreshes user permissions and makes sure the user permissions are fully updated. Required scope: Workspace.Read.All or Workspace.ReadWrite.AllTo set the permissions scope, see Register an app.
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
refresh_user_permissions <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/RefreshUserPermissions"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
<file_sep>#' Returns a list of dataflow storage accounts the user has access to.Required scope: StorageAccount.Read.All or StorageAccount.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
get_dataflow_storage_accounts <- function(token, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/dataflowStorageAccounts"
response <- httr::GET(url = glue::glue(path),
config = httr::config(token = token),
body = FALSE)
process(response, output)
}
#' Assigns the specified workspace to the specified dataflow storage account.Note: To perform this operation, the user must be an admin on the specified workspace and the Power BI dataflow storage account must be enabled. To unassign the specified workspace from a Power BI dataflow storage account, an empty GUID (00000000-0000-0000-0000-000000000000) should be provided as dataflowStorageId.Required scope: StorageAccount.ReadWrite.All and Workspace.ReadWrite.All
#' @param groupId OK
#' @return A `data.frame` object.
#' @export
groups_assigntodataflowstorage <- function(token, groupId, dataflowStorageId, output = "value") {
path <- "https://api.powerbi.com/v1.0/myorg/groups/{groupId}/AssignToDataflowStorage"
response <- httr::POST(url = glue::glue(path),
config = httr::config(token = token),
body = list(dataflowStorageId = dataflowStorageId))
process(response, output)
}
|
4d7b99ac4d0e6013f52cbd5ec1d110b511059a1e
|
[
"Markdown",
"R"
] | 19 |
R
|
assuncaolfi/pbr
|
b3f00e7054dedd4d365ebc385215dc94532d975d
|
a99279941f14d547ab4b71257d11b3a3a4640d2e
|
refs/heads/master
|
<file_sep>package com.example.retrofithomework1;
import android.app.Application;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
public class App extends Application {
private static JsonPlaceHolderApi jsonPlaceHolderApi;
private Retrofit retrofit;
@Override
public void onCreate() {
super.onCreate();
retrofit = new Retrofit.Builder()
.baseUrl("https://jsonplaceholder.typicode.com")
.addConverterFactory(GsonConverterFactory.create())
.build();
jsonPlaceHolderApi = retrofit.create(JsonPlaceHolderApi.class);
}
public static JsonPlaceHolderApi getApi() {
return jsonPlaceHolderApi;
}
}
<file_sep>rootProject.name='RetrofitHomework1'
include ':app'
|
cc66da867c3deffaa9e98aa4419522532717d0f2
|
[
"Java",
"Gradle"
] | 2 |
Java
|
ReTym/RetrofitHomework
|
5376c43bc510a2adcca5a69ad8834e697b5c0603
|
1167dd7652f4ff12d37e380baaf340c5fd435bb6
|
refs/heads/master
|
<repo_name>Maransatto/php-basico<file_sep>/conexao.php
<?php
$servidor = "127.0.0.1"; // não pode ser localhost
$usuario = "root";
$senha = "<PASSWORD>";
$dbname = "aula";
// Create connection
$conn = @mysqli_connect(
$servidor,
$usuario,
$senha,
$dbname
) or die ("Erro na conexão");
?><file_sep>/admin.php
<?php
session_start();
if (!empty($_SESSION['id_usuario'])) {
echo "Olá, ".$_SESSION['nome_usuario'].". Bem vindo <br>";
echo "<a href='logout.php'>Sair</a>";
} else {
$_SESSION['msg'] = 'Usuário não autenticado';
header('Location: login.php');
}
?><file_sep>/logout.php
<?php
session_start();
unset(
$_SESSION['id_usuario'],
$_SESSION['nome_usuario'],
$_SESSION['email']
);
$_SESSION['msg'] = 'Usuário não autenticado';
header('Location: login.php');
?><file_sep>/valida.php
<?php
session_start();
include_once("conexao.php");
// para validar se a origem do POST foi do botão btnLogin
$btnLogin = filter_input(INPUT_POST, 'btnLogin', FILTER_SANITIZE_STRING);
if ($btnLogin) {
$email = filter_input(INPUT_POST, 'email', FILTER_SANITIZE_STRING);
$senha = filter_input(INPUT_POST, 'senha', FILTER_SANITIZE_STRING);
// echo "$email e $senha";
if ((!empty($email)) && (!empty($senha))) {
// senha cripografada
// echo password_hash($senha, PASSWORD_DEFAULT);
// pesquisar no banco de dados
$query = "SELECT id_usuario, nome, email, senha FROM usuarios WHERE email = '<EMAIL>';";
$result = @mysqli_query($conn, $query) or die ('Não foi possível executar');
if ($result) {
$row = mysqli_fetch_assoc($result);
if (password_verify($senha, $row['senha'])) {
$_SESSION['id_usuario'] = $row['id_usuario'];
$_SESSION['nome_usuario'] = $row['nome'];
$_SESSION['email'] = $row['email'];
header('Location: admin.php');
} else {
$_SESSION['msg'] = "Falha na autenticação";
header('Location: login.php');
}
} else {
$_SESSION['msg'] = "Falha na autenticação";
header('Location: login.php');
}
} else {
$_SESSION['msg'] = "Informe o usuário e a senha";
header('Location: login.php');
}
} else {
$_SESSION['msg'] = "Página não encontrada";
header('Location: login.php');
}
?><file_sep>/login.php
<?php
session_start();
?>
<!DOCTYPE html>
<html lang='pt-br'>
<head>
<meta charset="utf-8">
<title>Login</title>
</head>
<body>
<h2>Área restrita</h2>
<?php
if (isset($_SESSION['msg'])) {
echo $_SESSION['msg'];
unset($_SESSION['msg']);
}
if (isset($_SESSION['msgCadastro'])) {
echo $_SESSION['msgCadastro'];
unset($_SESSION['msgCadastro']);
}
?>
<form method="POST" action="valida.php">
<label>E-mail</label>
<input type="text" name='email' placeholder="Informe seu e-mail">
<br><br>
<label>Senha</label>
<input type="text" name='senha' placeholder="Informe sua senha">
<br><br>
<button type="submit" name='btnLogin' value='OK'>OK</button>
<h4>Você ainda não possui uma conta?</h4>
<a href="cadastrar.php">Clique aqui</a> para criar
</form>
</body>
</html><file_sep>/cadastrar.php
<?php
session_start();
ob_start();
$btnCadastro = filter_input(INPUT_POST, 'btnCadastro', FILTER_SANITIZE_STRING);
if ($btnCadastro) {
include_once('conexao.php');
$dados = filter_input_array(INPUT_POST, FILTER_DEFAULT);
// var_dump($dados);
$nome = $dados['nome'];
$email = $dados['email'];
$senha = password_hash($dados['senha'], PASSWORD_DEFAULT);
$query = "
INSERT INTO usuarios (nome, email, senha)
VALUES ('".$nome."', '".$email."', '".$senha."')
";
$result = @mysqli_query($conn, $query) or die ('Erro ao cadastrar usuário');
if (mysqli_insert_id($conn)) {
$_SESSION['msgLogin'] = 'Usuário cadastrado com sucesso';
header('Location: login.php');
} else {
$_SESSION['msg'] = 'Erro ao cadastrar usuário';
}
}
?>
<!DOCTYPE html>
<html lang='pt-br'>
<head>
<meta charset="utf-8">
<title>Cadastro</title>
</head>
<body>
<h2>Cadastro</h2>
<?php
if (isset($_SESSION['msg'])) {
echo $_SESSION['msg'];
unset($_SESSION['msg']);
}
?>
<form method="POST" action="">
<label>Nome</label>
<input type="text" name='nome' placeholder="Informe seu nome">
<br><br>
<label>E-mail</label>
<input type="text" name='email' placeholder="Informe seu e-mail">
<br><br>
<label>Senha</label>
<input type="password" name='senha' placeholder="Informe sua senha">
<br><br>
<button type="submit" name='btnCadastro' value='OK'>OK</button>
<br><br>
<a href="login.php">Login</a>
</form>
</body>
</html><file_sep>/aula.session.sql
describe usuarios;
INSERT INTO usuarios (
nome,
email,
senha
) VALUES (
'<NAME>',
'<EMAIL>',
'$<PASSWORD>'
);
SELECT * FROM usuarios;
SELECT id_usuario, nome, email, senha FROM usuarios WHERE email = '<EMAIL>';
truncate table usuarios;
|
7f1cb11c330f70fc697cff66bc232a1c9e007e18
|
[
"SQL",
"PHP"
] | 7 |
PHP
|
Maransatto/php-basico
|
1932b7db98bde438eae5da0e2c104c598fc23c4d
|
0bc1da57b313cbc55b4d3a867f103bea24c72751
|
refs/heads/master
|
<file_sep>#include "Weapon.h"
Weapon::Weapon(int minimumDamage, int maximumDamage, std::string name, int level, int buyValue, int sellValue, int rarity) : Item(name, level, buyValue, sellValue, rarity)
{
this->minimumDamage = minimumDamage;
this->maximumDamage = maximumDamage;
}
Weapon::~Weapon()
{
}
Weapon* Weapon::clone() const
{
return new Weapon(*this);
}
std::string Weapon::getAsString()
{
return std::to_string(this->minimumDamage) + " " + std::to_string(this->maximumDamage);
}
<file_sep>#pragma once
#include <string>
#include <iostream>
#include <iomanip>
#include "Inventory.h"
#include "Weapon.h"
#include "Armor.h"
class Character
{
public:
Character();
virtual ~Character();
void initialize();
int calculateExperienceForNextLevel(const int level);
void print();
std::string getAsString() const;
void levelUp();
inline const int& getXPosition() const { return this->xPosition; }
inline const int& getYPosition() const { return this->yPosition; }
inline const std::string& getName() const { return this->name; }
inline const int& getLevel() const { return this->level; }
inline const int& getExperience() const { return this->experience; }
inline const int& getExperienceForNextLevel() const { return this->experienceForNextLevel; }
inline const int& getHealth() const { return this->health; }
inline const int& getMaximumHealth() const { return this->maximumHealth; }
inline const int& getStamina() const { return this->stamina; }
inline const int& getMinimumDamage() const { return this->minimumDamage; }
inline const int& getMaximumDamage() const { return this->maximumDamage; }
inline const int& getDefense() const { return this->defense; }
private:
double xPosition;
double yPosition;
Inventory inventory;
Weapon weapon;
Armor armor;
std::string name;
int level;
int experience;
int experienceForNextLevel;
int health;
int maximumHealth;
int stamina;
int maximumStamina;
int minimumDamage;
int maximumDamage;
int defense;
int luck;
int statPoints;
int skillPoints;
int strength;
int vitality;
int dexterity;
int intelligence;
std::string readName();
};
<file_sep>#include "Item.h"
Item::Item(std::string name = "NONE", int level = 0, int buyValue = 0, int sellValue = 0, int rarity = 0)
{
this->name = name;
this->level = level;
this->buyValue = buyValue;
this->sellValue = sellValue;
this->rarity = rarity;
}
Item::~Item()
{
}
<file_sep># ConsoleRPG
Just a simple game in C++ that runs on the console.
<file_sep>#pragma once
#include <iostream>
#include <iomanip>
#include <ctime>
#include <vector>
#include <fstream>
#include "Character.h"
class Game
{
public:
Game();
virtual ~Game();
void start();
void endGame();
void initialize();
void mainMenu();
void createNewCharacter();
void saveCharacters();
void loadCharacters();
inline const bool& isPlaying() const { return this->playing; };
private:
int choice;
bool playing;
const int numberOfChoices = 8;
int activeCharacter;
std::vector<Character> characters;
std::string fileName;
int readPlayerChoice();
bool isPlayerChoiceValid(int playerChoice);
};
<file_sep>#pragma once
#include <string>
#include "Item.h"
class Weapon :
public Item
{
private:
int minimumDamage;
int maximumDamage;
public:
Weapon(int minimumDamage = 0, int maximumDamage = 0, std::string name = "NONE", int level = 0, int buyValue = 0, int sellValue = 0, int rarity = 0);
~Weapon();
virtual Weapon* clone() const;
std::string getAsString();
};
<file_sep>#include "Game.h"
int main()
{
srand(time(NULL));
Game game;
game.initialize();
game.start();
while (game.isPlaying())
{
game.mainMenu();
}
return 0;
}<file_sep>#include "Inventory.h"
Inventory::Inventory()
{
this->capacity = 10;
this->numberOfItems = 0;
this->itemsArray = new Item*[this->capacity];
}
Inventory::~Inventory()
{
for (int i = 0; i < this->numberOfItems; i++)
{
delete this->itemsArray[i];
}
delete[] itemsArray;
}
void Inventory::expand()
{
this->capacity *= 2;
Item **tempArray = new Item*[this->capacity];
for (int i = 0; i < this->numberOfItems; i++)
{
tempArray[i] = this->itemsArray[i];
}
delete[] this->itemsArray;
this->itemsArray = tempArray;
this->initialize(this->numberOfItems);
}
void Inventory::initialize(const int from)
{
for (int i = from; i < this->capacity; i++)
{
this->itemsArray[i] = nullptr;
}
}
void Inventory::addItem(const Item &item)
{
if (this->numberOfItems >= this->capacity)
{
this->expand();
}
this->itemsArray[this->numberOfItems++] = item.clone();
}
void Inventory::removeItem(int index)
{
}<file_sep>#include "Game.h"
Game::Game()
{
this->choice = 0;
this->activeCharacter = 0;
this->fileName = "characters.txt";
}
Game::~Game()
{
}
void Game::start()
{
this->playing = true;
}
void Game::initialize()
{
this->createNewCharacter();
}
void Game::mainMenu()
{
bool firstTime = true;
do {
if (!firstTime)
std::cout << "Choice not found." << std::endl;
std::cout << "= MAIN MENU =" << std::endl;
std::cout << "0: Quit" << std::endl;
std::cout << "1: Travel" << std::endl;
std::cout << "2: Shop" << std::endl;
std::cout << "3: Level Up" << std::endl;
std::cout << "4: Rest" << std::endl;
std::cout << "5: Character Sheet" << std::endl;
std::cout << "6: Create new character" << std::endl;
std::cout << "7: Save characters" << std::endl;
std::cout << "8: Load characters" << std::endl;
std::cout << std::endl;
this->choice = this->readPlayerChoice();
std::cout << std::endl;
firstTime = false;
} while (!this->isPlayerChoiceValid(this->choice));
switch (this->choice)
{
case 0:
endGame();
break;
case 5:
this->characters[activeCharacter].print();
break;
case 6:
this->createNewCharacter();
break;
case 7:
this->saveCharacters();
break;
case 8:
break;
default:// case not implemented
endGame();
}
}
int Game::readPlayerChoice()
{
std::cout << std::endl << "Choice: ";
std::string playerInput;
std::getline(std::cin, playerInput);
int playerChoice = atoi(playerInput.c_str());
return playerChoice;
}
bool Game::isPlayerChoiceValid(int playerChoice)
{
return playerChoice >= 0 && playerChoice <= this->numberOfChoices;
}
void Game::createNewCharacter()
{
this->characters.emplace_back();
this->activeCharacter = this->characters.size() - 1;
this->characters[activeCharacter].initialize();
}
void Game::saveCharacters()
{
std::ofstream charactersFile;
charactersFile.open(this->fileName);
for (int i = 0; i < this->characters.size(); i++)
{
charactersFile << this->characters[i].getAsString() << "\n";
}
charactersFile.close();
}
void Game::loadCharacters()
{
}
void Game::endGame()
{
this->playing = false;
}<file_sep>#include "Character.h"
Character::Character()
{
this->xPosition = 0.0;
this->yPosition = 0.0;
this->name = "";
this->level = 0;
this->experience = 0;
this->experienceForNextLevel = 0;
this->strength = 0;
this->vitality = 0;
this->dexterity = 0;
this->intelligence = 0;
this->health = 0;
this->maximumHealth = 0;
this->stamina = 0;
this->maximumStamina = 0;
this->minimumDamage = 0;
this->maximumDamage = 0;
this->defense = 0;
this->luck = 0;
this->statPoints = 0;
this->skillPoints = 0;
}
Character::~Character()
{
}
std::string Character::readName()
{
std::string name = "";
std::cout << "Enter name for character: ";
std::getline(std::cin, name);
return name;
}
void Character::initialize()
{
this->xPosition = 0.0;
this->yPosition = 0.0;
this->name = this->readName();
this->level = 1;
this->experience = 0;
this->experienceForNextLevel = this->calculateExperienceForNextLevel(this->level);
this->strength = 5;
this->vitality = 5;
this->dexterity = 5;
this->intelligence = 5;
this->maximumHealth = (this->vitality * 2) + (this->strength / 2);
this->health = this->maximumHealth;
this->maximumStamina = this->vitality + (this->strength / 2) + (this->dexterity / 3);
this->stamina = this->maximumStamina;
this->minimumDamage = this->strength;
this->maximumDamage = this->strength + 2;
this->defense = this->dexterity + (this->intelligence / 2);
this->luck = this->intelligence;
this->statPoints = 0;
this->skillPoints = 0;
}
int Character::calculateExperienceForNextLevel(const int level)
{
return static_cast<int>((50 / 3) * (pow(level, 3) - 6 * pow(level, 2) + (17 * level) - 12)) + 100;
}
std::string Character::getAsString() const
{
return std::to_string(this->xPosition) + " "
+ std::to_string(this->yPosition) + " "
+ this->name + " "
+ std::to_string(this->level) + " "
+ std::to_string(this->experience) + " "
+ std::to_string(this->strength) + " "
+ std::to_string(this->vitality) + " "
+ std::to_string(this->dexterity) + " "
+ std::to_string(this->intelligence) + " "
+ std::to_string(this->health) + " "
+ std::to_string(this->stamina) + " "
+ std::to_string(this->statPoints) + " "
+ std::to_string(this->skillPoints);
}
void Character::print()
{
std::cout << "= Character Sheet =" << std::endl;
std::cout << "= Name: " << this->name << std::endl;
std::cout << "= Level: " << this->level << std::endl;
std::cout << "= Experience: " << this->experience << std::endl;
std::cout << "= Experience to next level: " << this->experienceForNextLevel << std::endl;
std::cout << std::endl;
std::cout << "= Strength: " << this->strength << std::endl;
std::cout << "= Vitality: " << this->vitality << std::endl;
std::cout << "= Dexterity: " << this->dexterity << std::endl;
std::cout << "= Intelligence: " << this->intelligence << std::endl;
std::cout << std::endl;
std::cout << "= HP: " << this->health << " / " << this->maximumHealth << std::endl;
std::cout << "= Stamina: " << this->stamina << " / " << this->maximumStamina << std::endl;
std::cout << "= Damage: " << this->minimumDamage << " / " << this->maximumDamage << std::endl;
std::cout << "= Defense: " << this->defense << std::endl;
std::cout << "= Luck: " << this->luck << std::endl;
std::cout << std::endl;
}
void Character::levelUp()
{
while (this->experience >= this->experienceForNextLevel)
{
this->experience -= this->experienceForNextLevel;
this->level++;
this->experienceForNextLevel = this->calculateExperienceForNextLevel(this->level);
this->statPoints++;
this->skillPoints++;
}
}
<file_sep>#pragma once
#include "Item.h"
class Armor :
public Item
{
private:
int type;
int defense;
public:
Armor(int type = 0, int defense = 0, std::string name = "NONE", int level = 0, int buyValue = 0, int sellValue = 0, int rarity = 0);
~Armor();
virtual Armor* clone() const;
std::string getAsString();
};
|
8640418b96223c5588cdb24afe802104bd88854e
|
[
"Markdown",
"C++"
] | 11 |
C++
|
evaporei/ConsoleRPG
|
312b5619199f3625aa7e937cff475c18e02087d6
|
607909cf6de091a2be8b198d83fe496f21e0dd2b
|
refs/heads/master
|
<repo_name>GowthamGirithar/Go_Examples_Set_2<file_sep>/ArrayProgrammingQuestions.go
package main
import (
"fmt"
"math"
"sort"
)
func main() {
var arr = [] int{1, 2, 4, 5, 7}
FindMissingElementInArray()
IsElementPresent(arr, 3)
GuessingGame()
FindDuplicateNumber()
FindDuplicateCount()
LargestAndSmallestNumber()
PairIsEqualToGivenNumber()
RemoveGivenElement(4)
RemoveGivenElementWithoutAlteringOrder(4)
RemoveDuplicatesFromArray()
}
func RemoveDuplicatesFromArray() {
var arr = [] int{1, 4, 3, 2, 2}
var duplicateCount = make(map[int]int) //1->1, 2-> 2, 3->1, 4->1
for i := range arr {
_, exists := duplicateCount[arr[i]]
if exists {
duplicateCount[arr[i]] += 1 //store index and count
} else {
duplicateCount[arr[i]] = 1
}
}
var data []int
for k, v := range duplicateCount {
if v == 1 {
data = append(data, k)
}
}
fmt.Printf("%v", data)
}
func RemoveGivenElement(i int) {
var arr = [] int{1, 4, 3, 2, 2}
var numberIndex int
for index := range arr {
if arr[index] == i {
numberIndex = index
}
}
arr[numberIndex] = arr[len(arr)-1] // copy the last element to the number index
arr[len(arr)-1] = 0 // invalidate the data
arr = arr[:len(arr)-1] // truncate
fmt.Printf("%v", arr)
}
func RemoveGivenElementWithoutAlteringOrder(i int) {
var arr = [] int{1, 4, 3, 2, 2}
var numberIndex int
for index := range arr {
if arr[index] == i {
numberIndex = index
}
}
copy(arr[numberIndex:], arr[numberIndex+1:]) // [1,3,2,2,2]
arr = arr[:len(arr)-1] // truncate
fmt.Printf("%v", arr)
}
func PairIsEqualToGivenNumber() {
var arr = [] int{1, 4, 3, 2, 2}
var givenNumber int = 6
var data = make([]int, len(arr))
for i := range arr {
isPresent := IsElementPresent(data, givenNumber-arr[i])
if isPresent {
fmt.Print("The pair is ", arr[i], givenNumber-arr[i])
break
}
data = append(data, arr[i]) // check each time in slice and see whether the element is present after subtraccting
}
}
// Smallest and largest number
func LargestAndSmallestNumber() {
var arr = [] int{1, 2, 3, 2, 2}
smallestNumber := math.MaxInt64 // math package have all the values
largestNumber := 0
for i := range arr {
if arr[i] < smallestNumber {
smallestNumber = arr[i]
}
if arr[i] > largestNumber {
largestNumber = arr[i]
}
}
println(" the smallest number and largest number is ", smallestNumber, largestNumber)
}
func FindDuplicateCount() {
var arr = [] int{1, 2, 3, 2, 2}
var duplicateCount = make(map[int]int)
for i := range arr {
_, exists := duplicateCount[arr[i]]
if exists {
duplicateCount[arr[i]] = duplicateCount[arr[i]] + 1
} else {
duplicateCount[arr[i]] = 1
}
}
for k, v := range duplicateCount {
if v > 1 {
println("duplicated count is ", k, v)
}
}
}
func FindDuplicateNumber() {
var arr = [] int{1, 2, 3, 5, 2} // cannot use [...]
sort.Ints(arr)
for i := 0; i < len(arr)-1; i++ {
if arr[i+1] == arr[i] {
println("the duplicate number is ", arr[i])
}
}
}
//find the guessing number
func GuessingGame() {
var s string
fmt.Printf("Pick an integer from 0 to 100.\n")
answer := sort.Search(100, func(i int) bool {
fmt.Printf("Is your number <= %d? ", i)
fmt.Scanf("%s", &s)
return s != "" && s[0] == 'y'
})
fmt.Printf("Your number is %d.\n", answer)
}
//element is present
func IsElementPresent(arr []int, num int) bool {
// cannot use [...]
value := sort.SearchInts(arr, num)
//println(value) // print the index wheere it can be inserted , if present also it wll return the index
if value < len(arr) && arr[value] == num {
return true
}
return false
}
//missing number in array
func FindMissingElementInArray() {
var arr = [] int{1, 2, 4, 5, 7} // cannot use [...]
sort.Ints(arr)
for i := 0; i < len(arr)-1; i++ {
if arr[i+1]-arr[i] != 1 {
println("the missing number is ", arr[i]+1)
}
}
}
<file_sep>/Collections_Examples/003_Min_dist_array.go
package main
import (
"fmt"
"math"
)
func main() {
//Find min distance in an unsorted array
arr:=[]int{1,30,10,10,20,4,9,9}
x:=1
y:=20
min:=math.MaxInt64
for i:=0; i<len(arr) ;i++{
for j:=i+1; j< len(arr); j++{
if (arr[i]==x && arr[j]==y) || (arr[i]==y && arr[j]==x){
if min > j-i{
min= j-i
}
}
}
}
fmt.Println(min)
fmt.Println(math.Abs(-8))// to avoid negative number //8
}
<file_sep>/Collections_Examples/002_Stack_Queue.go
package main
import (
"container/list"
"fmt"
)
//stack and queue implementation
func main() {
words := list.New() //every time it increments the size by 1 on insert or adjust depends on operation
for i:=0 ; i<5; i++{
addElements(i , words)
}
//retrieve the data // Last in First Out - STACK implementation
for i:=words.Back() ; i != nil ; i = i.Prev(){
fmt.Println(i.Value) // fmt.println print the data and println print the address ?
}
println("________________________________")
//retrieve the data // fifo queue
for i:=words.Front() ; i != nil ; i = i.Next(){
fmt.Println(i.Value) // fmt.println print the data and println print the address ?
}
}
func addElements(data interface{}, words *list.List) {
words.PushBack(data)
}
<file_sep>/StringProgrammingQuestions.go
package main
import (
"fmt"
"strconv"
"strings"
"unicode"
"unicode/utf8"
)
func main() {
StringContainDigits()
IsStringNumber()
NumberAndDigit()
PalindromeCheck()
}
func PalindromeCheck() {
var str1 = "Madam"
str1=strings.ToLower(str1)
for pos,_ := range str1{
if str1[pos] != str1[utf8.RuneCountInString(str1)-1-pos] {
println("Is not palindrome")
break
}
}
}
func NumberAndDigit(){
digit := rune('1')
fmt.Println(unicode.IsDigit(digit))//true
fmt.Println(unicode.IsNumber(digit))//true
letter := rune('Ⅷ')
fmt.Println(unicode.IsDigit(letter))//false
fmt.Println(unicode.IsNumber(letter))//true
other := rune('½')
fmt.Println(unicode.IsDigit(other))//false
fmt.Println(unicode.IsNumber(other))//true
}
func IsStringNumber() {
var str ="Test"
_, err := strconv.Atoi(str) // convert string to number
if err == nil{
println("is number")
}
}
func StringContainDigits() {
var str ="Test123"
strings.ContainsAny(str, "s")// for sample
for _, val := range str{ //return rune
if unicode.IsDigit(val){ // for rune , it will unicode
println("yes digit is there")
break
}
}
}
<file_sep>/Collections_Examples/004_Missing_smallest_num.go
package main
import "fmt"
func main() {
//sorted array. find smallest missing number
//no duplicate and always start with 0
arr:=[]int{0,1,8,9,10}
//linear
for i, v := range arr{
if i !=v{
fmt.Println("Missing number is ",i)
break
}
}
//binary search
data:=findSmallestNumber(arr,0,len(arr)-1)
fmt.Println("Missing number via recurssion is ",data)
}
func findSmallestNumber(arr []int, start , end int) int{
if len(arr)==0{
return 0
}
if arr[start] != start{
return start
}
midLength:= (start+end)/2
if arr[midLength] != midLength{
return findSmallestNumber(arr,start,midLength)
}
return findSmallestNumber(arr,midLength+1, end)
}
<file_sep>/Collections_Examples/001_List.go
package main
import (
"container/list"
"fmt"
)
//double linked list in go
func main() {
words := list.New()
println("the initial size of the list is ", words.Len()) // 0
//push at the end
for i:=0 ; i<6; i++{
words.PushBack(i) //0 , 1, 2, 3, 4, 5
}
//remove 3 from the list
//initialize where to point the iterating pointer back or front
for i:=words.Back() ; i != nil ; i=i.Prev(){
if i.Value == 3{
words.Remove(i)
}
}
// insert after 2
for i:=words.Back() ; i != nil ; i=i.Prev(){
if i.Value == 2{
words.InsertAfter(3, i)
}
}
// interchanging 3 and 4
for i:=words.Back() ; i != nil ; i=i.Prev(){
if i.Value == 3 && i.Next() != nil{
words.MoveAfter(i,i.Next())
}
}
//interchanging 3 and 4
for i:=words.Back() ; i != nil ; i=i.Prev(){
if i.Value == 3 && i.Prev() != nil{
words.MoveBefore(i,i.Prev())
}
}
for i:=words.Back() ; i != nil ; i=i.Prev(){
fmt.Println(i.Value)
}
}
<file_sep>/README.md
# Go_Examples_Set_2
Demos on
1. List
2. Implementing Stack and Queue with List
3. Array Related Programming questions - Basic
4. String Releated Programming questions - Basic
|
1499d93181a0367d9e2ed19099f7ddb1112d2ec9
|
[
"Markdown",
"Go"
] | 7 |
Go
|
GowthamGirithar/Go_Examples_Set_2
|
826e85af664b872d23fe77072f2a5864ccadaa7e
|
8cec171857de00860fce22279a7f2f7f767860d2
|
refs/heads/master
|
<repo_name>seyidkanan/android-progressbar-in-rv<file_sep>/README.md
# android-progressbar-in-rv
Test project for showing progressbar onMoreListener like RV element
<file_sep>/app/src/main/java/az/seyidkanan/progressinlistview/viewholder/ContentViewHolder.java
package az.seyidkanan.progressinlistview.viewholder;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.widget.TextView;
import az.seyidkanan.progressinlistview.R;
/**
* Created by Kanan on 3/25/2018.
*/
public class ContentViewHolder extends RecyclerView.ViewHolder {
private TextView contentTextView;
public ContentViewHolder(View itemView) {
super(itemView);
contentTextView = (TextView) itemView.findViewById(R.id.contentTextView);
}
public void bind(String text) {
contentTextView.setText(text);
}
}
<file_sep>/app/src/main/java/az/seyidkanan/progressinlistview/adapter/RVAdapter.java
package az.seyidkanan.progressinlistview.adapter;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import java.util.ArrayList;
import java.util.List;
import az.seyidkanan.progressinlistview.model.ContentModel;
import az.seyidkanan.progressinlistview.R;
import az.seyidkanan.progressinlistview.viewholder.ContentViewHolder;
import az.seyidkanan.progressinlistview.viewholder.ProgressViewHolder;
/**
* Created by Kanan on 3/25/2018.
*/
public class RVAdapter extends RecyclerView.Adapter {
private List<ContentModel> contentList = new ArrayList<>();
private final int TYPE_PROGRESS = 1;
private final int TYPE_TEXT = 2;
public void setDataList(List<ContentModel> contentListModel) {
if (getContentList().size() > 0)
removeElement(getContentList().size() - 1);
contentList.addAll(contentListModel);
notifyDataSetChanged();
}
private void removeElement(int position) {
contentList.remove(position);
notifyItemRemoved(position);
notifyItemRangeChanged(position, contentList.size());
}
public void setData(ContentModel content) {
contentList.add(content);
notifyDataSetChanged();
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view;
RecyclerView.ViewHolder viewHolder;
switch (viewType) {
case TYPE_TEXT:
view = LayoutInflater.from(parent.getContext()).inflate(R.layout.content_element, parent, false);
viewHolder = new ContentViewHolder(view);
break;
default:
view = LayoutInflater.from(parent.getContext()).inflate(R.layout.progress_element, parent, false);
viewHolder = new ProgressViewHolder(view);
break;
}
return viewHolder;
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
switch (getItemViewType(position)) {
case TYPE_TEXT:
ContentViewHolder contentViewHolder = (ContentViewHolder) holder;
contentViewHolder.bind(contentList.get(position).getText());
break;
default:
break;
}
}
@Override
public int getItemCount() {
return contentList.size();
}
@Override
public int getItemViewType(int position) {
if (contentList.get(position).isProgress()) {
return TYPE_PROGRESS;
} else {
return TYPE_TEXT;
}
}
public List<ContentModel> getContentList() {
return contentList;
}
}
<file_sep>/app/src/main/java/az/seyidkanan/progressinlistview/model/ContentModel.java
package az.seyidkanan.progressinlistview.model;
/**
* Created by Kanan on 3/25/2018.
*/
public class ContentModel {
private String text;
private boolean progress = false;
public ContentModel() {
progress = true;
}
public ContentModel(String text) {
this.text = text;
progress = false;
}
public boolean isProgress() {
return progress;
}
public void setProgress(boolean progress) {
this.progress = progress;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
@Override
public String toString() {
return "ContentModel{" +
"text='" + text + '\'' +
", progress=" + progress +
'}';
}
}
|
fe969bb7ca97c83cd975675f00600f2e642dd477
|
[
"Markdown",
"Java"
] | 4 |
Markdown
|
seyidkanan/android-progressbar-in-rv
|
efcd51f92ce54cf34db8ce08091f3613469d6caa
|
9f1b3bb380438411b4055f920d6005e2d347248f
|
refs/heads/master
|
<repo_name>tsu84/Eventbrite.NET<file_sep>/Xml/BarcodeBuilder.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
using EventbriteNET.Entities;
namespace EventbriteNET.Xml
{
class BarcodeBuilder : BuilderBase
{
public BarcodeBuilder(EventbriteContext context) : base(context) { }
public Barcode Build(string xmlString)
{
this.Validate(xmlString);
var toReturn = new Barcode(this.Context);
var doc = new XmlDocument();
doc.LoadXml(xmlString);
toReturn.Id = TryGetElementValue("id", doc);
toReturn.Status = TryGetElementValue("status", doc);
return toReturn;
}
}
}
<file_sep>/Entities/Venue.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace EventbriteNET.Entities
{
/// <summary>
/// venue
/// The event venue with the following structure:
/// </summary>
public class Venue : EntityBase
{
public Venue(EventbriteContext context)
: base(context)
{
}
/// <summary>
///id integer
///The venue ID.
/// </summary>
public long Id { get; set; }
/// <summary>
/// name string - The venue name.
/// </summary>
public string Name { get; set; }
/// <summary>
/// address string - The venue address.
/// </summary>
public string Address { get; set; }
/// <summary>
/// address_2 string - The venue address (continued).
/// </summary>
public string Address2 { get; set; }
/// <summary>
/// city string - The venue city.
/// </summary>
public string City { get; set; }
/// <summary>
/// region string - The venue state/province/county depending on the country.
/// </summary>
public string Region { get; set; }
/// <summary>
/// postal_code string - The venue postal code.
/// </summary>
public string PostalCode { get; set; }
/// <summary>
/// country string - The venue country name.
/// </summary>
public string Country { get; set; }
/// <summary>
/// country_code string - The venue country code, in 2-letter ISO 3166 format (e.g., “US”).
/// </summary>
public string CountryCode { get; set; }
/// <summary>
/// longitude float - The venue GeoLocation in WGS84 (Longitude).
/// </summary>
public decimal Longitude { get; set; }
/// <summary>
/// latitude float - The venue GeoLocation in WGS84 (Latitude).
/// </summary>
public decimal Latitude { get; set; }
/// <summary>
/// Lat-Long string - The venue GeoLocation in WGS84 (Latitude/Longitude).
/// </summary>
public string LatLong { get; set; }
public override string ToString()
{
var address = Address + (!String.IsNullOrEmpty(Address2) ? "," + Address2 : String.Empty);
var cityregion = City + (!String.IsNullOrEmpty(Region) ? "," + Region : String.Empty);
return String.Format("{0} {1} {2} {3}", Name, address, cityregion, CountryCode);
}
}
}<file_sep>/Entities/Barcode.cs
namespace EventbriteNET.Entities
{
public class Barcode : EntityBase
{
public Barcode(EventbriteContext context): base(context) { }
public string Id;
public string Status;
}
}<file_sep>/Entities/Answer.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace EventbriteNET.Entities
{
public class Answer : EntityBase
{
public string id, question, answer;
public Answer(EventbriteContext context) : base(context) { }
}
}
<file_sep>/Xml/AnswerBuilder.cs
using EventbriteNET.Entities;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
namespace EventbriteNET.Xml
{
class AnswerBuilder : BuilderBase
{
public AnswerBuilder(EventbriteContext context) : base(context) { }
public Answer Build(string xmlString)
{
this.Validate(xmlString);
var toReturn = new Answer(this.Context);
var doc = new XmlDocument();
doc.LoadXml(xmlString);
toReturn.id = TryGetElementValue("question_id", doc);
toReturn.question = TryGetElementValue("question", doc);
toReturn.answer = TryGetElementValue("answer_text", doc);
return toReturn;
}
}
}
<file_sep>/Entities/User.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using EventbriteNET.HttpApi;
namespace EventbriteNET.Entities
{
public class User
{
EventbriteContext _context;
public User(EventbriteContext context)
{
this._context = context;
}
private IDictionary<long, Event> userevents;
public IDictionary<long, Event> Events
{
get
{
if (userevents == null)
{
userevents = new Dictionary<long, Event>();
var eventArray = new UserEventsRequest(_context).GetResponse();
foreach (var eventEntity in eventArray)
{
userevents.Add(eventEntity.Id, eventEntity);
}
}
return userevents;
}
}
}
}
<file_sep>/HttpApi/UserEventsRequest.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using EventbriteNET.Entities;
using EventbriteNET.Xml;
namespace EventbriteNET.HttpApi
{
public class UserEventsRequest: RequestBase
{
const string PATH = "user_list_events";
public UserEventsRequest( EventbriteContext context, string[] statuses =null)
: base(PATH, context)
{
//statuses = statuses ?? new string[] {"live", "started"};
//this.AddGet("event_statuses", String.Join(",", statuses));
}
public Event[] GetResponse()
{
return new UserEventsBuilder(this.Context).Build(base.GetResponse());
}
}
}
|
c67c241a7e81d4414fba9a8bda4f2d655fa2039d
|
[
"C#"
] | 7 |
C#
|
tsu84/Eventbrite.NET
|
768ba9d5971dc6af0fbb1cd0fca00659492bbb6d
|
739e245f5759a9f8ba1e2d4094275357f76efc4d
|
refs/heads/master
|
<repo_name>Nachodlv/timebot<file_sep>/Assets/Scripts/Entities/Player/Audio/PlayerAudioReferences.cs
using System;
using UnityEngine;
using Utils.Audio;
namespace Entities.Player
{
[CreateAssetMenu(fileName = "Player Audio", menuName = "Audio/PlayerAudio", order = 1)]
public class PlayerAudioReferences : ScriptableObject
{
public CustomAudioClip swordDraw;
public CustomAudioClip attackClip;
public CustomAudioClip jumpClip;
public CustomAudioClip hitGroundClip;
public CustomAudioClip timeStopAbility;
public CustomAudioClip dieClip;
public CustomAudioClip walkingClip;
public CustomAudioClip continuousStoppedTime;
public CustomAudioClip timeStopEnd;
public CustomAudioClip dash;
public CustomAudioClip batteryPickUp;
public CustomAudioClip reflectBullet;
public CustomAudioClip lowHealth;
public CustomAudioClip receiveDamage;
}
}<file_sep>/Assets/Scripts/Utils/Audio/ThemeSetter.cs
using UnityEngine;
namespace Utils.Audio
{
public class ThemeSetter: MonoBehaviour
{
[SerializeField] private CustomAudioClip audioClip;
[SerializeField] private bool pauseOtherBackgroundMusic;
private void Start()
{
if(pauseOtherBackgroundMusic) AudioManager.Instance.StopAllBackgroundMusic(AudioOptions.Default());
AudioManager.Instance.PlayBackgroundMusic(audioClip.audioClip, new AudioOptions{Volume = audioClip.volume});
}
}
}<file_sep>/Assets/Scripts/Entities/Player/Movement/CharacterController.cs
using System;
using UnityEngine;
namespace Entities.Player.Movement
{
[RequireComponent(typeof(Rigidbody2D))]
public class CharacterController : MonoBehaviour
{
private const float GroundedRadius = .2f; // Radius of the overlap circle to determine if grounded
[SerializeField] private float jumpForce = 400f; // Amount of force added when the player jumps.
[Range(0, .3f)] [SerializeField] private float movementSmoothing = .05f; // How much to smooth out the movement
[SerializeField] private LayerMask whatIsGround; // A mask determining what is ground to the character
[SerializeField] private Transform groundCheck; // A position marking where to check if the player is grounded.
[SerializeField] private int maxJumps = 1;
[SerializeField] private float maxVelocity = 5f;
[SerializeField] private float timeJumping = 1f;
public event Action OnJumpEvent;
public event Action OnLandEvent;
public bool Grounded { get; private set; }
public Vector3 Velocity => _myRigidBody2D.velocity;
public bool FacingRight { get; private set; } = true;
public bool AirControl { get; set; } = true;
private Rigidbody2D _myRigidBody2D;
private Vector3 _velocity = Vector3.zero;
private bool _wasCrouching;
private int _currentJumps;
private Collider2D[] _colliders;
private Vector2 _jumpDirection;
private bool _jumping;
private float _jumpTime;
private void Awake()
{
_myRigidBody2D = GetComponent<Rigidbody2D>();
_colliders = new Collider2D[5];
}
private void FixedUpdate()
{
if (_jumping)
{
Jump();
return;
}
var wasGrounded = Grounded;
var size = Physics2D.OverlapCircleNonAlloc(groundCheck.position, GroundedRadius, _colliders, whatIsGround);
for (var i = 0; i < size; i++)
{
if (_colliders[i].gameObject == gameObject) continue;
Grounded = true;
_currentJumps = 0;
if (!wasGrounded)
{
OnLandEvent?.Invoke();
}
return;
}
Grounded = false;
}
public void Move(float move, bool crouch, bool canFlip )
{
var velocity = _myRigidBody2D.velocity;
Vector3 targetVelocity = new Vector2(move * 10f, velocity.y);
var newVelocity = Vector3.SmoothDamp(velocity, targetVelocity, ref _velocity,
movementSmoothing, maxVelocity);
_myRigidBody2D.velocity = newVelocity;
if (move > 0 && !FacingRight && canFlip)
{
Flip();
}
else if (move < 0 && FacingRight && canFlip)
{
Flip();
}
}
public void StartJumping()
{
StartJumpWithOptions(false, 0, jumpForce);
}
public void StopJumping()
{
_jumping = false;
}
public void StartJumpWithOptions(bool ignoreMaximumJumps, float angle, float force)
{
if ((!ignoreMaximumJumps && _currentJumps >= maxJumps) || _jumping) return;
_jumping = true;
Grounded = false;
_jumpTime = Time.time;
_currentJumps ++;
var velocity = _myRigidBody2D.velocity;
velocity.y = velocity.y < 0 ? 0 : velocity.y;
_myRigidBody2D.velocity = velocity;
_jumpDirection = new Vector2(0f, force);
_jumpDirection = Quaternion.Euler(0, 0, angle) * _jumpDirection;
_myRigidBody2D.AddForce(_jumpDirection * 5);
OnJumpEvent?.Invoke();
}
private void Jump()
{
if(Time.time - _jumpTime > timeJumping) StopJumping();
_myRigidBody2D.AddForce(_jumpDirection * ((timeJumping - (Time.time - _jumpTime)) / timeJumping));
}
private void Flip()
{
FacingRight = !FacingRight;
var myTransform = transform;
var theScale = myTransform.localScale;
theScale.x *= -1;
myTransform.localScale = theScale;
}
}
}
<file_sep>/Assets/Scripts/Entities/Player/Audio/CharacterAudioHandler.cs
using Entities.Grabbables;
using Entities.Player.Attack;
using UnityEngine;
using Utils.Audio;
using CharacterController = Entities.Player.Movement.CharacterController;
namespace Entities.Player.Audio
{
[RequireComponent(typeof(AudioClip))]
public class CharacterAudioHandler : MonoBehaviour, IPausable
{
[Header("Audios")] [SerializeField] private PlayerAudioReferences audioReferences;
[Header("References")]
[SerializeField] private CharacterController characterController;
[SerializeField] private APlayer player;
[SerializeField] private CharacterAnimator characterAnimator;
[SerializeField] private PlayerAttacker playerAttacker;
[SerializeField] private Grabber grabber;
[Header("Settings")]
[SerializeField] private float timeBetweenWalkingClips;
[SerializeField] private float timeBetweenLowHealthClip = 0.2f;
private float _lastWalkingClip;
private bool _paused;
private bool _isInLowHealth;
private float _lastLowHealthClip;
private AudioSource _audioSource;
private void Awake()
{
_audioSource = GetComponent<AudioSource>();
characterController.OnJumpEvent += () => PlaySound(audioReferences.jumpClip);
characterController.OnLandEvent += () => PlaySound(audioReferences.hitGroundClip);
player.OnDie += () => PlaySound(audioReferences.dieClip);
characterAnimator.OnSwordDrawn += () => PlaySound(audioReferences.swordDraw);
characterAnimator.OnAttackAnimation += () => PlaySound(audioReferences.attackClip);
player.DashAbility.OnDash += () => PlaySound(audioReferences.dash);
player.OnLowHealth += (isInLowHealth) => _isInLowHealth = isInLowHealth;
player.OnDamageReceive += () => PlaySound(audioReferences.receiveDamage);
playerAttacker.OnReflectBullet += () => PlaySound(audioReferences.reflectBullet);
grabber.OnGrab += () => PlaySound(audioReferences.batteryPickUp);
}
private void Update()
{
var now = Time.time;
if (now - _lastWalkingClip > timeBetweenWalkingClips && characterController.Grounded &&
Mathf.Abs(characterController.Velocity.x) > 0.01f)
{
PlaySound(audioReferences.walkingClip);
_lastWalkingClip = now;
}
if (_isInLowHealth && now - _lastLowHealthClip > timeBetweenLowHealthClip)
{
PlaySound(audioReferences.lowHealth);
_lastLowHealthClip = now;
}
}
private void PlaySound(CustomAudioClip customClip)
{
AudioManager.Instance.PlaySound(customClip.audioClip, new AudioOptions
{
Volume = customClip.volume,
});
}
private void PlayInterruptibleSound(CustomAudioClip customClip)
{
_audioSource.clip = customClip.audioClip;
_audioSource.volume = customClip.volume;
_audioSource.Play();
}
public void Pause()
{
_paused = true;
PlayInterruptibleSound(audioReferences.timeStopAbility);
AudioManager.Instance.PauseAllBackgroundMusic();
AudioManager.Instance.PlayBackgroundMusic(audioReferences.continuousStoppedTime.audioClip, new AudioOptions
{
Volume = audioReferences.continuousStoppedTime.volume
});
}
public void UnPause()
{
AudioManager.Instance.StopBackgroundMusic(audioReferences.continuousStoppedTime.audioClip);
if(_paused) PlayInterruptibleSound(audioReferences.timeStopEnd);
_paused = false;
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/AEnemy.cs
using System;
using UnityEngine;
namespace Entities.Enemy
{
[RequireComponent(typeof(Collider2D))]
public class AEnemy: DamageReceiver, IHaveStats
{
[SerializeField] private Stats stats;
public Stats Stats => stats;
public event Action OnDie;
private Collider2D _collider;
protected override void Awake()
{
base.Awake();
_collider = GetComponent<Collider2D>();
}
protected override bool DealDamage(float damage, bool instantKill)
{
stats.Health = instantKill? 0 : stats.Health - damage;
if (stats.Health > 0) return false;
Die();
return true;
}
private void Die()
{
_collider.enabled = false;
RigidBody2D.isKinematic = true;
OnDie?.Invoke();
}
}
}<file_sep>/Assets/Scripts/UI/TimeStopDisplayer.cs
using System;
using Entities.Player.Abilities;
using UnityEngine;
namespace UI
{
[RequireComponent(typeof(StatBar))]
public class TimeStopDisplayer : MonoBehaviour
{
[SerializeField] private TimeStopAbility timeStop;
private StatBar _statBar;
private void Awake()
{
_statBar = GetComponent<StatBar>();
}
private void Start()
{
_statBar.MaxValue = timeStop.TimeAvailableToStop;
_statBar.CurrentValue = timeStop.TimeAvailableToStop;
}
private void Update()
{
_statBar.CurrentValue = timeStop.TimeAvailableToStop;
}
}
}<file_sep>/Assets/Scripts/Levels/LevelSettings.cs
using UnityEngine;
namespace Levels
{
[CreateAssetMenu(fileName = "Level", menuName = "NewLevel", order = 0)]
public class LevelSettings : ScriptableObject
{
public int index;
public string levelName;
public float time;
public Vector2 playerPosition;
public Sprite[] backgrounds;
}
}<file_sep>/Assets/Scripts/Entities/Player/Movement/WallJumper.cs
using System;
using UnityEngine;
using Utils;
namespace Entities.Player.Movement
{
[RequireComponent(typeof(CharacterController), typeof(Rigidbody2D))]
public class WallJumper : MonoBehaviour
{
[SerializeField] private Transform rightCheck;
[SerializeField] private Transform leftCheck;
[SerializeField] private LayerMask whatIsWall;
[SerializeField] private float maximumTimeGrabbingWall = 1f;
[SerializeField] private float wallJump = 1000f;
[SerializeField] private float timeNotMoving = 1f;
public delegate void TouchingWall(bool isTouching, bool isRight);
public event TouchingWall OnTouchingWall;
private const float TriggerRadius = 0.2f;
private CharacterController _characterController;
private bool _touchingRightWall;
private bool _touchingLeftWall;
private Rigidbody2D _rigidBody2D;
private Collider2D[] _colliders;
private float _timeGrabbingWall;
private float _previousCollider;
private bool _rightTrigger;
private bool _leftTrigger;
private WaitSeconds _waitSeconds;
private void Awake()
{
_characterController = GetComponent<CharacterController>();
_rigidBody2D = GetComponent<Rigidbody2D>();
_colliders = new Collider2D[5];
_waitSeconds = new WaitSeconds(this, RegainMovement, timeNotMoving);
OnTouchingWall += WallTouched;
_characterController.OnLandEvent += OnLand;
}
private void Update()
{
if (_touchingLeftWall || _touchingRightWall) _timeGrabbingWall += Time.deltaTime;
if (!_touchingRightWall && _rightTrigger)
{
OnTouchingWall?.Invoke(true, _characterController.FacingRight);
return;
}
if (!_touchingLeftWall && _leftTrigger)
{
OnTouchingWall?.Invoke(true, !_characterController.FacingRight);
return;
}
if (!_rightTrigger && !_leftTrigger && (_touchingLeftWall || _touchingRightWall))
{
OnTouchingWall?.Invoke(false, false);
}
}
private void FixedUpdate()
{
if ((_touchingLeftWall || _touchingRightWall) && _timeGrabbingWall < maximumTimeGrabbingWall)
{
var velocity = _rigidBody2D.velocity;
velocity.y = velocity.y < 0 ? 0 : velocity.y;
_rigidBody2D.velocity = velocity;
}
_rightTrigger = CheckTrigger(rightCheck);
_leftTrigger = CheckTrigger(leftCheck);
}
public void Jump()
{
_timeGrabbingWall = 0;
_characterController.StartJumpWithOptions(true, _touchingRightWall ? 25 : -25, wallJump);
_characterController.AirControl = false;
_waitSeconds.Wait();
}
public bool CanWallJump()
{
if (!_touchingLeftWall && !_touchingRightWall)
return false;
var y = transform.position.x;
if (Math.Abs(y - _previousCollider) < 0.5f) return false;
_previousCollider = y;
return true;
}
private bool CheckTrigger(Transform trigger)
{
if (_characterController.Grounded) return false;
var size = Physics2D.OverlapCircleNonAlloc(trigger.position, TriggerRadius, _colliders, whatIsWall);
return size > 0;
}
private void WallTouched(bool isTouching, bool isRight)
{
if (!isTouching)
{
_touchingRightWall = false;
_touchingLeftWall = false;
return;
}
if (isRight) _touchingRightWall = true;
else _touchingLeftWall = true;
}
private void OnLand()
{
_timeGrabbingWall = 0;
_previousCollider = float.MaxValue;
OnTouchingWall?.Invoke(false, false);
}
private void RegainMovement()
{
_characterController.AirControl = true;
}
}
}<file_sep>/Assets/Scripts/UI/MenuButton.cs
using System;
using Levels;
using UnityEngine;
using UnityEngine.InputSystem;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
using Utils.Audio;
namespace UI
{
[RequireComponent(typeof(Button), typeof(Animator))]
public class MenuButton: MonoBehaviour
{
[SerializeField] private AudioClip clip;
private Animator _animator;
private static readonly int Tap = Animator.StringToHash("tap");
private InputAction _spaceAction;
private InputAction _enterAction;
private void Awake()
{
_animator = GetComponent<Animator>();
GetComponent<Button>().onClick.AddListener(TriggerButtonAnimation);
_spaceAction = new InputAction(binding: "/<Keyboard>/space");
_enterAction = new InputAction(binding: "/<Keyboard>/enter");
_spaceAction.performed += (_) => TriggerButtonAnimation();
_enterAction.performed += (_) => TriggerButtonAnimation();
_spaceAction.Enable();
_enterAction.Enable();
}
private void TriggerButtonAnimation()
{
_spaceAction.Dispose();
_enterAction.Dispose();
// LevelTransition.Instance.FadeIn();
_animator.SetTrigger(Tap);
AudioManager.Instance.PlaySound(clip);
}
private void FinishAnimation()
{
AudioManager.Instance.StopAllBackgroundMusic(AudioOptions.Default());
SceneManager.LoadScene(1);
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Bullet.cs
using System;
using System.Collections;
using Entities.Enemy.Ai;
using UnityEngine;
using Utils;
namespace Entities.Enemy
{
[RequireComponent(typeof(SpriteRenderer), typeof(Rigidbody2D))]
public class Bullet: Pooleable, IPausable
{
[SerializeField] private float speed;
[SerializeField] private float damage;
[SerializeField] private float timeToLive = 4f;
public float Damage
{
get => damage;
set => damage = value;
}
private SpriteRenderer _spriteRenderer;
private Rigidbody2D _rigidBody2D;
private Mover _mover;
private Func<IEnumerator> _destroyOnTimeOut;
private WaitForSeconds _timeToLive;
private Coroutine _destroyCoroutine;
private bool _paused;
private float _initialDamage;
private void Awake()
{
_initialDamage = damage;
_spriteRenderer = GetComponent<SpriteRenderer>();
_rigidBody2D = GetComponent<Rigidbody2D>();
_timeToLive = new WaitForSeconds(timeToLive);
_destroyOnTimeOut = DestroyOnTimeOut;
_destroyCoroutine = StartCoroutine(_destroyOnTimeOut());
_mover = new Mover(_spriteRenderer, _rigidBody2D, speed);
//TODO on activate change to bullet layer, reset damage
}
private void FixedUpdate()
{
if(!_paused) _mover.Move(transform.right);
}
private void OnCollisionEnter2D(Collision2D other)
{
var damageReceiver = other.collider.GetComponent<DamageReceiver>();
if (damageReceiver != null)
{
damageReceiver.ReceiveDamage(damage, transform.position);
}
DestroyBullet();
}
private IEnumerator DestroyOnTimeOut()
{
var timePassed = 0f;
while (timePassed < timeToLive)
{
if(!_paused) timePassed += Time.deltaTime;
yield return null;
}
DestroyBullet();
}
private void DestroyBullet()
{
StopCoroutine(_destroyCoroutine);
Deactivate();
}
public void Pause()
{
_paused = true;
_rigidBody2D.velocity = Vector2.zero;
}
public void UnPause()
{
_paused = false;
}
public override void Activate()
{
base.Activate();
Damage = _initialDamage;
gameObject.layer = LayerMask.NameToLayer("Bullet");
}
}
}<file_sep>/README.md
# Timebot
Game made for the [Gamedev.tv](https://itch.io/jam/gamedevtv-community-jam) game jam.
You can play the game [here](https://nachodlv.github.io/timebot).
## Introduction
Welcome to our testing facility.
It seems it's your first time being commissioned, are you up to the task?
We have unplugged you from the main grid and it's your turn to show us your skills and reach the main battery at the end of each course to keep yourself alive!
Nevertheless since time is of the essence and you are one of are latests models we have equipped you with a very useful tool. This tool, a time manipulator, is still a prototype so be wary of its duration.
Good luck out there.!!
## Instructions
- Move using the arrow keys or w a s d
- Press w or up key to jump (You can jump twice!)
- Press E or K to stop time
- Press Shift or L to dash
- Press Q or J to draw your sword, and Q or J again to attack
## Colaborators
- [<NAME>](https://github.com/JuanArambarri) - SFX and composer
- <NAME> - SFX, producer and composer
- <NAME> - Artist
## Screenshots




<file_sep>/Assets/Scripts/UI/GameOverButton.cs
using System;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
using Utils.Audio;
namespace UI
{
[RequireComponent(typeof(Button))]
public class GameOverButton : MonoBehaviour
{
[SerializeField] private AudioClip clip;
private void Awake()
{
GetComponent<Button>().onClick.AddListener(ButtonClicked);
}
private void ButtonClicked()
{
AudioManager.Instance.PlaySound(clip);
SceneManager.LoadScene(0);
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Audio/WeaponAudioHandler.cs
using System;
using UnityEngine;
using Utils.Audio;
namespace Entities.Enemy
{
[RequireComponent(typeof(EnemyWeapon))]
public class WeaponAudioHandler : MonoBehaviour
{
[SerializeField] private WeaponAudioReferences weaponAudioReferences;
private EnemyWeapon _enemyWeapon;
private void Awake()
{
_enemyWeapon = GetComponent<EnemyWeapon>();
_enemyWeapon.OnShoot += () => AudioManager.Instance.PlaySound(weaponAudioReferences.shoot.audioClip,
new AudioOptions {Volume = weaponAudioReferences.shoot.volume});
}
}
}<file_sep>/Assets/Scripts/Entities/Player/CharacterAnimator.cs
using System;
using Entities.Player.Abilities;
using Entities.Player.Attack;
using Entities.Player.Movement;
using UnityEngine;
using CharacterController = Entities.Player.Movement.CharacterController;
namespace Entities.Player
{
[RequireComponent(typeof(Animator))]
public class CharacterAnimator : MonoBehaviour
{
[SerializeField] private CharacterController characterController;
[SerializeField] private WallJumper wallJumper;
[SerializeField] private PlayerAttacker playerAttacker;
[SerializeField] private DashAbility dashAbility;
[SerializeField] private APlayer player;
public event Action OnAttackAnimation;
public event Action OnSwordDrawn;
private static readonly int JumpTrigger = Animator.StringToHash("jump");
private static readonly int GroundTrigger = Animator.StringToHash("ground");
private static readonly int WallBool = Animator.StringToHash("grabWall");
private static readonly int Speed = Animator.StringToHash("speed");
private static readonly int AttackTrigger = Animator.StringToHash("attack");
private static readonly int DashTrigger = Animator.StringToHash("dash");
private static readonly int DieTrigger = Animator.StringToHash("die");
private Animator _animator;
private bool _swordDrawn;
private void Awake()
{
_animator = GetComponent<Animator>();
characterController.OnJumpEvent += Jump;
characterController.OnLandEvent += Land;
wallJumper.OnTouchingWall += GrabWall;
playerAttacker.OnStartAttack += StartAttack;
dashAbility.OnDash += Dash;
player.OnDie += Die;
}
private void Update()
{
_animator.SetFloat(Speed, characterController.Grounded ? Mathf.Abs(characterController.Velocity.x) : 0);
}
private void Jump()
{
_animator.SetTrigger(JumpTrigger);
}
private void Land()
{
_animator.SetTrigger(GroundTrigger);
}
private void GrabWall(bool isGrabbed, bool isRight)
{
if(isGrabbed) _swordDrawn = false;
_animator.SetBool(WallBool, isGrabbed);
}
private void StartAttack()
{
if(!_swordDrawn) OnSwordDrawn?.Invoke();
_swordDrawn = true;
_animator.SetTrigger(AttackTrigger);
}
private void MakeAttack()
{
OnAttackAnimation?.Invoke();
}
private void Dash()
{
_animator.SetTrigger(DashTrigger);
}
private void MakeDash()
{
dashAbility.MakeDash();
}
private void Die()
{
_animator.SetTrigger(DieTrigger);
}
}
}<file_sep>/Assets/Scripts/Utils/Audio/AudioOptions.cs
namespace Utils.Audio
{
public struct AudioOptions
{
public float Volume;
public bool LowPassFilter;
public bool WithFade;
public float FadeSpeed;
public static AudioOptions Default()
{
return new AudioOptions
{
Volume = 1,
LowPassFilter = false,
WithFade = false,
FadeSpeed = 1,
};
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Ai/States/PlayAnimationState.cs
using UnityEngine;
namespace Entities.Enemy.Ai.States
{
public class PlayAnimationState : MoverState
{
public bool Finished { get; private set; }
private readonly float _animationtime;
private float _remainingTime;
private readonly int _animationTrigger;
private bool _hasRemainingTime;
private Animator _animator;
public PlayAnimationState(Animator animator, Transform player, Mover mover, string animationTrigger,
float animationTime) : base(mover, animator.transform, player)
{
_animationTrigger = Animator.StringToHash(animationTrigger);
_animationtime = animationTime;
_animator = animator;
}
public override void Tick()
{
_remainingTime -= Time.deltaTime;
if (_remainingTime <= 0) Finished = true;
}
public override void FixedTick()
{
}
public override void OnEnter()
{
_remainingTime = _animationtime;
LookAtTarget();
_animator.SetTrigger(_animationTrigger);
}
public override void OnExit()
{
_remainingTime = 0;
Finished = false;
_hasRemainingTime = false;
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Ai/Mover.cs
using UnityEngine;
namespace Entities.Enemy.Ai
{
public class Mover
{
private readonly Rigidbody2D _rigidBody2D;
private readonly SpriteRenderer _spriteRenderer;
private readonly float _smoothing;
private readonly float _speed;
private Vector2 _velocity;
public bool FacingRight { get; private set; }
public Mover(SpriteRenderer spriteRenderer, Rigidbody2D rigidBody2D, float speed, float smoothing = 0.1f)
{
_rigidBody2D = rigidBody2D;
_speed = speed;
_smoothing = smoothing;
_spriteRenderer = spriteRenderer;
}
public void Move(Vector2 direction)
{
Flip(direction);
_rigidBody2D.velocity =
Vector2.SmoothDamp(_rigidBody2D.velocity, direction * _speed, ref _velocity, _smoothing);
}
public void Flip(Vector2 direction)
{
if (direction.x > 0 && !FacingRight)
{
FlipTransform();
FacingRight = true;
} else if (direction.x < 0 && FacingRight)
{
FlipTransform();
FacingRight = false;
}
}
private void FlipTransform()
{
var transform = _spriteRenderer.transform;
var theScale = transform.localScale;
theScale.x *= -1;
transform.localScale = theScale;
}
}
}<file_sep>/Assets/Scripts/UI/SpritesProgressBar.cs
using System;
using System.Collections.Generic;
using Entities.Player;
using UnityEngine;
using UnityEngine.UI;
namespace UI
{
[RequireComponent(typeof(Image))]
public class SpritesProgressBar: MonoBehaviour
{
[SerializeField] private List<Sprite> sprites;
private Image _image;
private int _previousIndex;
private float _range;
private float _maxValue;
private void Awake()
{
_image = GetComponent<Image>();
}
public void SetUpMaxValue(float maxvalue)
{
_maxValue = maxvalue;
_range = _maxValue / sprites.Count;
_image.sprite = sprites[sprites.Count - 1];
_previousIndex = sprites.Count - 1;
}
public void UpdateValue(float currentValue)
{
if (currentValue > _maxValue)
{
SetUpMaxValue(currentValue);
return;
}
var newIndex = Mathf.CeilToInt(currentValue / _range);
if (newIndex == _previousIndex ) return;
if (newIndex >= sprites.Count) newIndex = sprites.Count - 1;
if (newIndex < 0) newIndex = 0;
_image.sprite = sprites[newIndex];
_previousIndex = newIndex;
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Enemies/EnemyAi.cs
using System;
using Entities.Enemy.Ai;
using Entities.Enemy.Ai.States;
using Entities.Player;
using UnityEngine;
using Utils;
namespace Entities.Enemy.Enemies
{
[RequireComponent(typeof(AEnemy))]
public abstract class EnemyAi: MonoBehaviour, IPausable
{
[SerializeField] private Animator animator;
[SerializeField] protected SpriteRenderer spriteRenderer;
[SerializeField] protected float visionRange = 5f;
[SerializeField] protected EnemyWeapon enemyWeapon;
[SerializeField] private LayerMask hitLayers;
public Animator Animator => animator;
public Rigidbody2D RigidBody { get; private set; }
protected Mover Mover { get; private set; }
protected Stats Stats => _enemy.Stats;
public event Action OnPlayerSight;
protected StateMachine StateMachine;
protected Transform Player;
private DistanceDetector _distanceDetector;
private bool _paused;
private AEnemy _enemy;
private bool _deadTriggered;
private bool _dead;
private void Awake()
{
_enemy = GetComponent<AEnemy>();
_enemy.OnDie += OnDie;
RigidBody = GetComponent<Rigidbody2D>();
Player = FindObjectOfType<APlayer>().transform;
_distanceDetector = gameObject.AddComponent<DistanceDetector>();
_distanceDetector.DetectionDistance = visionRange;
_distanceDetector.targetTag = "Player";
StateMachine = new StateMachine();
Mover = new Mover(spriteRenderer, RigidBody, Stats.Speed);
SetUpStates();
}
private void Update()
{
if(!_paused) StateMachine.Tick();
}
private void FixedUpdate()
{
if(!_paused) StateMachine.FixedTick();
}
protected bool PlayerInsideRange()
{
var colliders = _distanceDetector.GetColliders();
if (colliders.Count == 0) return false;
var position = transform.position;
Debug.DrawLine(position, colliders[0].position);
var hit = Physics2D.Linecast(position, colliders[0].position, hitLayers);
var hitTransform = hit.transform;
if (hitTransform == null)
return true;
var isPlayer = hitTransform.gameObject == colliders[0].gameObject;
return isPlayer;
}
protected Func<bool> FinishPlayingAnimation(PlayAnimationState state) => () => state.Finished;
protected bool EnemyDie()
{
if (!_dead || _deadTriggered) return false;
_deadTriggered = true;
return true;
}
protected abstract void SetUpStates();
public void Pause()
{
_paused = true;
Animator.speed = 0;
RigidBody.velocity = Vector2.zero;
}
public void UnPause()
{
_paused = false;
if(Animator != null) Animator.speed = 1;
}
private void OnDie()
{
_dead = true;
}
public void PlayerSighted()
{
OnPlayerSight?.Invoke();
}
}
}<file_sep>/Assets/Scripts/UI/TextShower.cs
using System;
using System.Collections;
using Entities.Player;
using UnityEngine;
using Utils;
namespace UI
{
public class TextShower : MonoBehaviour
{
[SerializeField] private CanvasGroup canvasGroup;
[SerializeField] private float timeShowing = 1f;
[SerializeField] private float fadeSpeed = 1f;
[SerializeField] private Collider2D collider2D;
[SerializeField] private int requiredDeads;
[SerializeField] private bool showOnlyOnce;
private Func<IEnumerator> _fadeIn;
private Func<IEnumerator> _fadeOut;
private WaitSeconds _waitSeconds;
private bool showed;
private void Awake()
{
_fadeIn = FadeIn;
_fadeOut = FadeOut;
canvasGroup.alpha = 0;
_waitSeconds = new WaitSeconds(this, () => StartCoroutine(_fadeOut()), timeShowing);
}
private void OnTriggerEnter2D(Collider2D other)
{
var player = other.GetComponentInChildren<APlayer>();
if (player == null) return;
if(showOnlyOnce && player.RetryQuantity != requiredDeads) return;
ShowText();
collider2D.enabled = false;
}
private void ShowText()
{
StartCoroutine(_fadeIn());
_waitSeconds.Wait();
}
private IEnumerator FadeIn()
{
while (canvasGroup.alpha < 0.99f)
{
canvasGroup.alpha += Time.deltaTime * fadeSpeed;
yield return null;
}
}
private IEnumerator FadeOut()
{
while (canvasGroup.alpha > 0.01f)
{
canvasGroup.alpha -= Time.deltaTime * fadeSpeed;
yield return null;
}
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Audio/EnemyAudioHandler.cs
using Entities.Enemy.Enemies;
using Utils.Audio;
using UnityEngine;
namespace Entities.Enemy
{
public class EnemyAudioHandler : MonoBehaviour
{
[Header("Audios")]
[SerializeField] private EnemyAudioReferences enemyAudioReferences;
[Header("References")]
[SerializeField] private EnemyAi enemyAi;
[SerializeField] private AEnemy enemy;
[Header("Configuration")] [SerializeField]
private float timeBetweenPlayerSights = 2f;
private float lastPlayerSight;
private void Awake()
{
enemyAi.OnPlayerSight += () =>
{
var now = Time.time;
if (now - lastPlayerSight < timeBetweenPlayerSights) return;
lastPlayerSight = now;
PlaySound(enemyAudioReferences.onSight);
};
enemy.OnDamageReceive += () => PlaySound(enemyAudioReferences.damageReceive);
enemy.OnDie += () => PlaySound(enemyAudioReferences.die);
}
private void PlaySound(CustomAudioClip customAudioClip)
{
AudioManager.Instance.PlaySound(customAudioClip.audioClip, new AudioOptions
{
Volume = customAudioClip.volume
});
}
}
}<file_sep>/Assets/Scripts/Entities/DamageDealer.cs
using System;
using UnityEngine;
namespace Entities
{
public class DamageDealer : MonoBehaviour
{
[SerializeField] private float damage;
[SerializeField] private bool instantKill;
[SerializeField] private bool overrideInvincibility;
private void OnCollisionEnter2D(Collision2D other)
{
var damageReceiver = other.collider.GetComponent<DamageReceiver>();
if (damageReceiver == null) return;
damageReceiver.ReceiveDamage(damage, transform.position, instantKill, overrideInvincibility);
}
}
}<file_sep>/Assets/Scripts/Entities/Player/Abilities/TimeStopAbility.cs
using System;
using System.Linq;
using UnityEngine;
using UnityEngine.Experimental.Rendering.Universal;
namespace Entities.Player.Abilities
{
public class TimeStopAbility : MonoBehaviour
{
[SerializeField] private float maximumTime = 10f;
[SerializeField] private float rechargingDelay = 2f;
[SerializeField] private float rechargeRatio;
[SerializeField] private Light2D universalLight;
[SerializeField] private float lightIntensity = 0.2f;
[SerializeField] private float changeLightSpeed = 1f;
[SerializeField] private TrailRenderer trail;
public IPausable[] Pausables
{
set => _pausables = value;
}
public float TimeAvailableToStop { get; private set; }
private IPausable[] _pausables;
private bool _paused;
private float _unPausedTime;
private float _initialLightIntensity;
private void Awake()
{
Pausables = new IPausable[0];
TimeAvailableToStop = maximumTime;
_initialLightIntensity = universalLight.intensity;
trail.emitting = false;
}
private void Update()
{
ChangeLightingIfNeeded();
if (!_paused)
{
if (TimeAvailableToStop >= maximumTime || Time.time - _unPausedTime < rechargingDelay) return;
TimeAvailableToStop += rechargeRatio * Time.deltaTime;
return;
}
if(TimeAvailableToStop <= 0) UnPause();
TimeAvailableToStop -= Time.deltaTime;
}
public void Pause()
{
if (_paused)
{
UnPause();
return;
}
_paused = true;
_pausables = FindObjectsOfType<MonoBehaviour>().OfType<IPausable>().ToArray();
trail.emitting = true;
foreach (var pausable in _pausables)
{
pausable.Pause();
}
}
public void UnPause()
{
_unPausedTime = Time.time;
_paused = false;
trail.emitting = false;
foreach (var pausable in _pausables)
{
pausable.UnPause();
}
}
public void Fill()
{
TimeAvailableToStop = maximumTime;
}
private void ChangeLightingIfNeeded()
{
if (_paused &&universalLight.intensity > lightIntensity)
{
universalLight.intensity -= Time.deltaTime * changeLightSpeed;
} else if (!_paused && universalLight.intensity < _initialLightIntensity)
{
universalLight.intensity += Time.deltaTime * changeLightSpeed;
}
}
}
}<file_sep>/Assets/Scripts/Utils/AxisDetection.cs
using System;
using UnityEngine;
namespace Utils
{
public class AxisDetection
{
private readonly Action<float> _singlePress;
private readonly Action<float> _doublePress;
private readonly float _timeBetweenPresses;
private float _lastTimePress;
private bool _isPressed;
private bool _positiveValue;
public AxisDetection(Action<float> singlePress, Action<float> doublePress, float timeBetweenPresses)
{
_singlePress = singlePress;
_doublePress = doublePress;
_timeBetweenPresses = timeBetweenPresses;
}
public void Update(float axis)
{
if (Math.Abs(axis) > 0.01f)
{
if (!_isPressed && Time.time - _lastTimePress < _timeBetweenPresses && GoingToTheSameDirection(axis))
{
_doublePress(axis);
return;
}
_singlePress(axis);
_isPressed = true;
_positiveValue = axis > 0;
_lastTimePress = Time.time;
}
else
{
_isPressed = false;
}
}
private bool GoingToTheSameDirection(float axis)
{
return (_positiveValue && axis > 0) || (!_positiveValue && axis < 0);
}
}
}<file_sep>/Assets/Scripts/Entities/Player/RestoreDashOnTrigger.cs
using System;
using UnityEngine;
namespace Entities.Player
{
public class RestoreDashOnTrigger : MonoBehaviour
{
private void OnTriggerEnter2D(Collider2D other)
{
var player = other.GetComponent<APlayer>();
if (player == null) return;
player.DashAbility.RestoreDash();
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Audio/WeaponAudioReferences.cs
using UnityEngine;
using Utils.Audio;
namespace Entities.Enemy
{
[CreateAssetMenu(fileName = "Weapon Audio", menuName = "Audio/Weapon Audio", order = 1)]
public class WeaponAudioReferences : ScriptableObject
{
public CustomAudioClip shoot;
}
}<file_sep>/Assets/Scripts/UI/MuteButton.cs
using System;
using UnityEngine;
using UnityEngine.UI;
using Utils.Audio;
namespace UI
{
[RequireComponent(typeof(Button), typeof(Image))]
public class MuteButton : MonoBehaviour
{
[SerializeField] private Sprite mutedSprite;
[SerializeField] private Sprite unMutedSprite;
[SerializeField] private AudioClip clip;
private Image _image;
private bool muted;
private void Awake()
{
_image = GetComponent<Image>();
GetComponent<Button>().onClick.AddListener(ButtonClicked);
}
private void ButtonClicked()
{
AudioManager.Instance.PlaySound(clip);
if(muted) UnMute();
else Mute();
}
private void Mute()
{
AudioManager.Instance.Mute();
_image.sprite = mutedSprite;
muted = true;
}
private void UnMute()
{
AudioManager.Instance.UnMute();
_image.sprite = unMutedSprite;
muted = false;
}
}
}<file_sep>/Assets/Scripts/UI/TimeDisplayer.cs
using System;
using System.Globalization;
using Levels;
using TMPro;
using UnityEngine;
using UnityEngine.UI;
namespace UI
{
public class TimeDisplayer : MonoBehaviour
{
[SerializeField] private TextMeshProUGUI minutes;
[SerializeField] private TextMeshProUGUI seconds;
[SerializeField] private bool displayBestRun;
private void Start()
{
var timer = Timer.Instance;
if (!displayBestRun) return;
if (timer.HasBestRun)
{
SetUpTime(timer.BestRun);
}
else
{
gameObject.SetActive(false);
}
}
private void Update()
{
if(displayBestRun) return;
var timer = Timer.Instance;
if (timer.Paused) return;
SetUpTime(timer.CurrentTime);
}
private void SetUpTime(float time)
{
minutes.text = Mathf.Floor(time / 60).ToString("00");
seconds.text = (time % 60).ToString("00");
}
}
}<file_sep>/Assets/Scripts/Levels/Timer.cs
using System;
using System.Globalization;
using UnityEngine;
using UnityEngine.UI;
namespace Levels
{
public class Timer : MonoBehaviour
{
public float CurrentTime { get; private set; }
public bool Paused { get; private set; }
public static Timer Instance;
public float BestRun { get; private set; } = float.MaxValue;
public bool HasBestRun { get; private set; }
private void Awake()
{
if (Instance == null)
{
Instance = this;
DontDestroyOnLoad(gameObject);
}
else
{
Destroy(gameObject);
}
}
private void Update()
{
if (Paused) return;
CurrentTime += Time.deltaTime;
}
public void Pause()
{
Paused = true;
}
public void Resume()
{
Paused = false;
}
public void Begin()
{
Paused = false;
CurrentTime = 0;
}
public void Stop()
{
Pause();
HasBestRun = true;
BestRun = CurrentTime < BestRun ? CurrentTime : BestRun;
CurrentTime = 0;
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/EnemyWeapon.cs
using System;
using UnityEngine;
using Utils;
namespace Entities.Enemy
{
public class EnemyWeapon : MonoBehaviour, IPausable
{
[SerializeField] private float timeBetweenShoots;
[SerializeField] private Transform shootingPoint;
[SerializeField] private Pool.PoolType bulletType;
public event Action OnShoot;
private float _lastShoot;
private float _pausedTime;
public void Shoot(bool isRight)
{
_lastShoot = Time.time;
var bullet = GlobalPooler.Instance.GetBullet(bulletType).transform;
bullet.position = shootingPoint.position;
bullet.rotation = Quaternion.AngleAxis(isRight ? 0 : 180, Vector3.forward);
OnShoot?.Invoke();
}
public bool CanShoot()
{
return Time.time - _lastShoot > timeBetweenShoots;
}
public void Pause()
{
_pausedTime = Time.time;
}
public void UnPause()
{
_lastShoot += Time.time - _pausedTime;
}
}
}<file_sep>/Assets/Scripts/Platforms/ConstantTriggerPlatform.cs
using UnityEngine;
namespace Platforms
{
public class ConstantTriggerPlatform: PlayerInteractablePlatform
{
private static readonly int PlayerExit = Animator.StringToHash("playerExit");
private void OnTriggerExit2D(Collider2D other)
{
if (!IsPlayer(other)) return;
Animator.SetTrigger(PlayerExit);
}
}
}<file_sep>/Assets/Scripts/Levels/LevelManager.cs
using System;
using System.Collections;
using System.Linq;
using Cinemachine;
using Entities;
using Entities.Player;
using UnityEngine;
using UnityEngine.SceneManagement;
namespace Levels
{
public class LevelManager : MonoBehaviour
{
[SerializeField] private LevelSettings[] levels;
[SerializeField] private APlayer player;
[SerializeField] private int gameOverSceneIndex = 10;
public event Action<LevelSettings> OnLevelChange;
private int _currentLevel;
private int _previousLevel;
private CinemachineFramingTransposer _camera;
private int _retryQuantity;
private LevelSettings Currentlevel => levels[_currentLevel];
private LevelSettings PreviousLevel => levels[_previousLevel];
private void Awake()
{
_camera = FindObjectOfType<CinemachineVirtualCamera>()
.GetCinemachineComponent<CinemachineFramingTransposer>();
Timer.Instance.Begin();
LoadLevel();
}
public void FinishLevel()
{
_retryQuantity = 0;
_previousLevel = _currentLevel;
if (_currentLevel >= levels.Length - 1)
{
GameOver();
return;
}
_currentLevel++;
LoadCurrentLevelWithFade(PreviousLevel.index);
}
public void ResetLevel()
{
if (_currentLevel == levels.Length - 1)
{
GameOver();
return;
}
_retryQuantity++;
LoadCurrentLevelWithFade(Currentlevel.index);
}
private void LoadCurrentLevelWithFade(int sceneToUnload)
{
LevelTransition.Instance.FadeIn();
var unloadScene = SceneManager.UnloadSceneAsync(sceneToUnload);
unloadScene.completed += (_) => LoadLevel();
}
private void LoadLevel()
{
var loadSceneAsync = SceneManager.LoadSceneAsync(Currentlevel.index, LoadSceneMode.Additive);
loadSceneAsync.completed += operation =>
{
SetUpPlayer();
LevelTransition.Instance.FadeOut();
OnLevelChange?.Invoke(Currentlevel);
};
}
private void SetUpPlayer()
{
var playerTransform = player.transform;
var previousPosition = playerTransform.position;
playerTransform.position = Currentlevel.playerPosition;
player.StartLevel(Currentlevel.time, _retryQuantity);
_camera.OnTargetObjectWarped(player.transform, Currentlevel.playerPosition - (Vector2) previousPosition);
}
private void GameOver()
{
// yield return new WaitForSeconds(1);
Timer.Instance.Stop();
SceneManager.LoadScene(gameOverSceneIndex);
}
}
}<file_sep>/Assets/Scripts/Utils/CoroutineQueue.cs
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// <para>Runs multiple coroutines one after the other</para>
/// </summary>
public class CoroutineQueue
{
private readonly Queue<IEnumerator> _coroutines;
private bool _executingCoroutines;
private readonly MonoBehaviour _monoBehaviour;
private readonly Func<IEnumerator> _executeCoroutines;
public CoroutineQueue(MonoBehaviour monoBehaviour, int initialQuantity)
{
_monoBehaviour = monoBehaviour;
_coroutines = new Queue<IEnumerator>(initialQuantity);
_executeCoroutines = ExecuteCoroutines;
}
/// <summary>
/// <para>Adds a new coroutine to the queue. If no coroutine is executing, it will start executing the
/// coroutine passed as parameter</para>
/// </summary>
/// <param name="coroutine"></param>
public void AddCoroutine(IEnumerator coroutine)
{
_coroutines.Enqueue(coroutine);
if(_executingCoroutines) return;
_executingCoroutines = true;
_monoBehaviour.StartCoroutine(_executeCoroutines());
}
private IEnumerator ExecuteCoroutines()
{
while (_coroutines.Count > 0)
{
yield return _monoBehaviour.StartCoroutine(_coroutines.Peek());
_coroutines.Dequeue();
}
_executingCoroutines = false;
}
}
<file_sep>/Assets/Scripts/Entities/Grabbables/BatteryDropper.cs
using System;
using Entities.Enemy;
using UnityEngine;
using Utils;
using Random = UnityEngine.Random;
namespace Entities.Grabbables
{
[RequireComponent(typeof(AEnemy))]
public class BatteryDropper : MonoBehaviour
{
[SerializeField] private int minQuantity = 1;
[SerializeField] private int maxQuantity = 1;
[SerializeField] private float healthRestore = 10;
[SerializeField] private float dropForce = 1f;
private void Awake()
{
GetComponent<AEnemy>().OnDie += DropBatteries;
}
private void DropBatteries()
{
var quantity = Random.Range(minQuantity, maxQuantity);
var position = transform.position;
for (var i = 0; i < quantity; i++)
{
var miniBattery = GlobalPooler.Instance.NextMiniBattery;
miniBattery.transform.position = position;
miniBattery.HealthQuantity = healthRestore;
miniBattery.Rigidbody2D.AddForce(new Vector2(Random.Range(-dropForce, dropForce), dropForce));
}
}
}
}<file_sep>/Assets/Scripts/Entities/Player/APlayer.cs
using System;
using System.Collections;
using Entities.Player.Abilities;
using UI;
using UnityEngine;
namespace Entities.Player
{
[RequireComponent(typeof(TimeStopAbility), typeof(DashAbility))]
public class APlayer : DamageReceiver, IHaveStats
{
[SerializeField] private Stats stats;
[SerializeField] private SpritesProgressBar healthDiplayer;
[SerializeField, Range(0, 1)] private float lowHealthPercentage = 0.2f;
public event Action OnDie;
public event Action OnResetLevel;
public event Action<bool> OnLowHealth;
public Stats Stats => stats;
public TimeStopAbility TimeStopAbility { get; private set; }
public DashAbility DashAbility { get; private set; }
public int RetryQuantity { get; private set; }
private float _maxHealth;
private bool _onLowHealth;
protected override void Awake()
{
base.Awake();
TimeStopAbility = GetComponent<TimeStopAbility>();
DashAbility = GetComponent<DashAbility>();
}
private void Update()
{
if (Dead) return;
UpdateHealth(stats.Health - Time.deltaTime);
}
public void StartLevel(float time, int retryQuantity)
{
Dead = false;
TimeStopAbility.Fill();
DashAbility.RestoreDash();
_maxHealth = time;
stats.Health = time;
healthDiplayer.SetUpMaxValue(time);
RetryQuantity = retryQuantity;
}
public void UpdateHealth(float newHealth)
{
stats.Health = newHealth;
if(!Dead) healthDiplayer.UpdateValue(stats.Health);
if (stats.Health <= 0)
{
Dead = true;
StartCoroutine(WaitForAnimationToEnd());
return;
}
if (_maxHealth * lowHealthPercentage > newHealth)
{
if (!_onLowHealth)
{
_onLowHealth = true;
OnLowHealth?.Invoke(true);
}
}
else if (_onLowHealth)
{
_onLowHealth = false;
OnLowHealth?.Invoke(false);
}
}
protected override bool DealDamage(float damage, bool instantKill)
{
if (Dead) return true;
UpdateHealth(instantKill ? 0 : stats.Health - damage);
return stats.Health <= 0;
}
private IEnumerator WaitForAnimationToEnd()
{
OnDie?.Invoke();
RigidBody2D.velocity = Vector2.zero;
yield return new WaitForSeconds(1.67f);
OnResetLevel?.Invoke();
ResetPlayer();
}
private void ResetPlayer()
{
TimeStopAbility.UnPause();
}
}
}<file_sep>/Assets/Scripts/Entities/Grabbables/MiniBattery.cs
using Entities.Player;
using UnityEngine;
namespace Entities.Grabbables
{
public class MiniBattery: Grabbable
{
[SerializeField] private float healthQuantity;
public float HealthQuantity
{
set => healthQuantity = value;
}
protected override void Grabbed(Grabber grabber)
{
var player = grabber.GetComponentInParent<APlayer>();
if (player == null) return;
player.UpdateHealth(player.Stats.Health + healthQuantity);
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Ai/States/DestroySelfState.cs
using UnityEngine;
namespace Entities.Enemy.Ai.States
{
public class DestroySelfState: IState
{
private GameObject _gameObject;
public DestroySelfState(GameObject gameObject)
{
_gameObject = gameObject;
}
public void Tick()
{
}
public void FixedTick()
{
}
public void OnEnter()
{
Object.Destroy(_gameObject);
}
public void OnExit()
{
}
}
}<file_sep>/Assets/Scripts/GameController.cs
using Entities.Player;
using Levels;
using UnityEngine;
public class GameController: MonoBehaviour
{
[SerializeField] private LevelManager levelManager;
[SerializeField] private APlayer player;
private void Awake()
{
player.OnResetLevel += ResetLevel;
}
public void FinishLevel()
{
levelManager.FinishLevel();
}
private void ResetLevel()
{
levelManager.ResetLevel();
}
}<file_sep>/Assets/Scripts/Entities/Player/Attack/PlayerAttacker.cs
using System;
using Entities.Enemy;
using UnityEngine;
using CharacterController = Entities.Player.Movement.CharacterController;
namespace Entities.Player.Attack
{
[RequireComponent(typeof(Collider2D))]
public class PlayerAttacker : MonoBehaviour
{
[SerializeField] private Sword sword;
[SerializeField] private CharacterController characterController;
[SerializeField] private float timeBetweenAttacks = 1f;
[SerializeField] private CharacterAnimator animator;
[SerializeField] private float reflectionMultiplier = 1f;
public event Action OnStartAttack;
public event Action OnReflectBullet;
private bool _swordDisplayed;
private Collider2D _collider;
private RaycastHit2D[] _hits;
private float _lastAttack;
private void Awake()
{
_collider = GetComponent<Collider2D>();
_hits = new RaycastHit2D[5];
animator.OnAttackAnimation += MakeAttack;
}
public void Attack()
{
if (!CanAttack()) return;
_lastAttack = Time.time;
OnStartAttack?.Invoke();
}
private void MakeAttack()
{
var myPosition = transform.position;
var center = myPosition;
center.x += (_collider.bounds.extents.x + sword.Range) * (characterController.FacingRight ? 1 : -1);
var hitsQuantity = Physics2D.BoxCast(
center, GetAttackRange(), 0, Vector2.zero, new ContactFilter2D(), _hits, 0);
for (var i = 0; i < hitsQuantity; i++)
{
AttackCollider(_hits[i]);
}
}
private void AttackCollider(RaycastHit2D hit)
{
var damageReceiver = hit.collider.GetComponent<DamageReceiver>();
var bullet = hit.collider.GetComponent<Bullet>();
if (bullet != null) ReflectBullet(bullet);
if (damageReceiver == null) return;
damageReceiver.ReceiveDamage(sword.Damage, transform.position);
}
private void OnDrawGizmos()
{
var myTransform = transform;
var center = myTransform.position;
center.x += (GetComponent<CapsuleCollider2D>().bounds.extents.x + sword.Range) * (characterController.FacingRight ? 1 : -1);
var size = Vector2.one * GetComponent<CapsuleCollider2D>().bounds.size.y;
size.x = sword.Range;
Gizmos.DrawCube(center,
size);
}
private bool CanAttack()
{
return Time.time - _lastAttack > timeBetweenAttacks;
}
private Vector2 GetAttackRange()
{
var size = Vector2.one * _collider.bounds.size.y;
size.x = sword.Range;
return size;
}
private void ReflectBullet(Bullet bullet)
{
var bulletTransform = bullet.transform;
var direction = (bulletTransform.position - transform.position).normalized;
direction.y = bulletTransform.right.y;
bullet.gameObject.layer = LayerMask.NameToLayer("Player");
bullet.Damage *= reflectionMultiplier;
bulletTransform.right = direction;
OnReflectBullet?.Invoke();
}
}
}<file_sep>/Assets/Scripts/UI/DashDisplayer.cs
using System;
using Entities.Player.Abilities;
using UnityEngine;
namespace UI
{
[RequireComponent(typeof(CanvasGroup))]
public class DashDisplayer : MonoBehaviour
{
[SerializeField] private DashAbility dashAbility;
[SerializeField, Range(0, 1)] private float percentageLeftToBlink;
[SerializeField] private float blinkSpeed = 1f;
private CanvasGroup _canvasGroup;
private bool _hided;
private bool _blinkFadeIn;
private void Awake()
{
_canvasGroup = GetComponent<CanvasGroup>();
dashAbility.OnDash += Dash;
}
private void Update()
{
if (!_hided) return;
var now = Time.time;
if(now - dashAbility.LastDash > dashAbility.TimeBetweenDashes) Show();
else if(now - dashAbility.LastDash > dashAbility.TimeBetweenDashes * percentageLeftToBlink) Blink();
}
private void Dash()
{
Hide();
}
private void Hide()
{
_canvasGroup.alpha = 0;
_hided = true;
}
private void Show()
{
_canvasGroup.alpha = 1;
_hided = false;
}
private void Blink()
{
if (_blinkFadeIn)
{
if (_canvasGroup.alpha < 0.99f) _canvasGroup.alpha += Time.deltaTime * blinkSpeed;
else _blinkFadeIn = false;
}
else
{
if (_canvasGroup.alpha > 0.01f) _canvasGroup.alpha -= Time.deltaTime * blinkSpeed;
else _blinkFadeIn = true;
}
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Ai/States/MultipleStates.cs
using System.Collections.Generic;
namespace Entities.Enemy.Ai.States
{
public class MultipleStates: IState
{
private readonly List<IState> _states;
public MultipleStates(List<IState> states)
{
_states = states;
}
public void Tick()
{
foreach (var state in _states)
{
state.Tick();
}
}
public void FixedTick()
{
foreach (var state in _states)
{
state.FixedTick();
}
}
public void OnEnter()
{
foreach (var state in _states)
{
state.OnEnter();
}
}
public void OnExit()
{
foreach (var state in _states)
{
state.OnExit();
}
}
}
}<file_sep>/Assets/Scripts/Entities/DamageReceiver.cs
using System;
using System.Collections;
using UnityEngine;
namespace Entities
{
[RequireComponent(typeof(Rigidbody2D))]
public abstract class DamageReceiver : MonoBehaviour
{
[SerializeField] private float forceAppliedOnHit = 1f;
[SerializeField] private float invincibleTime = 1f;
[SerializeField] private float timeBetweenBlinks = 0.2f;
[SerializeField] private SpriteRenderer[] spriteRenderers;
public bool Invincible { get; set; }
public event Action OnDamageReceive;
protected Rigidbody2D RigidBody2D { get; private set; }
protected bool Dead { get; set; }
private WaitForSeconds _timeBetweenBlinks;
private Func<IEnumerator> _blinkFunction;
private Coroutine _blinkCoroutine;
protected virtual void Awake()
{
RigidBody2D = GetComponent<Rigidbody2D>();
_timeBetweenBlinks = new WaitForSeconds(timeBetweenBlinks);
_blinkFunction = StartBlinking;
}
public void ReceiveDamage(float damage, Vector3 positionAttacker, bool instantKill = false,
bool overrideInvincible = false)
{
if ((Invincible && !overrideInvincible) || Dead) return;
OnDamageReceive?.Invoke();
Dead = DealDamage(damage, instantKill);
if (Dead) return;
_blinkCoroutine = StartCoroutine(_blinkFunction());
var direction = (transform.position - positionAttacker).normalized;
RigidBody2D.AddForce(direction * forceAppliedOnHit);
}
protected abstract bool DealDamage(float damage, bool instantKill);
private IEnumerator StartBlinking()
{
Invincible = true;
var now = Time.time;
var hide = false;
while (Time.time - now < invincibleTime)
{
hide = !hide;
ChangeAlphaSpriteRender(hide);
yield return _timeBetweenBlinks;
}
Invincible = false;
ChangeAlphaSpriteRender(false);
}
private void ChangeAlphaSpriteRender(bool hide)
{
for (var i = 0; i < spriteRenderers.Length; i++)
{
var color = spriteRenderers[i].color;
color.a = hide ? 0 : 1;
spriteRenderers[i].color = color;
}
}
private void OnDestroy()
{
if (_blinkCoroutine != null) StopCoroutine(_blinkCoroutine);
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Ai/States/AttackState.cs
using UnityEngine;
namespace Entities.Enemy.Ai.States
{
public class AttackState : MoverState
{
private static readonly int Shoot = Animator.StringToHash("shoot");
private EnemyWeapon _enemyWeapon;
private Animator _animator;
private bool _hasAnimator;
public AttackState(EnemyWeapon enemyWeapon, Animator animator, Mover mover, Transform player) : base(mover,
animator.transform, player)
{
_enemyWeapon = enemyWeapon;
_animator = animator;
_hasAnimator = true;
}
public AttackState(EnemyWeapon enemyWeapon, Transform self, Mover mover, Transform player) : base(mover, self, player)
{
_enemyWeapon = enemyWeapon;
_hasAnimator = false;
}
public override void Tick()
{
LookAtTarget();
if (!_enemyWeapon.CanShoot()) return;
_enemyWeapon.Shoot(Mover.FacingRight);
if(_hasAnimator) _animator.SetTrigger(Shoot);
}
public override void FixedTick()
{
}
public override void OnEnter()
{
}
public override void OnExit()
{
}
}
}<file_sep>/Assets/Scripts/UI/ResetButton.cs
using System;
using Levels;
using UnityEngine;
using UnityEngine.UI;
using Utils.Audio;
namespace UI
{
[RequireComponent(typeof(Button))]
public class ResetButton : MonoBehaviour
{
[SerializeField] private LevelManager levelManager;
[SerializeField] private PauseButton pauseButton;
[SerializeField] private AudioClip audioClip;
private void Awake()
{
GetComponent<Button>().onClick.AddListener(ButtonClicked);
}
private void ButtonClicked()
{
AudioManager.Instance.PlaySound(audioClip);
pauseButton.UnPause();
levelManager.ResetLevel();
}
}
}<file_sep>/Assets/Scripts/Entities/Player/CharacterInputHandler.cs
using System;
using Entities.Player.Attack;
using Entities.Player.Movement;
using Input;
using UnityEngine;
using UnityEngine.InputSystem;
using Utils;
using CharacterController = Entities.Player.Movement.CharacterController;
namespace Entities.Player
{
[RequireComponent(
typeof(CharacterController),
typeof(WallJumper),
typeof(PlayerAttacker))]
public class CharacterInputHandler : MonoBehaviour
{
[SerializeField] private APlayer aPlayer;
[SerializeField] private float timeForDoublePress = 0.4f;
private CharacterController _characterController;
private WallJumper _wallJumper;
private PlayerAttacker _playerAttacker;
private float _movement;
private bool _jump;
private bool _crouch;
private InputActionController _controller;
private bool _isGrabbingWall;
private float _lastTimePressed;
private AxisDetection _axisDetection;
private bool _enabled = true;
private void Awake()
{
_characterController = GetComponent<CharacterController>();
_wallJumper = GetComponent<WallJumper>();
_controller = new InputActionController();
_playerAttacker = GetComponent<PlayerAttacker>();
_controller.Player.Jump.started += ctx => PlayerFunction(StartJumping);
_controller.Player.Jump.canceled += ctx => PlayerFunction(StopJumping);
_controller.Player.Jump.performed += ctx => PlayerFunction(StopJumping);
_controller.Player.Crouch.performed += ctx => PlayerFunction(Crouch);
_controller.Player.Attack.performed += ctx => PlayerFunction(Attack);
_controller.Player.TimeStop.performed += ctx => PlayerFunction(TimeStopAbility);
_controller.Player.Dash.performed += ctx => PlayerFunction(Dash);
_wallJumper.OnTouchingWall += (grabbing, right) => WallGrabbed(grabbing);
aPlayer.OnDie += () => _enabled = false;
aPlayer.OnResetLevel += () => _enabled = true;
// _axisDetection = new AxisDetection(Move, Dash, timeForDoublePress);
}
private void Update()
{
// _axisDetection.Update(_controller.Player.Move.ReadValue<float>());
_movement = _controller.Player.Move.ReadValue<float>() * aPlayer.Stats.Speed;
}
private void FixedUpdate()
{
if (!_enabled) return;
if (_jump)
{
if(_wallJumper.CanWallJump()) _wallJumper.Jump();
else _characterController.StartJumping();
_jump = false;
}
_characterController.Move(_movement, _crouch, !_isGrabbingWall);
_movement = 0;
_crouch = false;
}
private void PlayerFunction(Action playerFunction)
{
if (_enabled) playerFunction();
}
private void Attack()
{
if(!_isGrabbingWall) _playerAttacker.Attack();
}
private void StartJumping()
{
_jump = true;
}
private void StopJumping()
{
_characterController.StopJumping();
}
private void Crouch()
{
_crouch = true;
}
public void OnEnable()
{
_controller.Enable();
}
public void OnDisable()
{
_controller.Disable();
}
private void WallGrabbed(bool isGrabbing)
{
_isGrabbingWall = isGrabbing;
}
private void TimeStopAbility()
{
aPlayer.TimeStopAbility.Pause();
}
private void Dash()
{
aPlayer.DashAbility.Dash();
}
private void Move(float axis)
{
_movement = axis * aPlayer.Stats.Speed;
}
}
}
<file_sep>/Assets/Scripts/Levels/LevelTransition.cs
using System;
using UnityEngine;
namespace Levels
{
[RequireComponent(typeof(Animator))]
public class LevelTransition: MonoBehaviour
{
public static LevelTransition Instance { get; private set; }
private Animator _animator;
private static readonly int FadeInTrigger = Animator.StringToHash("fadeIn");
private static readonly int fadeOutTrigger = Animator.StringToHash("fadeOut");
private void Awake()
{
if (Instance == null)
{
Instance = this;
DontDestroyOnLoad(gameObject);
}
else
{
Destroy(gameObject);
return;
}
_animator = GetComponent<Animator>();
}
public void FadeIn()
{
_animator.SetTrigger(FadeInTrigger);
}
public void FadeOut()
{
_animator.SetTrigger(fadeOutTrigger);
}
}
}<file_sep>/Assets/Scripts/Utils/Audio/CustomAudioClip.cs
using System;
using UnityEngine;
namespace Utils.Audio
{
[Serializable]
public class CustomAudioClip
{
public AudioClip audioClip;
[Range(0,1)] public float volume = 1;
}
}<file_sep>/Assets/Scripts/Utils/WaitSeconds.cs
using System;
using System.Collections;
using UnityEngine;
namespace Utils
{
public class WaitSeconds
{
private readonly WaitForSeconds _waitTime;
private readonly Action _callback;
private readonly Func<WaitForSeconds, IEnumerator> _waitFunction;
private readonly MonoBehaviour _monoBehaviour;
public WaitSeconds(MonoBehaviour monoBehaviour, Action callback, float time = 0)
{
_callback = callback;
_waitTime = new WaitForSeconds(time);
_monoBehaviour = monoBehaviour;
_waitFunction = WaitForSeconds;
}
public void Wait(float time)
{
Wait(new WaitForSeconds(time));
}
public void Wait()
{
Wait(_waitTime);
}
private void Wait(WaitForSeconds waitingTime)
{
_monoBehaviour.StartCoroutine(_waitFunction(waitingTime));
}
private IEnumerator WaitForSeconds(WaitForSeconds waitingTime)
{
yield return waitingTime;
_callback();
}
}
}<file_sep>/Assets/Scripts/Platforms/PlayerInteractablePlatform.cs
using System;
using System.Collections;
using Entities;
using UnityEngine;
using Utils;
namespace Platforms
{
public class PlayerInteractablePlatform : MonoBehaviour, IPausable
{
[SerializeField] private Animator _animator;
[SerializeField] private float timeBeforeTriggering;
protected Animator Animator => _animator;
protected event Action OnTriggerEnter;
protected bool Paused { get; private set; }
private static readonly int PlayerEnterTrigger = Animator.StringToHash("playerEnter");
private WaitSeconds _waitSeconds;
private bool _triggered;
protected virtual void Awake()
{
_waitSeconds = new WaitSeconds(this, PlayerEnter, timeBeforeTriggering);
}
private void OnTriggerEnter2D(Collider2D other)
{
if (!IsPlayer(other)) return;
_waitSeconds.Wait();
}
protected static bool IsPlayer(Collider2D collider)
{
return collider.gameObject.CompareTag("Player");
}
private void PlayerEnter()
{
if (Paused)
{
_triggered = true;
return;
}
_animator.SetTrigger(PlayerEnterTrigger);
OnTriggerEnter?.Invoke();
}
public void Pause()
{
Paused = true;
Animator.speed = 0;
}
public virtual void UnPause()
{
Paused = false;
if(Animator != null) Animator.speed = 1;
if (!_triggered) return;
PlayerEnter();
_triggered = false;
}
}
}<file_sep>/Assets/Scripts/Entities/Grabbables/Grabbable.cs
using System;
using System.Collections;
using UnityEngine;
using Utils;
namespace Entities.Grabbables
{
[RequireComponent(typeof(Rigidbody2D))]
public abstract class Grabbable: Pooleable
{
private const float SmoothSpeed = 0.5f;
[SerializeField] private Animator animator;
[SerializeField] private float speed;
[SerializeField] private float maxSpeed;
public Rigidbody2D Rigidbody2D => _rigidbody2D;
private Rigidbody2D _rigidbody2D;
private Vector3 _velocity = Vector3.zero;
private bool _goingToTarget;
private bool _reachedTarget;
private Func<Grabber, IEnumerator> _goToTargetFunction;
private static readonly int Idle = Animator.StringToHash("idle");
private Grabber _grabber;
protected void Awake()
{
_goToTargetFunction = GoToTarget;
_rigidbody2D = GetComponent<Rigidbody2D>();
}
protected abstract void Grabbed(Grabber grabber);
public void OnGrabberNear(Grabber grabber)
{
if (_goingToTarget) return;
_goingToTarget = true;
_grabber = grabber;
StartCoroutine(_goToTargetFunction(grabber));
}
public override void Activate()
{
base.Activate();
_rigidbody2D.isKinematic = false;
}
private IEnumerator GoToTarget(Grabber target)
{
var grabberTransform = target.transform;
_reachedTarget = false;
while (!_reachedTarget)
{
var currentPosition = transform.position;
transform.position = Vector3.SmoothDamp(currentPosition, grabberTransform.position, ref _velocity, SmoothSpeed, speed);
yield return null;
}
}
private void ReachedGrabber()
{
Grabbed(_grabber);
animator.SetBool(Idle, false);
Deactivate();
}
private void OnTriggerEnter2D(Collider2D other)
{
if (other.CompareTag("Player") && _goingToTarget)
{
_reachedTarget = true;
_goingToTarget = false;
ReachedGrabber();
}
else if (other.CompareTag("Floor"))
{
_rigidbody2D.isKinematic = true;
var position = _rigidbody2D.position;
position.y += 0.3f;
_rigidbody2D.position = position;
animator.SetBool(Idle, true);
_rigidbody2D.velocity = Vector2.zero;
}
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Ai/States/MoverState.cs
using UnityEngine;
namespace Entities.Enemy.Ai.States
{
public abstract class MoverState: IState
{
private readonly Transform _player;
protected readonly Mover Mover;
protected readonly Transform _self;
protected MoverState(Mover mover, Transform self, Transform player)
{
Mover = mover;
_player = player;
_self = self;
}
public abstract void Tick();
public abstract void FixedTick();
public abstract void OnEnter();
public abstract void OnExit();
protected void LookAtTarget()
{
Mover.Flip(_player.position.x < _self.position.x ? new Vector2(-1, 0) : new Vector2(1, 0));
}
}
}<file_sep>/Assets/Scripts/Entities/IHaveStats.cs
namespace Entities
{
public interface IHaveStats
{
Stats Stats { get; }
}
}<file_sep>/Assets/Scripts/Utils/Audio/ChangeAudioOnTrigger.cs
using System;
using UnityEngine;
namespace Utils.Audio
{
[RequireComponent(typeof(Collider2D))]
public class ChangeAudioOnTrigger : MonoBehaviour
{
[SerializeField] private CustomAudioClip audioClip;
[SerializeField] private float fadeSpeed = 1f;
private Collider2D _collider;
private void Awake()
{
_collider = GetComponent<Collider2D>();
}
private void OnTriggerEnter2D(Collider2D other)
{
if (!other.CompareTag("Player")) return;
AudioManager.Instance.StopAllBackgroundMusic(new AudioOptions
{Volume = 1, FadeSpeed = fadeSpeed, WithFade = true});
AudioManager.Instance.PlaySound(audioClip.audioClip,
new AudioOptions {Volume = audioClip.volume, FadeSpeed = fadeSpeed, WithFade = true});
_collider.enabled = false;
}
}
}<file_sep>/Assets/Scripts/Entities/Stats.cs
using System;
using UnityEngine;
namespace Entities
{
[Serializable]
public class Stats
{
[SerializeField] private float health;
[SerializeField] private float speed;
public float Speed => speed;
public float Health
{
get => health;
set => health = value;
}
private float _currentHealth;
private bool _initialized;
}
}<file_sep>/Assets/Scripts/Utils/GlobalPooler.cs
using System;
using System.Collections.Generic;
using Entities.Enemy;
using Entities.Grabbables;
using Levels;
using UnityEngine;
namespace Utils
{
[Serializable]
public class Pool
{
public int quantity;
public GameObject prefab;
public PoolType poolType;
public enum PoolType
{
Battery,
BlueCannon,
RedCannon,
Laser,
GreenCannon,
BlueLaser
}
}
public class GlobalPooler : MonoBehaviour
{
[SerializeField] private Pool batteryPool;
[SerializeField] private Pool[] bulletPools;
[SerializeField] private LevelManager levelManager;
public MiniBattery NextMiniBattery => _miniBattteryPooler.GetNextObject();
public static GlobalPooler Instance;
private ObjectPooler<MiniBattery> _miniBattteryPooler;
private Dictionary<Pool.PoolType, ObjectPooler<Bullet>> _bulletPools;
private void Awake()
{
if (Instance == null)
{
Instance = this;
}
else
{
Destroy(this);
return;
}
_miniBattteryPooler = CreatePool<MiniBattery>(batteryPool);
levelManager.OnLevelChange += (settings) => DeactivatePools();
InitializeBulletPools();
}
public Bullet GetBullet(Pool.PoolType poolType) => _bulletPools[poolType].GetNextObject();
private ObjectPooler<T> CreatePool<T>(Pool pool) where T : Pooleable
{
var objectPooler = new ObjectPooler<T>();
objectPooler.InstantiateObjects(pool.quantity, pool.prefab.GetComponent<T>(),
$"Pool of {pool.prefab.name}");
return objectPooler;
}
private void InitializeBulletPools()
{
_bulletPools = new Dictionary<Pool.PoolType, ObjectPooler<Bullet>>();
foreach (var bulletPool in bulletPools)
{
_bulletPools[bulletPool.poolType] = CreatePool<Bullet>(bulletPool);
}
}
private void DeactivatePools()
{
_miniBattteryPooler.DeactivatePooleables();
foreach (var pools in _bulletPools)
{
pools.Value.DeactivatePooleables();
}
}
}
}<file_sep>/Assets/Scripts/UI/BackgroundChanger.cs
using System.Collections;
using Levels;
using UnityEngine;
namespace UI
{
public class BackgroundChanger : MonoBehaviour
{
[SerializeField] private SpriteRenderer background;
[SerializeField] private SpriteRenderer secondBackground;
[SerializeField] private LevelManager levelManager;
private Coroutine _changingBackgrounds;
private void Awake()
{
levelManager.OnLevelChange += (settings) => InitiateBackgrounds(settings.backgrounds, settings.time);
}
private void InitiateBackgrounds(Sprite[] backgrounds, float time)
{
if (backgrounds.Length == 0) return;
if (_changingBackgrounds != null) StopCoroutine(_changingBackgrounds);
if (backgrounds.Length < 2) background.sprite = backgrounds[0];
else
{
_changingBackgrounds = StartCoroutine(GoThroughBackgrounds(backgrounds, 0, time / backgrounds.Length));
}
}
private IEnumerator GoThroughBackgrounds(Sprite[] backgrounds, int currentIndex, float range)
{
background.sprite = backgrounds[currentIndex];
secondBackground.sprite = backgrounds[currentIndex + 1];
var backgroundColor = background.color;
backgroundColor.a = 1;
background.color = backgroundColor;
float t = 0;
while (background.color.a > 0)
{
var color = background.color;
color.a = Mathf.Lerp(1, 0, t);
background.color = color;
t += Time.deltaTime / range;
yield return null;
}
currentIndex++;
if (currentIndex + 1 >= backgrounds.Length)
{
background.sprite = secondBackground.sprite;
yield break;
}
_changingBackgrounds = StartCoroutine(GoThroughBackgrounds(backgrounds, currentIndex, range));
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Enemies/RedEnemyAi.cs
using Entities.Enemy.Ai.States;
using UnityEngine;
namespace Entities.Enemy.Enemies
{
public class RedEnemyAi : EnemyAi
{
[SerializeField, Tooltip("When the enemy health goes below this percentage the short range attack is triggered"), Range(0, 1)]
private float laserHealthPercentage;
[SerializeField] private EnemyWeapon shortRangeWeapon;
private float _initialHealth;
protected override void SetUpStates()
{
var idleState = new IdleState(this);
var largeAttackState = new AttackState(enemyWeapon, Animator, Mover, Player);
var prepareShortAttack = new PlayAnimationState(Animator, Player, Mover, "prepareShortAttack", 0.5f);
var shortRangeState = new AttackState(shortRangeWeapon, transform, Mover, Player);
var idleShortRangeState = new IdleState(this);
var dieAnimation = new PlayAnimationState(Animator, Player, Mover, "die", 0.67f);
var destroySelf = new DestroySelfState(gameObject);
StateMachine.AddTransition(idleState, largeAttackState, PlayerInsideRange);
StateMachine.AddTransition(largeAttackState, idleState, () => !PlayerInsideRange());
StateMachine.AddTransition(largeAttackState, prepareShortAttack, HealthBelowThreshold);
StateMachine.AddTransition(prepareShortAttack, idleShortRangeState, FinishPlayingAnimation(prepareShortAttack));
StateMachine.AddTransition(idleShortRangeState, shortRangeState, PlayerInsideRange);
StateMachine.AddTransition(shortRangeState, idleShortRangeState, () => !PlayerInsideRange());
StateMachine.AddAnyTransition(dieAnimation, EnemyDie);
StateMachine.AddTransition(dieAnimation, destroySelf, FinishPlayingAnimation(dieAnimation));
StateMachine.SetState(idleState);
_initialHealth = Stats.Health;
bool HealthBelowThreshold() => Stats.Health <= _initialHealth * laserHealthPercentage;
}
}
}<file_sep>/Assets/Scripts/Utils/Audio/AudioManager.cs
using System.Collections.Generic;
using Entities;
using UnityEngine;
namespace Utils.Audio
{
public class AudioManager : MonoBehaviour, IPausable
{
[SerializeField] private int audioSourceQuantity;
[SerializeField] private AudioSourcePooleable audioSourcePrefab;
[SerializeField] private AudioSourcePooleable lowPassFilterPrefab;
[SerializeField] private float fadeTime = 2f;
public static AudioManager Instance;
private ObjectPooler<AudioSourcePooleable> _audioClipPooler;
private ObjectPooler<AudioSourcePooleable> _lowPassFilterPooler;
private ObjectPooler<AudioSourcePooleable> _backgroundMusicPooler;
private bool _paused;
private void Awake()
{
if (Instance != null)
{
Destroy(gameObject);
return;
}
else
{
Instance = this;
DontDestroyOnLoad(this);
}
PoolAudioSources();
InitializeBackgroundMusicPool();
}
public void PlaySound(AudioClip clip)
{
PlaySound(clip, AudioOptions.Default());
}
public void PlaySound(AudioClip clip, AudioOptions audioOptions)
{
audioOptions.LowPassFilter = _paused;
var audioSource = audioOptions.LowPassFilter
? _lowPassFilterPooler.GetNextObject()
: _audioClipPooler.GetNextObject();
audioSource.SetClip(clip);
audioSource.StartClip();
if (audioOptions.WithFade)
{
StartCoroutine(AudioFades.FadeIn(audioSource.AudioSource, audioOptions.FadeSpeed, audioOptions.Volume));
}
else audioSource.SetVolume(audioOptions.Volume);
}
public void PlayBackgroundMusic(AudioClip clip, AudioOptions audioOptions)
{
var audioSource = _backgroundMusicPooler.GetNextObject();
audioSource.AudioSource.clip = clip;
if (audioOptions.WithFade)
StartCoroutine(AudioFades.FadeIn(audioSource.AudioSource, audioOptions.FadeSpeed, audioOptions.Volume));
else
{
audioSource.AudioSource.volume = audioOptions.Volume;
audioSource.AudioSource.Play();
}
}
public void PauseAllBackgroundMusic()
{
foreach (var audioSourcePooleable in _backgroundMusicPooler.Objects)
{
audioSourcePooleable.AudioSource.Pause();
}
}
public void StopAllBackgroundMusic(AudioOptions audioOptions)
{
foreach (var audioSourcePooleable in _backgroundMusicPooler.ActiveObjects)
{
if (audioOptions.WithFade)
StartCoroutine(AudioFades.FadeOut(audioSourcePooleable, audioOptions.FadeSpeed));
else
{
audioSourcePooleable.AudioSource.Stop();
audioSourcePooleable.Deactivate();
}
}
}
public void StopBackgroundMusic(AudioClip clip)
{
var audioSource = GetAudioSource(clip);
audioSource?.Deactivate();
var activeObject = _backgroundMusicPooler.ActiveObjects;
foreach (var audioSourcePooleable in activeObject)
{
audioSourcePooleable.AudioSource.UnPause();
}
}
private void PoolAudioSources()
{
_audioClipPooler = new ObjectPooler<AudioSourcePooleable>();
_lowPassFilterPooler = new ObjectPooler<AudioSourcePooleable>();
var parent = _audioClipPooler.InstantiateObjects(audioSourceQuantity, audioSourcePrefab, "Audio Sources");
var lowPassFilterParent = _lowPassFilterPooler.InstantiateObjects(audioSourceQuantity, lowPassFilterPrefab,
"Low pass Audio Sources");
DontDestroyOnLoad(parent);
DontDestroyOnLoad(lowPassFilterParent);
}
private void InitializeBackgroundMusicPool()
{
_backgroundMusicPooler = new ObjectPooler<AudioSourcePooleable>();
var parent = _backgroundMusicPooler.InstantiateObjects(2, audioSourcePrefab, "Background Music",
audioSources =>
{
foreach (var audioSource in audioSources)
{
audioSource.AudioSource.loop = true;
}
});
DontDestroyOnLoad(parent);
}
private AudioSourcePooleable GetAudioSource(AudioClip clip)
{
foreach (var audioSource in _backgroundMusicPooler.Objects)
{
if (audioSource.AudioSource.clip == clip) return audioSource;
}
return null;
}
public void Pause()
{
_paused = true;
}
public void UnPause()
{
_paused = false;
}
public void Mute() => ChangeMute(true);
public void UnMute() => ChangeMute(false);
private void ChangeMute(bool mute)
{
foreach (var audioSourcePooleable in _audioClipPooler.Objects)
{
audioSourcePooleable.AudioSource.mute = mute;
}
foreach (var audioSourcePooleable in _backgroundMusicPooler.Objects)
{
audioSourcePooleable.AudioSource.mute = mute;
}
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Enemies/CommonEnemyAi.cs
using Entities.Enemy.Ai.States;
using UnityEngine;
namespace Entities.Enemy.Enemies
{
[RequireComponent(typeof(Rigidbody2D))]
public class CommonEnemyAi : EnemyAi
{
[SerializeField] private Transform leftPosition;
[SerializeField] private Transform rightPosition;
[SerializeField] private float prepareToAttackAnimationLength;
[SerializeField] private float dieAnimationLength;
protected override void SetUpStates()
{
var idleState = new IdleState(leftPosition, rightPosition, this, Mover);
var startAttackingState =
new PlayAnimationState(Animator, Player, Mover, "startAttacking", prepareToAttackAnimationLength);
var attackState = new AttackState(enemyWeapon, Animator, Mover, Player);
var stopAttackingState =
new PlayAnimationState(Animator, Player, Mover, "stopAttacking", prepareToAttackAnimationLength);
var dieState = new PlayAnimationState(Animator, Player, Mover, "die", dieAnimationLength);
var destroySelf = new DestroySelfState(gameObject);
StateMachine.AddTransition(idleState, startAttackingState, PlayerInsideRange);
StateMachine.AddTransition(startAttackingState, attackState, FinishPlayingAnimation(startAttackingState));
StateMachine.AddTransition(attackState, stopAttackingState, PlayerOutsideRange);
StateMachine.AddTransition(stopAttackingState, idleState, FinishPlayingAnimation(stopAttackingState));
StateMachine.AddTransition(dieState, destroySelf, FinishPlayingAnimation(dieState));
StateMachine.AddAnyTransition(dieState, EnemyDie);
StateMachine.SetState(idleState);
bool PlayerOutsideRange() => !PlayerInsideRange();
}
}
}<file_sep>/Assets/Scripts/Platforms/MovablePlatform.cs
using System;
using System.Collections.Generic;
using Entities;
using Entities.Enemy;
using Entities.Enemy.Enemies;
using Entities.Player;
using UnityEngine;
namespace Platforms
{
[RequireComponent(typeof(Rigidbody2D))]
public class MovablePlatform : MonoBehaviour, IPausable
{
[SerializeField] private Transform[] transforms;
[SerializeField] private float speed;
[SerializeField] private float movementSmoothing = 0.1f;
private Vector2[] _positions;
private int _currentPosition;
private Rigidbody2D _rigidBody;
private Vector2 _velocity;
private bool _paused;
private Vector2 _targetVelocity;
private Rigidbody2D _player;
private bool _hasPlayer;
private Vector2 _previousPosition;
private Vector2 NextPosition => _positions[_currentPosition];
private void Awake()
{
_rigidBody = GetComponent<Rigidbody2D>();
_positions = new Vector2[transforms.Length];
_previousPosition = transform.position;
for (var i = 0; i < transforms.Length; i++)
{
_positions[i] = transforms[i].position;
}
}
private void Update()
{
if (_paused) return;
var position = _rigidBody.position;
var targetVelocity = NextPosition - position;
targetVelocity = targetVelocity.normalized * speed;
var velocity = Vector2.SmoothDamp(_rigidBody.velocity, targetVelocity, ref _velocity, movementSmoothing);
_rigidBody.velocity = velocity;
if (_hasPlayer) _player.position += position - _previousPosition;
if (Vector3.Distance(position, NextPosition) < speed/20)
{
_currentPosition = (_currentPosition + 1) % _positions.Length;
}
_previousPosition = position;
}
private void OnCollisionEnter2D(Collision2D other)
{
if (!other.collider.CompareTag("Player")) return;
_hasPlayer = true;
_player = other.gameObject.GetComponent<Rigidbody2D>();
}
private void OnCollisionExit2D(Collision2D other1)
{
_hasPlayer = false;
}
public void Pause()
{
_paused = true;
_rigidBody.velocity = Vector2.zero;
}
public void UnPause()
{
_paused = false;
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Ai/States/IdleState.cs
using Entities.Enemy.Enemies;
using UnityEngine;
namespace Entities.Enemy.Ai.States
{
public class IdleState: IState
{
private static readonly int IdleBool = Animator.StringToHash("idle");
private readonly float _leftPosition;
private readonly float _rightPosition;
private bool _goingRight;
private readonly Animator _animator;
private readonly Rigidbody2D _rigidBody2D;
private Vector2 _velocity;
private readonly Mover _mover;
private static readonly int Speed = Animator.StringToHash("speed");
private bool _move;
private EnemyAi _enemyAi;
public IdleState(Transform leftPosition, Transform rightPosition, EnemyAi enemyAi, Mover mover)
{
_leftPosition = leftPosition.position.x;
_rightPosition = rightPosition.position.x;
_animator = enemyAi.Animator;
_rigidBody2D = enemyAi.RigidBody;
_mover = mover;
_move = true;
_enemyAi = enemyAi;
}
public IdleState(EnemyAi enemyAi)
{
_enemyAi = enemyAi;
_animator = enemyAi.Animator;
_rigidBody2D = enemyAi.RigidBody;
_move = false;
}
public void Tick()
{
if (!_move) return;
if (_goingRight)
{
if (_rightPosition < _rigidBody2D.position.x) _goingRight = false;
}
else
{
if (_leftPosition > _rigidBody2D.position.x) _goingRight = true;
}
SetAnimatorVelocity();
}
public void FixedTick()
{
if (!_move) return;
_mover.Move(_goingRight? new Vector2(1, 0) : new Vector2(-1, 0));
}
public void OnEnter()
{
_animator.SetBool(IdleBool, true);
}
public void OnExit()
{
_rigidBody2D.velocity = Vector2.zero;
if(_move) SetAnimatorVelocity();
_animator.SetBool(IdleBool, false);
_enemyAi.PlayerSighted();
}
private void SetAnimatorVelocity()
{
_animator.SetFloat(Speed, Mathf.Abs(_rigidBody2D.velocity.magnitude));
}
}
}<file_sep>/Assets/Scripts/Entities/Player/Attack/Sword.cs
using UnityEngine;
namespace Entities.Player.Attack
{
public class Sword : MonoBehaviour
{
[SerializeField] private float range;
[SerializeField] private float damage;
public float Range => range;
public float Damage => damage;
}
}<file_sep>/Assets/Scripts/Platforms/TimePlatform.cs
using System;
using System.Collections;
using UnityEngine;
using Utils;
namespace Platforms
{
public class TimePlatform: PlayerInteractablePlatform
{
[SerializeField] private float timeToDeactivate;
private static readonly int PlayerExitTrigger = Animator.StringToHash("playerExit");
private WaitSeconds _waitSeconds;
private bool _triggered;
protected override void Awake()
{
base.Awake();
OnTriggerEnter += TriggerEnter;
_waitSeconds = new WaitSeconds(this, PlayerExit, timeToDeactivate);
}
private void TriggerEnter()
{
_waitSeconds.Wait();
}
private void PlayerExit()
{
if (Paused)
{
_triggered = true;
return;
}
Animator.SetTrigger(PlayerExitTrigger);
}
public override void UnPause()
{
base.UnPause();
if (!_triggered) return;
TriggerEnter();
_triggered = false;
}
}
}<file_sep>/Assets/Scripts/UI/PauseButton.cs
using System;
using Input;
using Levels;
using UnityEngine;
using UnityEngine.InputSystem;
using UnityEngine.UI;
using Utils.Audio;
namespace UI
{
[RequireComponent(typeof(Button), typeof(Image))]
public class PauseButton : MonoBehaviour
{
[SerializeField] private Sprite pauseSprite;
[SerializeField] private Sprite resumeSprite;
[SerializeField] private AudioClip buttonClicked;
[SerializeField] private Image pausePanel;
[SerializeField] private CanvasGroup[] buttonsToDisplay;
private Button _button;
private Image _image;
private bool _paused;
private void Awake()
{
_button = GetComponent<Button>();
_image = GetComponent<Image>();
_button.onClick.AddListener(ButtonClicked);
var controller = new InputAction(binding: "<Keyboard>/p");
controller.performed += (_) => ButtonClicked();
controller.Enable();
}
private void Start()
{
UnPause();
}
private void ButtonClicked()
{
AudioManager.Instance.PlaySound(buttonClicked);
if(_paused) UnPause();
else Pause();
}
private void Pause()
{
Time.timeScale = 0;
_paused = true;
pausePanel.enabled = true;
_image.sprite = resumeSprite;
Timer.Instance.Pause();
ChangeAlphaButton(false);
}
public void UnPause()
{
Time.timeScale = 1;
_paused = false;
pausePanel.enabled = false;
_image.sprite = pauseSprite;
Timer.Instance.Resume();
ChangeAlphaButton(true);
}
private void ChangeAlphaButton(bool hide)
{
foreach (var canvasGroup in buttonsToDisplay)
{
canvasGroup.alpha = hide ? 0 : 1;
canvasGroup.interactable = !hide;
canvasGroup.blocksRaycasts = !hide;
}
}
}
}<file_sep>/Assets/Scripts/Levels/MapLimit.cs
using System;
using Entities;
using UnityEngine;
namespace Levels
{
public class MapLimit : MonoBehaviour
{
private void OnTriggerExit2D(Collider2D other)
{
var damageReceiver = other.GetComponent<DamageReceiver>();
if (damageReceiver == null) return;
damageReceiver.ReceiveDamage(0, Vector3.zero, true, true);
}
}
}<file_sep>/Assets/Scripts/Entities/Enemy/Audio/EnemyAudioReferences.cs
using UnityEngine;
using Utils.Audio;
namespace Entities.Enemy
{
[CreateAssetMenu(fileName = "Enemy Audio", menuName = "Audio/Enemy Audio", order = 1)]
public class EnemyAudioReferences : ScriptableObject
{
public CustomAudioClip onSight;
public CustomAudioClip damageReceive;
public CustomAudioClip die;
}
}<file_sep>/Assets/Scripts/Levels/FinishLevel.cs
using System;
using Entities.Player;
using UnityEngine;
namespace Levels
{
public class FinishLevel : MonoBehaviour
{
private void OnTriggerEnter2D(Collider2D other)
{
if (!other.GetComponent<APlayer>()) return;
FindObjectOfType<LevelManager>().FinishLevel();
}
}
}<file_sep>/Assets/Scripts/Entities/Grabbables/Grabber.cs
using System;
using UnityEngine;
using Utils;
namespace Entities.Grabbables
{
public class Grabber : MonoBehaviour
{
[SerializeField] private float grabbingDistance;
public event Action OnGrab;
private DistanceDetector _distanceDetector;
private void Awake()
{
_distanceDetector = gameObject.AddComponent<DistanceDetector>();
_distanceDetector.targetTag = "Grabbable";
_distanceDetector.DetectionDistance = grabbingDistance;
_distanceDetector.OnColliderInsideRadius += ColliderNear;
}
private void ColliderNear(Collider2D collider)
{
var grabbable = collider.GetComponentInParent<Grabbable>();
if (grabbable == null) return;
grabbable.OnGrabberNear(this);
OnGrab?.Invoke();
}
}
}<file_sep>/Assets/Scripts/Entities/Player/Abilities/DashAbility.cs
using System;
using Cinemachine;
using UnityEngine;
using Utils;
using CharacterController = Entities.Player.Movement.CharacterController;
namespace Entities.Player.Abilities
{
[RequireComponent(typeof(Collider2D), typeof(Rigidbody2D), typeof(DamageReceiver))]
public class DashAbility : MonoBehaviour
{
[SerializeField] private CharacterController characterController;
[SerializeField] private float distance = 2f;
[SerializeField] private LayerMask collisionMask;
[SerializeField] private float damage = 4f;
[SerializeField] private float timeBetweenDashes = 1f;
[SerializeField] private float invincibleTime = 0.5f;
public event Action OnDash;
public float LastDash { get; private set; }
public float TimeBetweenDashes => timeBetweenDashes;
// private bool _hasDashed;
private RaycastHit2D[] _hits;
private Collider2D _collider;
private Rigidbody2D _rigidBody;
private DamageReceiver _damageReceiver;
private WaitSeconds _invincibleWaitTime;
private CinemachineFramingTransposer _camera;
private void Awake()
{
_camera = FindObjectOfType<CinemachineVirtualCamera>()
.GetCinemachineComponent<CinemachineFramingTransposer>();
_hits = new RaycastHit2D[10];
_collider = GetComponent<Collider2D>();
_rigidBody = GetComponent<Rigidbody2D>();
_damageReceiver = GetComponent<DamageReceiver>();
_invincibleWaitTime =
new WaitSeconds(this, () => _damageReceiver.Invincible = false, invincibleTime);
LastDash = -timeBetweenDashes;
}
public void Dash()
{
if (!CanDash()) return;
_damageReceiver.Invincible = true;
_invincibleWaitTime.Wait();
// _hasDashed = true;
LastDash = Time.time;
OnDash?.Invoke();
}
public void MakeDash()
{
var position = (Vector2) transform.position;
var destination = position;
var facing = characterController.FacingRight ? 1 : -1;
destination.x += distance * facing;
var boxSize = _collider.bounds.size;
boxSize.x *= 0.2f;
boxSize.y *= 0.8f;
var size = Physics2D.BoxCastNonAlloc(position, boxSize, 0f, destination - position, _hits,
distance, collisionMask);
IterateThroughColliders(position, destination, size);
}
public void RestoreDash()
{
LastDash -= timeBetweenDashes;
}
private void IterateThroughColliders(Vector2 position, Vector2 destination, int size)
{
for (int i = 0; i < size; i++)
{
var damageReceiver = _hits[i].collider.GetComponent<DamageReceiver>();
if (damageReceiver != null) damageReceiver.ReceiveDamage(damage, position);
else
{
HitWall(_hits[i].point);
return;
}
}
_camera.OnTargetObjectWarped(transform, destination - _rigidBody.position);
_rigidBody.position = destination;
// _rigidBody.velocity = Vector2.zero;
}
private void HitWall(Vector2 point)
{
point.x += _collider.bounds.extents.x * (characterController.FacingRight ? -1 : 1);
transform.position = point;
Debug.DrawLine(transform.position, point, Color.red, 5);
}
private bool CanDash()
{
return Time.time - LastDash > timeBetweenDashes;
}
}
}
|
e95ca6aa2fadc1afd02c529fd72db9b6074377c4
|
[
"Markdown",
"C#"
] | 69 |
C#
|
Nachodlv/timebot
|
477035212b26136357aedfde8b0edfb36c00328e
|
af3f548de946794630bc3220fe6fbd0e3dcc06f3
|
refs/heads/master
|
<repo_name>vishnuGovindaraj/AmazonWebpage-HtmlParser<file_sep>/README.txt
Java -jar ProductParser.jar
<file_sep>/src/Product.java
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.text.ParseException;
import java.util.ArrayList;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;
public class Product {
private String title;
private String location;
private String productASINID;
private String ProductUrl;
private int reviewCount = 0;
private int productReviewCount;
private ArrayList<Review> reviewObjects = new ArrayList<Review>();
private Document productPage;
public Product(String filePath) throws ParseException, FileNotFoundException, UnsupportedEncodingException{
setLocation(filePath);
File product = new File(filePath);
try {
productPage = Jsoup.parse(product, "UTF-8");
title = productPage.title();
//get the number of reviews for this product
String reviewCountLine = productPage.select("span[class=a-size-base]").text();
String reviewCountAlpha = reviewCountLine.substring(0, reviewCountLine.indexOf("customer")).trim();
reviewCountAlpha = reviewCountAlpha.replace(",", "");
productReviewCount = Integer.parseInt(reviewCountAlpha);
//get the product ID for this product, note some pages have a different ASIN then the folder names
productASINID = filePath.split("\\\\")[3];
//product url for this product
ProductUrl = "http://www.amazon.com/dp/" + productASINID;
//format the product filePath string to get the review folder filepath
String reviewFilePath = filePath.replace(productASINID + "." + "html", "reviews\\");
System.out.println("productID: " + productASINID);
System.out.println("Reveiw Folder filePath:" + reviewFilePath);
//create a writer for this specific product folder
PrintWriter outputWriter;
outputWriter = new PrintWriter("Output\\" + productASINID + "_Output.txt", "UTF-8");
//write the product information to the specified file.
outputWriter.println("Title: "+ title);
outputWriter.println("Product ID: " + productASINID);
outputWriter.println("Product url: " + ProductUrl);
outputWriter.println("Review Count: "+ productReviewCount);
outputWriter.println("---------------------------------------------------");
//create review objects for every single review file for the product
Files.walk(Paths.get(reviewFilePath)).forEach(fp -> {
if (Files.isRegularFile(fp)) {
reviewObjects.add(new Review(fp.toString()));
}
});
outputWriter.println("Reviews: ");
outputWriter.println("Review_ID " + "Reviewer_ID " + " Rating " +
"Date_Reviewed " + "Comment_Count " + " Comment ");
int i = 0;
while (i < reviewObjects.size()){
int j = 0;
while (j < 10){
outputWriter.println(reviewObjects.get(i).getReviewID(j) + " " +
reviewObjects.get(i).getReviewerID(j) + " " +
reviewObjects.get(i).getRating(j) + " " +
reviewObjects.get(i).getDate(j) + " " +
reviewObjects.get(i).getCommentCount(j)+" " +
reviewObjects.get(i).getreviewContent(j));
j++;
}
i++;
}
outputWriter.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
private void setLocation(String location) {
this.location = location;
}
public String getProductID() {
return productASINID;
}
private void setProductID(String productID) {
this.productASINID = productID;
}
public Document getProductPage() {
return productPage;
}
public static void setProductPage(Document productPage) {
productPage = productPage;
}
public String getTitle() {
return title;
}
public static void setTitle(String title) {
title = title;
}
}
<file_sep>/src/HtmlParser.java
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
public class HtmlParser {
private static final int NUMBER_OF_PRODUCTS = 5;
private static int ProductIndex = 0;
private static Path[] PathsArray;
private static ArrayList<Product> productObjects = new ArrayList<Product>();
/**
* Parser to extract Product and Review related information from locally stored Amazon Web Pages
* @param args
* @author <NAME>
* @throws ParseException
*
*/
public static void main(String[] args) throws ParseException {
PathsArray = new Path[NUMBER_OF_PRODUCTS];
//get filepaths for the different products only, excluding the review filePaths
try {
Files.walk(Paths.get("Assets/products/")).forEach(filePath -> {
if (Files.isRegularFile(filePath)) {
if (!filePath.toString().contains("reviews")){
PathsArray[ProductIndex] = filePath;
System.out.println("productIndex: " + (ProductIndex + 1 )
+ " " + PathsArray[ProductIndex]);
ProductIndex++;
}
}
});
} catch (IOException e) {
e.printStackTrace();
}
try {
int i = 0;
while(i < NUMBER_OF_PRODUCTS) {
//create the product objects
productObjects.add(new Product(PathsArray[i].toString()));
i++;
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
3b85f1a35f7b8f6685ac06f96d16a3db74ce67ea
|
[
"Java",
"Text"
] | 3 |
Text
|
vishnuGovindaraj/AmazonWebpage-HtmlParser
|
d426c3f0fd2b46afcebd669c5d7c88d30198db01
|
0e4f1b20ca7cf3a98bc743e7056689d38f6c3883
|
refs/heads/master
|
<file_sep># Regularjs Definitions
Type declaration files of [Regularjs](https://github.com/regularjs/regular)<file_sep>
// 静态API
interface Regular_Static {
new (options: object): Regular_Instance;
// todo
}
// 实例API
interface Regular_Instance {
// todo
}
declare var Regular: Regular_Static;
export = Regular
|
8f42a07991849c50012b561901ba1f57b917b969
|
[
"Markdown",
"TypeScript"
] | 2 |
Markdown
|
laoqiren/Regular-types
|
d037221506ba6fcf008bb5068a1c79c721fc416a
|
4cc94952f50fc08014f29952d9cd83bfe9c292ae
|
refs/heads/main
|
<repo_name>seanthayer/Exploring-C<file_sep>/Deque & variants/Linked-List/structs.h
#ifndef __STRUCTS_H
#define __STRUCTS_H
# ifndef TYPE
# define TYPE int
# define TYPE_SIZE sizeof(int)
# endif
struct Link
{
struct Link* next;
TYPE val;
};
struct DLink
{
struct DLink* next;
struct DLink* prev;
TYPE val;
};
struct Deque
{
struct DLink* head;
struct DLink* tail;
int size;
};
struct Queue
{
struct Link* head;
struct Link* tail;
int size;
};
struct Stack
{
struct Link* head;
int size;
};
#endif<file_sep>/Deque & variants/Linked-List/interfaces.h
#include "structs.h"
#ifndef __INTERFACES_H
#define __INTERFACES_H
/* DEQUE */
void initDeque(struct Deque* dq);
void freeDeque(struct Deque* dq);
void addFrontDeque(struct Deque* dq, TYPE val);
void addBackDeque(struct Deque* dq, TYPE val);
void printDeque(struct Deque* dq);
void removeFront(struct Deque* dq);
void removeBack(struct Deque* dq);
int isEmptyDeque(struct Deque* dq);
TYPE front(struct Deque* dq);
TYPE back(struct Deque* dq);
/* END DEQUE */
/* QUEUE */
void initQueue(struct Queue* q);
void freeQueue(struct Queue* q);
void addQueue(struct Queue* q, TYPE val);
void printQueue(struct Queue* q);
void removeQueue(struct Queue* q);
int isEmptyQueue(struct Queue* q);
TYPE frontQueue(struct Queue* q);
/* END QUEUE */
/* STACK */
void initStack(struct Stack* s);
void freeStack(struct Stack* s);
int isEmptyStack(struct Stack* s);
void pushStack(struct Stack* s, TYPE val);
TYPE popStack(struct Stack* s);
TYPE peepStack(struct Stack* s);
/* END STACK */
#endif<file_sep>/Hashtable/interfaces.c
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include "structs.h"
#include "interfaces.h"
int stringHash(char* str)
{
/* DJB HASH */
int hash = 5381;
int c;
while ( (c = *str++) )
{
hash = ((hash << 5) + hash) + c;
}
return hash;
}
void initTable (hashTable* ht, int tableSize)
{
int index;
assert(ht);
ht->table = (hashLink**) malloc(sizeof(hashLink*) * tableSize);
ht->tableSize = tableSize;
ht->count = 0;
for(index = 0; index < tableSize; index++)
ht->table[index] = NULL;
}
void freeTable(hashTable* ht)
{
int i;
int cap;
hashLink* htLink;
hashLink* htLinkNxt;
hashLink* temp;
assert(ht);
cap = ht->tableSize;
for (i = 0; i < cap; i++)
{
if (ht->table[i])
{
htLink = ht->table[i];
htLinkNxt = htLink->next;
while (htLinkNxt)
{
temp = htLinkNxt;
htLinkNxt = htLinkNxt->next;
free(temp->key);
free(temp);
}
free(htLink->key);
free(htLink);
}
}
free(ht->table);
}
void freeTableBody(hashLink** htb, int oldSize)
{
int i;
struct hashLink* htLink;
struct hashLink* htLinkNxt;
struct hashLink* temp;
assert(htb);
for (i = 0; i < oldSize; i++)
{
if (htb[i])
{
htLink = htb[i];
htLinkNxt = htLink->next;
while (htLinkNxt)
{
temp = htLinkNxt;
htLinkNxt = htLinkNxt->next;
free(temp);
}
free(htLink);
}
}
free(htb);
}
void insertTable(hashTable* ht, KeyType key, ValueType val)
{
int index;
hashLink* htLink;
htLink = (hashLink*)malloc(sizeof(hashLink));
assert(htLink);
htLink->key = key;
htLink->val = val;
index = (int)(labs( stringHash(key) ) % ht->tableSize);
htLink->next = ht->table[index];
ht->table[index] = htLink;
ht->count++;
}
void removeKey(hashTable* ht, KeyType key)
{
int index;
int htKeyHash;
int paramKeyHash;
hashLink* htLink;
hashLink* htLinkNxt;
hashLink* prev;
assert(ht);
index = (int)(labs( stringHash(key) ) % ht->tableSize);
paramKeyHash = stringHash(key);
if (ht->table[index])
{
htLink = ht->table[index];
htKeyHash = stringHash(htLink->key);
if ( EQ(htKeyHash, paramKeyHash) )
{
/* Found key at front index */
ht->table[index] = htLink->next;
free(htLink->key);
free(htLink);
ht->count--;
}
else
{
prev = htLink;
htLinkNxt = htLink->next;
while (htLinkNxt)
{
htKeyHash = stringHash(htLinkNxt->key);
if ( EQ(htKeyHash, paramKeyHash) )
{
prev->next = htLinkNxt->next;
free(htLinkNxt->key);
free(htLinkNxt);
htLinkNxt = NULL;
ht->count--;
}
else
{
prev = htLinkNxt;
htLinkNxt = htLinkNxt->next;
}
}
}
}
}
void printTable(hashTable* ht)
{
int i;
struct hashLink* htLink;
struct hashLink* htLinkNxt;
struct hashLink* temp;
assert(ht);
for (i = 0; i < ht->tableSize; i++)
{
if (ht->table[i])
{
htLink = ht->table[i];
htLinkNxt = htLink->next;
printf("%s: %d\n\n", htLink->key, htLink->val);
while (htLinkNxt)
{
temp = htLinkNxt;
htLinkNxt = htLinkNxt->next;
printf("%s: %d\n\n", temp->key, temp->val);
}
}
}
}
int containsKey(hashTable* ht, KeyType key)
{
int index;
int htKeyHash;
int paramKeyHash;
hashLink* htLink;
hashLink* htLinkNxt;
assert(ht);
index = (int)(labs( stringHash(key) ) % ht->tableSize);
paramKeyHash = stringHash(key);
if (ht->table[index])
{
htLink = ht->table[index];
htKeyHash = stringHash(htLink->key);
if ( EQ(htKeyHash, paramKeyHash) )
{
return 1;
}
else
{
htLinkNxt = htLink->next;
while (htLinkNxt)
{
htKeyHash = stringHash(htLinkNxt->key);
if ( EQ(htKeyHash, paramKeyHash) )
{
return 1;
}
else
{
htLinkNxt = htLinkNxt->next;
}
}
}
return 0;
}
else
{
return 0;
}
}
int isEmptyTable(hashTable* ht)
{
assert(ht);
if (ht->count == 0)
{
return 1;
}
else
{
return 0;
}
}
int sizeTable(hashTable* ht)
{
int i;
int cap;
int count;
hashLink* htLink;
hashLink* htLinkNxt;
assert(ht);
i = 0;
cap = ht->tableSize;
count = 0;
for (i = 0; i < cap; i++)
{
if (ht->table[i])
{
count++;
htLink = ht->table[i];
htLinkNxt = htLink->next;
while (htLinkNxt)
{
count++;
htLinkNxt = htLinkNxt->next;
}
}
}
return count;
}
int emptyBuckets(hashTable* ht)
{
int i;
int cap;
int count;
assert(ht);
i = 0;
cap = ht->tableSize;
count = 0;
for (i = 0; i < cap; i++)
{
if (!ht->table[i])
count++;
}
return count;
}
float tableLoad(hashTable* ht)
{
int elements = ht->count;
int tsize = ht->tableSize;
float load = ((float)elements / (float)tsize);
return load;
}
struct hashLink* findLink(hashTable* ht, KeyType key)
{
int index;
int htKeyHash;
int paramKeyHash;
struct hashLink* htLink;
struct hashLink* htLinkNxt;
assert(ht);
index = (int)(labs( stringHash(key) ) % ht->tableSize);
paramKeyHash = stringHash(key);
if (ht->table[index])
{
htLink = ht->table[index];
htKeyHash = stringHash(htLink->key);
if ( EQ(htKeyHash, paramKeyHash) )
{
return htLink;
}
else
{
htLinkNxt = htLink->next;
while (htLinkNxt)
{
htKeyHash = stringHash(htLinkNxt->key);
if ( EQ(htKeyHash, paramKeyHash) )
{
return htLinkNxt;
}
else
{
htLinkNxt = htLinkNxt->next;
}
}
}
return NULL;
}
else
{
return NULL;
}
}
void _resizeTable(hashTable* ht)
{
int i;
int oldSize = ht->tableSize;
hashLink** oldTable = ht->table;
hashLink* curr;
assert(ht);
initTable(ht, 2 * oldSize);
for (i = 0; i < oldSize; i++)
{
curr = oldTable[i];
while (curr != NULL)
{
insertTable(ht, curr->key, curr->val);
curr = curr->next;
}
}
freeTableBody(oldTable, oldSize);
}<file_sep>/BST/structs.h
#ifndef __STRUCTS_H
#define __STRUCTS_H
# ifndef TYPE
# define TYPE int
# define TYPE_SIZE sizeof(int)
# define EQ(a,b) (a == b)
# define LT(a,b) (a < b)
# endif
struct BST
{
struct Node* root;
int size;
};
struct Node
{
struct Node* left;
struct Node* right;
TYPE val;
};
#endif<file_sep>/BST/interfaces.h
#include "structs.h"
#ifndef __INTERFACES_H
#define __INTERFACES_H
/* Binary Search Tree */
void initBST(struct BST* tree);
void freeBST(struct BST* tree);
int containsBST(struct BST* tree, TYPE val);
void addBST(struct BST* tree, TYPE val);
void removeBST(struct BST* tree, TYPE val);
void printBST(struct BST* tree);
int isEmptyBST(struct BST* tree);
struct Node* _addNode(struct Node* curr, TYPE val);
struct Node* _removeNode(struct Node* curr, TYPE val);
struct Node *_removeLeftmost(struct Node* curr);
int _containsNode(struct Node* curr, TYPE val);
void _printInOrder(struct Node* curr);
void _freePostOrder(struct Node* curr);
/* END Binary Search Tree */
/* Misc */
void _miscPrintVal(struct Node* curr);
/* END Misc */
#endif<file_sep>/Deque & variants/Linked-List/main.c
#include <stdio.h>
#include <stdlib.h>
#include "structs.h"
#include "interfaces.h"
int main(int argc, char const *argv[])
{
struct Deque dq;
struct Queue q;
struct Stack s;
FILE* file;
const char* fileName;
int val;
int count;
int quantity;
if (argc == 2)
{
fileName = argv[1];
}
else
{
fileName = "input.txt";
}
file = fopen(fileName,"r");
printf("\n--------------------------------------------\n");
printf("\nHello! I'm here to generate simple visualizations of some basic data structures:\n");
printf("- Deques\n- Queues\n- Stacks\n\n");
printf("Press ENTER to continue.\n\n");
getchar();
/* --------------------------------------------
*
* DEQUE
*
* --------------------------------------------
*/
quantity = 25;
printf("Here are some Deque operations:\n\n");
initDeque(&dq);
printf("- isEmpty: %d\n\n", isEmptyDeque(&dq));
printf("- Adding values from %s...\n\n", fileName);
while ( !feof(file) && (quantity > 0) )
{
fscanf(file, "%d", &val);
addFrontDeque(&dq, val);
quantity--;
}
printf("- isEmpty: %d\n\n", isEmptyDeque(&dq));
printf("- Ready to print. Press ENTER.\n");
getchar();
printDeque(&dq);
printf("\n");
printf("- Link-front: { %d }\n", front(&dq));
printf("- Link-back: { %d }\n", back(&dq));
printf("- Freeing Deque memory.\n");
freeDeque(&dq);
/* --------------------------------------------
*
* END DEQUE
*
* --------------------------------------------
*/
/*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*/
/* --------------------------------------------
*
* QUEUE
*
* --------------------------------------------
*/
quantity = 25;
printf("\n");
printf("Here are some Queue operations:\n\n");
initQueue(&q);
printf("- isEmpty: %d\n\n", isEmptyQueue(&q));
printf("- Adding values from %s...\n\n", fileName);
while ( !feof(file) && (quantity > 0) )
{
fscanf(file, "%d", &val);
addQueue(&q, val);
quantity--;
}
printf("- isEmpty: %d\n\n", isEmptyQueue(&q));
printf("- Ready to print. Press ENTER.\n");
getchar();
printQueue(&q);
printf("\n");
printf("- Link-front: { %d }\n", frontQueue(&q));
printf("- Freeing Queue memory.\n");
freeQueue(&q);
/* --------------------------------------------
*
* END QUEUE
*
* --------------------------------------------
*/
/*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*/
/* --------------------------------------------
*
* STACK
*
* --------------------------------------------
*/
count = 0;
quantity = 25;
printf("\n");
printf("Here are some Stack operations:\n\n");
initStack(&s);
printf("- isEmpty: %d\n\n", isEmptyStack(&s));
printf("- Adding values from %s...\n\n", fileName);
while ( !feof(file) && (quantity > 0) )
{
fscanf(file, "%d", &val);
pushStack(&s, val);
quantity--;
}
printf("- isEmpty: %d\n\n", isEmptyStack(&s));
printf("- Ready to print. Press ENTER.\n");
getchar();
while ( !isEmptyStack(&s) )
{
count++;
printf("- *POP* Link %d: { %d }\n", count, popStack(&s));
}
printf("\n");
printf("- Freeing Stack memory.\n");
freeStack(&s);
/* --------------------------------------------
*
* END STACK
*
* --------------------------------------------
*/
printf("\n--------------------------------------------\n");
fclose(file);
return 0;
}
<file_sep>/Hashtable/main.c
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include "structs.h"
#include "interfaces.h"
char* getWord(FILE *file); /* getWord function referenced from Professor <NAME> */
/*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*/
int main(int argc, char const *argv[])
{
hashTable ht;
hashLink* htLink;
FILE* file;
const char* fileName;
char* word;
if (argc == 2)
{
fileName = argv[1];
}
else
{
fileName = "input.txt";
}
file = fopen(fileName,"r");
printf("\n--------------------------------------------\n");
printf("\nHello! I'm here to generate simple visualizations of some basic data structures:\n");
printf("- Hashtable\n\n");
printf("Press ENTER to continue.\n\n");
getchar();
/* --------------------------------------------
*
* HASHTABLE
*
* --------------------------------------------
*/
printf("Here are some Hashtable operations:\n\n");
initTable(&ht, 30);
printf("- isEmpty: %d\n\n", isEmptyTable(&ht));
printf("- Adding values from %s...\n\n", fileName);
do
{
word = getWord(file);
if (word)
{
if ( containsKey(&ht, word) )
{
htLink = findLink(&ht, word);
htLink->val++;
free(word);
}
else
{
insertTable(&ht, word, 1);
if (tableLoad(&ht) > 1)
_resizeTable(&ht);
}
}
} while (word);
printf("- isEmpty: %d\n\n", isEmptyTable(&ht));
printf("- Ready to print. Press ENTER.\n");
getchar();
printTable(&ht);
printf("- Contains key {\"the\"}?: %d\n\n", containsKey(&ht, "the"));
printf("- Removing key {\"the\"}\n\n");
removeKey(&ht, "the");
printf("- Contains key {\"the\"}?: %d\n\n", containsKey(&ht, "the"));
printf("- Book-kept element count: %d\n", ht.count);
printf("- Calculated element count: %d\n\n", sizeTable(&ht));
printf("- Empty buckets: %d\n", emptyBuckets(&ht));
printf("- Table load: %f\n", tableLoad(&ht));
printf("\n");
printf("- Freeing Hashtable memory.\n");
freeTable(&ht);
/* --------------------------------------------
*
* END HASHTABLE
*
* --------------------------------------------
*/
printf("\n--------------------------------------------\n");
fclose(file);
return 0;
}
/*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*/
char* getWord(FILE *file)
{
int length = 0;
int maxLength = 16;
char character;
char* word = (char*)malloc(sizeof(char) * maxLength);
assert(word != NULL);
while( (character = fgetc(file)) != EOF)
{
if((length+1) > maxLength)
{
maxLength *= 2;
word = (char*)realloc(word, maxLength);
}
if((character >= '0' && character <= '9') || /*is a number*/
(character >= 'A' && character <= 'Z') || /*or an uppercase letter*/
(character >= 'a' && character <= 'z') || /*or a lowercase letter*/
character == 39) /*or is an apostrophy*/
{
word[length] = character;
length++;
}
else if(length > 0)
break;
}
if(length == 0)
{
free(word);
return NULL;
}
word[length] = '\0';
return word;
}
<file_sep>/Hashtable/structs.h
# ifndef __STRUCTS_H
# define __STRUCTS_H
# ifndef TYPE
# define TYPE int
# define TYPE_SIZE sizeof(int)
# define EQ(a,b) (a == b)
# define LT(a,b) (a < b)
# endif
# ifndef HASHTABLE
# define HASHTABLE
# define KeyType char*
# define ValueType int
typedef struct hashLink
{
struct hashLink* next;
KeyType key;
ValueType val;
} hashLink;
typedef struct hashTable
{
hashLink** table;
int tableSize;
int count;
} hashTable;
#endif
# ifndef BASIC_STRUCTS
# define BASIC_STRUCTS
struct Link
{
struct Link* next;
TYPE val;
};
struct DLink
{
struct DLink* next;
struct DLink* prev;
TYPE val;
};
#endif
#endif<file_sep>/README.md
<sup><sub>References from: Professor <NAME></sub></sup>
## Points of Interest
### Interfaces
* [Linked-List](Deque%20%26%20variants/Linked-List/interfaces.c) — An implementation of the Linked-List data structure interface.
* [Binary Search Tree](BST/interfaces.c) — An implementation of the BST data structure interface.
* [Hashtable](Hashtable/interfaces.c) — An implementation of the Hashtable data structure interface.
### Diagrams
<details><summary>Deque (Double-Link, Double-Sentinel)</summary>

</details>
<details><summary>Queue (Single-Link, Single-Sentinel)</summary>

</details>
<details><summary>Stack (Single-Link)</summary>

</details>
<details><summary>Hashtable (Hash-Link)</summary>

</details>
<details><summary>Binary Search Tree (Node)</summary>

</details>
<file_sep>/BST/interfaces.c
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include "structs.h"
#include "interfaces.h"
void initBST(struct BST* tree)
{
/* Description:
* Function initializes a BST structure using a pointer to its memory address.
* The function does not allocate additional memory upon initialization.
*
* Parameters:
* [struct] (BST*) tree: A pointer to the BST.
*
* Pre-conditions:
* - Pointer 'tree' points to a proper memory location.
*
* Post-conditions:
* - The BST is initialized: the size is 0, and the root points to NULL.
*/
assert(tree);
tree->size = 0;
tree->root = NULL;
}
void freeBST(struct BST* tree)
{
/* Description:
* Function calls recursive function " _freePostOrder() " to free all nodes,
* in post-order.
*
* Parameters:
* [struct] (BST*) tree: A pointer to the BST.
*
* Pre-conditions:
* - Pointer 'tree' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The BST's memory is freed.
*/
assert(tree);
_freePostOrder(tree->root);
}
int containsBST(struct BST* tree, TYPE val)
{
/* Description:
* For a well balanced tree, complexity O(log₂(n)).
* Function calls recursive function " _containsNode() " to traverse the BST,
* and determine whether the value is contained within it.
*
* Parameters:
* [struct] (BST*) tree: A pointer to the BST.
* [TYPE] val: The value to search for.
*
* Pre-conditions:
* - Pointer 'tree' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The function returns True (1) if the value is contained, or False (0) if not.
*/
assert(tree);
if ( isEmptyBST(tree) )
{
return 0;
}
else
{
return _containsNode(tree->root, val);
}
}
void addBST(struct BST* tree, TYPE val)
{
/* Description:
* For a well balanced tree, complexity O(log₂(n)).
* Function calls recursive function " _addNode() " to traverse the BST,
* and correctly add a node with the value, maintaining BST principles.
*
* Parameters:
* [struct] (BST*) tree: A pointer to the BST.
* [TYPE] val: The value to insert.
*
* Pre-conditions:
* - Pointer 'tree' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The function adds and links the given value to the BST.
*/
assert(tree);
tree->root = _addNode(tree->root, val);
tree->size++;
}
void removeBST(struct BST* tree, TYPE val)
{
/* Description:
* For a well balanced tree, complexity O(log₂(n)).
* Function calls recursive function " _removeNode() " to traverse the BST,
* and correctly remove a value, maintaining BST principles.
*
* Parameters:
* [struct] (BST*) tree: A pointer to the BST.
* [TYPE] val: The value to remove.
*
* Pre-conditions:
* - Pointer 'tree' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The function removes the given value and properly re-links the BST.
*/
assert(tree);
if ( containsBST(tree, val) )
{
tree->root = _removeNode(tree->root, val);
tree->size--;
}
}
void printBST(struct BST* tree)
{
/* Description:
* Function calls recursive function " _printInOrder() " to traverse the BST,
* and print the nodes in-order.
*
* Parameters:
* [struct] (BST*) tree: A pointer to the BST.
*
* Pre-conditions:
* - Pointer 'tree' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The function prints the BST in-order to stdout.
*/
assert(tree);
_printInOrder(tree->root);
}
int isEmptyBST(struct BST* tree)
{
/* Description:
* Function checks the BST's size and returns true or false.
*
* Parameters:
* [struct] (BST*) tree: A pointer to the BST.
*
* Returns:
* [int] (1 || 0): True or False, if the structure is empty or not.
*
* Pre-conditions:
* - Pointer 'tree' points to a proper memory location.
* - The BST was initialized correctly.
*
* Post-conditions:
* - True (1) or False (0) is returned.
*/
assert(tree);
if (tree->size == 0)
{
return 1;
}
else
{
return 0;
}
}
struct Node* _addNode(struct Node* curr, TYPE val)
{
/* Description:
* Recursive auxiliary function.
* Traverses the BST using binary search. Upon encountering
* a NULL pointer, allocates memory for a new node with the given value.
* After creating the new node, the function returns the node and further
* recursion stops. The BST is correctly re-linked as each recursive call
* returns.
*
* Parameters:
* [struct] (Node*) curr: A pointer to the next node to traverse from.
* [TYPE] val : The value to insert.
*
* Pre-conditions:
* - Pointer 'curr' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The function adds and links the given value to the BST.
*/
struct Node* node;
/* When the current node is a NULL pointer, create the new node */
if (curr == NULL)
{
node = (struct Node*) malloc(sizeof(struct Node));
assert(node);
node->val = val;
node->left = NULL;
node->right = NULL;
/* Returns the newly created node to the previous recursive call,
* and any previous recursive calls return, consecutively. */
return node;
}
else
{
/* Binary search */
if ( LT(val, curr->val) )
{
/* Traverse left subtree */
curr->left = _addNode(curr->left, val);
}
else
{
/* Traverse right subtree */
curr->right = _addNode(curr->right, val);
}
}
/* As recursive calls return in cascade, each unchanged child node is
* correctly returned to the previous parent node. */
return curr;
}
struct Node* _removeNode(struct Node* curr, TYPE val)
{
/* Description:
* Recursive auxiliary function.
* Traverses the BST using binary search. Upon encountering
* the node with the given value, the function determines if it has a right child node:
*
* - No child -> the function frees the removal node, and returns its left child (NULL or otherwise),
* ending further recursion.
* - Right child -> the function finds the leftmost descendant of the right child, overwrites the removal
* node with its value, and calls recursive function " _removeLeftmost " to
* remove its original node and properly re-link any children nodes. The function
* then returns the current node with the overwritten value, ending further recursion.
*
* As the recursive calls return in cascade, each node maintains correct linkage.
*
* Parameters:
* [struct] (Node*) curr: A pointer to the next node to traverse from.
* [TYPE] val : The value to remove.
*
* Pre-conditions:
* - Pointer 'curr' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The node with the given value is removed, its memory is freed, and BST principles are maintained.
*/
struct Node* node;
if ( EQ(val, curr->val) )
{
if (curr->right == NULL)
{
node = curr->left;
free(curr);
return node;
}
else
{
/* Finding leftmost descendant of the right child */
/* { */
node = curr->right;
while (node->left)
{
node = node->left;
}
/* } */
curr->val = node->val;
curr->right = _removeLeftmost(curr->right);
}
}
else
{
/* Binary search */
if ( LT(val, curr->val) )
{
/* Traverse left subtree */
curr->left = _removeNode(curr->left, val);
}
else
{
/* Traverse right subtree */
curr->right = _removeNode(curr->right, val);
}
}
/* As recursive calls return in cascade, each unchanged child node is
* correctly returned to the previous parent node. */
return curr;
}
struct Node* _removeLeftmost(struct Node* curr)
{
/* Description:
* Recursive auxiliary function.
* Traverses the BST through only left child nodes. Upon encountering
* a NULL left child pointer, the function frees the current node (the leftmost descendant), and
* returns its right child node, ending further recursion.
* As the recursive calls return in cascade, each node maintains correct linkage.
*
* Parameters:
* [struct] (Node*) curr: A pointer to the next node to traverse from.
*
* Pre-conditions:
* - Pointer 'curr' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The leftmost descendant is removed, its memory is freed, and its right child takes its place.
*/
struct Node* node;
if(curr->left)
{
curr->left = _removeLeftmost(curr->left);
return curr;
}
else
{
node = curr->right;
free(curr);
return node;
}
}
int _containsNode(struct Node* curr, TYPE val)
{
/* Description:
* Recursive auxiliary function.
* Traverses the BST using binary search. Upon encountering
* the node with the given value, the function returns True (1) to the previous
* recursive call, ending further recursion. Else, the function traverses until
* encountering a NULL child node pointer, indicating the absence of the given value in the BST.
* This ends further recursion, and as the recursive calls return in cascade,
* either True (1) or False (0) will be returned to the origin call.
*
* Parameters:
* [struct] (Node*) curr: A pointer to the next node to traverse from.
* [TYPE] val : The value to search for.
*
* Pre-conditions:
* - Pointer 'curr' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The BST is traversed, and upon reaching the value or upon reaching
* a NULL child node pointer, the function returns True (1) or False (0), accordingly.
*/
int flag = 0;
if ( EQ(val, curr->val) )
{
flag = 1;
}
else
{
/* Checks for "curr->left" and "curr->right" to identify
* NULL child nodes, and if there's no where to traverse,
* then the functions flows naturally and returns the initial
* flag value of 0.
*/
if ( LT(val, curr->val) && curr->left)
{
flag = _containsNode(curr->left, val);
}
else if (curr->right)
{
flag = _containsNode(curr->right, val);
}
}
return flag;
}
void _printInOrder(struct Node* curr)
{
/* Description:
* Recursive auxiliary function.
* Traverses the BST and prints node values in-order.
*
* Parameters:
* [struct] (Node*) curr: A pointer to the next node to traverse from.
*
* Pre-conditions:
* - Pointer 'curr' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The BST is traversed and node values are printed to stdout in-order.
*/
if (curr)
{
_printInOrder(curr->left);
printf("Node with value: { %d }\n", curr->val);
_printInOrder(curr->right);
}
}
void _freePostOrder(struct Node* curr)
{
/* Description:
* Recursive auxiliary function.
* Traverses the BST and frees node memory allocations post-order.
*
* Parameters:
* [struct] (Node*) curr: A pointer to the next node to traverse from.
*
* Pre-conditions:
* - Pointer 'curr' points to a proper memory location.
* - The BST is correctly linked.
*
* Post-conditions:
* - The BST is traversed and node memory allocations are freed post-order.
*/
if (curr)
{
_freePostOrder(curr->left);
_freePostOrder(curr->right);
free(curr);
}
}
<file_sep>/Hashtable/Makefile
default: prog
interfaces.o: interfaces.c interfaces.h
gcc -Wall -ansi -pedantic-errors -c interfaces.c
prog: interfaces.o main.c
gcc -Wall -ansi -pedantic-errors -o prog interfaces.o main.c structs.h
clean:
rm interfaces.o
cleanall: clean
rm prog
<file_sep>/BST/main.c
#include <stdio.h>
#include <stdlib.h>
#include "structs.h"
#include "interfaces.h"
int main(int argc, char const *argv[])
{
struct BST bst;
FILE* file;
const char* fileName;
int quantity;
int val;
if (argc == 2)
{
fileName = argv[1];
}
else
{
fileName = "input.txt";
}
file = fopen(fileName,"r");
printf("\n--------------------------------------------\n");
printf("\nHello! I'm here to generate simple visualizations of some basic data structures:\n");
printf("- BST\n\n");
printf("Press ENTER to continue.\n\n");
getchar();
/* --------------------------------------------
*
* Binary Search Tree
*
* --------------------------------------------
*/
quantity = 25;
printf("Here are some BST operations:\n\n");
initBST(&bst);
printf("- isEmpty: %d\n\n", isEmptyBST(&bst));
printf("- Adding values from %s...\n\n", fileName);
while ( !feof(file) && (quantity > 0) )
{
fscanf(file, "%d", &val);
addBST(&bst, val);
quantity--;
}
printf("- isEmpty: %d\n\n", isEmptyBST(&bst));
printf("- contains {747}?: %d\n\n", containsBST(&bst, 747));
printf("- contains {199}?: %d\n\n", containsBST(&bst, 199));
printf("- contains {500}?: %d\n\n", containsBST(&bst, 500));
printf("- Ready to print. Press ENTER.\n");
getchar();
printBST(&bst);
printf("\n- removing {747}\n\n");
removeBST(&bst, 747);
printf("- contains {747}?: %d\n", containsBST(&bst, 747));
printf("\n");
printf("- Freeing BST memory.\n");
freeBST(&bst);
/* --------------------------------------------
*
* END Binary Search Tree
*
* --------------------------------------------
*/
printf("\n--------------------------------------------\n");
fclose(file);
return 0;
}
<file_sep>/Deque & variants/Linked-List/interfaces.c
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include "structs.h"
#include "interfaces.h"
/* The Deque interfaces are implemented assuming double links are the base data structure. */
/* --------------------------------------------
*
* DEQUE
*
* Foundation used: Double-Link
*
* Sentinels: Front & Back sentinels
*
* --------------------------------------------
*/
void initDeque(struct Deque* dq)
{
/* Description:
* Function initializes a Deque structure using a pointer to its memory address.
* The function allocates memory for two initial Double-Links, the front & back sentinels.
* The sentinels initially point to each-other, and elements are added between, maintaining
* correct linkages.
*
* Parameters:
* [struct] (Deque*) dq: A pointer to the Deque.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - There is memory available to allocate.
*
* Post-conditions:
* - The Deque is initialized: the size is 0, both sentinels are created, and linkages are correct.
*/
struct DLink* senFront;
struct DLink* senBack;
assert(dq);
dq->size = 0;
senFront = (struct DLink*) malloc(sizeof(struct DLink));
senBack = (struct DLink*) malloc(sizeof(struct DLink));
assert(senFront);
assert(senBack);
dq->head = senFront;
dq->tail = senBack;
dq->head->next = dq->tail;
dq->tail->prev = dq->head;
dq->head->prev = NULL;
dq->tail->next = NULL;
}
void freeDeque(struct Deque* dq)
{
/* Description:
* Function frees all allocated memory associated with the Deque, such as the sentinels and Double-Links.
*
* Parameters:
* [struct] (Deque*) dq: A pointer to the Deque.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - The Deque's memory is freed.
*/
struct DLink* curr, * temp;
assert(dq);
curr = dq->head;
while (curr)
{
temp = curr;
curr = curr->next;
free(temp);
}
}
void addFrontDeque(struct Deque* dq, TYPE val)
{
/* Description:
* Function allocates memory for a Double-Link with the given value and adds it to the front of the Deque.
*
* Parameters:
* [struct] (Deque*) dq : A pointer to the Deque.
* [TYPE] val : The value to insert.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - A Double-Link with the given value is inserted after the front sentinel, and correct linkages are maintained.
*/
struct DLink* dLink;
assert(dq);
dLink = (struct DLink*) malloc(sizeof(struct DLink));
assert(dLink);
dLink->val = val;
dLink->next = dq->head->next;
dLink->prev = dq->head;
dLink->next->prev = dLink;
dq->head->next = dLink;
dq->size++;
}
void addBackDeque(struct Deque* dq, TYPE val)
{
/* Description:
* Function allocates memory for a Double-Link with the given value and adds it to the back of the Deque.
*
* Parameters:
* [struct] (Deque*) dq : A pointer to the Deque.
* [TYPE] val : The value to insert.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - A Double-Link with the given value is inserted before the back sentinel, and correct linkages are maintained.
*/
struct DLink* dLink;
assert(dq);
dLink = (struct DLink*) malloc(sizeof(struct DLink));
assert(dLink);
dLink->val = val;
dLink->next = dq->tail;
dLink->prev = dq->tail->prev;
dLink->prev->next = dLink;
dq->tail->prev = dLink;
dq->size++;
}
void printDeque(struct Deque* dq)
{
/* Description:
* Function iterates through the Deque, starting at the front, and prints each corresponding value.
* (Assuming an integer value.)
*
* Parameters:
* [struct] (Deque*) dq : A pointer to the Deque.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - The Deque's values are printed to stdout.
*/
int count = 0;
struct DLink* curr;
assert(dq);
printf("Front to Back:\n\n");
curr = dq->head->next;
while (curr && (curr != dq->tail))
{
count++;
printf("- Link %d: { %d }\n", count, curr->val);
curr = curr->next;
}
/*
printf("\nBack to Front:\n\n");
curr = dq->tail->prev;
while (curr && (curr != dq->head))
{
printf("- Link %d: { %d }\n", count, curr->val);
curr = curr->prev;
count--;
}
*/
}
void removeFront(struct Deque* dq)
{
/* Description:
* Function unlinks the Double-Link at the front of the Deque and frees its memory.
*
* Parameters:
* [struct] (Deque*) dq : A pointer to the Deque.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - The Double-Link at the front of the Deque is unlinked and has its memory freed.
*/
struct DLink* rmv;
assert(dq);
if ( isEmptyDeque(dq) )
{
return;
}
else
{
rmv = dq->head->next;
dq->head->next = rmv->next;
rmv->next->prev = dq->head;
free(rmv);
dq->size--;
}
}
void removeBack(struct Deque* dq)
{
/* Description:
* Function unlinks the Double-Link at the back of the Deque and frees its memory.
*
* Parameters:
* [struct] (Deque*) dq : A pointer to the Deque.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - The Double-Link at the back of the Deque is unlinked and has its memory freed.
*/
struct DLink* rmv;
assert(dq);
if ( isEmptyDeque(dq) )
{
return;
}
else
{
rmv = dq->tail->prev;
dq->tail->prev = rmv->prev;
rmv->prev->next = dq->tail;
free(rmv);
dq->size--;
}
}
int isEmptyDeque(struct Deque* dq)
{
/* Description:
* Function checks the Deque's size and returns true or false.
*
* Parameters:
* [struct] (Deque*) dq : A pointer to the Deque.
*
* Returns:
* [int] (1 || 0): True or False, if the structure is empty or not.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - True (1) or False (0) is returned.
*/
assert(dq);
if (dq->size == 0)
{
return 1;
}
else
{
return 0;
}
}
TYPE front(struct Deque* dq)
{
/* Description:
* Function accesses the value at the front of the Deque and returns it.
*
* Parameters:
* [struct] (Deque*) dq : A pointer to the Deque.
*
* Returns:
* [TYPE] returnVal: The value at the front of the Deque.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - The Deque's front element value is returned, or NULL if Deque is empty.
*/
TYPE returnVal = NULL;
assert(dq);
if ( isEmptyDeque(dq) )
{
return returnVal;
}
else
{
returnVal = (TYPE) dq->head->next->val;
return returnVal;
}
}
TYPE back(struct Deque* dq)
{
/* Description:
* Function accesses the value at the back of the Deque and returns it.
*
* Parameters:
* [struct] (Deque*) dq : A pointer to the Deque.
*
* Returns:
* [TYPE] returnVal: The value at the back of the Deque.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - The Deque's back element value is returned, or NULL if empty.
*/
TYPE returnVal = NULL;
assert(dq);
if ( isEmptyDeque(dq) )
{
return returnVal;
}
else
{
returnVal = (TYPE) dq->tail->prev->val;
return returnVal;
}
}
/* --------------------------------------------
*
* END DEQUE
*
* --------------------------------------------
*/
/*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*/
/* --------------------------------------------
*
* QUEUE
*
* Foundation used: Single-Link
*
* Sentinels: Front sentinel
*
* --------------------------------------------
*/
void initQueue(struct Queue* q)
{
/* Description:
* Function initializes a Queue structure using a pointer to its memory address.
* The function allocates memory for one initial Single-Link, the front sentinel.
* The sentinel initially points to NULL, and elements are appended to the end of the Queue, maintaining
* correct linkages.
*
* Parameters:
* [struct] (Queue*) q: A pointer to the Queue.
*
* Pre-conditions:
* - Pointer 'q' points to a proper memory location.
* - There is memory available to allocate.
*
* Post-conditions:
* - The Queue is initialized: the size is 0, the front sentinel is created, and linkages are correct.
*/
struct Link* senFront;
assert(q);
q->size = 0;
senFront = (struct Link*) malloc(sizeof(struct Link));
assert(senFront);
senFront->next = NULL;
q->head = senFront;
q->tail = senFront;
}
void freeQueue(struct Queue* q)
{
/* Description:
* Function frees all allocated memory associated with the Queue, such as the front sentinel and Single-Links.
*
* Parameters:
* [struct] (Queue*) q: A pointer to the Queue.
*
* Pre-conditions:
* - Pointer 'q' points to a proper memory location.
* - The Queue was initialized correctly.
*
* Post-conditions:
* - The Queue's memory is freed.
*/
struct Link* curr, * temp;
assert(q);
curr = q->head;
while (curr)
{
temp = curr;
curr = curr->next;
free(temp);
}
}
void addQueue(struct Queue* q, TYPE val)
{
/* Description:
* Function allocates memory for a Single-Link with the given value and appends it to the end of the Queue.
*
* Parameters:
* [struct] (Queue*) q : A pointer to the Queue.
* [TYPE] val : The value to insert.
*
* Pre-conditions:
* - Pointer 'q' points to a proper memory location.
* - The Queue was initialized correctly.
*
* Post-conditions:
* - A Single-Link with the given value is inserted after the tail link, and correct linkages are maintained.
*/
struct Link* link;
assert(q);
link = (struct Link*) malloc(sizeof(struct Link));
assert(link);
link->val = val;
link->next = NULL;
q->tail->next = link;
q->tail = link;
q->size++;
}
void printQueue(struct Queue* q)
{
/* Description:
* Function iterates through the Queue, starting at the front, and prints each corresponding value.
* (Assuming an integer value.)
*
* Parameters:
* [struct] (Queue*) q : A pointer to the Queue.
*
* Pre-conditions:
* - Pointer 'q' points to a proper memory location.
* - The Queue was initialized correctly.
*
* Post-conditions:
* - The Queue's values are printed to stdout.
*/
struct Link* curr;
int count = 0;
assert(q);
printf("Front to Back:\n\n");
curr = q->head->next;
while (curr)
{
count++;
printf("- Link %d: { %d }\n", count, curr->val);
curr = curr->next;
}
}
void removeQueue(struct Queue* q)
{
/* Description:
* Function unlinks the Single-Link at the front of the Queue and frees its memory.
*
* Parameters:
* [struct] (Queue*) q : A pointer to the Queue.
*
* Pre-conditions:
* - Pointer 'q' points to a proper memory location.
* - The Queue was initialized correctly.
*
* Post-conditions:
* - The Single-Link at the front of the Queue is unlinked and has its memory freed.
* Correct linkages are maintained.
*/
struct Link* curr;
assert(q);
if ( isEmptyQueue(q) )
{
return;
}
else
{
curr = q->head->next;
q->head->next = curr->next;
if (curr == q->tail)
q->tail = q->head;
free(curr);
q->size--;
}
}
int isEmptyQueue(struct Queue* q)
{
/* Description:
* Function checks the Queue's size and returns true or false.
*
* Parameters:
* [struct] (Queue*) q : A pointer to the Queue.
*
* Returns:
* [int] (1 || 0): True or False, if the structure is empty or not.
*
* Pre-conditions:
* - Pointer 'dq' points to a proper memory location.
* - The Deque was initialized correctly.
*
* Post-conditions:
* - True (1) or False (0) is returned.
*/
assert(q);
if (q->size == 0)
{
return 1;
}
else
{
return 0;
}
}
TYPE frontQueue(struct Queue* q)
{
/* Description:
* Function accesses the value at the front of the Queue and returns it.
*
* Parameters:
* [struct] (Queue*) q : A pointer to the Queue.
*
* Returns:
* [TYPE] returnVal: The value at the front of the Queue.
*
* Pre-conditions:
* - Pointer 'q' points to a proper memory location.
* - The Queue was initialized correctly.
*
* Post-conditions:
* - The Queue's front element value is returned, or NULL if Queue is empty.
*/
TYPE returnVal = NULL;
assert(q);
if ( isEmptyQueue(q) )
{
return returnVal;
}
else
{
returnVal = (TYPE) q->head->next->val;
return returnVal;
}
}
/* --------------------------------------------
*
* END QUEUE
*
* --------------------------------------------
*/
/*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*/
/* --------------------------------------------
*
* STACK
*
* Foundation used: Single-Link
*
* --------------------------------------------
*/
void initStack(struct Stack* s)
{
/* Description:
* Function initializes a Stack structure using a pointer to its memory address.
* The function does not allocate additional memory upon initialization, as this Stack structure
* implementation does not use a sentinel. Simply having a pointer to 'head' element is sufficient.
*
* Parameters:
* [struct] (Stack*) s: A pointer to the Stack.
*
* Pre-conditions:
* - Pointer 's' points to a proper memory location.
*
* Post-conditions:
* - The Stack is initialized: the size is 0, and the 'head' points to NULL.
*/
assert(s);
s->size = 0;
s->head = NULL;
}
void freeStack(struct Stack* s)
{
/* Description:
* Function frees all allocated memory associated with the Stack, the Single-Link entries (if any exist).
*
* Parameters:
* [struct] (Stack*) s : A pointer to the Stack.
*
* Pre-conditions:
* - Pointer 's' points to a proper memory location.
* - The Stack was initialized correctly.
*
* Post-conditions:
* - The Stack's memory is freed.
*/
assert(s);
while ( !isEmptyStack(s) )
{
popStack(s);
}
}
int isEmptyStack(struct Stack* s)
{
/* Description:
* Function checks the Stack's size and returns true or false.
*
* Parameters:
* [struct] (Stack*) s : A pointer to the Stack.
*
* Returns:
* [int] (1 || 0): True or False, if the structure is empty or not.
*
* Pre-conditions:
* - Pointer 's' points to a proper memory location.
* - The Stack was initialized correctly.
*
* Post-conditions:
* - True (1) or False (0) is returned.
*/
assert(s);
if (s->size == 0)
{
return 1;
}
else
{
return 0;
}
}
void pushStack(struct Stack* s, TYPE val)
{
/* Description:
* Function allocates memory for a Single-Link with the given value and pushes on top of the Stack.
*
* Parameters:
* [struct] (Stack*) s : A pointer to the Stack.
* [TYPE] val : The value to insert.
*
* Pre-conditions:
* - Pointer 's' points to a proper memory location.
* - The Stack was initialized correctly.
*
* Post-conditions:
* - A Single-Link with the given value is inserted on the head of the Stack, and correct linkages are maintained.
*/
struct Link* link;
assert(s);
link = (struct Link*) malloc(sizeof(struct Link));
link->val = val;
link->next = s->head;
s->head = link;
s->size++;
}
TYPE popStack(struct Stack* s)
{
/* Description:
* Function accesses the value on the top of the Stack, frees the Single-Link, and returns the value.
*
* Parameters:
* [struct] (Stack*) s: A pointer to the Stack.
*
* Returns:
* [TYPE] returnVal: The value on the top of the Stack.
*
* Pre-conditions:
* - Pointer 's' points to a proper memory location.
* - The Stack was initialized correctly.
*
* Post-conditions:
* - The Stack's top element is unlinked and freed; 'head' is moved to next element.
* - The Stack's top element value is returned, or NULL if Stack is empty.
*/
struct Link* pop;
TYPE returnVal = NULL;
assert(s);
if ( isEmptyStack(s) )
{
return returnVal;
}
else
{
pop = s->head;
returnVal = (TYPE) pop->val;
s->head = pop->next;
free(pop);
s->size--;
return returnVal;
}
}
TYPE peepStack(struct Stack* s)
{
/* Description:
* Function accesses the value on the top of the Stack and returns the value.
*
* Parameters:
* [struct] (Stack*) s : A pointer to the Stack.
*
* Returns:
* [TYPE] returnVal: The value on the top of the Stack.
*
* Pre-conditions:
* - Pointer 's' points to a proper memory location.
* - The Stack was initialized correctly.
*
* Post-conditions:
* - The Stack's top element value is returned, or NULL if Stack is empty.
*/
TYPE returnVal = NULL;
assert(s);
if ( isEmptyStack(s) )
{
return returnVal;
}
else
{
returnVal = s->head->val;
return returnVal;
}
}
/* --------------------------------------------
*
* END STACK
*
* --------------------------------------------
*/<file_sep>/Hashtable/interfaces.h
#include "structs.h"
#ifndef __INTERFACES_H
#define __INTERFACES_H
/* HASHTABLE */
void initTable(hashTable* ht, int tableSize);
void freeTable(hashTable* ht);
void freeTableBody(hashLink** htb, int oldSize);
void insertTable(hashTable* ht, KeyType key, ValueType val);
void removeKey(hashTable* ht, KeyType key);
void printTable(hashTable* ht);
int containsKey(hashTable* ht, KeyType key);
int isEmptyTable(hashTable* ht);
int sizeTable(hashTable* ht);
int emptyBuckets(hashTable* ht);
float tableLoad(hashTable* ht);
struct hashLink* findLink(hashTable* ht, KeyType key);
void _resizeTable(hashTable* ht);
/* END HASHTABLE */
#endif
|
abe7f055d2d0f34e87ae857f40a49bb75508af18
|
[
"Markdown",
"C",
"Makefile"
] | 14 |
C
|
seanthayer/Exploring-C
|
626609f370eb2a9832d10cea095783b49c1ea148
|
fce2960e579a3afaaf3a15e3fa910aafb53e2052
|
refs/heads/master
|
<repo_name>eagletmt/revision_plate-golang<file_sep>/revision_plate_test.go
package revision_plate
import (
"net/http"
"net/http/httptest"
"os"
"testing"
)
func runServer(t *testing.T, h http.Handler, method string) *httptest.ResponseRecorder {
w := httptest.NewRecorder()
req, err := http.NewRequest(method, "/site/sha", nil)
if err != nil {
t.Fatal(err)
}
h.ServeHTTP(w, req)
return w
}
func createRevisionFile(t *testing.T, path string, rev string) {
file, err := os.Create(path)
if err != nil {
t.Fatal(err)
}
defer file.Close()
_, err = file.WriteString(rev)
if err != nil {
t.Fatal(err)
}
}
func removeRevisionFile(t *testing.T, path string) {
err := os.Remove(path)
if err != nil && !os.IsNotExist(err) {
t.Fatal(err)
}
}
func TestGetRevision(t *testing.T) {
createRevisionFile(t, "REVISION", "deadbeef")
defer removeRevisionFile(t, "REVISION")
recorder := runServer(t, New(""), "GET")
if recorder.Code != 200 {
t.Errorf("Expected status code 200, but got %d", recorder.Code)
}
body := recorder.Body.String()
if body != "deadbeef" {
t.Errorf("Expected response body 'deadbeef', but got %s", body)
}
}
func TestHeadRevision(t *testing.T) {
createRevisionFile(t, "REVISION", "deadbeef")
defer removeRevisionFile(t, "REVISION")
recorder := runServer(t, New(""), "HEAD")
if recorder.Code != 200 {
t.Errorf("Expected status code 200, but got %d", recorder.Code)
}
body := recorder.Body.String()
if body != "" {
t.Errorf("Expected empty response body, but got %s", body)
}
}
func TestGetRevisionWithoutFile(t *testing.T) {
recorder := runServer(t, New(""), "GET")
if recorder.Code != 404 {
t.Errorf("Expected status code 404, but got %d", recorder.Code)
}
body := recorder.Body.String()
if body != "REVISION_FILE_NOT_FOUND" {
t.Errorf("Expected response body 'REVISION_FILE_NOT_FOUND', but got %s", body)
}
}
func TestHeadRevisionWithRemovedFile(t *testing.T) {
createRevisionFile(t, "REVISION", "deadbeef")
defer removeRevisionFile(t, "REVISION")
h := New("")
r1 := runServer(t, h, "HEAD")
if r1.Code != 200 {
t.Errorf("Expected status code 200, but got %d", r1.Code)
}
b1 := r1.Body.String()
if b1 != "" {
t.Errorf("Expected empty esponse body, but got %s", b1)
}
removeRevisionFile(t, "REVISION")
r2 := runServer(t, h, "HEAD")
if r2.Code != 404 {
t.Errorf("Expected status code 404, but got %d", r2.Code)
}
b2 := r2.Body.String()
if b2 != "" {
t.Errorf("Expected empty esponse body, but got %s", b2)
}
}
func TestHeadRevisionWithoutFile(t *testing.T) {
recorder := runServer(t, New(""), "HEAD")
if recorder.Code != 404 {
t.Errorf("Expected status code 404, but got %d", recorder.Code)
}
body := recorder.Body.String()
if body != "" {
t.Errorf("Expected empty response body, but got %s", body)
}
}
func TestGetRevisionWithRemovedFile(t *testing.T) {
createRevisionFile(t, "REVISION", "deadbeef")
defer removeRevisionFile(t, "REVISION")
h := New("")
r1 := runServer(t, h, "GET")
if r1.Code != 200 {
t.Errorf("Expected status code 200, but got %d", r1.Code)
}
b1 := r1.Body.String()
if b1 != "deadbeef" {
t.Errorf("Expected esponse body 'deadbeef', but got %s", b1)
}
removeRevisionFile(t, "REVISION")
r2 := runServer(t, h, "GET")
if r2.Code != 404 {
t.Errorf("Expected status code 404, but got %d", r2.Code)
}
b2 := r2.Body.String()
if b2 != "REVISION_FILE_REMOVED" {
t.Errorf("Expected esponse body 'REVISION_FILE_REMOVED', but got %s", b2)
}
}
func TestGetRevisionWithCustomPath(t *testing.T) {
createRevisionFile(t, "site-sha", "cafebabe")
defer removeRevisionFile(t, "site-sha")
r1 := runServer(t, New(""), "GET")
if r1.Code != 404 {
t.Errorf("Expected status code 404, but got %d", r1.Code)
}
r2 := runServer(t, New("site-sha"), "GET")
if r2.Code != 200 {
t.Errorf("Expected status code 200, but got %d", r2.Code)
}
body := r2.Body.String()
if body != "cafebabe" {
t.Errorf("Expected response body 'cafebabe', but got %s", body)
}
}
func TestGetRevisionWithUpdatedFile(t *testing.T) {
createRevisionFile(t, "REVISION", "deadbeef")
defer removeRevisionFile(t, "REVISION")
h := New("")
r1 := runServer(t, h, "GET")
if r1.Code != 200 {
t.Errorf("Expected status code 200, but got %d", r1.Code)
}
b1 := r1.Body.String()
if b1 != "deadbeef" {
t.Errorf("Expected response body 'deadbeef', but got %s", b1)
}
createRevisionFile(t, "REVISION", "cafebabe")
r2 := runServer(t, h, "GET")
if r2.Code != 200 {
t.Errorf("Expected status code 200, but got %d", r2.Code)
}
b2 := r2.Body.String()
if b2 != "deadbeef" {
t.Errorf("Expected response body 'deadbeef', but got %s", b2)
}
}
func TestGetRevisionWithFileCreatedAfterInitialize(t *testing.T) {
h := New("")
r1 := runServer(t, h, "GET")
if r1.Code != 404 {
t.Errorf("Expected status code 404, but got %d", r1.Code)
}
b1 := r1.Body.String()
if b1 != "REVISION_FILE_NOT_FOUND" {
t.Errorf("Expected response body 'REVISION_FILE_NOT_FOUND', but got %s", b1)
}
createRevisionFile(t, "REVISION", "deadbeef")
defer removeRevisionFile(t, "REVISION")
r2 := runServer(t, h, "GET")
if r2.Code != 404 {
t.Errorf("Expected status code 404, but got %d", r2.Code)
}
b2 := r2.Body.String()
if b2 != "REVISION_FILE_NOT_FOUND" {
t.Errorf("Expected response body 'REVISION_FILE_NOT_FOUND', but got %s", b2)
}
}
<file_sep>/README.md
# revision_plate-golang
[](https://travis-ci.org/eagletmt/revision_plate-golang)
Serve application's REVISION.
Golang version of [revision_plate](https://github.com/sorah/revision_plate) .
## Usage
```go
import (
"net/http"
"github.com/eagletmt/revision_plate-golang"
)
func NewHandler() http.Handler {
mux := http.NewServeMux()
mux.Handle("/site/sha", revision_plate.New("REVISION"))
return mux
}
```
<file_sep>/revision_plate.go
package revision_plate
import (
"errors"
"io/ioutil"
"net/http"
"os"
)
type Handler struct {
filePath string
revision []byte
}
func New(filePath string) *Handler {
h := &Handler{filePath: filePath}
h.readCurrentRevision()
return h
}
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
switch r.Method {
case "GET", "HEAD":
rev, err := h.getCurrentRevision()
if err == nil {
w.WriteHeader(http.StatusOK)
if r.Method == "GET" {
w.Write(rev)
}
} else {
w.WriteHeader(http.StatusNotFound)
if r.Method == "GET" {
w.Write([]byte(err.Error()))
}
}
}
}
func (h *Handler) readCurrentRevision() {
file, err := os.Open(h.revisionFilePath())
if err != nil {
return
}
defer file.Close()
revision, err := ioutil.ReadAll(file)
if err != nil {
return
}
h.revision = revision
}
func (h *Handler) getCurrentRevision() ([]byte, error) {
if h.revision == nil {
return nil, errors.New("REVISION_FILE_NOT_FOUND")
} else {
_, err := os.Stat(h.revisionFilePath())
if err == nil {
return h.revision, nil
} else {
return nil, errors.New("REVISION_FILE_REMOVED")
}
}
}
func (h *Handler) revisionFilePath() string {
if h.filePath == "" {
return "REVISION"
} else {
return h.filePath
}
}
|
cc9e1e630f81af6596a00b8b7c40eb127c5558c2
|
[
"Markdown",
"Go"
] | 3 |
Go
|
eagletmt/revision_plate-golang
|
cbf62d1067bfa40ac32d7d7f105551383ccae55a
|
1beee66e66c5ef65bcbddbb5e588703c17efdc0a
|
refs/heads/master
|
<repo_name>yangjae/control-jquery<file_sep>/Gruntfile.js
module.exports = function( grunt ) {
'use strict';
grunt.loadNpmTasks('grunt-wiredep');
/**
Node package info
*/
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
wiredep: {
app: {
src: 'index.html'
}
}
});
/**
Start the web server on port 8080
*/
grunt.registerTask('server', 'Start express server', function() {
require('./server.js').listen(8080, function () {
grunt.log.writeln('Web server running at http://localhost:8080.');
}).on('close', this.async());
});
/**
Set the server task as our default.
*/
grunt.registerTask('default', ['server']);
};
<file_sep>/README.markdown
## Nest Control sample app with jQuery
A simple thermostat control application that demonstrates how to access the Nest API from a web
application.
## Requirements
The development environment is a simple Node.js server managed by Yeoman. If you don't have them already
install these tools first:
* [Node](http://nodejs.org/download/)
* [Yeoman](http://yeoman.io/gettingstarted.html)
## Running
To install required Bower components and Node modules, simply type:
$ bower install
$ npm install
Next you will need your client ID and client secret from developer.nest.com/clients set as environment variables:
$ export NEST_ID=<CLIENT ID>
$ export NEST_SECRET=<CLIENT SECRET>
And finally, use Grunt to start the server:
$ grunt
Then open http://localhost:8080 in your browser and you will be walked through the authentication process.
## License
LCDBOLD
Copyright © 1999 by <NAME>. See /app/fonts/LCDBOLD/readme.txt
control-jquery
Copyright 2014 Nest Labs Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
adb8cd5a30c07b43b1d3dac9a69aa3a3270af210
|
[
"JavaScript",
"Markdown"
] | 2 |
JavaScript
|
yangjae/control-jquery
|
5bb44bdc697557c47a7593a63fdab55145f1a0fd
|
821b2f330cb2efc657bfc1c7acbffc8f1573618f
|
refs/heads/master
|
<repo_name>Copterdoctor/chatty-app<file_sep>/src/Navbar.jsx
import React, { Component } from 'react';
import RetroHitCounter from 'react-retro-hit-counter';
const YourComponent = (props) => (
<RetroHitCounter
hits={props.userCount}
/* The following are all default values: */
withBorder={true}
withGlow={false}
minLength={4}
size={40}
padding={4}
digitSpacing={3}
segmentThickness={4}
segmentSpacing={0.5}
segmentActiveColor="#0"
segmentInactiveColor="none"
backgroundColor="none"
borderThickness={0}
glowStrength={0.5}
/>
);
class Navbar extends Component {
render() {
return (
<nav className="navbar">
<a href="/" className="navbar-brand">Chatty</a>
<span className="user-counter">Total User Count
<YourComponent userCount={this.props.userCount} />
</span>
</nav>
)
}
}
export default Navbar;<file_sep>/README.md
Chatty App
=====================
Chatty App is a minamalist real time chat application concept using WebSockets.
** App is currently in Development and server would have to be redone for actual production deployment **
To get started
Git Clone https://github.com/Copterdoctor/chatty-app.git
* For those who want a little extra flair to their experience you may be interested in checking out the matrix branch
```$ Git checkout matrix ```
```js
$ cd chatty-app
$ npm install
$ npm run build
$ npm start
```
Open a second terminal window to start sockets server
```js
$ cd /chatty-app/chatty-server
$ npm install
$ npm start
```
Goto http://localhost:3000 and enjoy!
You can open multiple windows on localhost to simulate other users or if your computer firewall allows for it, other users should be able to connect via your computers local lan address at port 3000.
### Dependencies
dependencies:
* randomcolor
* react
* react-dom
* react-retro-hit-counter
* express
* ws
* uuid
devDependencies:
* babel-core
* babel-loader
* babel-preset-es2015
* babel-preset-react
* babel-preset-stage-0
* css-loader
* node-sass
* sass-loader
* sockjs-client
* style-loader
* webpack-cli
* webpack
* webpack-dev-server
#Screenshots

<file_sep>/chatty-server/server.js
// server.js
const express = require('express');
const SocketServer = require('ws').Server;
const uuidv4 = require('uuid/v4');
const randomColor = require('randomcolor');
const PORT = 3001;
const server = express()
.use(express.static('../dist'))
.use(express.static('public'))
.listen(PORT, '0.0.0.0', 'localhost', () => console.log(`Listening on ${PORT}`));
const wss = new SocketServer({ server });
const re = /(http)?s?:?(\/\/[^"']*\.(?:png|jpg|jpeg|gif|png|svg))/i;
function incoming(incomingMessage) {
let message = JSON.parse(incomingMessage);
message.id = uuidv4();
if (message.content.match(re)) {
const imgUrl = re.exec(message.content);
const imgObj = {
id: message.id,
type: 'image',
username: message.username,
color: message.color,
url: imgUrl[0]
}
message = JSON.stringify(imgObj);
wss.clients.forEach((client) => {
try {
client.send(message);
} catch (error) {
console.log(`Message failed to send.................${error}`);
}
})
} else {
message = JSON.stringify(message);
wss.clients.forEach((client) => {
try {
client.send(message);
} catch (error) {
console.log(`Message failed to send.................${error}`);
}
})
}
}
function newUser(ws) {
const color = randomColor({
luminosity: 'bright',
format: 'rgb' // e.g. 'rgb(225,200,20)'
});
ws.send(JSON.stringify({ type: 'newConnection', color: color }));
}
let numOfUsers = {
type: 'userCountUpdate',
count: 0
}
function userCountChange(num) {
numOfUsers.count += num;
let message = JSON.stringify(numOfUsers)
wss.clients.forEach((client) => {
try {
client.send(message);
} catch (error) {
console.log(`Message failed to send.................\n${error}`);
}
})
}
wss.on('connection', (ws) => {
userCountChange(1);
newUser(ws);
ws.on('message', incoming);
try {
ws.send(JSON.stringify(numOfUsers));
} catch (error) {
console.log(`Unable to send userNumber to new connection.......\n${error}`);
}
// Set up a callback for when a client closes the socket.
ws.on('close', () => {
userCountChange(-1)
console.log(`User Count = ${numOfUsers.count}`);
});
});
|
16b27f308aeb024e895de908ffc05db3ff323261
|
[
"JavaScript",
"Markdown"
] | 3 |
JavaScript
|
Copterdoctor/chatty-app
|
03123772cc7feab7299995495c3f79f8502fef73
|
290484b19b47f47c13c7ab72363c5b90f8876f0a
|
refs/heads/master
|
<file_sep>package com.hawk.book.data.dto;
import lombok.Data;
/**
* BookDto
*
* @author wangshuguang
* @since 2018/02/19
*/
@Data
public class BookDto {
/**
* 图书 id
*/
private int id;
/**
* 书名
*/
private String bookName;
/**
* 上传该书的用户昵称
*/
private String nickname;
/**
* 封面链接
*/
private String coverUrl;
/**
* 文件链接
*/
private String pdfUrl;
/**
* 下载数量
*/
private int downloadCount;
/**
* 平均分,初始为 0,最高为 5
*/
private double score = 0.0;
/**
* 图书上传时间
*/
private String createTime;
}
|
92515af5f409041d45796b6decf6eba396c838c2
|
[
"Java"
] | 1 |
Java
|
HAWK97/BookSystem
|
af74fe25c4b5ef51038aafe0ac18e147acd98b01
|
06817376f264b1339ba62a3ea30a3e8bdd2d43c3
|
refs/heads/master
|
<file_sep>Install selenium for python3, aria2 for downloading the images.
Set aria2c path in your environment variables :)
<file_sep>
import os
import time
from time import sleep
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor, as_completed
# driver.set_window_size(1120, 550)
s = input("type your search term and hit enter: ")
chrome_options = Options()
# chrome_options.add_argument(f"--user-data-dir=./chrome-data")
chrome_options.add_experimental_option("useAutomationExtension", False)
# chrome_options.headless = True
service = Service('D:\\libtools\\chromedriver')
driver = webdriver.Chrome(service=service, options=chrome_options)
driver.get("https://unsplash.com/s/photos/"+s)
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
sleep(20)
photos = driver.find_elements(by=By.CLASS_NAME, value='NP4SP')
print(len(photos), photos[0].tag_name)
images = [one_photo.get_attribute('href') + "/download?force=true" for one_photo in photos]
print("Found {} images".format(len(images)))
os.makedirs("photos/" + s)
os.chdir("photos/" + s)
# os.chdir(dirr)
def download_image(count, x):
os.system("D:\\libtools\\aria2c -o {:04d} {}".format(count, x))
print("Downloaded {}".format(x))
with ThreadPoolExecutor() as executor:
futures = [executor.submit(download_image, count, x) for count, x in enumerate(images)]
for f in as_completed(futures):
print(f.result())
'''
for count, x in enumerate(images):
os.system("aria2c -o {:04d} {}".format(count, x))
print("Downloaded {}".format(x))
'''
files = os.listdir(".")
for f in files:
if not f.endswith(".jpg"):
os.rename(f, f + ".jpg")
print(f"\nFiles downloaded into photos/{s}")
<file_sep>
import os
from time import sleep
from selenium import webdriver
# driver.set_window_size(1120, 550)
s = input()
dirr = input()
option = webdriver.ChromeOptions()
chrome_prefs = {}
option.experimental_options["prefs"] = chrome_prefs
chrome_prefs["profile.default_content_settings"] = {"images": 2}
chrome_prefs["profile.managed_default_content_settings"] = {"images": 2}
option.headless = True
############ change the below path to the chromedriver in your PC ####################
driver = webdriver.Chrome("D:\libtools\chromedriver\chromedriver.exe", options=option)
driver.get("https://unsplash.com/s/photos/"+s)
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
sleep(20)
photos = driver.find_elements_by_class_name('_2Mc8_')
images = [one_photo.get_attribute('href') + "/download?force=true" for one_photo in photos]
print("Found {} images".format(len(images)))
# os.makedirs("photos/" + s)
# os.chdir("photos/" + s)
os.chdir(dirr)
for count, x in enumerate(images):
os.system("aria2c -o {:04d} {}".format(count, x))
print("Downloaded {}".format(x))
files = os.listdir(".")
for f in files:
if not f.endswith(".jpg"):
os.rename(f, f + ".jpg")
print("\nFiles downloaded into {}".format(dir))
|
c0c11ed80a62d455cf2b68203bfaa15e217860da
|
[
"Markdown",
"Python"
] | 3 |
Markdown
|
rahbal/unsplash-script
|
5fec00d3438e4eab7f7ee09b30f5b3776682771e
|
a86592a545e1c2a9b62905ee844b2f89285d209a
|
refs/heads/master
|
<repo_name>Vampire010/Selenium_DDT_TESTNG<file_sep>/src/Screenshoot/Screenshoot.java
package Screenshoot;
import java.io.File;
import java.io.IOException;
import org.apache.commons.io.FileUtils;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.TakesScreenshot;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
public class Screenshoot {
public static void main(String[] args) throws IOException
{
System.setProperty("webdriver.gecko.driver", "./Exe/geckodriver.exe");
WebDriver driver=new FirefoxDriver();
// Maximize the window
driver.manage().window().maximize();
// Pass the url
driver.get("http://www.google.com");
TakesScreenshot ts=(TakesScreenshot)driver;
// Take screenshot and store as a file format
File src= ts.getScreenshotAs(OutputType.FILE);
//now copy the screenshot to desired location using copyFile //method
FileUtils.copyFile(src, new File("./Screenshot_Images/screeshot1.bmp"));
}
}
|
c91c85ca395687586c960d70d68329fb15ceb3af
|
[
"Java"
] | 1 |
Java
|
Vampire010/Selenium_DDT_TESTNG
|
5b5406d41a118007a60ed0895a9a47d2cbc787c3
|
38653595528e635ea742d3d2168631d93750f19a
|
refs/heads/main
|
<file_sep>/*
** listener.c -- a datagram sockets "server" demo
*/
/*
* Copyright (c) 2019 Flight Dynamics and Control Lab
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
#include "common.hh"
#include <opencv2/opencv.hpp>
#include <opencv2/aruco.hpp>
#include <pthread.h>
#include <time.h>
#include <math.h>
using namespace std;
using namespace tinyxml2;
namespace {
const char* about = "Pose estimation of ArUco marker images";
const char* keys =
"{d |16 | dictionary: DICT_4X4_50=0, DICT_4X4_100=1, "
"DICT_4X4_250=2, DICT_4X4_1000=3, DICT_5X5_50=4, DICT_5X5_100=5, "
"DICT_5X5_250=6, DICT_5X5_1000=7, DICT_6X6_50=8, DICT_6X6_100=9, "
"DICT_6X6_250=10, DICT_6X6_1000=11, DICT_7X7_50=12, DICT_7X7_100=13, "
"DICT_7X7_250=14, DICT_7X7_1000=15, DICT_ARUCO_ORIGINAL = 16}"
"{h |false | Print help }"
"{l | | Actual marker length in meter }"
"{v |<none>| Custom video source, otherwise '0' }"
"{h |false | Print help }"
"{l | | Actual marker length in meter }"
"{v |<none>| Custom video source, otherwise '0' }"
;
}
#define MYPORT "4242" // the port users will be connecting to
#define MAXBUFLEN 256
#define SAMPLINGTIME 100000 // in usec
#define MAXSINGNALLENGTH 512
// get sockaddr, IPv4 or IPv6:
char buf[MAXDATASIZE];
//----prototipos-----//
int comRobot(int id,string ip,string port,int instruction);//used for send and recive instructions and data for every robot
void tokenize(const string s, char c,vector<string>& v);//split the string
void concatenateChar(char c, char *word);//not used for now
void operationSend();//allow the user choose an instruction for send to the robot
void SetupRobots();//copy the information in the xml file to save in the class robot.
void error(const char *msg)
{
perror(msg);
exit(1);
}
void *get_in_addr(struct sockaddr *sa)
{
if (sa->sa_family == AF_INET) {
return &(((struct sockaddr_in*)sa)->sin_addr);
}
return &(((struct sockaddr_in6*)sa)->sin6_addr);
}
void SetupRobots();
enum {r1, r2, r3, r4,r5};
//definition of robots
Robot robot1,robot2,robot3,robot4;//se define la clase para los distintos robots.
struct record_data//struct for share information between threads
{
std::ostringstream vector_to_marker;
std::vector<int> ids;
std::vector<std::vector<cv::Point2f> > corners;
std::vector<cv::Vec3d> rvecs, tvecs;
};
struct record_data data;//shared data bewtwen threads
pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
void *dataAruco(void *arg){//thread function
int id;
string ip,port;
robot1.SetupConection(id,ip,port);//for now only use 1 robot for communication
//in this case the experiment needs the velocity
struct timeval tval_before, tval_after, tval_sample;
tval_sample.tv_sec=0;
tval_sample.tv_usec=0;
int n=0;
double fs=1/0.1;
double f0=fs/30;
double w0=2*M_PI*f0;
double A=30;
double vel,td,auxVel=0;
double w;
char del=',';
char wc[sizeof(vel)];
while(n<MAXSINGNALLENGTH){
td=(double)n*0.1;
gettimeofday(&tval_before,NULL);
vel=A*w0*cos(w0*td);
if(vel>=0){
vel=A*w0;
}
else{
vel=-A*w0;
}
w=vel/robot1.radWheel;//arduino needs the radial velocity
cout<<"vel:"<<vel<<endl;
cout<<"w:"<<w<<endl;
comRobot(id,ip,port,OP_VEL_ROBOT);//request for the velocity of the robot
snprintf(operation_send.data,sizeof(w),"%2.4f",w);
snprintf(wc,sizeof(w),"%2.4f",w);
strcat(operation_send.data,&del);
strcat(operation_send.data,wc);
if(vel != auxVel){
comRobot(id,ip,port,OP_MOVE_WHEEL);
auxVel=vel;
}
n++;
gettimeofday(&tval_after,NULL);
timersub(&tval_after,&tval_before,&tval_sample);
if(tval_sample.tv_usec != SAMPLINGTIME)
{
while(tval_sample.tv_usec<SAMPLINGTIME){
gettimeofday(&tval_after,NULL);
timersub(&tval_after,&tval_before,&tval_sample);
}
//usleep(SAMPLINGTIME-tval_sample.tv_usec);
}
else if( tval_sample.tv_usec<0 || tval_sample.tv_usec>SAMPLINGTIME)
{
error("error short sample time");
}
}
return NULL;
}
int main(int argc,char **argv)
{
pthread_t detectAruco;
SetupRobots();
cv::CommandLineParser parser(argc, argv, keys);
parser.about(about);
if (argc < 2) {
parser.printMessage();
return 1;
}
if (parser.get<bool>("h")) {
parser.printMessage();
return 0;
}
int dictionaryId = parser.get<int>("d");
float marker_length_m = parser.get<float>("l");
int wait_time = 10;
if (marker_length_m<= 0) {
std::cerr << "marker length must be a positive value in meter"
<< std::endl;
return 1;
}
cv::String videoInput = "0";//se selecciona la entrada de la camara
cv::VideoCapture in_video;
if (parser.has("v")) {
videoInput = parser.get<cv::String>("v");
if (videoInput.empty()) {
parser.printMessage();
return 1;
}
char* end = nullptr;
int source = static_cast<int>(std::strtol(videoInput.c_str(), &end, \
10));
if (!end || end == videoInput.c_str()) {
in_video.open(videoInput); // url
in_video.set(cv::CAP_PROP_FOURCC, cv::VideoWriter::fourcc('M', 'J', 'P', 'G'));
in_video.set(cv::CAP_PROP_FPS,30);
} else {
in_video.open(source); // id
}
} else {
in_video.open(0);
}
if (!parser.check()) {
parser.printErrors();
return 1;
}
if (!in_video.isOpened()) {
std::cerr << "failed to open video input: " << videoInput << std::endl;
return 1;
}
cv::Mat image, image_copy;
cv::Mat camera_matrix, dist_coeffs;
std::ostringstream vector_to_marker;
cv::Ptr<cv::aruco::Dictionary> dictionary =
cv::aruco::getPredefinedDictionary( \
cv::aruco::PREDEFINED_DICTIONARY_NAME(dictionaryId));
cv::FileStorage fs("calibration_params.yml", cv::FileStorage::READ);
fs["camera_matrix"] >> camera_matrix;
fs["distortion_coefficients"] >> dist_coeffs;
std::cout << "camera_matrix\n" << camera_matrix << std::endl;
std::cout << "\ndist coeffs\n" << dist_coeffs << std::endl;
//pthread_create(&record,NULL,recordAruco,(void*)&data);
pthread_create(&detectAruco,NULL,dataAruco,NULL);
while (in_video.grab())
{
in_video.retrieve(image);
image.copyTo(image_copy);
//std::vector<int> ids;
//std::vector<std::vector<cv::Point2f> > corners;
cv::aruco::detectMarkers(image, dictionary, data.corners, data.ids);
// if at least one marker detected
if (data.ids.size() > 0)
{
cv::aruco::drawDetectedMarkers(image_copy, data.corners, data.ids);
//std::vector<cv::Vec3d> rvecs, tvecs;
cv::aruco::estimatePoseSingleMarkers(data.corners, marker_length_m,
camera_matrix, dist_coeffs, data.rvecs, data.tvecs);
/*std::cout << "Translation: " << tvecs[0]
<< "\tRotation: " << rvecs[0]
<< std::endl;
*/
// Draw axis for each marker
for(int i=0; i < data.ids.size(); i++)
{
cv::aruco::drawAxis(image_copy, camera_matrix, dist_coeffs,
data.rvecs[i], data.tvecs[i], 0.1);
// This section is going to print the data for all the detected
// markers. If you have more than a single marker, it is
// recommended to change the below section so that either you
// only print the data for a specific marker, or you print the
// data for each marker separately.
vector_to_marker.str(std::string());
vector_to_marker << std::setprecision(4)
<< "x: " << std::setw(8) << data.tvecs[0](0);
cv::putText(image_copy, vector_to_marker.str(),
cv::Point(10, 30), cv::FONT_HERSHEY_SIMPLEX, 0.6,
cv::Scalar(0, 252, 124), 1, CV_AVX);
vector_to_marker.str(std::string());
vector_to_marker << std::setprecision(4)
<< "y: " << std::setw(8) << data.tvecs[0](1);
cv::putText(image_copy, vector_to_marker.str(),
cv::Point(10, 50), cv::FONT_HERSHEY_SIMPLEX, 0.6,
cv::Scalar(0, 252, 124), 1, CV_AVX);
vector_to_marker.str(std::string());
vector_to_marker << std::setprecision(4)
<< "z: " << std::setw(8) << data.tvecs[0](2);
cv::putText(image_copy, vector_to_marker.str(),
cv::Point(10, 70), cv::FONT_HERSHEY_SIMPLEX, 0.6,
cv::Scalar(0, 252, 124), 1, CV_AVX);
}
}
imshow("Pose estimation", image_copy);
char key = (char)cv::waitKey(wait_time);
if (key == 27)
break;
}
in_video.release();
pthread_exit(NULL);
return 0;
}
void SetupRobots()
{
// Read the sample.xml file
XMLDocument Robotdoc;
Robotdoc.LoadFile( "robots_info.xml" );
XMLNode* Robotarium =Robotdoc.FirstChild();
XMLElement *robot=Robotarium->FirstChildElement("robot");
int i=0;
while(robot !=NULL)
{
XMLElement *robotChild=robot->FirstChildElement("ID");
int ID;
robotChild->QueryIntText(&ID);
cout<<"ID:"<<ID<<endl;
robotChild=robot->FirstChildElement("IP");
const char* ip=robotChild->GetText();
string ss=ip;
cout<<"ip:"<<ip<<endl;
robotChild=robot->FirstChildElement("PORT");
const char* port=robotChild->GetText();
string p=port;
cout<<"puerto:"<<p<<endl;
robot=robot->NextSiblingElement("robot");
switch (i)
{
case 0:
robot1.SetupRobotData(ID,ss,p);
break;
case 1:
robot2.SetupRobotData(ID,ss,p);
break;
case 2:
robot3.SetupRobotData(ID,ss,p);
break;
case 3:
robot4.SetupRobotData(ID,ss,p);
break;
}
i++;
}
}
int comRobot(int id,string ip,string port,int instruction){
//se crea el socket y se establece la comunicación
int sockfd;
struct addrinfo hints, *servinfo, *p;
int rv;
int numbytes;
struct sockaddr_storage robot_addr;
cout<<"puert Robot:"<<port<<endl;
socklen_t addr_len = sizeof robot_addr;
memset(&hints, 0, sizeof hints);
hints.ai_family = AF_INET; // set to AF_INET to force IPv4
hints.ai_socktype = SOCK_DGRAM;
//hints.ai_flags = IPPROTO_UDP;
const char *ipRobot=ip.c_str();
const char *portRobot=port.c_str();
if ((rv = getaddrinfo(ipRobot, portRobot, &hints, &servinfo)) != 0) {
fprintf(stderr, "getaddrinfo: %s\n", gai_strerror(rv));
return 1;
}
// loop through all the results and make a socket
for(p = servinfo; p != NULL; p = p->ai_next) {
if ((sockfd = socket(p->ai_family, p->ai_socktype,
p->ai_protocol)) == -1) {
perror("talker: socket");
continue;
}
break;
}
int enable = 1;
if (setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &enable, sizeof(int)) < 0)
error("setsockopt(SO_REUSEADDR) failed");
if (p == NULL) {
fprintf(stderr, "talker: failed to create socket\n");
return 2;
}
memset (buf, '\0', MAXDATASIZE); /* Pone a cero el buffer inicialmente */
//aqui se indica la operacion que se desea realizar
operation_send.id=id;//se asigna el id del robot1
string data;
string delimiter=":";
/*cout<<"elige operacion"<<endl;//se debe ingresar la operacion y los correspondientes datos
operationSend();//se elige la operacion a enviar
if(operation_send.op != OP_SALUDO && operation_send.op != OP_VEL_ROBOT){
//se ingresa la informacion correspondiente a la operacion elegida.
cout<<"ingresa datos: ";
cin.ignore();
cin>>operation_send.data;
operation_send.len = strlen (operation_send.data);
}*/
operation_send.len = strlen (operation_send.data);
operation_send.op=instruction;
if ((numbytes = sendto(sockfd,(char *) &operation_send, operation_send.len+HEADER_LEN, 0,p->ai_addr, p->ai_addrlen)) == -1) {
perror("talker: sendto");
exit(1);
}
//cout<<"mensaje enviado"<<endl;
if((numbytes=recvfrom(sockfd,buf,MAXBUFLEN-1,0,(struct sockaddr*)&robot_addr, &addr_len))==-1){
}
operation_recv=( struct appdata*)&buf;
if((numbytes< HEADER_LEN) || (numbytes != operation_recv->len+HEADER_LEN) ){
cout<<"(servidor) unidad de datos incompleta\n";
}
else{
/* cout<<"(servidor) id "<<operation_recv->id;
cout<<" operacion solicitada [op 0x]"<<operation_recv->op;
cout<<" contenido "<<operation_recv->data<<endl;*/
}
// relaiza operacion solicitada por el cliente
switch (operation_recv->op){
case OP_SALUDO:
// cout<<" contenido "<<operation_recv->data<<endl;
break;
case OP_MESSAGE_RECIVE:
// cout<<" contenido "<<operation_recv->data<<endl;
break;
case OP_VEL_ROBOT:
data=operation_recv->data;
char del =',';
vector<string> speed;
tokenize(data,del,speed);
cout<<"velocidad rueda derecha: "<<speed[0]<<endl;
cout<<"velocidad rueda izquierda: "<<speed[1]<<endl;
break;
memset (buf, '\0', MAXDATASIZE);
}
freeaddrinfo(servinfo);
close(sockfd);
return 0;
}
void tokenize(const string s, char c,vector<string>& v)//sirve para separa la entrada string.
{
string::size_type i = 0;
string::size_type j = s.find(c);
while (j != string::npos)
{
v.push_back(s.substr(i, j-i));
i = ++j;
j = s.find(c, j);
if (j == string::npos)
v.push_back(s.substr(i, s.length()));
}
}
void concatenateChar(char c, char *word){
char cadenaTemporal[2];
cadenaTemporal[0] = c;
cadenaTemporal[1] = '\0';
strcat(word, cadenaTemporal);
}
void operationSend(){
int value;
cout<<"(0) SALUDO"<<endl;
cout<<"(1) MOVE_WHEEL"<<endl;
cout<<"(2) STOP_WHEEL"<<endl;
cout<<"(3) MESSAGE"<<endl;
cout<<"(4) TELEMETRY"<<endl;
cout<<"(5) VEL_wheel"<<endl;
cin>>value;
switch (value)
{
case 0:
operation_send.op=OP_SALUDO;
strcpy(operation_send.data,"Saludo");
operation_send.len = strlen (operation_send.data);
break;
case 1:
operation_send.op=OP_MOVE_WHEEL;
break;
case 2:
operation_send.op=OP_STOP_WHEEL;
break;
case 3:
break;
case 4:
break;
case 5:
operation_send.op=OP_VEL_ROBOT;
strcpy(operation_send.data,"velocidad");
operation_send.len = strlen (operation_send.data);
break;
}
}
<file_sep>CC=g++
CFLAGS= -I/usr/local/include/opencv4 -L/usr/local/lib -g -Wall -pedantic -g
CLIBS= -lopencv_highgui -lopencv_aruco -lopencv_imgcodecs -lopencv_core -lopencv_videoio -lopencv_calib3d -lopencv_imgproc -lpthread
TARGET= main
all: $(TARGET)
$(TARGET): $(TARGET).cc
$(CC) $(CFLAGS) -o $(TARGET) $(TARGET).cc $(CLIBS)
clean:
$(RM) $(TARGET)
<file_sep># rasp_server-Robot-control
UDP server for controll diferential robots that belong of a robotarium, the aim is create a sinusoidal movement for every robot commanded by a raspberry pi, the raspberry pi is connected to a camera, not necessary a rasp cam,the cam is on the robot, the cam gets the position of the other robots involve in the proyect using the code of ARUCO markers, the raspberry take the velocity of the robot and save the time and velocity in a txt file
<file_sep>#include <string>
#include <iostream>
#include <cstring>
using namespace std;
int main(){
char data[256];
double vel=20;
char velc[sizeof(vel)];
char del=',';
snprintf(data,sizeof(vel),"%2.4f",vel);
snprintf(velc,sizeof(vel),"%2.4f",vel);
strcat(data,&del);
strcat(data,velc);
cout<<data<<endl;
return 0;
}<file_sep>#ifndef ROBOT_HH
#define ROBOT_HH
#include "common.hh"
class udp
{
private:
char MYPORT[] = "4242";
int MAXBUFLEN =255;
public:
char buf[MAXBUFLEN];
int comRobot();
voidf tokenize(const string s, char c, vector <string>& v);
void operationSend();
void *get_in_addr(struct sockaddr *sa);
void SetupRobots();
}
#endif<file_sep>#include "robot.hh"
void Robot::SetupRobotData(int a,string b, string c){
ID=a;
ip=b;
port=c;
}
void Robot::SetupConection(int &id,string &IP,string &P){
id=ID;
IP=ip;
P=port;
}<file_sep>#include "udp.hh"
using namespace tinyxml2;
using namespace std;
void udp::SetupRobots(){
// Read the sample.xml file
XMLDocument Robotdoc;
Robotdoc.LoadFile( "robots_info.xml" );
XMLNode* Robotarium =Robotdoc.FirstChild();
XMLElement *robot=Robotarium->FirstChildElement("robot");
int i=0;
while(robot !=NULL)
{
XMLElement *robotChild=robot->FirstChildElement("ID");
int ID;
robotChild->QueryIntText(&ID);
cout<<"ID:"<<ID<<endl;
robotChild=robot->FirstChildElement("IP");
const char* ip=robotChild->GetText();
string ss=ip;
cout<<"ip:"<<ip<<endl;
robotChild=robot->FirstChildElement("PORT");
const char* port=robotChild->GetText();
string p=port;
cout<<"puerto:"<<p<<endl;
robot=robot->NextSiblingElement("robot");
switch (i)
{
case 0:
robot1.SetupRobotData(ID,ss,p);
break;
case 1:
robot2.SetupRobotData(ID,ss,p);
break;
case 2:
robot3.SetupRobotData(ID,ss,p);
break;
case 3:
robot4.SetupRobotData(ID,ss,p);
break;
}
i++;
}
}<file_sep>#ifndef ROBOT_HH
#define ROBOT_HH
#include <string>
/* se crea un clase robot para poder manejar los diferentes parametros
de los robots y las instrucciones que se desean hacer*/
using namespace std;
class Robot
{
private:
int ID ;
string ip;
string port;
public:
double radWheel=3.35;
void SetupRobotData(int,string,string);
void SetupConection(int& ,string& ,string&);
//void rightWheel(wheel a);
//void leftWheel(wheel b);
void IMU();
};
class wheel
{
friend class Robot;
private:
int N=20;//encoder resolution
int R=6;//wheel radius
public:
double angularSpeed();
double linearSpeed();
};
#endif<file_sep>//#include <string>
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <sstream>
#include <vector>
#include <sys/wait.h>
#include <atomic>
#include <filesystem>
#include <sys/time.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <arpa/inet.h>
#include <netinet/in.h>
#include <netdb.h>
#include <fcntl.h>
#include <pthread.h>
#include <errno.h>
#include "tinyxml2.cpp"
#include "robot.cpp"
const int PORT = 4242;
const int MAXDATASIZE = 256; //numero de bytes que se pueden recibir
const int HEADER_LEN = sizeof(unsigned short)*3;
const int MAXROBOTS = 4;
const char IP_SERVER[] = "192.168.1.2";
/*//direcciones IP del servidor y de los distintos robots
const char arduino1[] ="192.168.2.5";
const char arduino2[] ="192.168.2.6";
const char arduino3[] ="192.168.2.7";
const char arduino4[] ="192.168.2.8";
const char arduino5[] ="192.168.2.9";
*/
struct appdata{
unsigned short id; //identificador
unsigned short op; //codigo de operacion
unsigned short len; /* longitud de datos */
char
data [MAXDATASIZE-HEADER_LEN];//datos
//nota¡ actualizar char data a string o puntero para que sea mas versatil.
};
//operacion error
#define OP_ERROR 0xFFFF
//operaciones requeridas por central
#define OP_SALUDO 0x0001
#define OP_MOVE_WHEEL 0x0002
#define OP_STOP_WHEEL 0x0003
#define OP_VEL_ROBOT 0X0005//devuelve la velocidad de las ruedas en rad/s
//operaciones cliente
#define OP_MESSAGE_RECIVE 0x0004
//saludo esta en ambas
struct appdata *operation_recv;//message received of operation
struct appdata operation_send;//struct for message send
|
0f017268eaf9e1dcb997742ca38a4eb46b82712b
|
[
"Markdown",
"Makefile",
"C++"
] | 9 |
C++
|
truposky/rasp_server-Robot-control
|
7720fe71636a46ac09f6f8c630b9d10b97ad7d43
|
0d306555278ad9fb203fd2bc75f656c2cf604241
|
refs/heads/master
|
<file_sep>/**
Currently, this is not part of the automatic docker build process.
This commands need to be run
BEFORE the CAS4 docker is built
and
AFTER the MySQL db is up and running
**/
CREATE DATABASE cas;
USE cas;
CREATE TABLE cas_users (
id INT AUTO_INCREMENT NOT NULL, username VARCHAR(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
password VARCHAR(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL, PRIMARY KEY (id), UNIQUE KEY (username)
);
INSERT INTO cas_users (username, password) VALUES ('guest', '<PASSWORD>');
|
d4835850c459dc7c133d49e48a9f6ebbe89f385d
|
[
"SQL"
] | 1 |
SQL
|
tzaffi/docker-cas_mysql
|
2a0d8b99ca71a1de4218e3e05c64edbcfbd08662
|
bca89a5fa23f0acaf9a9807d678139a849faa097
|
refs/heads/master
|
<repo_name>simonask/js<file_sep>/Makefile
all:
g++ -o js *.cpp libv8.a<file_sep>/io.h
#ifndef _IO_H_
#define _IO_H_
#include "v8.h"
namespace IO { void initialize(v8::Handle<v8::Object>& global); }
#endif /* _IO_H_ */
<file_sep>/process.cpp
#include "process.h"
using namespace v8;
#include <unistd.h>
#include <sys/wait.h>
#include <stdlib.h>
static Persistent<FunctionTemplate> process_templ;
static Handle<Value> wrap_pid(pid_t pid)
{
Local<Object> process = process_templ->GetFunction()->NewInstance();
process->Set(String::New("pid"), Integer::New(pid));
return process;
}
pid_t unwrap_pid(Handle<Object> process)
{
pid_t pid = (pid_t)process->Get(String::New("pid"))->ToInteger()->Value();
return pid;
}
static Handle<Value> js_process_new(const Arguments& args)
{
return Undefined();
}
static Handle<Value> js_process_fork(const Arguments& args)
{
pid_t pid = fork();
if (pid)
return wrap_pid(pid);
else
return Undefined();
}
static Handle<Value> js_process_exit(const Arguments& args)
{
int status;
if (args.Length() > 0)
status = args[0]->ToInteger()->Value();
else
status = 0;
exit(status);
}
static Handle<Value> js_process_wait(const Arguments& args)
{
pid_t pid = unwrap_pid(args.This());
int status;
waitpid(pid, &status, 0);
return Integer::New(WEXITSTATUS(status)); // TODO: Support signals etc.
}
static void dummy_setter(Local<String>, const Local<Value>, const AccessorInfo&) {}
void Process::initialize(Handle<Object>& global)
{
process_templ = Persistent<FunctionTemplate>(FunctionTemplate::New(js_process_new));
process_templ->SetClassName(String::New("Process"));
Local<ObjectTemplate> ot = process_templ->InstanceTemplate();
process_templ->Set(String::New("fork"), FunctionTemplate::New(js_process_fork));
process_templ->Set(String::New("exit"), FunctionTemplate::New(js_process_exit));
ot->Set(String::New("wait"), FunctionTemplate::New(js_process_wait));
Local<Object> process = process_templ->GetFunction();
global->Set(String::New("Process"), process);
}
<file_sep>/io.cpp
#include "io.h"
using namespace v8;
#include <stdio.h>
#include <string.h>
#include <assert.h>
static Persistent<FunctionTemplate> io_templ;
enum InternalFields { FILE_HANDLE = 0, NUM_FIELDS };
static Handle<Value> wrap_file_pointer(FILE* fp)
{
Local<Object> io = io_templ->GetFunction()->NewInstance();
io->SetInternalField(FILE_HANDLE, External::New((void*)fp));
return io;
}
static FILE* unwrap_file_pointer(Handle<Object> self)
{
Local<Value> field = self->GetInternalField(FILE_HANDLE);
return reinterpret_cast<FILE*>(Handle<External>::Cast(field)->Value());
}
/*
METHODS
*/
static Handle<Value> js_io_new(const Arguments& args)
{
if (args.Length() < 2 && args.Length() != 0)
assert(0 && "not enough arguments for js_io_new!"); // TODO: Throw exception
FILE* fp = fdopen(args[0]->ToInteger()->Value(), *String::AsciiValue(args[1]));
args.This()->SetInternalField(FILE_HANDLE, External::New(fp));
return args.This();
}
static Handle<Value> js_io_open(const Arguments& args)
{
if (args.Length() < 1)
assert(0 && "not enough arguments for js_io_open!"); // TODO: Throw exception
const char* flags = args.Length() < 2 ? "rw" : *String::AsciiValue(args[1]);
FILE* fp = fopen(*String::Utf8Value(args[0]), flags);
if (!fp)
return Undefined();
else
return wrap_file_pointer(fp);
}
static Handle<Value> js_io_close(const Arguments& args)
{
FILE* fp = unwrap_file_pointer(args.This());
fclose(fp);
return True();
}
static Handle<Value> js_io_read(const Arguments& args)
{
FILE* fp = unwrap_file_pointer(args.This());
size_t n = args[0]->ToInteger()->Value();
char buf[n];
size_t m = fread(buf, 1, n, fp);
return String::New(buf, m);
}
static Handle<Value> js_io_write(const Arguments& args)
{
FILE* fp = unwrap_file_pointer(args.This());
const char* data = *String::Utf8Value(args[0]);
size_t n = fwrite(data, 1, strlen(data), fp);
return Integer::New(n);
}
static Handle<Value> js_io_flush(const Arguments& args)
{
FILE* fp = unwrap_file_pointer(args.This());
return fflush(fp) ? False() : True();
}
static Handle<Value> js_io_seek(const Arguments& args)
{
FILE* fp = unwrap_file_pointer(args.This());
int offset = args[0]->ToInteger()->Value();
int whence = args[1]->ToInteger()->Value();
return fseek(fp, offset, whence) ? False() : True();
}
static Handle<Value> js_io_tell(const Arguments& args)
{
FILE* fp = unwrap_file_pointer(args.This());
return Integer::New(ftell(fp));
}
/*
ACCESSORS
*/
static Handle<Value> js_io_handle(Local<String> property, const AccessorInfo& info)
{
return Integer::New((int64_t)unwrap_file_pointer(info.Holder()));
}
static Handle<Value> js_io_eof(Local<String> property, const AccessorInfo& info)
{
FILE* fp = unwrap_file_pointer(info.Holder());
return feof(fp) ? True() : False();
}
static void dummy_setter(Local<String>, const Local<Value>, const AccessorInfo&) {}
void IO::initialize(Handle<Object>& global)
{
io_templ = Persistent<FunctionTemplate>(FunctionTemplate::New(js_io_new));
io_templ->SetClassName(String::New("IO"));
Local<ObjectTemplate> ot = io_templ->InstanceTemplate();
ot->SetInternalFieldCount(NUM_FIELDS);
ot->SetAccessor(String::New("handle"), js_io_handle, dummy_setter);
ot->SetAccessor(String::New("eof"), js_io_eof, dummy_setter);
ot->Set(String::New("read"), FunctionTemplate::New(js_io_read));
ot->Set(String::New("write"), FunctionTemplate::New(js_io_write));
ot->Set(String::New("close"), FunctionTemplate::New(js_io_close));
ot->Set(String::New("flush"), FunctionTemplate::New(js_io_flush));
ot->Set(String::New("seek"), FunctionTemplate::New(js_io_seek));
ot->Set(String::New("tell"), FunctionTemplate::New(js_io_tell));
io_templ->Set(String::New("open"), FunctionTemplate::New(js_io_open));
Local<Object> io = io_templ->GetFunction();
io->Set(String::New("stdout"), wrap_file_pointer(stdout));
io->Set(String::New("stdin"), wrap_file_pointer(stdin));
io->Set(String::New("stderr"), wrap_file_pointer(stderr));
global->Set(String::New("IO"), io);
}
<file_sep>/main.cpp
#include "v8.h"
using namespace v8;
#include <string>
#include <fstream>
#include <iostream>
#include <sstream>
#include <vector>
using namespace std;
#include "io.h"
#include "process.h"
Handle<Value> LoadScript(const char* file)
{
ifstream fs(file);
if (!fs.is_open())
{
cerr << "ERROR: File not found: " << file << endl;
return Undefined();
}
stringstream source_str;
string line;
while (fs.is_open() && !fs.eof())
{
getline(fs, line);
if (line[0] != '#')
source_str << line << endl;
}
fs.close();
Handle<String> source = String::New(source_str.str().c_str());
Handle<Script> script = Script::Compile(source);
return script->Run();
}
Handle<Value> js_load(const Arguments& args)
{
const char* file = *String::AsciiValue(args[0]);
return LoadScript(file);
}
int main (int argc, char const *argv[])
{
HandleScope handle_scope;
Handle<ObjectTemplate> global = ObjectTemplate::New();
global->Set(String::New("load"), FunctionTemplate::New(js_load));
Persistent<Context> context = Context::New(NULL, global);
Context::Scope context_scope(context);
Handle<Object> global_obj(context->Global());
IO::initialize(global_obj);
Process::initialize(global_obj);
Handle<Value> result = LoadScript(argv[1]);
context.Dispose();
return 0;
}<file_sep>/test.js
#!./js
load("prelude.js");
var pid = Process.fork();
println("hello from " + (pid ? pid.pid : 0));
if (pid)
{
var result = pid.wait();
println("return with result: " + result);
}
else
{
Process.exit(123);
}
<file_sep>/process.h
#ifndef _PROCESS_H_
#define _PROCESS_H_
#include "v8.h"
namespace Process { void initialize(v8::Handle<v8::Object>& global); }
#endif /* _PROCESS_H_ */
|
345371b9340c1299a2d010dad2768c955ad68516
|
[
"JavaScript",
"Makefile",
"C++"
] | 7 |
Makefile
|
simonask/js
|
2ee2565222bc665f6a1d310dbccef846da5534b2
|
5ed4a9a259036328c3c5dccf3e1f9a5e31058dfc
|
refs/heads/master
|
<repo_name>kzj891134340/demo<file_sep>/libraries.gradle
ext.repoUrl = "${project.'repo'}"
ext.spring_boot = project.properties['version.springboot'] ?: '2.2.6.RELEASE'
ext.spring_cloud = project.properties['version.springcloud'] ?: '2.2.0.RELEASE'
ext.spring_cloud_gateway = project.properties['version.springcloudgateway'] ?: '2.2.5.RELEASE'
ext.spring_cloud_zuul = project.properties['version.springcloudzuul'] ?: '1.4.5.RELEASE'
ext.spring_cloud_k8s = project.properties['version.springcloudk8s'] ?: '1.1.1.RELEASE'
ext.spring_security = project.properties['version.springsecurity'] ?: '4.2.2.RELEASE'
ext.spring_security_oauth2 = project.properties['version.springsecurityoauth2'] ?: '2.3.3.RELEASE'
ext.spring_data = project.properties['version.springdata'] ?: '2.2.2.RELEASE'
ext.spring = project.properties['version.spring'] ?: '5.0.4.RELEASE'
ext.hibernate = project.properties['version.hibernate'] ?: '5.4.1.Final'
ext.jpa_api = project.properties['version.jpa_api'] ?: '2.2'
ext.jpa = project.properties['version.jpa'] ?: '5.4.1.Final'
ext.postgresql = project.properties['version.postgresql'] ?: '42.2.14'
ext.jackson = project.properties['version.jackson'] ?: '2.9.7'
ext.activemq = project.properties['version.activemq'] ?: '5.15.9'
ext.apache_poi = project.properties['version.apachepoi'] ?: '4.1.0'
ext.okhttp3 = project.properties['version.okhttp3'] ?: '4.8.1'
ext.retrofit = project.properties['version.retrofit'] ?: '2.9.0'
ext.itextpdf = project.properties['version.itextpdf'] ?: '5.5.12'
ext.pdfbox = project.properties['version.pdfbox'] ?: '2.0.11'
ext.groovy = project.properties['version.groovy'] ?: '3.0.5'
ext.gson = project.properties['version.gson'] ?: '2.8.5'
ext.drools = project.properties['version.drools'] ?: '7.10.0.Final'
ext.netty_all = project.properties['version.nettyall'] ?: '4.1.29.Final'
ext.jetcd = project.properties['version.jetcd'] ?: '0.5.4'
ext.messaginghub_pooled_jms = project.properties['version.messaginghub_pooled_jms'] ?: '1.0.4'
ext.sitb_utils = project.properties['version.sitbutils'] ?: '1.0.33.RELEASE'
ext.aomi_common_bs = project.properties['version.aomi_common_bs'] ?: '2.0.17'
ext.sitb_spring_data_mongo = project.properties['version.sitb_spring_data_mongo'] ?: '1.0.6'
ext.lib = [
_test : [
"org.springframework.boot:spring-boot-starter-test:$spring_boot"
],
json : [
"com.fasterxml.jackson.core:jackson-core:${jackson}",
"com.fasterxml.jackson.core:jackson-databind:${jackson}"
],
xml : [
"com.fasterxml.jackson.core:jackson-core:${jackson}",
"com.fasterxml.jackson.dataformat:jackson-dataformat-xml:${jackson}"
],
logging : [
"org.springframework.boot:spring-boot-starter-logging:$spring_boot",
"org.codehaus.groovy:groovy-all:${groovy}"
],
mongodb : [
"org.mongodb:mongo-java-driver:2.12.5"
],
drools : [
"org.drools:drools-core:${drools}",
"org.drools:drools-compiler:${drools}"
],
logback : [
"ch.qos.logback:logback-core:1.2.3",
"ch.qos.logback:logback-classic:1.2.3"
],
gson : "com.google.code.gson:gson:${gson}",
slf4j_api : "org.slf4j:slf4j-api:1.7.25",
spring_core : "org.springframework:spring-core:$spring",
spring_beans : "org.springframework:spring-beans:$spring",
spring_context : "org.springframework:spring-context:$spring",
spring_orm : "org.springframework:spring-orm:$spring",
spring_data_commons : "org.springframework.data:spring-data-commons:$spring_data",
spring_data_jpa : "org.springframework.data:spring-data-jpa:$spring_data",
spring_data_mongodb : "org.springframework.data:spring-data-mongodb:$spring_data",
spring_data_redis : "org.springframework.data:spring-data-redis:$spring_data",
spring_jms : "org.springframework:spring-jms:$spring",
spring_ws_core : "org.springframework.ws:spring-ws-core:2.2.4.RELEASE",
spring_messaging : "org.springframework:spring-messaging:4.3.7.RELEASE",
spring_security_core : "org.springframework.security:spring-security-core:$spring_security",
spring_security_config : "org.springframework.security:spring-security-config:$spring_security",
spring_security_web : "org.springframework.security:spring-security-web:$spring_security",
spring_security_oauth2 : "org.springframework.security.oauth:spring-security-oauth2:${spring_security_oauth2}",
spring_integration_mail : "org.springframework.integration:spring-integration-mail:4.3.12.RELEASE",
spring_cloud_commons : "org.springframework.cloud:spring-cloud-commons:$spring_cloud",
spring_cloud_config : "org.springframework.cloud:spring-cloud-starter-config:$spring_cloud",
spring_cloud_config_server : "org.springframework.cloud:spring-cloud-config-server:$spring_cloud",
spring_cloud_eureka_client : "org.springframework.cloud:spring-cloud-starter-netflix-eureka-client:$spring_cloud",
spring_cloud_eureka_server : "org.springframework.cloud:spring-cloud-starter-netflix-eureka-server:$spring_cloud",
spring_cloud_eureka : "org.springframework.cloud:spring-cloud-starter-eureka:$spring_cloud",
spring_cloud_oauth : "org.springframework.cloud:spring-cloud-starter-oauth2:$spring_cloud",
spring_cloud_security : "org.springframework.cloud:spring-cloud-starter-security:$spring_cloud",
spring_cloud_openfeign : "org.springframework.cloud:spring-cloud-starter-openfeign:$spring_cloud",
spring_cloud_gateway : "org.springframework.cloud:spring-cloud-starter-gateway:$spring_cloud_gateway",
spring_cloud_zuul : "org.springframework.cloud:spring-cloud-starter-zuul:$spring_cloud_zuul",
spring_cloud_netflix_ribbon : "org.springframework.cloud:spring-cloud-starter-netflix-ribbon:$spring_cloud_zuul",
spring_cloud_k8s_all : "org.springframework.cloud:spring-cloud-starter-kubernetes-all:$spring_cloud_k8s",
spring_cloud_k8s : "org.springframework.cloud:spring-cloud-starter-kubernetes:$spring_cloud_k8s",
spring_cloud_k8s_config : "org.springframework.cloud:spring-cloud-starter-kubernetes-config:$spring_cloud_k8s",
spring_boot : "org.springframework.boot:spring-boot-starter:$spring_boot",
spring_boot_actuator : "org.springframework.boot:spring-boot-starter-actuator:$spring_boot",
spring_boot_remote_shell : "org.springframework.boot:spring-boot-starter-remote-shell:$spring_boot",
spring_boot_aop : "org.springframework.boot:spring-boot-starter-aop:$spring_boot",
spring_boot_web : "org.springframework.boot:spring-boot-starter-web:$spring_boot",
spring_boot_webflux : "org.springframework.boot:spring-boot-starter-webflux:$spring_boot",
spring_boot_email : "org.springframework.boot:spring-boot-starter-mail:$spring_boot",
spring_boot_data_jpa : "org.springframework.boot:spring-boot-starter-data-jpa:$spring_boot",
spring_boot_data_jdbc : "org.springframework.boot:spring-boot-starter-jdbc:$spring_boot",
spring_boot_data_mongodb : "org.springframework.boot:spring-boot-starter-data-mongodb:$spring_boot",
spring_boot_data_redis : "org.springframework.boot:spring-boot-starter-data-redis:$spring_boot",
spring_boot_data_redis_reactive : "org.springframework.boot:spring-boot-starter-data-redis-reactive:$spring_boot",
spring_boot_freemarker : "org.springframework.boot:spring-boot-starter-freemarker:$spring_boot",
spring_boot_thymeleaf : "org.springframework.boot:spring-boot-starter-thymeleaf:$spring_boot",
spring_boot_websocket : "org.springframework.boot:spring-boot-starter-websocket:$spring_boot",
spirng_boot_security : "org.springframework.boot:spring-boot-starter-security:$spring_boot",
spring_boot_oauth : "org.springframework.security.oauth.boot:spring-security-oauth2-autoconfigure:2.0.1.RELEASE",
spring_boot_jdbc : "org.springframework.boot:spring-boot-starter-jdbc:$spring_boot",
spring_boot_jta_atomikos : "org.springframework.boot:spring-boot-starter-jta-atomikos:$spring_boot",
spring_boot_activemq : "org.springframework.boot:spring-boot-starter-activemq:$spring_boot",
spring_boot_amqp : "org.springframework.boot:spring-boot-starter-amqp:$spring_boot",
spring_boot_configuration_processor: "org.springframework.boot:spring-boot-configuration-processor:$spring_boot",
spring_boot_hateoas : "org.springframework.boot:spring-boot-starter-hateoas:$spring_boot",
spring_boot_integration : "org.springframework.boot:spring-boot-starter-integration:${spring_boot}",
spring_boot_devtools : "org.springframework.boot:spring-boot-devtools:${spring_boot}",
spring_boot_undertow : "org.springframework.boot:spring-boot-starter-undertow:${spring_boot}",
spring_boot_batch : "org.springframework.boot:spring-boot-starter-batch:${spring_boot}",
spring_boot_validation : "org.springframework.boot:spring-boot-starter-validation:${spring_boot}",
spring_boot_autoconfigure : "org.springframework.boot:spring-boot-autoconfigure:${spring_boot}",
reactor_net : "io.projectreactor:reactor-net:2.0.8.RELEASE",
groovy_all : "org.codehaus.groovy:groovy-all:${groovy}",
jpa_api : "javax.persistence:javax.persistence-api:${jpa_api}",
jpa : "org.hibernate:hibernate-entitymanager:$jpa",
hibernate_core : "org.hibernate:hibernate-core:$hibernate",
h2database : "com.h2database:h2:1.4.186",
mysql : "mysql:mysql-connector-java:8.0.19",
mybatis : "org.mybatis:mybatis:3.4.3",
mybatis_spring : "org.mybatis:mybatis-spring:1.3.1",
mybatis_spring_boot : "org.mybatis.spring.boot:mybatis-spring-boot-starter:1.2.0",
mybatis_pagehelper : "com.github.pagehelper:pagehelper:4.0.1",
postgresql : "org.postgresql:postgresql:$postgresql",
oracle : "com.oracle:ojdbc6:v1.0",
commons_compress : "org.apache.commons:commons-compress:1.13",
commons_pool : "commons-pool:commons-pool:1.6",
commons_collections4 : "org.apache.commons:commons-collections4:4.1",
commons_lang3 : "org.apache.commons:commons-lang3:3.5",
commons_codec : "commons-codec:commons-codec:1.10",
commons_email : "org.apache.commons:commons-email:1.4",
commons_io : 'commons-io:commons-io:2.5',
commons_beanutils : 'commons-beanutils:commons-beanutils:1.9.3',
commons_net : "commons-net:commons-net:3.5",
commons_math : "org.apache.commons:commons-math3:3.6.1",
mesos : "org.apache.mesos:mesos:1.1.1",
poi : "org.apache.poi:poi:${apache_poi}",
poi_ooxml : "org.apache.poi:poi-ooxml:${apache_poi}",
shiro_all : "org.apache.shiro:shiro-all:1.2.3",
activemq_all : "org.apache.activemq:activemq-all:${activemq}",
activemq_broker : "org.apache.activemq:activemq-broker:${activemq}",
activemq_pool : "org.apache.activemq:activemq-pool:${activemq}",
messaginghub_pooled_jms : "org.messaginghub:pooled-jms:${messaginghub_pooled_jms}",
httpclient : "org.apache.httpcomponents:httpclient:4.4.1",
netty_all : "io.netty:netty-all:${netty_all}",
jetcd_all : "io.etcd:jetcd-all:${jetcd}",
jetcd_core : "io.etcd:jetcd-core:${jetcd}",
zookeeper : "org.apache.zookeeper:zookeeper:3.4.9",
protostuff : [
"com.dyuproject.protostuff:protostuff-core:1.0.12",
"com.dyuproject.protostuff:protostuff-runtime:1.0.12"
],
objenesis : "org.objenesis:objenesis:2.5.1",
cglib : "cglib:cglib:3.2.4",
kaptcha : "com.github.axet:kaptcha:0.0.9",
xmemcached : "com.googlecode.xmemcached:xmemcached:2.3.2",
UserAgentUtils : "eu.bitwalker:UserAgentUtils:1.20",
jpos : "org.jpos:jpos:2.0.6",
freemarker : "org.freemarker:freemarker:2.3.23",
okhttp3 : "com.squareup.okhttp3:okhttp:${okhttp3}",
okhttp3_logging : "com.squareup.okhttp3:logging-interceptor:${okhttp3}",
retrofit : "com.squareup.retrofit2:retrofit:${retrofit}",
retrofit_jackson : "com.squareup.retrofit2:converter-jackson:${retrofit}",
retrofit_gson : "com.squareup.retrofit2:converter-gson:${retrofit}",
jsoup : 'org.jsoup:jsoup:1.10.2',
itextpdf : "com.itextpdf:itextpdf:${itextpdf}",
itextpdf_tool_xmlworker : "com.itextpdf.tool:xmlworker:${itextpdf}",
nekohtml : "net.sourceforge.nekohtml:nekohtml:1.9.22",
pdfbox : "org.apache.pdfbox:pdfbox:${pdfbox}",
rxjava : "io.reactivex:rxjava:1.2.2",
zxing : "com.google.zxing:core:3.3.0",
lombok : "org.projectlombok:lombok:1.18.0",
hibernate_jpamodelgen : "org.hibernate:hibernate-jpamodelgen:${hibernate}",
feign_okhttp : "io.github.openfeign:feign-okhttp:9.5.1",
ews_java_api : "com.microsoft.ews-java-api:ews-java-api:2.0",
jpush_client : "cn.jpush.api:jpush-client:3.3.0",
sitb_dswrs : "software.sitb:dswrs:1.0.15.RELEASE",
sitb_common_utils : "software.sitb.common:utils:1.0.3.RELEASE",
sitb_common_entity : "software.sitb.common:entity:1.0.94",
sitb_common_web : "software.sitb.common:web:1.0.40.RELEASE",
sitb_utils : "software.sitb:utils:${sitb_utils}",
sitb_spring_data_jpa : "software.sitb.spring.data:jpa:1.0.9.RELEASE",
sitb_spring_data_mongo : "software.sitb.spring.data:mongo:${sitb_spring_data_mongo}",
sitb_spring_data_mybatis_page : "software.sitb.spring.data:mybatis-page:1.0.2.RELEASE",
sitb_spring_cache : "software.sitb.spring:cache:1.0.1.RELEASE",
sitb_spring_feign : "software.sitb.spring:feign:1.0.17.RELEASE",
sitb_spring_feign_exception : "software.sitb.spring:feign-exception:1.0.0.RELEASE",
aomi_common_bs : "tech.aomi.common:business-services:${aomi_common_bs}",
aomi_common_utils : "tech.aomi.common:utils:1.0.7",
]<file_sep>/src/main/resources/application.properties
spring.data.mongodb.host=1938728bm9.imdo.co
spring.data.mongodb.port=49369
spring.data.mongodb.database=test
spring.data.mongodb.username=test
spring.data.mongodb.password=<PASSWORD>
<file_sep>/src/main/java/com/example/demo/controller/TestController.java
package com.example.demo.controller;
import com.example.demo.service.TestService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Slf4j
@RequestMapping("/test")
@RestController
public class TestController {
@Autowired
private TestService testService;
@GetMapping
public String test1(){
testService.save();
return "SUCESS";
}
}
<file_sep>/java.gradle
apply plugin: 'java'
[compileJava, compileTestJava, javadoc]*.options*.encoding = 'UTF-8'
compileJava {
targetCompatibility = JavaVersion.VERSION_1_8
sourceCompatibility = JavaVersion.VERSION_1_8
}<file_sep>/idea.gradle
apply plugin: 'idea'
idea {
module {
inheritOutputDirs = false
outputDir = file("$buildDir/classes/main/")
testOutputDir = file("$buildDir/classes/test/")
}
}<file_sep>/build.gradle
plugins {
id "io.franzbecker.gradle-lombok" version "2.2"
id 'org.springframework.boot' version '2.3.2.RELEASE' apply false
}
group 'com.konzj.demo'
version '1.0.0'
apply from: "libraries.gradle"
apply from: "java.gradle"
apply from: "idea.gradle"
apply from: "bundle-spring-boot-app.gradle"
repositories {
maven { url "${project.'owner.repo'}" }
maven { url "${project.'repo'}" }
jcenter()
}
configurations {
compile.exclude module: "spring-boot-starter-tomcat"
}
dependencies {
implementation lib.retrofit
implementation lib.spring_boot_web
implementation lib.spring_boot_data_mongodb
implementation lib.spring_boot_undertow
implementation lib.spring_boot_actuator
implementation lib.spring_cloud_commons
implementation lib.okhttp3
implementation lib.commons_lang3
implementation lib.commons_codec
implementation lib.spring_data_mongodb
testCompile lib._test
}<file_sep>/gradle.properties
repo=
owner.repo=
docker.image.namespace=192.168.1.22:30001
|
6d7fde597eb3f0d1920dbf0bd98efe8a206388b4
|
[
"Java",
"INI",
"Gradle"
] | 7 |
Gradle
|
kzj891134340/demo
|
ed69b54853fc4a53334fcfc4953a8922729d431c
|
8812e9f7204c44f250be6c2ee939b639cac0590f
|
refs/heads/main
|
<repo_name>tbauer1979/Newsfeed-Components<file_sep>/components/Article.js
import datadata from './datadata';
function makeArticle ({title,date,firstParagraph,secondParagraph,thirdParagraph}) {
const article = document.createElement('div')
const headerTwo = document.createElement('h2')
const firstPara = document.createElement('p')
const secondPara = document.createElement('p')
const thirdPara = document.createElement('p')
const fourthPara = document.createElement('p')
const spanspan = document.createElement('span')
const newBTN = document.createElement('button')
article.appendChild(headerTwo);
article.appendChild(firstPara);
article.appendChild(secondPara);
article.appendChild(thirdPara);
article.appendChild(fourthPara);
article.appendChild(spanspan);
spanspan.appendChild(newBTN);
article.classList.add("article")
firstPara.classList.add("date")
newBTN.classList.add("expandButton")
headerTwo.textContent = title
firstPara.textContent = date
secondPara.textContent = firstParagraph
thirdPara.textContent = secondParagraph
fourthPara.textContent = thirdParagraph
newBTN.textContent - ('+')
spanspan.addEventListener('click',() => {
article.classList.toggle('article-open')
})
return article
}
datadata.forEach(d => {
const article = makeArticle(d)
document.body.appendChild(article)
})
/*
Step 1: Write a component called 'articleMaker' to create an article.
Your component is a function that takes an article object as its only argument,
and returns a DOM node looking like the one below:
<div class="article">
<h2>{title of the article}</h2>
<p class="date">{date of the article}</p>
{three separate paragraph elements}
<span class="expandButton">+</span>
</div>
Step 2: Still inside `articleMaker`, add an event listener to the span.expandButton.
This listener should toggle the class 'article-open' on div.article.
Step 3: Don't forget to return something from your function!
Step 4: Outside your function now, loop over the data. At each iteration you'll use your component
to create a div.article element and append it to the DOM inside div.articles (see index.html).
Step 5: Try adding new article object to the data array. Make sure it is in the same format as the others.
Refresh the page to see the new article.
*/
|
3ae1f29169bdb384a09f663dcdd37a81e2fce7a4
|
[
"JavaScript"
] | 1 |
JavaScript
|
tbauer1979/Newsfeed-Components
|
656e76c5341ac91d434a9ed4839c8de8787c7e99
|
ae564752abccc7e416829378d13bc41ffc28837b
|
refs/heads/master
|
<file_sep>---
layout: post
title: Hello World
date: 2017-02-10 23:10:01
tags:
- Tags1
---
### Hello!
<file_sep>#!/bin/sh
jekyll build
wait
git add *
wait
git commit -am "commit now"
wait
git push origin master
|
5bb3d40b78f35d2601b0fc34e93865446348a57f
|
[
"Markdown",
"Shell"
] | 2 |
Markdown
|
QAJenning/Test.github.io
|
0fd297e08b2d390e0c1197f752e74d2ef7e671ad
|
5fcd61677dd5741d15bf623e7a5eadc3327c0860
|
refs/heads/master
|
<repo_name>zparvez2z/News_classifier<file_sep>/News_classifier/data_preprocessing.py
import os
import codecs
import re
import time
start = time.time()
with codecs.open('news.txt','r',encoding='utf8') as f:
text = f.read()
print(len(text))
countRe = re.compile(r'\t')
print("no of tab before : "+str(len(countRe.findall(text))))
single_lined = re.sub(r'\s+', ' ', text)
countRe = re.compile(r'\t')
print("no of tab: "+str(len(countRe.findall(single_lined))))
tagged = re.sub(r'</news>\s+', '\t0\n', single_lined)
countRe = re.compile(r'\n')
print("no of newline: "+str(len(countRe.findall(tagged))))
countRe = re.compile(r'\t')
print("no of tab: "+str(len(countRe.findall(tagged))))
cleaned = re.sub(r'<date>|</date>|<title>|</title>|<news>','',tagged)
with codecs.open('news_out.tsv','w',encoding='utf8') as f:
f.write(cleaned)
print("total time : "+str(time.time()-start))<file_sep>/README.md
# News_classifier
This is a news classifier . classification is done using Naive Bayes(GaussianNB) . works for bangla news too .
A django app is built on top of it just for usability.
<file_sep>/News_classifier/views.py
from django.shortcuts import render
from django.http import HttpResponse
from . import natural_language_processing
import json
def index(request):
context = {
}
return render(request,'index.html',context)
def classify(request):
if request.method == 'POST':
query = request.POST.get('news',None)
print(type(query))
print(query)
result = natural_language_processing.predict_news(query)
result = result[0]
if result == 0:
return HttpResponse("Other News")
elif result == 1:
return HttpResponse(" Sports news !!!")
else:
pass
|
d7f84da9aa17d77b984f1443419c52e3637d0f6f
|
[
"Markdown",
"Python"
] | 3 |
Python
|
zparvez2z/News_classifier
|
0c845df77d81bc82e4a2074c5ba50b8e448175fd
|
93c9c8dec401fb432249b2378bb095948dc280c3
|
refs/heads/main
|
<file_sep><?php
namespace Shirish71\TailwindForm\Traits;
trait HandlesDefaultAndOldValue
{
use HandlesBoundValues;
private function setValue(
string $name,
string $label = '',
$bind = null,
$default = null,
$language = null,
string $placeholder = '',
string $id = ''
) {
if ($this->isWired()) {
return;
}
$inputName = static::convertBracketsToDots($name);
if (!$language) {
$default = $this->getBoundValue($bind, $name) ?: $default;
return $this->value = old($inputName, $default);
}
if ($bind !== false) {
$bind = $bind ?: $this->getBoundTarget();
}
if ($bind) {
$default = $bind->getTranslation($name, $language, false) ?: $default;
}
if ($label) {
$this->label = $label;
} else {
$this->label = str_replace('_', ' ', ucfirst($name));
}
if ($id) {
$this->id = $id;
} else {
$this->id = $name;
}
if ($placeholder) {
$this->placeholder = $placeholder;
} else {
$this->placeholder = "Please enter {$this->label}";
}
$this->value = old("{$inputName}.{$language}", $default);
}
}
<file_sep><?php
namespace Shirish71\TailwindForm\Components;
class SuccessMessage extends Component
{
public function __construct()
{
}
}
<file_sep><?php
namespace Shirish71\TailwindForm\Components;
use Shirish71\TailwindForm\Traits\HandlesValidationErrors;
use Shirish71\TailwindForm\Traits\HandlesDefaultAndOldValue;
class FormInput extends Component
{
use HandlesValidationErrors, HandlesDefaultAndOldValue;
public string $name, $label, $type, $value, $placeholder, $id;
public bool $required;
/**
* Create a new component instance.
*
* @return void
*/
public function __construct(
string $name,
string $type = 'text',
string $label = '',
string $id = '',
string $placeholder = '',
bool $required = false,
$bind = null,
$default = null,
$language = null,
bool $showErrors = true
) {
$this->name = $name;
$this->type = $type;
$this->showErrors = $showErrors;
if ($language) {
$this->name = "{$name}[{$language}]";
}
$this->placeholder = $placeholder;
$this->id = $id;
$this->required = $required;
$this->setValue($name, $label, $bind, $default, $language, $placeholder);
}
}
<file_sep><?php
namespace Shirish71\TailwindForm\Components;
class FormLabel extends Component
{
public string $label, $smallNote;
public bool $required;
/**
* Create a new component instance.
*
* @return void
*/
public function __construct(bool $required = false, string $label = '', $smallNote = '')
{
$this->label = $label;
$this->required = $required;
$this->smallNote = $smallNote;
}
}
|
00c06b8c7de952389bff07b8135a23e56ae545a6
|
[
"PHP"
] | 4 |
PHP
|
shirish71/tailwind-form
|
4ce86a9c80839d9e918097f01430d1da2df43d7d
|
bea0589ab50eb26e7bf0065b30aa299d4a58c87c
|
refs/heads/master
|
<repo_name>danielwermann/sinospark<file_sep>/SinosPark/SinosPark/Controllers/EstacionamentoesController.cs
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Entity;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Mvc;
using SinosPark.Models;
namespace SinosPark.Controllers
{
public class EstacionamentoesController : Controller
{
private SinosParkEntities db = new SinosParkEntities();
// GET: Estacionamentoes
public ActionResult Index()
{
return View(db.Estacionamento.ToList());
}
// GET: Estacionamentoes/Details/5
public ActionResult Details(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Estacionamento estacionamento = db.Estacionamento.Find(id);
if (estacionamento == null)
{
return HttpNotFound();
}
return View(estacionamento);
}
// GET: Estacionamentoes/Create
public ActionResult Create()
{
return View();
}
// POST: Estacionamentoes/Create
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create([Bind(Include = "Id,Nome,isAtivo,QuantidadeMaxima")] Estacionamento estacionamento)
{
if (ModelState.IsValid)
{
db.Estacionamento.Add(estacionamento);
db.SaveChanges();
return RedirectToAction("Index");
}
return View(estacionamento);
}
// GET: Estacionamentoes/Edit/5
public ActionResult Edit(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Estacionamento estacionamento = db.Estacionamento.Find(id);
if (estacionamento == null)
{
return HttpNotFound();
}
return View(estacionamento);
}
// POST: Estacionamentoes/Edit/5
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit([Bind(Include = "Id,Nome,isAtivo,QuantidadeMaxima")] Estacionamento estacionamento)
{
if (ModelState.IsValid)
{
db.Entry(estacionamento).State = EntityState.Modified;
db.SaveChanges();
return RedirectToAction("Index");
}
return View(estacionamento);
}
// GET: Estacionamentoes/Delete/5
public ActionResult Delete(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Estacionamento estacionamento = db.Estacionamento.Find(id);
if (estacionamento == null)
{
return HttpNotFound();
}
return View(estacionamento);
}
// POST: Estacionamentoes/Delete/5
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeleteConfirmed(short id)
{
Estacionamento estacionamento = db.Estacionamento.Find(id);
db.Estacionamento.Remove(estacionamento);
db.SaveChanges();
return RedirectToAction("Index");
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
db.Dispose();
}
base.Dispose(disposing);
}
}
}
<file_sep>/SinosPark/SinosPark/ViewModels/EventoViewModel.cs
using System;
using System.Collections.Generic;
using System.Web.Mvc;
namespace SinosPark.ViewModels
{
public class EventoViewModel
{
public int AlunoId { get; set; }
public int EstacionamentoId { get; set; }
public int VeiculoId { get; set; }
public string AlunoNome { get; set; }
public string AlunoMatricula { get; set; }
public string VeiculoPlaca { get; set; }
public string VeiculoCor { get; set; }
public string VeiculoModelo { get; set; }
public string EstacionamentoNome { get; set; }
public int PagamentoId { get; set; }
public DateTime? Entrada { get; set; }
public DateTime? Saida { get; set; }
public string CodigoBarras { get; set; }
public List<SelectListItem> Estacionamentos { get; set; }
public List<SelectListItem> Veiculos { get; set; }
public List<SelectListItem> Alunos { get; set; }
public int EventoId { get; set; }
}
}<file_sep>/SinosPark/SinosPark/Controllers/VeiculoModeloesController.cs
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Entity;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Mvc;
using SinosPark.Models;
namespace SinosPark.Controllers
{
public class VeiculoModeloesController : Controller
{
private SinosParkEntities db = new SinosParkEntities();
// GET: VeiculoModeloes
public ActionResult Index()
{
return View(db.VeiculoModelo.ToList());
}
// GET: VeiculoModeloes/Details/5
public ActionResult Details(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
VeiculoModelo veiculoModelo = db.VeiculoModelo.Find(id);
if (veiculoModelo == null)
{
return HttpNotFound();
}
return View(veiculoModelo);
}
// GET: VeiculoModeloes/Create
public ActionResult Create()
{
return View();
}
// POST: VeiculoModeloes/Create
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create([Bind(Include = "Id,Descricao,isAtivo,isMoto")] VeiculoModelo veiculoModelo)
{
if (ModelState.IsValid)
{
db.VeiculoModelo.Add(veiculoModelo);
db.SaveChanges();
return RedirectToAction("Index");
}
return View(veiculoModelo);
}
// GET: VeiculoModeloes/Edit/5
public ActionResult Edit(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
VeiculoModelo veiculoModelo = db.VeiculoModelo.Find(id);
if (veiculoModelo == null)
{
return HttpNotFound();
}
return View(veiculoModelo);
}
// POST: VeiculoModeloes/Edit/5
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit([Bind(Include = "Id,Descricao,isAtivo,isMoto")] VeiculoModelo veiculoModelo)
{
if (ModelState.IsValid)
{
db.Entry(veiculoModelo).State = EntityState.Modified;
db.SaveChanges();
return RedirectToAction("Index");
}
return View(veiculoModelo);
}
// GET: VeiculoModeloes/Delete/5
public ActionResult Delete(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
VeiculoModelo veiculoModelo = db.VeiculoModelo.Find(id);
if (veiculoModelo == null)
{
return HttpNotFound();
}
return View(veiculoModelo);
}
// POST: VeiculoModeloes/Delete/5
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeleteConfirmed(short id)
{
VeiculoModelo veiculoModelo = db.VeiculoModelo.Find(id);
db.VeiculoModelo.Remove(veiculoModelo);
db.SaveChanges();
return RedirectToAction("Index");
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
db.Dispose();
}
base.Dispose(disposing);
}
}
}
<file_sep>/README.md
# sinospark
http://sinospark.apphb.com
<file_sep>/SinosPark/SinosPark/Controllers/HomeController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Microsoft.Ajax.Utilities;
using SinosPark.Models;
namespace SinosPark.Controllers
{
public class HomeController : Controller
{
public ActionResult Index()
{
ViewBag.Title = "Sinos Park";
return View();
}
public ActionResult About()
{
ViewBag.Message = "Your application description page.";
return View();
}
public ActionResult Contact()
{
ViewBag.Message = "Your contact page.";
return View();
}
public JsonResult GetEstacionamentos()
{
IEnumerable<Estacionamento> t = new List<Estacionamento>();
using (var db = new SinosParkEntities())
{
t = db.Estacionamento.Where(p => p.isAtivo).ToList();
}
return Json(t, JsonRequestBehavior.AllowGet);
}
}
}<file_sep>/SinosPark/SinosPark/Utility/ExtensionMethods.cs
using System;
using System.Globalization;
namespace SinosPark.Utility
{
public static class ExtensionMethods
{
public static string GetMonthName(this int month)
{
return new DateTime(2010, month, 1).ToString("MMMM", CultureInfo.CurrentCulture);
}
}
}<file_sep>/SinosPark/SinosPark/ViewModels/EventoPagamentoViewModel.cs
using System;
using System.Collections.Generic;
using System.Web.Mvc;
namespace SinosPark.ViewModels
{
public class EventoPagamentoViewModel
{
public List<SelectListItem> Pagamentos { get; set; }
public int PagamentoId { get; set; }
public int EventoId { get; set; }
public string PagamentoNome { get; set; }
public string AlunoNome { get; set; }
public string AlunoMatricula { get; set; }
public string VeiculoPlaca { get; set; }
public string VeiculoCor { get; set; }
public string VeiculoModelo { get; set; }
public DateTime DataSaida { get; set; }
public DateTime DataEntrada { get; set; }
public DateTime DataPagamento { get; set; }
public decimal Valor { get; set; }
public TimeSpan Tempo { get; set; }
}
}<file_sep>/SinosPark/SinosPark/Controllers/VeiculoesController.cs
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Entity;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Mvc;
using SinosPark.Models;
namespace SinosPark.Controllers
{
public class VeiculoesController : Controller
{
private SinosParkEntities db = new SinosParkEntities();
// GET: Veiculoes
public ActionResult Index()
{
var veiculo = db.Veiculo.Include(v => v.VeiculoModelo);
return View(veiculo.ToList());
}
// GET: Veiculoes/Details/5
public ActionResult Details(int? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Veiculo veiculo = db.Veiculo.Find(id);
if (veiculo == null)
{
return HttpNotFound();
}
return View(veiculo);
}
// GET: Veiculoes/Create
public ActionResult Create()
{
ViewBag.VeiculoModeloId = new SelectList(db.VeiculoModelo, "Id", "Descricao");
return View();
}
// POST: Veiculoes/Create
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create([Bind(Include = "Id,Placa,Cor,VeiculoModeloId,isBloqueado")] Veiculo veiculo)
{
if (ModelState.IsValid)
{
db.Veiculo.Add(veiculo);
db.SaveChanges();
return RedirectToAction("Index");
}
ViewBag.VeiculoModeloId = new SelectList(db.VeiculoModelo, "Id", "Descricao", veiculo.VeiculoModeloId);
return View(veiculo);
}
// GET: Veiculoes/Edit/5
public ActionResult Edit(int? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Veiculo veiculo = db.Veiculo.Find(id);
if (veiculo == null)
{
return HttpNotFound();
}
ViewBag.VeiculoModeloId = new SelectList(db.VeiculoModelo, "Id", "Descricao", veiculo.VeiculoModeloId);
return View(veiculo);
}
// POST: Veiculoes/Edit/5
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit([Bind(Include = "Id,Placa,Cor,VeiculoModeloId,isBloqueado")] Veiculo veiculo)
{
if (ModelState.IsValid)
{
db.Entry(veiculo).State = EntityState.Modified;
db.SaveChanges();
return RedirectToAction("Index");
}
ViewBag.VeiculoModeloId = new SelectList(db.VeiculoModelo, "Id", "Descricao", veiculo.VeiculoModeloId);
return View(veiculo);
}
// GET: Veiculoes/Delete/5
public ActionResult Delete(int? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Veiculo veiculo = db.Veiculo.Find(id);
if (veiculo == null)
{
return HttpNotFound();
}
return View(veiculo);
}
// POST: Veiculoes/Delete/5
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeleteConfirmed(int id)
{
Veiculo veiculo = db.Veiculo.Find(id);
db.Veiculo.Remove(veiculo);
db.SaveChanges();
return RedirectToAction("Index");
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
db.Dispose();
}
base.Dispose(disposing);
}
}
}
<file_sep>/SinosPark/SinosPark/Controllers/AlunoesController.cs
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Entity;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Mvc;
using SinosPark.Models;
using SinosPark.ViewModels;
namespace SinosPark.Controllers
{
public class AlunoesController : Controller
{
private SinosParkEntities db = new SinosParkEntities();
// GET: Alunoes
public ActionResult Index()
{
return View(db.Aluno.ToList());
}
// GET: Alunoes/Details/5
public ActionResult Details(int? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Aluno aluno = db.Aluno.Find(id);
if (aluno == null)
{
return HttpNotFound();
}
return View(aluno);
}
// GET: Alunoes/Create
public ActionResult Create()
{
return View();
}
public ActionResult ComprarCreditos()
{
var viewModel = new CreditoViewModel();
using (var db = new SinosParkEntities())
{
viewModel.Alunos = db.Aluno.Select(x => new SelectListItem
{
Text = x.Nome,
Value = x.Id.ToString()
})
.OrderBy(x => x.Text)
.ToList();
viewModel.PagamentoTipos = db.PagamentoTipo.Select(x => new SelectListItem
{
Text = x.Descricao,
Value = x.Id.ToString()
})
.OrderBy(x => x.Text)
.ToList();
}
return View(viewModel);
}
[HttpPost]
public ActionResult ComprovanteVenda(CreditoViewModel viewModel)
{
if (!ModelState.IsValid)
{
return RedirectToAction("ComprarCreditos");
}
using (var db = new SinosParkEntities())
{
var aluno = db.Aluno.Find(viewModel.AlunoId);
var saldoAtual = aluno.ValorSaldo;
saldoAtual += viewModel.Valor;
aluno.ValorSaldo = saldoAtual;
db.Entry(aluno).State = EntityState.Modified;
db.SaveChanges();
var pagamentoTipo = db.PagamentoTipo.Find(viewModel.PagamentoTipoId);
viewModel.AlunoNome = aluno.Nome;
viewModel.PagamentoTipoNome = pagamentoTipo.Descricao;
viewModel.AlunoMatricula = aluno.Matricula;
}
viewModel.DataCompra = DateTime.Now;
return View(viewModel);
}
// POST: Alunoes/Create
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create([Bind(Include = "Id,CPF,Matricula,Nome,ValorSaldo")] Aluno aluno)
{
if (ModelState.IsValid)
{
db.Aluno.Add(aluno);
db.SaveChanges();
return RedirectToAction("Index");
}
return View(aluno);
}
// GET: Alunoes/Edit/5
public ActionResult Edit(int? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Aluno aluno = db.Aluno.Find(id);
if (aluno == null)
{
return HttpNotFound();
}
return View(aluno);
}
// POST: Alunoes/Edit/5
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit([Bind(Include = "Id,CPF,Matricula,Nome,ValorSaldo")] Aluno aluno)
{
if (ModelState.IsValid)
{
db.Entry(aluno).State = EntityState.Modified;
db.SaveChanges();
return RedirectToAction("Index");
}
return View(aluno);
}
// GET: Alunoes/Delete/5
public ActionResult Delete(int? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Aluno aluno = db.Aluno.Find(id);
if (aluno == null)
{
return HttpNotFound();
}
return View(aluno);
}
// POST: Alunoes/Delete/5
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeleteConfirmed(int id)
{
Aluno aluno = db.Aluno.Find(id);
db.Aluno.Remove(aluno);
db.SaveChanges();
return RedirectToAction("Index");
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
db.Dispose();
}
base.Dispose(disposing);
}
}
}
<file_sep>/SinosPark/SinosPark/ViewModels/RelatorioFinanceiroViewModel.cs
namespace SinosPark.ViewModels
{
public class RelatorioFinanceiroViewModel
{
public string EstacionamentoNome { get; set; }
public string MesNome { get; set; }
public int MesId { get; set; }
public decimal Valor { get; set; }
}
}<file_sep>/SinosPark/SinosPark/Controllers/FuncionariosController.cs
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Entity;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Mvc;
using SinosPark.Models;
namespace SinosPark.Controllers
{
public class FuncionariosController : Controller
{
private SinosParkEntities db = new SinosParkEntities();
// GET: Funcionarios
public ActionResult Index()
{
return View(db.Funcionario.ToList());
}
// GET: Funcionarios/Details/5
public ActionResult Details(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Funcionario funcionario = db.Funcionario.Find(id);
if (funcionario == null)
{
return HttpNotFound();
}
return View(funcionario);
}
// GET: Funcionarios/Create
public ActionResult Create()
{
return View();
}
// POST: Funcionarios/Create
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create([Bind(Include = "Id,Nome,CPF,isGerente,Email")] Funcionario funcionario)
{
if (ModelState.IsValid)
{
db.Funcionario.Add(funcionario);
db.SaveChanges();
return RedirectToAction("Index");
}
return View(funcionario);
}
// GET: Funcionarios/Edit/5
public ActionResult Edit(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Funcionario funcionario = db.Funcionario.Find(id);
if (funcionario == null)
{
return HttpNotFound();
}
return View(funcionario);
}
// POST: Funcionarios/Edit/5
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit([Bind(Include = "Id,Nome,CPF,isGerente,Email")] Funcionario funcionario)
{
if (ModelState.IsValid)
{
db.Entry(funcionario).State = EntityState.Modified;
db.SaveChanges();
return RedirectToAction("Index");
}
return View(funcionario);
}
// GET: Funcionarios/Delete/5
public ActionResult Delete(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Funcionario funcionario = db.Funcionario.Find(id);
if (funcionario == null)
{
return HttpNotFound();
}
return View(funcionario);
}
// POST: Funcionarios/Delete/5
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeleteConfirmed(short id)
{
Funcionario funcionario = db.Funcionario.Find(id);
db.Funcionario.Remove(funcionario);
db.SaveChanges();
return RedirectToAction("Index");
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
db.Dispose();
}
base.Dispose(disposing);
}
}
}
<file_sep>/SinosPark/SinosPark/ViewModels/CreditoViewModel.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace SinosPark.ViewModels
{
public class CreditoViewModel
{
public int AlunoId { get; set; }
public string AlunoNome { get; set; }
public string AlunoMatricula { get; set; }
public int PagamentoTipoId { get; set; }
public string PagamentoTipoNome { get; set; }
public DateTime DataCompra { get; set; }
public List<SelectListItem> Alunos { get; set; }
public List<SelectListItem> PagamentoTipos { get; set; }
public decimal Valor { get; set; }
}
}<file_sep>/SinosPark/SinosPark/Controllers/PrecoesController.cs
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Entity;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Mvc;
using SinosPark.Models;
namespace SinosPark.Controllers
{
public class PrecoesController : Controller
{
private SinosParkEntities db = new SinosParkEntities();
// GET: Precoes
public ActionResult Index()
{
return View(db.Preco.ToList());
}
// GET: Precoes/Details/5
public ActionResult Details(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Preco preco = db.Preco.Find(id);
if (preco == null)
{
return HttpNotFound();
}
return View(preco);
}
// GET: Precoes/Create
public ActionResult Create()
{
return View();
}
// POST: Precoes/Create
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create([Bind(Include = "Id,Tempo,Valor,isMoto,isAtivo")] Preco preco)
{
if (ModelState.IsValid)
{
db.Preco.Add(preco);
db.SaveChanges();
return RedirectToAction("Index");
}
return View(preco);
}
// GET: Precoes/Edit/5
public ActionResult Edit(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Preco preco = db.Preco.Find(id);
if (preco == null)
{
return HttpNotFound();
}
return View(preco);
}
// POST: Precoes/Edit/5
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit([Bind(Include = "Id,Tempo,Valor,isMoto,isAtivo")] Preco preco)
{
if (ModelState.IsValid)
{
db.Entry(preco).State = EntityState.Modified;
db.SaveChanges();
return RedirectToAction("Index");
}
return View(preco);
}
// GET: Precoes/Delete/5
public ActionResult Delete(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Preco preco = db.Preco.Find(id);
if (preco == null)
{
return HttpNotFound();
}
return View(preco);
}
// POST: Precoes/Delete/5
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeleteConfirmed(short id)
{
Preco preco = db.Preco.Find(id);
db.Preco.Remove(preco);
db.SaveChanges();
return RedirectToAction("Index");
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
db.Dispose();
}
base.Dispose(disposing);
}
}
}
<file_sep>/SinosPark/SinosPark/Controllers/EventoesController.cs
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Entity;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Mvc;
using Microsoft.Owin.Security;
using SinosPark.Models;
using SinosPark.Utility;
using SinosPark.ViewModels;
namespace SinosPark.Controllers
{
public class EventoesController : Controller
{
private SinosParkEntities db = new SinosParkEntities();
// GET: Eventoes
public ActionResult Index()
{
return RedirectToAction("Lista");
}
public ViewResult RelatorioMovimentacao()
{
var evento = db.Evento
.Include(e => e.Aluno)
.Include(e => e.Estacionamento)
.Include(e => e.Veiculo)
.OrderByDescending(e=>e.Entrada);
var result = (from e in evento
let aluno = e.Aluno
let veiculo = e.Veiculo
let estacionamento = e.Estacionamento
let veiculoModelo = e.Veiculo != null ? e.Veiculo.VeiculoModelo.Descricao : ""
let pagamento = e.Pagamento.FirstOrDefault()
select new EventoViewModel
{
AlunoMatricula = aluno == null ? "" : aluno.Matricula,
AlunoNome = aluno == null ? "" : aluno.Nome,
VeiculoCor = veiculo == null ? "" : veiculo.Cor,
VeiculoPlaca = veiculo == null ? "" : veiculo.Placa,
VeiculoModelo = veiculoModelo,
EstacionamentoNome = estacionamento.Nome,
EventoId = e.Id,
Entrada = e.Entrada,
Saida = e.Saida,
PagamentoId = pagamento != null ? pagamento.Id : 0
}).ToList();
return View(result);
}
public ActionResult Lista(string matricula, string placa)
{
var evento = db.Evento
.Include(e => e.Aluno)
.Include(e => e.Estacionamento)
.Include(e => e.Veiculo);
if (!string.IsNullOrEmpty(matricula))
{
ViewBag.SearchMatricula = matricula;
evento = evento.Where(x => (x.Aluno != null && x.Aluno.Matricula.Contains(matricula)));
}
if (!string.IsNullOrEmpty(placa))
{
ViewBag.SearchPlaca = placa;
evento = evento.Where(x => (x.Veiculo != null && x.Veiculo.Placa.Contains(placa)));
}
var result = (from e in evento
let aluno = e.Aluno
let veiculo = e.Veiculo
let estacionamento = e.Estacionamento
let veiculoModelo = e.Veiculo != null ? e.Veiculo.VeiculoModelo.Descricao : ""
let pagamento = e.Pagamento.FirstOrDefault()
select new EventoViewModel
{
AlunoMatricula = aluno == null ? "" : aluno.Matricula,
AlunoNome = aluno == null ? "" : aluno.Nome,
VeiculoCor = veiculo == null ? "" : veiculo.Cor,
VeiculoPlaca = veiculo == null ? "" : veiculo.Placa,
VeiculoModelo = veiculoModelo,
EstacionamentoNome = estacionamento.Nome,
EventoId = e.Id,
Entrada = e.Entrada,
Saida = e.Saida,
PagamentoId = pagamento != null ? pagamento.Id : 0
}).ToList();
return View(result);
}
// GET: Eventoes/Details/5
public ActionResult Details(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Evento evento = db.Evento.Find(id);
if (evento == null)
{
return HttpNotFound();
}
return View(evento);
}
[HttpPost]
public ActionResult ComprovantePagamento(EventoPagamentoViewModel viewModel)
{
var evento = db.Evento.Find(viewModel.EventoId);
if (evento == null)
{
return HttpNotFound();
}
PagamentoTipo pagamentoTipo = db.PagamentoTipo.First(x => x.Id == viewModel.PagamentoId);
viewModel = new EventoPagamentoViewModel
{
PagamentoNome = pagamentoTipo.Descricao,
DataEntrada = evento.Entrada,
DataSaida = DateTime.Now
};
var tempo = viewModel.DataSaida - viewModel.DataEntrada;
var hora = tempo.Hours;
var minutos = tempo.Minutes;
if (minutos >= 30)
{
hora += 1;
}
viewModel.Tempo = new TimeSpan(0, hora, minutos, 0);
var tempoFilter = new TimeSpan(0, hora, 0, 0);
var preco = db.Preco.FirstOrDefault(x => x.Tempo == tempoFilter);
viewModel.Valor = (preco != null) ? preco.Valor : 10;
if (evento.Aluno != null)
{
viewModel.AlunoMatricula = evento.Aluno.Matricula;
viewModel.AlunoNome = evento.Aluno.Nome;
}
if (evento.Veiculo != null)
{
viewModel.VeiculoPlaca = evento.Veiculo.Placa;
viewModel.VeiculoCor = evento.Veiculo.Cor;
viewModel.VeiculoModelo = evento.Veiculo.VeiculoModelo.Descricao;
}
var pagamento = new Pagamento
{
DataHoraPagamento = DateTime.Now,
Evento = evento,
PagamentoTipo = pagamentoTipo,
Tempo = viewModel.Tempo,
Valor = viewModel.Valor
};
db.Pagamento.Add(pagamento);
db.SaveChanges();
evento.Saida = viewModel.DataSaida;
db.Entry(evento).State = EntityState.Modified;
db.SaveChanges();
return View(viewModel);
}
public ViewResult ComprovantePagamento(int pagamentoId)
{
var pagamento = db.Pagamento.Find(pagamentoId);
var viewModel = new EventoPagamentoViewModel
{
PagamentoNome = pagamento.PagamentoTipo.Descricao,
DataEntrada = pagamento.Evento.Entrada,
DataSaida = pagamento.Evento.Saida.GetValueOrDefault(),
Valor = (decimal) pagamento.Valor,
DataPagamento = (DateTime) pagamento.DataHoraPagamento,
Tempo = (TimeSpan) pagamento.Tempo
};
if (pagamento.Evento.Aluno != null)
{
viewModel.AlunoMatricula = pagamento.Evento.Aluno.Matricula;
viewModel.AlunoNome = pagamento.Evento.Aluno.Nome;
}
if (pagamento.Evento.Veiculo != null)
{
viewModel.VeiculoPlaca = pagamento.Evento.Veiculo.Placa;
viewModel.VeiculoCor = pagamento.Evento.Veiculo.Cor;
viewModel.VeiculoModelo = pagamento.Evento.Veiculo.VeiculoModelo.Descricao;
}
return View(viewModel);
}
public ActionResult Saida(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
var evento = db.Evento.Find(id);
if (evento == null)
{
return HttpNotFound();
}
var viewModel = new EventoPagamentoViewModel
{
Pagamentos = db.PagamentoTipo.Where(x => x.isAtivo).Select(x => new SelectListItem
{
Text = x.Descricao,
Value = x.Id.ToString()
})
.OrderBy(x => x.Text)
.ToList(),
DataEntrada = evento.Entrada,
};
viewModel.DataSaida = DateTime.Now;
var tempo = viewModel.DataSaida - viewModel.DataEntrada;
var hora = tempo.Hours;
var minutos = tempo.Minutes;
if (minutos >= 30)
{
hora += 1;
}
viewModel.Tempo = new TimeSpan(0, hora, minutos, 0);
var tempoFilter = new TimeSpan(0, hora, 0, 0);
var preco = db.Preco.FirstOrDefault(x => x.Tempo == tempoFilter);
viewModel.Valor = (preco != null) ? preco.Valor : 10;
viewModel.EventoId = evento.Id;
return View(viewModel);
}
// GET: Eventoes/Create
public ActionResult Create()
{
ViewBag.AlunoId = new SelectList(db.Aluno, "Id", "CPF");
ViewBag.EstacionamentoId = new SelectList(db.Estacionamento, "Id", "Nome");
ViewBag.VeiculoId = new SelectList(db.Veiculo, "Id", "Placa");
return View();
}
public ActionResult Entrada()
{
var viewModel = GetEventoViewModel();
return View(viewModel);
}
private EventoViewModel GetEventoViewModel()
{
var viewModel = new EventoViewModel
{
Alunos = db.Aluno.Select(x => new SelectListItem
{
Text = x.Nome,
Value = x.Id.ToString()
})
.OrderBy(x => x.Text)
.ToList()
};
viewModel.Alunos.Insert(0, new SelectListItem
{
Text = "",
Value = "-1"
});
viewModel.Veiculos = db.Veiculo.Select(x => new SelectListItem
{
Text = x.Placa,
Value = x.Id.ToString()
})
.OrderBy(x => x.Text)
.ToList();
viewModel.Veiculos.Insert(0, new SelectListItem
{
Text = "",
Value = "-1"
});
viewModel.Estacionamentos = db.Estacionamento.Select(x => new SelectListItem
{
Text = x.Nome,
Value = x.Id.ToString()
})
.OrderBy(x => x.Text)
.ToList();
viewModel.Entrada = DateTime.Now;
return viewModel;
}
public ViewResult RelatorioFinanceiro()
{
var vm = db.Pagamento
.GroupBy(x => new
{
x.DataHoraPagamento.Value.Month,
x.Evento.Estacionamento
})
.Select(y => new RelatorioFinanceiroViewModel
{
EstacionamentoNome = y.Key.Estacionamento.Nome,
MesId= y.Key.Month,
Valor = y.Sum(v => v.Valor.Value)
})
.ToList();
vm.ForEach(x=>x.MesNome = x.MesId.GetMonthName());
return View(vm);
}
[HttpPost]
public ActionResult Ticket(EventoViewModel viewModel)
{
if (viewModel.VeiculoId == -1 && viewModel.AlunoId == -1)
{
return RedirectToAction("Entrada");
}
Aluno aluno = null;
Veiculo veiculo = null;
if (viewModel.AlunoId > -1)
{
aluno = db.Aluno.Find(viewModel.AlunoId);
viewModel.AlunoMatricula = aluno.Matricula;
viewModel.AlunoNome = aluno.Nome;
}
var estacionamento = db.Estacionamento.Find(viewModel.EstacionamentoId);
viewModel.EstacionamentoNome = estacionamento.Nome;
if (viewModel.VeiculoId > -1)
{
veiculo = db.Veiculo.Find(viewModel.VeiculoId);
viewModel.VeiculoPlaca = veiculo.Placa;
viewModel.VeiculoCor = veiculo.Cor;
}
var evento = new Evento
{
Estacionamento = estacionamento,
Veiculo = veiculo,
Aluno = aluno,
Entrada = viewModel.Entrada.GetValueOrDefault(),
CodigoBarras = ""
};
db.Evento.Add(evento);
db.SaveChanges();
return View(viewModel);
}
// POST: Eventoes/Create
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create([Bind(Include = "Id,Entrada,Saida,CodigoBarras,VeiculoId,AlunoId,EstacionamentoId")] Evento evento)
{
if (ModelState.IsValid)
{
db.Evento.Add(evento);
db.SaveChanges();
return RedirectToAction("Index");
}
ViewBag.AlunoId = new SelectList(db.Aluno, "Id", "CPF", evento.AlunoId);
ViewBag.EstacionamentoId = new SelectList(db.Estacionamento, "Id", "Nome", evento.EstacionamentoId);
ViewBag.VeiculoId = new SelectList(db.Veiculo, "Id", "Placa", evento.VeiculoId);
return View(evento);
}
// GET: Eventoes/Edit/5
public ActionResult Edit(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Evento evento = db.Evento.Find(id);
if (evento == null)
{
return HttpNotFound();
}
ViewBag.AlunoId = new SelectList(db.Aluno, "Id", "CPF", evento.AlunoId);
ViewBag.EstacionamentoId = new SelectList(db.Estacionamento, "Id", "Nome", evento.EstacionamentoId);
ViewBag.VeiculoId = new SelectList(db.Veiculo, "Id", "Placa", evento.VeiculoId);
return View(evento);
}
// POST: Eventoes/Edit/5
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit([Bind(Include = "Id,Entrada,Saida,CodigoBarras,VeiculoId,AlunoId,EstacionamentoId")] Evento evento)
{
if (ModelState.IsValid)
{
db.Entry(evento).State = EntityState.Modified;
db.SaveChanges();
return RedirectToAction("Index");
}
ViewBag.AlunoId = new SelectList(db.Aluno, "Id", "CPF", evento.AlunoId);
ViewBag.EstacionamentoId = new SelectList(db.Estacionamento, "Id", "Nome", evento.EstacionamentoId);
ViewBag.VeiculoId = new SelectList(db.Veiculo, "Id", "Placa", evento.VeiculoId);
return View(evento);
}
// GET: Eventoes/Delete/5
public ActionResult Delete(short? id)
{
if (id == null)
{
return new HttpStatusCodeResult(HttpStatusCode.BadRequest);
}
Evento evento = db.Evento.Find(id);
if (evento == null)
{
return HttpNotFound();
}
return View(evento);
}
// POST: Eventoes/Delete/5
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeleteConfirmed(short id)
{
Evento evento = db.Evento.Find(id);
db.Evento.Remove(evento);
db.SaveChanges();
return RedirectToAction("Index");
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
db.Dispose();
}
base.Dispose(disposing);
}
}
}
|
23e922b13fa7dd3596d4ce0dbb1b393912bb0111
|
[
"Markdown",
"C#"
] | 14 |
C#
|
danielwermann/sinospark
|
cb921c77b99fdf9c24036bc71e58ee10e6e55b1d
|
12ec40bcadf70ca9f7294956d3b558cb5378608e
|
refs/heads/main
|
<repo_name>anstar9271/GIR<file_sep>/grule.py
def welcome():
print("\t","/// Welcome to GIR \\\\\\")
print("\t","\\\\\\'Guess It Right'///")
print()
print()
def rule():
print("""GAME RULES:
(A) Three levels in this game:
1. Easy Mode:-Player has three chances to guess correct number.
2. Medium:-Two chances
3. Hard:-SINGLE CHANCE
(B) Guess numbers correctly and score higher :)
""")<file_sep>/runtest.py
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 18 12:14:54 2021
@author: Dell
"""
import grule as a
import leveas as b
import levmed as c
import levhard as d
a.welcome()
a.rule()
print("'1' for Easy")
print("'2' for Medium")
print("'3' for Hard")
plyinp=int(input("Which level do you want to choose? :"))
if plyinp==1:
b.easy()
b.scoreread()
elif plyinp==2:
c.medm()
c.scoreread()
elif plyinp==3:
d.hard()
d.scoreread()
else:
print("Invalid Input!!!")
op=input("New Game? (Y/N):")
while op=="Y" or op=="y":
plyinp=int(input("Which level do you want to choose? :"))
if plyinp==1:
b.easy()
b.scoreread()
elif plyinp==2:
c.medm()
c.scoreread()
elif plyinp==3:
d.hard()
d.scoreread()
else:
print("Invalid Input!!!")
op=input("New Game? (Y/N):")<file_sep>/__init__.py
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 17 13:20:32 2021
@author: Dell
"""
<file_sep>/levhard.py
"""Level: Hard
Only single chance to guess the correct number.
"""
import csv
def hard():
import random as r
tscor=0
chk="T"
while chk=="T":
num=r.randint(1,6)
ent=int(input("Choose a digit between 1 to 6:"))
if ent==num:
tscor+=150
print("Right! Score till now:",tscor)
continue
else:
chk="F"
print("Game Over :(")
print("The number was:",num)
print("Your Score:",tscor)
savescr(tscor)
def savescr(scor):
file=open("comHardMode.csv","a",newline="")
fwrit=csv.writer(file)
for i in range(5):
fwrit.writerow(["Player",0])
file.close()
file=open("comHardMode.csv","r",newline="")
read1=csv.reader(file)
storinlst=list(read1)
scorestr=[]
for i in storinlst:
scorestr.append(i[1])
scoreint=list(map(int,scorestr))
if scoreint[-1]>=scor:
pass
else:
count1=0
print("....BINGO....You made it into TOP FIVE....")
plyrnm=input("Enter Name: ")
newlst=[plyrnm,scor]
for i in range(len(scoreint)):
if scor>=scoreint[i] and count1==0:
count1+=1
storinlst.insert(storinlst.index(storinlst[i]),newlst)
storinlst.pop()
file=open("comHardMode.csv","w",newline="")
fwrit2=csv.writer(file)
count=1
for i in storinlst:
if count<=5:
fwrit2.writerow(i)
count+=1
file.close()
def scoreread():
print("\n"*2)
print("...Top Five Scores...")
file=open("comEasyMode.csv","r",newline="")
read2=csv.reader(file)
for i in read2:
for j in i:
print(j,end=" ")
print()
|
40d15324edb13277fe24e22eb12132d9f2b2162f
|
[
"Python"
] | 4 |
Python
|
anstar9271/GIR
|
9a5c13af63dd19529c9c867db0f1aecf97e3b19e
|
0c9c5f34fb05e957fbeefa6ce600591f368e98b7
|
refs/heads/master
|
<file_sep># section4-Test
CSULB Fall 2019
<file_sep>
def binary_search(integers, value):
mid = (len(integers) // 2) - 1
if mid < 0 and integers[mid] != value:
return False
elif integers[mid] == value:
return True
elif integers[mid] < value:
return binary_search(integers[mid + 1:], value)
elif integers[mid] > value:
return binary_search(integers[:mid], value)
integers = [3,5,7,11,13,17,21,23,29,31]
found_or_not = binary_search(integers,5000)
if found_or_not:
print("In the List")
else:
print("Not in the list")
<file_sep>import time
t0 = time.time()
for i in range(1,10):
print(i)
t1 = time.time()
total = t1-t0
print(total)
|
06acc62e62cec172de8021f31db90cea3462fe98
|
[
"Markdown",
"Python"
] | 3 |
Markdown
|
KyawHtetWin/section4-Test
|
86207215f2a0b9d5b9d3b8d71b3b7b9e17437c32
|
d374ed1e51611cdf81ccc13419cca8048caf763d
|
refs/heads/master
|
<file_sep># tooltip
纯 CSS 鼠标提示工具。
预览[DEMO](https://jofunliang.github.io/tooltip/)效果。
# 安装
**npm 安装**
```
npm install tooltip-core --save
```
**yarn 安装**
```
yarn add tooltip-core
```
**浏览器安装**
只需在页面中引入 dist 文件夹下的"tooltip.css"或“tooltip.min.css”文件即可。如:
```
<link rel="stylesheet" href="css/tooltip.css" />
or(或)
<link rel="stylesheet" href="css/tooltip.min.css" />
```
# 使用
1、在需要显示提示的标签上添加**data-tooltip**属性和**data-tooltip-text**两个属性;
2、**data-tooltip**属性值代表方向,可以设置为“left”、“right”、“top”、“bottom”、“top-left”、“top-right”、“bottom-left”和“bottom-right”中的任意一个值;
3、**data-tooltip-text**属性存放的是显示所需的文本。
如:
```
<p data-tooltip="left" data-tooltip-text="左边提示">左边提示</p>
```
文本换行:在要换行的地方添加**
**字符
如:
```
<p data-tooltip="right" data-tooltip-text="右边提示样式 
 这个是右边提示样式哦 
 css3提示样式">右边提示</p>
```
# 兼容性:
1)IE9、IE9+
2)Firefox
3)Chrome
4)360 Browser
5)Sogou Explorer
# MIT License
Copyright (c) 2018 梁毫卓
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
<file_sep>const { src, dest, watch, series, parallel } = require("gulp");
const clean = require('gulp-clean')
const miniCSS = require('gulp-clean-css')
const autoprefixer = require('gulp-autoprefixer')
const sass = require('gulp-sass')
const rename = require('gulp-rename')
const connect = require('gulp-connect')
function cleanDist () {
return src('dist', {read: false, allowEmpty: true})
.pipe(clean())
}
const sources = 'src/tooltip.scss'
function minifyCSS () {
return src(sources)
.pipe(sass())
.pipe(autoprefixer())
.pipe(dest("dist"))
.pipe(miniCSS())
.pipe(
rename({
suffix: ".min",
})
)
.pipe(dest("dist"))
.pipe(connect.reload());
}
function webServer () {
return connect.server({
livereload: true,
port: 3000,
index: 'example.html'
})
}
function watcher () {
return watch(['src/*.scss'], series(minifyCSS));
}
exports.default = series(cleanDist, parallel(webServer, minifyCSS, watcher));
|
8782a4d029e5e3198847da31e59d7ae3f39beab5
|
[
"Markdown",
"JavaScript"
] | 2 |
Markdown
|
JofunLiang/tooltip
|
d685857f949684facc73c5db621af26799baadb6
|
c700335839899b4130b7b8e36cf870a45497ed3d
|
refs/heads/master
|
<file_sep>/*
hole = free memeory spalce
block = occupied memeory space
*/
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <time.h>
const int N = 100;
const double avg_size = 4.0;
const double stddev = 1.0;
int X = 5;
int search_count = 0;
double rand_gen();
double normalRandom();
void reset_memory(int memory[]);
void init_memory(int memory[]);
int generate_random_request();
double memory_utilization(int memory[]);
void release_occupied_block(int memory[]);
void update_size(int memory[]);
int first_fit(int memory[], int s);
int next_fit(int memory[], int s);
int best_fit(int memory[], int s);
int worst_fit(int memory[], int s);
int main()
{
int memory[N];
double memory_utilizations[5];
reset_memory(memory);
init_memory(memory);
int total_search = 0;
int requests = 0;
for (size_t i = 0; i < X; i++)
{
int fail = 0; // -1 = fail, 0 = succeed
while (fail > -1)
{
int s = generate_random_request();
// call your memory allocation method below:
// fail = first_fit(memory, s);
// fail = next_fit(memory, s);
// fail = best_fit(memory, s);
fail = worst_fit(memory, s);
requests++;
total_search += search_count;
search_count = 0;
}
memory_utilizations[i] = memory_utilization(memory);
release_occupied_block(memory);
}
// display the results
double ratio = (double)total_search / (double)requests;
printf("ratio = %f\n", ratio);
double total = 0.0;
for (size_t i = 0; i < X; i++)
{
total += memory_utilizations[i];
}
printf("Average memory utilizatioin = %f\n", total / X);
system("pause");
return 0;
}
// 0: succeed; -1: failed to allocate
int first_fit(int memory[], int s)
{
for (int j = 0; j < N - s; j++) {
if ((memory[j] < 0) && s <= (memory[j]*-1)) {
for (int k = 0; k < s; k++) {
memory[j + k] = s;
}
update_size(memory);
return 0;
}
search_count++;
}
return -1;
}
// return the index of the hole being allocated, or -1 if failed
int next_fit(int memory[], int s)
{
for (int j = 0; j < N - s; j++) {
if ((memory[j] < 0) && s <= (memory[j] * -1)) {
for (int k = 0; k < s; k++) {
memory[j + k] = s;
}
update_size(memory);
return j;
}
search_count++;
}
return -1;
}
int best_fit(int memory[], int s)
{
for (int j = 0; j < N - s; j++) {
for (int r = 0; r < avg_size; r++) {
if ((memory[j] < 0) && s == (memory[j] * -1) + r) {
for (int k = 0; k < s; k++) {
memory[j + k] = s;
}
update_size(memory);
return 0;
}
}
search_count++;
}
return -1;
}
int worst_fit(int memory[], int s)
{
int max_index = 0, max_number = 0;
for (int i = 0; i < N - s; i++) {
if (memory[i] > max_number) {
max_number = memory[i];
max_index = i;
}
}
for (int j = max_index; j < N - s; j++) {
if ((memory[j] < 0) && s <= (memory[j]*-1)) {
for (int k = 0; k < s; k++) {
memory[j + k] = s;
}
update_size(memory);
return 0;
}
search_count++;
}
return -1;
}
void reset_memory(int memory[])
{
for (size_t i = 0; i < N; i++)
{
memory[i] = 0;
}
}
int generate_random_request()
{
srand(time(NULL));
int s = N;
while (s < 1 || s > N - 1)
{
s = (int)(normalRandom() * stddev + avg_size);
}
return s;
}
void init_memory(int memory[])
{
srand(time(NULL));
int hole_or_block = rand() % 2;
int i = 0;
while (i < N)
{
int size = N;
while (size < 0 || size > N - 1)
{
size = (int)(normalRandom() * stddev + avg_size);
}
if (i + size >= N)
{
size = N - i;
}
int value = size;
if (hole_or_block == 0)
{
value = -value;
hole_or_block = 1;
}
else
{
hole_or_block = 0;
}
for (size_t j = 0; j < size; j++)
{
memory[i] = value;
i++;
}
}
}
double rand_gen() {
return ((double)(rand()) + 1.) / ((double)(RAND_MAX)+1.);
}
double normalRandom() {
double v1 = rand_gen();
double v2 = rand_gen();
return cos(2 * 3.14 * v2) * sqrt(-2. * log(v1));
}
double memory_utilization(int memory[])
{
int occupied = 0;
for (size_t i = 0; i < N; i++)
{
if (memory[i] > 0)
{
occupied++;
}
}
return (double)occupied / (double)N;
}
void release_occupied_block(int memory[])
{
int occupied_indexes[N];
int number_occupied_blocks = 0;
int i = 0;
while (i < N)
{
if (memory[i] > 0)
{
occupied_indexes[number_occupied_blocks] = i;
number_occupied_blocks++;
}
i += abs(memory[i]);
}
srand(time(NULL));
int lower = 0;
int upper = number_occupied_blocks - 1;
int random_index = rand() % (upper - lower + 1) + lower;
int block_index = occupied_indexes[random_index];
int block_size = memory[block_index];
int new_hole_size = block_size;
int new_index = block_index;
if (block_index > 0)
{
int before_value = memory[block_index - 1];
if (before_value < 0)
{
new_hole_size += abs(before_value);
new_index -= abs(before_value);
}
}
if (block_index + block_size < N)
{
int after_value = memory[block_index + block_size];
if (after_value < 0)
{
new_hole_size += abs(after_value);
}
}
for (size_t i = 0; i < new_hole_size; i++)
{
memory[new_index + i] = -new_hole_size;
}
}
void update_size(int memory[]) {
int temp = 0, size, i = 0;
while (i < N) {
size = 0;
if (memory[i] < 0) {
temp = i;
for (int j = 0; j < N; j++) {
if (memory[i + j] < 0)
size++;
else
break;
}
if (size != -memory[i]) {
for (int k = 0; k < size; k++) {
if ((temp+k) < N-1) {
memory[temp + k] = -size;
}
}
}
i += size;
}
else {
i++;
}
}
}
|
7dd1d93aede49d49089637327fb67e1390489a1e
|
[
"C"
] | 1 |
C
|
sabotoshiboi/homework5
|
17a7e6543e18295ce7ad62b6a2321f4033bba8af
|
6da0845c2ec83c654cc71297304f2397a6f6e197
|
refs/heads/master
|
<file_sep>import yfinance as yf
instrument_to_name_map = [("^DJI", "DJI")]
interval_period = ("1d", "10y")
def file_name(inst):
return "data/" + inst + "_" + interval_period[0] + "_" + interval_period[1] + ".csv"
def download():
for inst in instrument_to_name_map:
data = yf.download(
tickers=inst[0],
period=interval_period[1],
interval=interval_period[0],
group_by='ticker',
auto_adjust=True,
prepost=True,
threads=True,
proxy=None
)
file = file_name(inst[1])
data.to_csv(file)
download()
<file_sep>import modelling as md
import pandas as pd
import feature_engineering as fe
import os
data = pd.read_csv("data/DJI_1d_10y_signal.csv")
data = data.drop(["Date"], axis=1)
data = data.ffill(axis=0)
print(data)
xTrain, xTest, yTrain, yTest = fe.ordered_train_test_split(data, "Signal")
results = pd.DataFrame()
results["true_y"] = yTest
ada = md.fit_ada_boost(xTrain, yTrain, True)
results["prediction"] = ada.predict(xTest)
print("Adaboost Classifier")
print(results["prediction"])
print(md.get_results(results["true_y"], fe.generate_y(results, "prediction")))
svm = md.fit_SVM(xTrain, yTrain)
results["prediction"] = svm.predict(xTest)
print("SVM Classifier")
print(md.get_results(results["true_y"], results["prediction"]))
print('Accuracy of the SVM on test set: {:.3f}'.format(svm.score(xTest, yTest)))
knn = md.fit_KNN(xTrain, yTrain, True)
results["prediction"] = knn.predict(xTest)
print("KNN")
print(md.get_results(results["true_y"], results["prediction"]))
print('Accuracy of the KNN on test set: {:.3f}'.format(knn.score(xTest, yTest)))
rf = md.fit_random_forest(xTrain, yTrain)
results["prediction"] = rf.predict(xTest)
print("Random Forest Classifier")
print(md.get_results(results["true_y"], results["prediction"]))
lr = md.fit_logistic_regression(xTrain, yTrain)
results["prediction"] = lr.predict(xTest)
print("Logistic Regression")
print(md.get_results(results["true_y"], results["prediction"]))
print('Accuracy of the Logistic Regression on test set: {:.3f}'.format(lr.score(xTest, yTest)))
gb = md.fit_gradient_boosting(xTrain, yTrain, True)
results["prediction"] = gb.predict(xTest)
print("Gradient Boosting")
print(md.get_results(results["true_y"], results["prediction"]))
print('Accuracy of the GBM on test set: {:.3f}'.format(gb.score(xTest, yTest)))
xTrain_fnn, xTest_fnn, yTrain_fnn, yTest_fnn = md.reconstruct(xTrain, xTest, yTrain, yTest)
fnn = md.fit_FNN(xTrain_fnn, yTrain_fnn)
results["prediction"] = fnn.predict(xTest_fnn).flatten()
print("FNN")
print(md.get_results(yTest_fnn, results["prediction"]))
print('Accuracy of the FNN on test set: {:.3f}'.format(md.accuracy_score(yTest_fnn, results["prediction"])))
"""
Regression On Close Price
"""
data = pd.read_csv("data/data_reg_1d_10y.csv")
data = data.drop(["Date"], axis=1)
xTrain, xTest, yTrain, yTest = fe.ordered_train_test_split(data, "Y")
results = pd.DataFrame()
results["true_y"] = yTest
knn_reg_close = md.fit_KNN_reg(xTrain, yTrain, True)
results["prediction"] = knn_reg_close.predict(xTest)
results["trueSignal"] = fe.generate_y(results, "true_y")
results["signal"] = fe.generate_y(results, "prediction")
print("KNN Regressor")
print(md.get_results(results["trueSignal"], results["signal"]))
gradient_reg_close = md.fit_gradient_boosting_reg(xTrain, yTrain, True)
results["prediction"] = gradient_reg_close.predict(xTest)
results["trueSignal"] = fe.generate_y(results, "true_y")
results["signal"] = fe.generate_y(results, "prediction")
print("Gradient Boosting Regressor")
print(md.get_results(results["trueSignal"], results["signal"]))
|
a9b14f2fe504d27a5d47eacd29b89704b096715d
|
[
"Python"
] | 2 |
Python
|
puiyanng/djia_price_prediction
|
21e875574d2c14b889b06e59614080da3a191807
|
31348872fe2799b92dcfc150c33cf19c5df90c83
|
refs/heads/master
|
<repo_name>FDirectionClear/F-Music<file_sep>/src/src/store/actions.js
import * as types from './mutation-types'
import { playMode } from 'common/js/config'
import { shuffle } from 'common/js/tools'
const {
sequence,
loop,
random
} = playMode
export const selectPlay = (
{commit, state},
{songList, index}
) => {
if(state.mode === playMode.random) {
// 应该判断当前模式是否为random,如果不对random模式下特殊处理
// 在当我们在random模式下再次点击song-list中的某一个歌曲时,就会
// 出现虽然显示的是random模式,但是播放的模式却是sequence模式。
// 导致这个现象,是因为我们在上一次播放时切换为random,但是在
// 下一次从song-list上选择歌曲时,又会通过selectPlay函数重新将
// playList变成和sequenceList一样的顺序列表,因而我们在这里需要冲洗
// 对歌曲的播放列表进行 ”洗牌“。
var list = shuffle(state.sequenceList)
index = list.findIndex(
curr => curr === state.sequenceList[index]
)
} else {
var list = songList
}
commit(types.SET_FULLSCREEN, true)
commit(types.SET_PLAYING, true)
commit(types.SET_SEQUENCELIST, list)
commit(types.SET_PLAYLIST, list)
commit(types.SET_CURRENTINDEX, index)
}
export const setPlayMode = (
{commit, state},
mode
) => {
switch (mode) {
case sequence :
// 防止从random -> sequence会造成currentIndex改变,从而直接切歌
var newIndex= state.sequenceList.findIndex(
curr => curr === state.playList[state.currentIndex]
)
commit(types.SET_MODE, mode)
commit(types.SET_PLAYLIST, state.sequenceList)
commit(types.SET_CURRENTINDEX, newIndex)
break
case loop :
commit(types.SET_MODE, mode)
break
case random :
// 防止从sequence -> random会造成currentIndex改变,从而直接切歌
const list = shuffle(state.sequenceList)
var newIndex= list.findIndex(
curr => curr === state.playList[state.currentIndex]
)
commit(types.SET_MODE, mode)
commit(types.SET_PLAYLIST, list)
commit(types.SET_CURRENTINDEX, newIndex)
break
}
}
<file_sep>/src/src/directive/f-lazyload/f-lazyload.js
let viewHeight
export function flazyload (el, binding){
// if(el.src)
// return
// viewHeight = typeof viewHeight === 'undefined'
// ? document.documentElement.clientHeight // 缓存视口的高度
// : viewHeight
// if(!binding.expression)
// return
// let rect = el.getBoundingClientRect()
// if(rect.bottom > 0 && rect.top < viewHeight){
// el.src = binding.expression
// }
let parent = el.parentNode
while(parent !== window){
parent.addEventListener(scroll, (e)=>{
this.insert()
})
}
}
// https://cn.vuejs.org/v2/guide/custom-directive.html
// https://www.jb51.net/article/112355.htm
<file_sep>/src/src/store/state.js
import { playMode } from 'common/js/config'
const state = {
singer: {}, // 当前专辑的歌手
fullscreen: false, // 播放器是否显示全屏
playing: false, // 播放器是否在播放
playList: [], // 当前播放列表
sequenceList: [], // 当前播放列表(真实播放顺序)
mode: playMode.sequence, // 当前播放模式,默认顺序播放
currentIndex: -1 // 当前正在播放的歌曲索引,不播默认为-1
}
export default state
<file_sep>/src/index.js
import 'common/style/index'
import Vue from 'vue'
import App from './App.vue'
import router from '@/router/router'
import store from '@/store/index'
import fastclick from 'fastclick'
fastclick.attach(document.body)
new Vue({
el: '#app',
router,
store,
render: (h) => h(App)
})
localStorage.setItem('name','方向明')
console.log(`localStorage:${localStorage.getItem('name')}`)
function openDB (myDB) {
let request = window.indexedDB.open(myDB.name)
request.onerrer = (e) => {
console.log("open indexedDB error!")
}
request.onsuccess = (e) => {
console.log('open indexedDB success!')
myDB.db = e.target.result
}
request.onupgradeneeded = () => {
console.log('upgradeneeded!')
var store = request.result.createObjectStore('books', {
keyPath: 'isbn'
})
var title = store.createIndex('by_title', 'title', {
unique : true
})
var authorIndex = store.createIndex('by_author', 'author')
store.put({
title: '七龙珠',
author: '鸟山明',
isbn: 123456
})
store.put({
title: '百变小樱魔术卡',
author: '爱心屋',
isbn: 234567
})
store.put({
title: '请问要点兔子么',
author: '芳文社',
isbn: 345678
})
store.put({
title: '川柳少女',
author: '芳文社',
isbn: 456789
})
}
}
function zsgc ({ db }) {
var transaction = db.transaction('books', 'readwrite')
var store = transaction.objectStore('books')
// 查找数据的消耗性能是出了名的,查找数据是一个异步的过程,核心方法是store.get()。
// 修改数据也是一个异步的过程,核心方法是store.put。当带入put的参数中的keyPath是一个已经
// 存在的数据,那么put将会修改这个keyPath下对应的数据,如果keyPath之前并不存在,那就将这个
// 数据添加进ObjectStore中
// 为了方便演示,接下来我们要做的是先从ObjectStore中找到'川柳少女'这一条数据,然后修改他的
// 作者为'<NAME>',也就是先经历一个获取,在经历一个修改的过程
store.get(456789).onsuccess = function (e) {
var book = e.target.result
book.author = 'Mad House'
console.log(`获取到原来的川柳少女信息:`)
console.log(book)
var req = store.put(book)
req.onsuccess = function (e) {
console.log('update <川柳少女> success !')
}
}
}
const myDB = {
name: 'fangDB',
version:'1.0.0',
db: null
}
openDB(myDB)
setTimeout(()=>{
zsgc(myDB)
},3000)
<file_sep>/README.md
# F-Music
我的前端项目作品————一款清爽的音乐WebApp。
该项目原型来自《Vue.js权威指南》的一位知名的主要作者,**但本人独立的将该项目的全部JS部分用自己的代码复现了一遍。**
核心技术栈: VueJS + Vuex + Vue-router + Webpack + 极少量JQuery + github上的一些开源工具 + 本人全力质量coding。(并没有使用Vue-cli,自己原生配置webpack)
开发状态:播放器内核已经开发完毕!
# 成果演示








<file_sep>/src/src/api/config.js
// 该文件用来存放各种接口的请求公用数据,防止代码冗余
export const commonParams = {
g_tk: '6<PASSWORD>',
loginUin: '947288309',
format: 'json',
inCharset: 'utf-8',
outCharset: 'utf-8',
notice: '0',
platform: 'h5',
needNewCode: 0
}
export const jsonpOptions = {
param: 'jsonpCallback',
prefix: 'jp'
}
export const ERR_OK = 0 // 返回数据成功的状态码
<file_sep>/src/common/js/tools.js
import axios from 'axios'
import { isArray } from 'util';
export const noop = '' // 统一的无操作指令
export function parse(data /*data ?: object*/){
if(typeof data !== 'object') return
let res = ""
for(var key in data){
let value = data[key] !== undefined ? data[key] : noop
res += `${encodeURIComponent(key)}=${encodeURIComponent(value)}&`
}
return res.substring(0,res.length-1)
}
export function getStaticSource(url) {
// 专门用来请求获取静态资源,以此来做localStorage缓存
return axios.get(url)
}
// 将字符串转换为首字母大写的形式
export function capitalize(str) {
// 如果str是undefined返回空字符串
if(typeof str === 'undefined') {
return ''
}
// 如果str的第一个不是小写字母就返回str
if(str.search(/^[a-z]/) === -1) {
return str
}
return str.slice(0,1).toUpperCase() + str.slice(1)
}
// 从某个范围内取随机整数
export function getRandomInt (min, max) {
return Math.floor(Math.random() * (max - min + 1) + min)
}
// 经典shuffle洗牌函数, 思路参考来自网络
// 返回一个洗好的新的数组。我为他增加了一些参数验证。
export function shuffle (arr) {
if(!Array.isArray(arr)) {
console.warn("请确认shuffle函数的参数是否是一个数组")
return false
}
const _arr = [...arr]
for(let i = 0, len = _arr.length; i < len; i++) {
const random = getRandomInt(0, i)
let temp = _arr[i]
_arr[i] = _arr[random]
_arr[random] = temp
}
return _arr
}
/**
* Generator状态机
* @param {Number, String} start 状态开始位置索引,不包含当前索引
* @param {...any} modes 需要轮询的模式
*/
export function StatusChange(start = 0, ...modes /*start : Number | String, modes ?: any */) {
if(
// 如果start不能解析为数字
typeof start !== 'number' &&
isNaN(start)
) {
console.warn(
`带入changeMode中的第一个参数start:${start}不能解析为数字,` +
`或者没有带入需要轮询的模式。请确认你带入的参数是否符合预期。`
)
return false
}
var modeLen = modes.length, i = 0
let gen = function* () {
while(true) {
yield modes[i]
i < modeLen - 1 ? i ++ : i = 0
}
}
let g = gen()
for(let i = 0; i < start + 1; i++) {
var {value} = g.next()
}
return {
g, // 返回遍历器,在外部g.next()将从start + 1处开始
value
}
}
<file_sep>/src/src/store/mutation-types.js
export const SET_SINGER = 'SET_SINGER'
export const SET_FULLSCREEN = 'SET_FULLSCREEN'
export const SET_PLAYING = 'SET_PLAYING'
export const SET_PLAYLIST = 'SET_PLAYLIST'
export const SET_SEQUENCELIST = 'SET_SEQUENCELIST'
export const SET_MODE = 'SET_MODE'
export const SET_CURRENTINDEX = 'SET_CURRENTINDEX'
<file_sep>/src/api/recommend.js
import { getJson } from 'common/js/getJson'
import { commonParams } from './config'
// 获取推荐页的轮播图的数据
export function getSliderMessage(){
// 防止代码冗余,将请求公共参数的commonParams引入进来,让特有的覆盖公共参数
const data = Object.assign({},commonParams,{
_: '1560068856152'
})
const conf = {
url:"/api/getSliderMessage",
data
}
return getJson(conf)
}
// 获取推荐页热门歌单推荐部分数据
export function getHotList(){
const data = Object.assign({},commonParams,{
platform: 'yqq',
hostUin: 0,
sin: 0,
ein: 29,
sortId: 5,
needNewCode: 0,
categoryId: 10000000,
rnd: Math.random(),
format: 'json'
})
const conf = {
url : '/api/getHotList',
data
}
return getJson(conf)
}
<file_sep>/src/src/api/singer.js
import getJsonp from 'common/js/getJsonp'
import { commonParams } from './config'
export function getSingerList () {
const data = Object.assign({},commonParams,{
channel: 'singer',
page: 'list',
key: 'all_all_all',
pagesize: 100,
pagenum: 1,
hostUin: 0,
needNewCode: 0,
platform: 'yqq'
})
const conf = {
url: 'https://c.y.qq.com/v8/fcg-bin/v8.fcg',
data
}
return getJsonp(conf)
}
export function getSingerDetail (singerID) {
if(!singerID)
return Promise.reject(`singerID为undefined,拉取歌手详细信息无效`)
const data = Object.assign({},commonParams,{
hostUin: 0,
needNewCode: 0,
platform: 'yqq',
order: 'listen',
begin: 0,
num: 100, // num似乎是抓取数据的个数
songstatus: 1,
singermid: singerID
})
const conf = {
url: 'https://c.y.qq.com/v8/fcg-bin/fcg_v8_singer_track_cp.fcg',
data
}
return getJsonp(conf)
}
<file_sep>/src/common/js/getJson.js
import { noop, parse } from 'common/js/tools'
// 自动 TreeShaking
// import jsonp from 'jsonp'
// 用于get或者post请求Json数据,也可以使用axios改写
export function getJson({url,type="GET",data={}, async = true }
/*url域名,type请求类型,data请求数据 :?Object , async ?: bool 是否异步*/){
return new Promise((resolve, reject) => {
let xhr = new XMLHttpRequest()
type.toUpperCase() === "GET" ? get() : post()
xhr.responseType = "json"
xhr.onreadystatechange = handler
xhr.send(data)
// post 逻辑分割
function post(){
xhr.open('POST',url,async)
xhr.setRequestHeader(
"Content-type",
"application/x-www-form-urlencoded"
)
data = parse(data)
}
// get 逻辑分割
function get(){
url += (url.indexOf('?') < 0 ? "?" : "&") + parse(data)
xhr.open('GET',url,async)
data = null
}
// success 回调
function handler(){
if(this.readyState !== 4){
return
}
if(this.status === 200){
resolve(this.response)
} else {
reject(new Error(this.statusText))
}
}
})
}
<file_sep>/src/common/js/Song.js
import { getLyric } from 'api/song'
import { ERR_OK } from 'api/config'
export default class Song {
constructor ({
id, // 歌曲id
mid, // 歌曲mid
singer, // 歌手
name, // 歌曲名称
album, // 专辑名称
duration, // song播放时间
strMediaMid, // 歌曲的媒体id(url中的参数片段)
image, // 歌曲图片
url, // 歌曲真实请求路径
lyric // 歌词
}) {
this.id = id
this.mid = mid
this.singer = singer
this.name = name
this.album = album
this.duration = duration
this.image = image
this.strMediaMid = strMediaMid
this.url = url
this.filename = `C400${this.mid}.m4a`
}
getLyric () {
getLyric(this.mid).then((res)=>{
if(res.retcode === ERR_OK) {
console.log(res)
this.getLyric = res.lyric
console.log(res.lyric)
}
})
}
}
// 因为song的参数较多,所以创造一个工厂方法来边界的从生数据中筛选出
// 需要的数据然后传入构造函数, createSong单独存在并无意义。
export function createSong (musicData) {
return new Song ({
id : musicData.songid,
mid: musicData.songmid,
singer: formatSinger(musicData.singer),
name: musicData.songname,
album: musicData.albumname,
duration: musicData.interval,
image: `https://y.gtimg.cn/music/photo_new/T002R300x300M000${musicData.albummid}.jpg?max_age=2592000`,
strMediaMid: musicData.strMediaMid,
url: `http://aqqmusic.tc.qq.com/amobile.music.tc.qq.com/C400${musicData.strMediaMid}.m4a?guid=1948695352&vkey=C2AF17B96C5C7F1D07DFE259964DD5C72BD0CDD600B68D021A36120DB25A25B42AEAE51C8E8EC653293026E04421D2BA1F3BD1EDBA432C1D&uin=0&fromtag=38`
})
}
// 目前可用的模板
// http://aqqmusic.tc.qq.com/amobile.music.tc.qq.com/C400003iHc0e2UIgMC.m4a?guid=4057747536&vkey=<KEY>FE7FD8B95E230A06A324480C88&uin=0&fromtag=38
// 废弃的6号模板 7月20日
//http://aqqmusic.tc.qq.com/amobile.music.tc.qq.com/C400003z4eeo3mteMn.m4a?guid=6068696641&vkey=CEE31FE9622352D6A6759516F1436579424FB0785DAB37EF6A8DBE6BA6E3436D07378B2DB737BAE577B72B56156C8B680ACD5D141473899C&uin=0&fromtag=38
// 废弃的5号模板 7月19日
//http://aqqmusic.tc.qq.com/amobile.music.tc.qq.com/C400001Qu4I30eVFYb.m4a?guid=7602163028&vkey=21A1F70507672E4D68EADAFBF99E235E77F854F716424DDF02C16112B9BC89A7DC92A4D8BD7CAC682E9C4B2E5A699680E658F7233A4DF195&uin=0&fromtag=38
// 废弃的4号歌曲播放链接模板
// http://aqqmusic.tc.qq.com/amobile.music.tc.qq.com/C400003iHc0e2UIgMC.m4a?guid=2305743866&vkey=731B8464FE7699CA3307A69A933F4FFAF62AFE99E526EAB3BEFEADFF9352C2F35A5A261BEFCC821EBD<KEY>E5C1071175DD559833&uin=0&fromtag=38
// 废弃的3号歌曲播放链接模板
// http://aqqmusic.tc.qq.com/amobile.music.tc.qq.com/C400233704216.m4a?guid=2305743866&vkey=443074E46FCE8E6152F560450ABC36A49ADD697301BC11560C0AFAA2ECDF265A844FDA1503D7D666265B8AAAA89871AD558046DFB0A8732A&uin=0&fromtag=38
// 废弃的2号歌曲播放链接模板
// http://aqqmusic.tc.qq.com/amobile.music.tc.qq.com/C400${musicData.songid}.m4a?guid=2305743866&vkey=<KEY>&uin=0&fromtag=38
// 废弃的1号歌曲播放链接模板
// http://aqqmusic.tc.qq.com/amobile.music.tc.qq.com/C400233704216.m4a?guid=2305743866&vkey=<KEY>&uin=0&fromtag=38
// 当歌手有多个人的时候,将名字格式化为一个字符串 name1/name2/name3/....
function formatSinger (singerList) {
if(typeof singerList === 'undefined' || singerList === null){
return ''
} else if(!Array.isArray(singerList)){
return singerList
} else {
let res = []
singerList.forEach((item) => {
res.push(item.name)
})
return res.join('/')
}
}
<file_sep>/src/src/common/js/domOps.js
import { capitalize } from './tools'
// 为一个dom元素添加一个或者多个class
export function addClass (el, classes
/*el:dom, classes?:string|Array*/) {
if(Array.isArray(classes)){
classes.forEach((cur) => {
if(!hasClass(cur)){
el.classList.add(cur)
}
})
return
}
if(!hasClass(classes)){
el.classList.add(classes)
}
}
// 判断一个dom元素是否有某一个class
export function hasClass (el, target
/*el:dom, target?:string*/) {
return el.classList.contains(target)
}
// 通过proxy代理式创建DOM树
export const dom = new Proxy({}, {
get (target, property) {
return function (attrs = {}, ...children) {
const el = document.createElement(property)
for(let prop of Object.keys(attrs)) {
el.setAttribute(prop, attrs[prop])
}
for(let child of children) {
// children可能带入的是一个字符串也可能就是一个原生的DOM节点
if(typeof child === 'string'){
child = document.createTextNode(child)
}
el.appendChild(child)
}
return el
}
}
})
// 用来判断对象是否为元素节点
export function isDom (el) {
//首先要对HTMLElement进行类型检查,因为即使在支持HTMLElement
//的浏览器中,类型却是有差别的,在Chrome,Opera中HTMLElement的
//类型为function
var res = (typeof HTMLElement === 'object' ||
typeof HTMLElement === 'function')
? (function () {
return el instanceof HTMLElement
})()
: (function () {
return el &&
typeof el === 'object' &&
el.nodeType === 1 &&
typeof el.nodeName === 'string'
})()
return res
}
// 获取或者设置dom的dataset属性,获取或设置取决于val是否存在
export function getDataset (el, key, val) {
if(!isDom(el)) {
// 如果目标并不是一个DOM,直接返回
return false
}
let res = null
if(key !== undefined && val){
el.setAttribute(`data-${key}`, val)
} else {
// 如果key下对应的值是个可以转换为数值的字符串就转换成浮点数
res = !isNaN(el.dataset[key]) ? parseFloat(el.dataset[key]) : res
}
return res
}
let vendor = (() => { // vender : String
const div = dom.div() // 创建div
const transformNames = {
'webkit': 'webkitTransform',
'Moz': 'MozTransform',
'O': 'OTransform',
'ms': 'msTransform',
'standard' :'transform'
}
for(let key in transformNames) {
if(typeof div.style[transformNames[key]] !== 'undefined'){
return key
}
}
return false // 如果返回false说明浏览器压根就不支持transform
})()
// 自动添加适配当前浏览器的厂商前缀
export function prefixStyle (style) { // style : String
if(vendor === false) {
return false // 浏览器可能不支持CSS3
}
if(vendor === 'standard') {
return style
}
return vendor + capitalize(style)
}
let _uid = 0
// 获取dom相对于窗口的总偏移量
export function getSumOffset (el, bol = true
/*目标元素el ?: DOM, 是否接受el是null就返回left和top都是0的对象bol ?: true */) {
if(
(el === null && !bol) ||
el === undefined ||
(!isDom(el) && !bol)
) {
console.log(
// display为none,获取的offsetParent为null。无法计算偏移量
`${el}不是一个dom元素节点或为null或未带入参数。请检查你的el是否真正的获取到了dom,或者el的display为none?`
)
return false
}
if(el === null) {
return {
left: 0,
top: 0
}
} else {
const parent = el.offsetParent
let left = el.offsetLeft
let top = el.offsetTop
if(parent !== document.body && el !== document.body ){
const rect = getSumOffset(parent, bol) // 非严格模式下可以使用arguments.callee和函数本身的名称解耦
left += rect.left
top += rect.top
}
return {left, top}
}
}
// webkitTransform:
// translate3d(-232px, 0px, 0px)
export function getTranslateVal(el) {
// 注意:目前该方法只支持transform中只有一个属性
if(!isDom(el)) {
console.warn(`getTranslateVal: ${el}不是一个dom节点,请检查你的el是否真正获取到了dom`)
return false
}
const TRANSFORM = prefixStyle('transform')
if(!el.style[TRANSFORM]) {
return {X: null, Y:null, Z:null}
}
const reg = /(?<!translate)-?\d+/gi
const [X , Y , Z] = el.style[TRANSFORM].match(reg)
return {
X: X / 1,
Y: Y / 1,
Z: Z / 1
}
}
|
f3d49a3130bee29b3a93eb56535e46503aced596
|
[
"JavaScript",
"Markdown"
] | 13 |
JavaScript
|
FDirectionClear/F-Music
|
51d4fff586f396cdccdd7e9042ff695432645ddc
|
947ac110be21e143df46bd503e1fd0d27700237a
|
refs/heads/master
|
<file_sep>using System;
using System.IO;
namespace HexPatcher
{
class Program
{
static void Main(string[] args)
{
var configFilePath = "config.cfg";
if (args.Length > 0)
{
configFilePath = args[0];
}
if (!File.Exists(configFilePath))
{
Console.WriteLine($"Configuration file '{configFilePath}' not found.");
Console.WriteLine($"Working directory path: {Directory.GetCurrentDirectory()}");
}
else
{
var config = File.ReadAllLines(configFilePath);
if (config.Length < 3)
{
Console.WriteLine("Invalid config file.");
Console.WriteLine("Please use the following format:");
Console.WriteLine("BINARY FILE PATH");
Console.WriteLine("ORIGINAL BYTES TO FIND");
Console.WriteLine("MODIFIED BYTES TO REPLACE THE ORIGINAL WITH");
}
else
{
var filePath = config[0];
var originalHex = config[1];
var modifiedHex = config[2];
if (!File.Exists(filePath))
{
Console.WriteLine($"File '{filePath}' does not exist.");
}
else
{
var fileBytes = File.ReadAllBytes(filePath);
var originalBytes = new ByteFilter(originalHex);
var moddedBytes = new ByteFilter(modifiedHex);
// Try to find the original bytes from the file
var startIndex = GetStartIndex(fileBytes, originalBytes);
if (startIndex < 0)
{
Console.WriteLine("Original bytes were not found.");
}
else
{
// Make a backup file
if (SafeWriteBytesToFile(filePath + ".bak", fileBytes))
{
// Replace with the modified bytes & overwrite the original file
WriteBytes(fileBytes, startIndex, moddedBytes);
SafeWriteBytesToFile(filePath, fileBytes);
}
}
}
}
}
Console.ReadKey();
}
/// <summary>
/// Writes the given bytes to the given file path.
/// </summary>
/// <returns>True if success.</returns>
public static bool SafeWriteBytesToFile(string filePath, byte[] fileBytes)
{
try
{
File.WriteAllBytes(filePath, fileBytes);
Console.WriteLine("Bytes written to path: " + filePath);
return true;
}
catch (Exception e)
{
Console.WriteLine("Could not write bytes to file: " + e.Message);
return false;
}
}
/// <summary>
/// Writes the bytes from the given ByteFilter into the given byte array
/// starting at the given index.
/// </summary>
public static void WriteBytes(byte[] writeTo, long startIndex, ByteFilter data)
{
for (long i = 0; i < data.Length; i++)
{
if (data.IsWildcardIndex(i))
{
continue;
}
else
{
writeTo[startIndex + i] = data.Bytes[i];
}
}
}
/// <summary>
/// Returns the start index of the first occurrence of the given ByteFilter.
/// </summary>
public static long GetStartIndex(byte[] fileBytes, ByteFilter findBytes)
{
long matchIndex = 0;
for (long i = 0; i < fileBytes.Length; i++)
{
if (findBytes.IsWildcardIndex(matchIndex) || fileBytes[i] == findBytes.Bytes[matchIndex])
{
matchIndex++;
if (matchIndex == findBytes.Length)
{
return i - matchIndex + 1;
}
}
else
{
matchIndex = 0;
}
}
return -1;
}
}
}
<file_sep># HexPatcher
A .NET command line tool to modify any binary files by their hex based address.
Targets .NET Core 1.1 Framework.
Create a configuration file that contains three rows:
- Binary file path
- Original bytes to find (can contain '\*' wildcard bytes)
- Modified bytes to replace the original bytes with
An example of a configuration file:
```
C:\Program Files\Company\Target.exe
E8 ** AB B2 FF 84 C0 75 08
E8 ** AB B2 FF 84 C0 74 08
```
The program will make a backup of the binary file before overwriting any of its content. The backup is stored at the same location as the original binary file and has a ".bak" extension added to its full filename.
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
namespace HexPatcher
{
/// <summary>
/// A byte array that may contain wildcard bytes instead of data.
/// </summary>
public class ByteFilter
{
/// <summary>
/// The data bytes.
/// </summary>
public byte[] Bytes { get; set; }
/// <summary>
/// Length of the data.
/// </summary>
public int Length { get { return Bytes.Length; } }
private List<long> wildcardIndexes = new List<long>();
/// <summary>
/// Constructs a new ByteFilter of the given hex string.
/// The string may contain spaces and wildcards '*'.
/// </summary>
public ByteFilter(string hexString)
{
hexString = hexString.Replace(" ", "");
Bytes = StringToByteArray(hexString);
// Get all wildcards from the hex string
for (int i = hexString.IndexOf('*'); i > -1; i = hexString.IndexOf('*', i + 1))
{
wildcardIndexes.Add(i / 2);
}
// Remove possible duplicates
wildcardIndexes = wildcardIndexes.Distinct().ToList();
}
/// <summary>
/// Returns true if the given index contains a wildcard.
/// </summary>
public bool IsWildcardIndex(long index)
{
return wildcardIndexes.Contains(index);
}
/// <summary>
/// Converts a hex string to a byte array.
/// </summary>
private static byte[] StringToByteArray(string hex)
{
if (hex.Length % 2 == 1)
{
throw new Exception("The binary key cannot have an odd number of digits");
}
byte[] arr = new byte[hex.Length >> 1];
for (int i = 0; i < hex.Length >> 1; ++i)
{
// Two hex characters represent one byte
arr[i] = (byte)((GetHexValue(hex[i << 1]) << 4) + (GetHexValue(hex[(i << 1) + 1])));
}
return arr;
}
/// <summary>
/// Converts an encoded hex character into a byte value.
/// </summary>
private static int GetHexValue(char hex)
{
if (hex == '*')
{
return 0;
}
int val = hex;
//For uppercase A-F letters:
return val - (val < 58 ? 48 : 55);
//For lowercase a-f letters:
//return val - (val < 58 ? 48 : 87);
//Or the two combined, but a bit slower:
//return val - (val < 58 ? 48 : (val < 97 ? 55 : 87));
}
}
}
|
4727c65e4eaaffc520f5280ae2debf1602b53fd3
|
[
"Markdown",
"C#"
] | 3 |
C#
|
Haapavuo/HexPatcher
|
fce034729f78381a746a77ac7d78e92ef17aef3b
|
30f0ff2a51166336caa9c389c495170ba8bb8e20
|
refs/heads/master
|
<repo_name>thegingerbloke/archive-tweet<file_sep>/app.sample.js
/*
* Start tweet archive
*/
var tweetArchive = require("./lib/tweet-archive");
tweetArchive.init({
originalTwitterAccount : "stephenfry",
archiveTime : 3,
archiveTimeUnit : "months",
consumer_key : "x",
consumer_secret : "x",
access_token_key : "x",
access_token_secret : "x"
});<file_sep>/lib/log.js
/*
* logger
*/
var
messages = [],
moment = require('moment');
// log messages
function log() {
// convert arguments into array
var args = Array.prototype.slice.call(arguments);
// add date to start of arguments
var str = moment().format("D/M/YY, H:mm:ss") + " -";
args.unshift(str);
// log
console.log(args.join(" "));
messages.push(args);
// remove old messages if the log gets too long - save RAM!
if (messages.length > 500) {
messages.shift();
}
}
// exit on errors
function exit() {
console.log();
log(" -- ERROR -- ");
log(Array.prototype.slice.call(arguments));
log(" -- EXITING -- ");
console.log();
process.exit(1);
}
module.exports = {
log: log,
exit: exit,
messages: messages
};<file_sep>/lib/large-numbers.js
/*
* JavaScript can't natively deal with the huge twitter id numbers...
* http://stackoverflow.com/questions/9717488/using-since-id-and-max-id-in-twitter-api
*/
function decrementHugeNumberBy1(n) {
// make sure s is a string, as we can't do math on numbers over a certain size
n = n.toString();
var allButLast = n.substr(0, n.length - 1);
var lastNumber = n.substr(n.length - 1);
if (lastNumber === "0") {
return decrementHugeNumberBy1(allButLast) + "9";
}
else {
var finalResult = allButLast + (parseInt(lastNumber, 10) - 1).toString();
return trimLeft(finalResult, "0");
}
}
function trimLeft(s, c) {
var i = 0;
while (i < s.length && s[i] === c) {
i++;
}
return s.substring(i);
}
module.exports = {
decrementHugeNumberBy1: decrementHugeNumberBy1
};<file_sep>/lib/file-helper.js
/*
* File helper
*/
var
fs = require('fs'),
logger = require('./log'),
log = logger.log,
exit = logger.exit;
function save(path, obj, cb) {
if (obj.length < 1) {
log("No content set, not saving");
return;
}
fs.writeFile(path, JSON.stringify(obj), 'utf8', function(err) {
if(err) {
log("Cannot save to file");
exit(err);
} else {
log("File saved, ", obj.length, "items");
if (!!cb) {
cb();
}
}
});
}
function empty(path, cb) {
fs.writeFile(path, "", 'utf8', function(err) {
if(err) {
log("Cannot empty file");
exit(err);
} else {
log("File emptied");
if (!!cb) {
cb();
}
}
});
}
function read(path, next) {
var content;
fs.exists(path, function(fileExists) {
if (fileExists) {
fs.readFile(path, 'utf8', function (err, data) {
if (err) {
log("Error reading file", err);
next();
} else {
if (data.length > 0) {
log("File found and parsed");
content = JSON.parse(data);
next(content);
} else {
log("File found but empty");
next();
}
}
});
} else {
log("File doesn't exist");
next();
}
});
}
module.exports = {
save : save,
read : read,
empty: empty
};<file_sep>/README.md
# TweetArchiver
Retweet things from another account a set time after they were originally posted
## Set up
Setting up a new TweetArchiver account will require a few steps:
* Create a new [Twitter](http://twitter.com/) account
* [Register a new app](https://dev.twitter.com/) with the twitter account - take a note of the four different keys listed below, and make sure the app has read and write access. (Give it read and write access before creating your access tokens so they share this access, to check see [here](https://twitter.com/settings/applications))
## Init
* Check out the app from github
* run `npm install` to install dependencies
* Duplicate/rename the app.sample.js file, call it app.js (or similar)
* Open the app.js file, add the correct details for the account you want to retweet
* Once configured, run `node app.js`
### Init options explained
* *originalTwitterAccount*: The Twitter account we're copying from
* *archiveTime*: An integer to indicate how much later you want to retweet
* *archiveTimeUnit*: The time units. Options are 'minutes', 'hours', 'days' 'weeks', 'months' or 'years'
* *twitterConsumerKey*, *twitterConsumerSecret*, *twitterConsumerOauthToken* and *twitterConsumerOauthSecret*: Twitter Authorisation tokens - [Register a new app](https://dev.twitter.com/) for these
|
9ff364a1e8a2ca4720fcc014ecdf77a9a54b7684
|
[
"JavaScript",
"Markdown"
] | 5 |
JavaScript
|
thegingerbloke/archive-tweet
|
db768dc884984c6e54e69deebeca1e1619746fd7
|
cb02007df224ee716c60c896384bd6996de0e72e
|
refs/heads/master
|
<file_sep><?php
class NotificationTest extends \PHPUnit\Framework\TestCase
{
/** @test */
public function it_generates_a_correct_notification_object()
{
$notification = new \Fcm\Push\Notification();
$notification
->setTitle('Test title')
->setBody('A small body as an example')
->addRecipient('device_1')
->addData('key', 'value');
$expected = [
'to' => 'device_1',
'notification' => [
'title' => 'Test title',
'body' => 'A small body as an example',
'sound' => '',
'icon' => '',
'color' => '',
'tag' => '',
'subtitle' => '',
],
'data' => [
'key' => 'value',
],
];
$this->assertSame($expected, $notification->getBody());
$this->assertSame('https://fcm.googleapis.com/fcm/send', $notification->getUrl());
}
/**
* @test
*
* @expectedException \Fcm\Exception\NotificationException
* @expectedExceptionMessage Must minimaly specify a single recipient or topic.
*/
public function it_can_not_have_no_recipients_or_topics()
{
$notification = new \Fcm\Push\Notification();
$notification
->setTitle('Test title')
->setBody('A small body as an example');
$notification->getBody();
}
/**
* @test
*
* @expectedException \Fcm\Exception\NotificationException
* @expectedExceptionMessage Must either specify a recipient or topic, not more then one.
*/
public function it_can_not_have_a_recipient_and_topic()
{
$notification = new \Fcm\Push\Notification();
$notification
->setTitle('Test title')
->addRecipient('device')
->addTopic('topic');
$notification->getBody();
}
/** @test */
public function it_can_generate_a_notification_for_multiple_recipients()
{
$notification = new \Fcm\Push\Notification();
$notification
->setTitle('Test title')
->addRecipient('device_1')
->addRecipient(['device_2', 'device_3'])
->addData('key', 'value');
$expected = [
'registration_ids' => [
'device_1',
'device_2',
'device_3',
],
'notification' => [
'title' => 'Test title',
'body' => '',
'sound' => '',
'icon' => '',
'color' => '',
'tag' => '',
'subtitle' => '',
],
'data' => [
'key' => 'value',
],
];
$this->assertSame($expected, $notification->getBody());
}
/** @test */
public function it_can_generate_a_notification_for_multiple_topics()
{
$notification = new \Fcm\Push\Notification();
$notification
->setTitle('Test title')
->addTopic(['news', 'weather'])
->addTopic('personal')
->addData('key', 'value');
$expected = [
'condition' => "'news' in topics||'weather' in topics||'personal' in topics",
'notification' => [
'title' => 'Test title',
'body' => '',
],
'notification' => [
'title' => 'Test title',
'body' => '',
'sound' => '',
'icon' => '',
'color' => '',
'tag' => '',
'subtitle' => '',
],
'data' => [
'key' => 'value',
],
];
$this->assertSame($expected, $notification->getBody());
}
/** @test */
public function it_can_generate_a_notification_with_data()
{
$notification = new \Fcm\Push\Notification();
$notification
->setTitle('Test title')
->addRecipient('device')
->addData('key', 'value')
->addData('name', 'notification')
->addData('test', 'data');
$expected = [
'to' => 'device',
'notification' => [
'title' => 'Test title',
'body' => '',
'sound' => '',
'icon' => '',
'color' => '',
'tag' => '',
'subtitle' => '',
],
'data' => [
'key' => 'value',
'name' => 'notification',
'test' => 'data',
],
];
$this->assertSame($expected, $notification->getBody());
}
/** @test */
public function it_can_generate_a_quick_object_from_magic_method()
{
$client = \Fcm\FcmClient::create('', '');
$notification = $client->pushNotification('Sample title', 'Sample body', 'device_id');
$expected = [
'to' => 'device_id',
'notification' => [
'title' => 'Sample title',
'body' => 'Sample body',
'sound' => '',
'icon' => '',
'color' => '',
'tag' => '',
'subtitle' => '',
],
];
$this->assertSame($expected, $notification->getBody());
}
}
|
4bfc40e1eb2ea78c2bd34e8311bea773d1b87f29
|
[
"PHP"
] | 1 |
PHP
|
rolinger/php-fcm
|
5d8b7872a70a0112e480b6a34daec3c674be350a
|
23f90f50fb7cbb5eb2857dc49264f8608c22982a
|
refs/heads/master
|
<file_sep>import os
from conan.packager import ConanMultiPackager
from conanfile import SQLite3Conan
username = os.getenv("CONAN_USERNAME", "jgsogo")
reference = os.getenv("CONAN_REFERENCE", "{}/{}".format(SQLite3Conan.name, SQLite3Conan.version))
if __name__ == "__main__":
builder = ConanMultiPackager(username=username,
reference=reference,
stable_branch_pattern="master")
builder.add_common_builds()
print("{} builds ahead!".format(len(builder.builds)))
builder.run()
<file_sep>conan-sqlite3
=============
This is a `conan.io recipe <http://conan.io>`__ for the well-known `sqlite3 <http://www.sqlite.org/index.html>`__ database engine.
+------------------------+----------------------+
| **master (linux/osx)** | **master (windows)** |
+========================+======================+
| |Build travis| | |Build appveyor| |
+------------------------+----------------------+
License
-------
`MIT LICENSE <./LICENSE>`__
.. |Build travis| image:: https://travis-ci.org/jgsogo/conan-sqlite3.svg?branch=master
:target: https://travis-ci.org/jgsogo/conan-sqlite3
.. |Build appveyor| image:: https://ci.appveyor.com/api/projects/status/6saqlnj5e7bju6tj/branch/master?svg=true
:target: https://ci.appveyor.com/project/jgsogo/conan-sqlite3/branch/master
|
bcf3562442e4a778024c542ab2533405c959efd6
|
[
"Python",
"reStructuredText"
] | 2 |
Python
|
cjwddtc/conan-sqlite3
|
b98bb10e51016f4ce506b76ae53b50fcf41d2d76
|
f7459d869b34412da91df7f54a30e4f1f31db0b0
|
refs/heads/master
|
<file_sep>#include <cmath>
#include <string>
//#include <vector>
#include <numeric>
#include <stdlib.h>
#include <iostream>
#include <algorithm>
#include <sstream>
#include <fstream>
#include <assert.h>
#include <map>
#include "vector.h"
using namespace std;
using xmh::vector;
#define DEBUG
struct Matrixelem;
/************************************************************************/
/* 一些参数设定 */
/************************************************************************/
const string FILEDIR = "D:/计科/大二/数据结构/大作业—搜索引擎/Spider_/nju";
const string LISTFILE = FILEDIR + "/pagelist.txt";
const string RESULTFILE = FILEDIR + "/pagerank.txt";
const string DEGREEFILE = FILEDIR + "/degreerank.txt";
/*限制读入网页个数,-1为不限制*/
unsigned NR_Page_Limit = -1;
const double eps = 1e-6;
const double beta = 0.85;
const int amplification_factor = 10000;
/************************************************************************/
#define fabs(x) ((x)>0?(x):-(x))
/*some global variable*/
int NR_PAGE = 0;
map<string, int> mp;
vector<Matrixelem> M;
struct Matrixelem
{
int row, col;
double value;
Matrixelem()
{
this->row = -1;
this->col= -1;
this->value = 0;
}
Matrixelem(int a, int b, double value) :row(a), col(b), value(value) {}
bool operator == (const Matrixelem & right)
{
if (this->col == right.col&&this->row == right.row&&this->value == right.value)
return 1;
else
return 0;
}
};
//************************************
// Method: init_listfile
// FullName: init_listfile
// Access: public
// Returns: void
// Qualifier:导入网页列表,创建map
//************************************
void init_listfile()
{
ifstream fin(LISTFILE);
assert(fin);
string temp;
//限制读入个数
while (NR_Page_Limit&&fin >> temp)
{
mp[temp] = NR_PAGE++;
NR_Page_Limit--;
}
#ifdef DEBUG
cout << NR_PAGE << endl;
#endif
fin.close();
}
string int2str(int temp)
{
stringstream ss;
string num_str;
ss << temp;
ss >> num_str;
return num_str;
}
//************************************
// Method: vec_operate
// FullName: vec_operate
// Access: public
// Returns: std::vector<double>
// Qualifier: 向量间运算
// Parameter: const vector<double> & v1
// Parameter: const vector<double> & v2
// Parameter: double fun
// Parameter: double
// Parameter: double
//************************************
vector<double> vec_operate(const vector<double>& v1, const vector<double>& v2, double fun(double, double))
{
vector<double> resultant; // stores the results
for (int i = 0; i < v1.size(); ++i)
{
resultant.push_back(fun(v1[i], v2[i])); // stores the results of todo() called on the i'th element on each vector
}
return resultant;
}
//************************************
// Method: sqrt_of_pinfangsum
// FullName: sqrt_of_pinfangsum
// Access: public
// Returns: double
// Qualifier: 平方和的sqrt
// Parameter: const vector<double> & v1
//************************************
double sqrt_of_pinfangsum(const vector<double>& v1)
{
double running_total = 0.0;
for_each(v1.begin(), v1.end(), [&](double val) {running_total += (val * val); });
return sqrt(running_total); // returns the L2 Norm of the vector v1, by sqrt(sum of squares)
}
//************************************
// Method: init_childrenlink
// FullName: init_childrenlink
// Access: public
// Returns: void
// Qualifier: 导入链接关系
//************************************
void init_link()
{
for (int father_index = 0; father_index < NR_PAGE; father_index++)
{
cout << "正在读入第" << father_index << "个文件" << endl;
string linkfile = FILEDIR + "/childrenlink/" + int2str(father_index) + ".txt";
ifstream fin(linkfile);
#ifdef DEBUG
assert(fin);
#endif
string child;
int nr_children = 0;
while (fin >> child)
{
map<string, int>::iterator it = mp.find(child);
if (it != mp.end()&&it->second!=father_index) {
M.push_back(Matrixelem(it->second, father_index, 1));
nr_children++;
}
}
if (nr_children)
{
for_each(M.begin(), M.end(), [&](Matrixelem & it) {if (it.col == father_index)it.value /= nr_children; });
}
fin.close();
}
}
//************************************
// Method: matrix_mul_vector
// FullName: matrix_mul_vector
// Access: public
// Returns: std::vector<double>
// Qualifier: This function mul one n*n matrix with n*1 vector
// Parameter: vector<Matrixelem> A
// Parameter: vector<double>x
// Parameter: int n
//************************************
vector<double> matrix_mul_vector(vector<Matrixelem>&A, vector<double>&x, int n)
{
vector<double> result(n);
fill(result.begin(), result.end(), 0);
for_each(M.begin(),M.end(),
[&](auto it){
result[it.row] += it.value*x[it.col];
});
return result;
}
//************************************
// Method: pr_caculate
// FullName: pr_caculate
// Access: public
// Returns: std::vector<double>
// Qualifier:计算PR值
// Parameter: double eps
// Parameter: double init default=1/NR_page
// Parameter: double beta
//************************************
vector<double> pr_caculate()
{
//放解的数组
vector<double> result(NR_PAGE);
//初始化解向量
fill(result.begin(), result.end(), 1.0/NR_PAGE);
double diff = 1 << 20;
while (diff>eps)
{
cout << ".";
vector<double> temp(NR_PAGE);
temp = matrix_mul_vector(M, result, NR_PAGE);
for_each(temp.begin(), temp.end(), [=](double &a) {a = beta*a+(1 - beta) * 1.0/ NR_PAGE; });
diff = sqrt_of_pinfangsum(vec_operate(temp, result, [](double a, double b) {return a - b; }));
result = temp;
}
for_each(result.begin(), result.end(), [=](double &a) {a *= amplification_factor; });
return result;
}
void outputdegree()
{
ofstream fout(DEGREEFILE);
assert(fout);
vector<int>count(NR_PAGE);
fill(count.begin(), count.end(), 0);
for_each(M.begin(), M.end(), [&](Matrixelem & it){
count[it.row]++;
}
);
for_each(count.begin(), count.end(), [&](int &x)
{
fout << x << endl;
});
fout.close();
cout << "输出度排序文件成功" << endl;
}
int main()
{
init_listfile();
cout << "初始化文件列表成功..." << endl;
init_link();
cout << "初始化链接关系成功..." << endl;
outputdegree();
auto re = pr_caculate();
ofstream fout(RESULTFILE);
#ifdef DEBUG
assert(fout);
#endif
for_each(re.begin(), re.end(), [&fout](double rank) {fout << rank << endl; });
fout.close();
cout << "计算成功..." << endl;
return 0;
}
<file_sep>#include "http_demo_Search.h"
#include "inverted-index.h"
#include <vector>
#include <iostream>
#include <fstream>
#include <string>
#include <sstream>
#include <unordered_map>
using std::string;
using std::vector;
JNIEXPORT void JNICALL Java_http_demo_Search_initData
(JNIEnv *, jobject){
std::cout << "Load inverted index" << std::endl;
std::ifstream fin;
fin.open("inverted_index\\inverted_index.txt");
if (fin.fail()) {
std::cout << "Fail to load inverted index!" << std::endl;
return;
}
else {
std::cout << "Load successful" << std::endl;
}
read_inverted_index(fin);
std::cout << "Create inverted index...Done!" << std::endl;
return;
}
struct Result { //本来设想Result结构体还包含了其他的成员,最后决定
int index; //放到java的代码中去了,于是只剩下一个孤零零的成员
};
int setResults(Result *&results, // output
string *keyArr, int len);
//以下代码是JNI接口,JAVA和CPP交互的部分,有点dirty
JNIEXPORT jobjectArray JNICALL Java_http_demo_Search_searchResults
(JNIEnv *env, jobject jobj, jobjectArray jstrArr){
//从java的String转化为cpp的string,并且需要转码为GBK
jint len = env->GetArrayLength(jstrArr);
string *keyArr = new string[len];
jclass jstr_class = env->FindClass("java/lang/String");
jmethodID get_bytes = env->GetMethodID(jstr_class, "getBytes", "(Ljava/lang/String;)[B");
jstring codetype = env->NewStringUTF("GBK");
for (jint i = 0; i < len; i ++) {
jstring jstr = (jstring)env->GetObjectArrayElement(jstrArr, i);
//这里调用了java的方法,将java的String转码为GBK的字节串
jbyteArray byteArr = (jbyteArray)env->CallObjectMethod(jstr, get_bytes, codetype);
char *key = (char *)env->GetByteArrayElements(byteArr, JNI_FALSE);
int strLen = env->GetArrayLength(byteArr);
key[strLen] = '\0';
keyArr[i] = key;
}
Result *results;
//setResult :cpp函数,根据keyArr进行检索并将结果置于results中
int resLen = setResults(results, keyArr, len);
//以下代码的工作为将cpp的Result结构转换为java中的Result类,并返回结果
jclass jresult = env->FindClass("http/demo/Result");
jobjectArray jresults = env->NewObjectArray(resLen, jresult, 0);
jmethodID newID = env->GetMethodID(jresult, "<init>", "(I)V");
for (int i = 0; i < resLen; i ++) {
jobject jres = env->NewObject(jresult, newID, results[i].index);
env->SetObjectArrayElement(jresults, i, jres);
}
return jresults;
}
const string AND = "and", OR = "or", NOT = "not";
const int PRI_N = 1, PRI_A = 2, PRI_O = 3;
// NOT检索逻辑
vector<int> logic_n(vector<int> a, vector<int> b) {
int ia = 0, ib = 0;
vector<int> iv;
while (ia < a.size() && ib < b.size()) {
if (a[ia] == b[ib]) { ia ++; ib ++; }
else if (a[ia] < b[ib]) { iv.push_back(a[ia ++]); }
else { ib ++; }
}
while (ia < a.size()) { iv.push_back(a[ia ++]); }
return iv;
}
// AND检索逻辑
vector<int> logic_a(vector<int> a, vector<int> b) {
int ia = 0, ib = 0;
vector<int> iv;
while (ia < a.size() && ib < b.size()) {
if (a[ia] == b[ib]) { iv.push_back(a[ia ++]); ib ++; }
else if (a[ia] < b[ib]) { ia ++; }
else { ib ++; }
}
return iv;
}
// OR检索逻辑
vector<int> logic_o(vector<int> a, vector<int> b) {
int ia = 0, ib = 0;
vector<int> iv;
while (ia < a.size() || ib < b.size()) {
if (ia == a.size()) { iv.push_back(b[ib ++]); }
else if (ib == b.size()) { iv.push_back(a[ia ++]); }
else if (a[ia] == b[ib]) { iv.push_back(a[ia ++]); ib ++; }
else if (a[ia] < b[ib]) { iv.push_back(a[ia ++]); }
else { iv.push_back(b[ib ++]); }
}
return iv;
}
// 根据倒排索引获取一个key对应的文件序列
vector<int> getIndex(string key) {
vector<int> iv;
vector<Posting> pv = search_inverted_index(key);
for (int i = 0; i < pv.size(); i ++) {
iv.push_back(pv[i].docId);
}
return iv;
}
// 递归函数,用于解析布尔检索关键词串,并递归求解
vector<int> retrieval(string *keyArr, int len) {
vector<int> iv;
if (len <= 0) { return iv; }
if (len == 1) { return getIndex(keyArr[0]); }
int level = 0, prio = 99, pos = -1;
bool paren = false;
for (int i = 0; i < len; i ++) {
if (keyArr[i] == "(") { level --; paren = true; }
else if (keyArr[i] == ")") { level ++; }
else if (level < 0) { continue; }
else if (keyArr[i] == NOT && prio > PRI_N) { pos = i; prio = PRI_N; }
else if (keyArr[i] == AND && prio > PRI_A) { pos = i; prio = PRI_A; }
else if (keyArr[i] == OR && prio > PRI_O) { pos = i; prio = PRI_O; }
}
if (!paren && pos == -1) { return logic_o(getIndex(keyArr[0]), retrieval(keyArr + 1, len - 1)); }
if (pos == -1) { return retrieval(keyArr + 1, len - 1); }
switch (prio) {
case PRI_N: return logic_n(retrieval(keyArr, pos), retrieval(keyArr + pos + 1, len - pos - 1)); break;
case PRI_A: return logic_a(retrieval(keyArr, pos), retrieval(keyArr + pos + 1, len - pos - 1)); break;
case PRI_O: return logic_o(retrieval(keyArr, pos), retrieval(keyArr + pos + 1, len - pos - 1)); break;
default: std::cout << "bad expression!" << std::endl;
}
return iv;
}
// 设置Result结构的值,为返回结果作准备
int setResults(Result *&results, // output
string *keyArr, int len) {
vector<int> iv = retrieval(keyArr, len);
results = new Result[iv.size()];
for (int i = 0; i < iv.size(); i ++) {
results[i].index = iv[i];
}
return iv.size();
}
<file_sep>import threading
from queue import Queue
from spider import Spider
from domain import *
from general import *
from urllib.request import urlopen
import chardet
import os
##############################################################
#some global setting
PROJECT_NAME = 'nju'
HOMEPAGE = 'http://www.nju.edu.cn/'
DOMAIN_NAME = get_domain_name(HOMEPAGE)
QUEUE_FILE = PROJECT_NAME + '/queue.txt'
CRAWLED_FILE = PROJECT_NAME + '/crawled.txt'
NUMBER_OF_THREADS = 4
queue = Queue()
##############################################################
global ALLNUM
ALLNUM=0
init_file=open(PROJECT_NAME+'/pagelist.txt','r')
lines=init_file.readlines()
ALLNUM=len(lines)
init_file.close()
Spider(PROJECT_NAME, HOMEPAGE, DOMAIN_NAME)
lock=threading.Lock()
def append_link(url):
if(not os.path.exists(PROJECT_NAME+'/pagelist.txt')):
file=open(PROJECT_NAME+'/pagelist.txt','w')
else:
file=open(PROJECT_NAME+'/pagelist.txt','a')
file.write(url+'\n')
file.close()
# Create worker threads (will die when main exits)
def create_workers():
for _ in range(NUMBER_OF_THREADS):
t = threading.Thread(target=work)
t.daemon = True
t.start()
# Do the next job in the queue
def work():
while True:
url = queue.get()
global ALLNUM
try:
if (lock.acquire()):
response=urlopen(url,timeout=3)
childrenlink = Spider.gather_links(url)
childrenfile = open(PROJECT_NAME + '/childrenlink/' + str(ALLNUM) + '.txt', 'w')
childrenfile.write(url + '\n')
for each_child in childrenlink:
if ('javascript' not in each_child):
childrenfile.write(each_child + '\n',)
childrenfile.close()
#write html file by utf8 encoding
html_byte=response.read()
chardit1 = chardet.detect(html_byte)
file1 = open(PROJECT_NAME + '/html/utf8/' + str(ALLNUM) + '.html', 'wb')
html_string=html_byte.decode(chardit1['encoding']).encode('utf-8')
file1.write(html_string)
file1.close()
#for smj encode as GBK
file2 = open(PROJECT_NAME + '/html/gbk/' + str(ALLNUM) + '.html', 'wb')
html_string = html_byte.decode(chardit1['encoding'], 'ignore').encode('gbk','ignore')
file2.write(html_string)
file2.close()
except Exception as e:
print(str(e))
queue.task_done()
lock.release()
else:
append_link(url)
ALLNUM = ALLNUM + 1
Spider.crawl_page(threading.current_thread().name, url)
queue.task_done()
lock.release()
# Each queued link is a new job
def create_jobs():
for link in file_to_set(QUEUE_FILE):
queue.put(link)
queue.join()
crawl()
# Check if there are items in the queue, if so crawl them
def crawl():
queued_links = file_to_set(QUEUE_FILE)
if len(queued_links) > 0:
print(str(len(queued_links)) + ' links in the queue')
create_jobs()
create_workers()
crawl()
<file_sep>package demo.Extracter;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.huaban.analysis.jieba.JiebaSegmenter;
import com.huaban.analysis.jieba.JiebaSegmenter.SegMode;
import com.huaban.analysis.jieba.SegToken;
public class KeysExtracter {
private final String SRC_FOLDER = "html", DST_FOLDER = "keys",
SUFFIX = ".html", DST_FILE = "result.txt";
private void extract() throws IOException {
int i = 0;
String dst = DST_FOLDER + File.separator + DST_FILE;
OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(dst), "gbk");
BufferedReader reader;
while(true) {
// 依次读入html文件,直到读取失败
String file = SRC_FOLDER + File.separator + i + SUFFIX;
try {
reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), "gbk"));
} catch (FileNotFoundException e) {
System.out.println("stop at " + i);
break;
}
StringBuilder page = new StringBuilder();
String tmp = reader.readLine();
while (tmp != null) {
page.append(tmp);
tmp = reader.readLine();
}
reader.close();
// getKeys: 获得分词结果
String result = getKeys(page.toString());
writer.write(i + "\n");
writer.write(result + "\n");
i ++;
if (i % 100 == 0) {
System.out.println("at " + i);
}
}
writer.close();
}
//分词方法
private String getKeys(String string) {
StringBuilder keys = new StringBuilder(), content = new StringBuilder();
//正则匹配出不是<script>标签也不是<style>标签内的内容
String pattern = ">([^\"\'<>]*?)</(?!script|style)";
Pattern r = Pattern.compile(pattern, Pattern.DOTALL);
Matcher m = r.matcher(string);
while (m.find()) {
//替换掉html的转义字符
content.append(" ").append(m.group(1).replaceAll("(&\\w+;|&#\\d+;)", " "));
}
JiebaSegmenter segmenter = new JiebaSegmenter();
//进行分词
List<SegToken> tokens = segmenter.process(content.toString(), SegMode.INDEX);
for (SegToken token : tokens) {
String word = token.word;
//匹配出中文、英文或数字的字串
if (word.matches("[\\u3400-\\u9FA5a-zA-Z0-9]+")) {
keys.append(word).append(" ");
}
}
//删除最后一个空格
if (keys.length() > 0) {
keys.delete(keys.length() - 1, keys.length());
}
return keys.toString();
}
public static void main(String[] args) throws IOException {
KeysExtracter extracter = new KeysExtracter();
extracter.extract();
}
}
<file_sep>#pragma once
#include <iterator>
template<typename T>
class miterator :public std::iterator<std::forward_iterator_tag, T>
{
private://迭代器在此处就是一个指针而已
T* t;
public:
miterator()
{
t = NULL;
}
miterator(T* t_)
{
t = t_;
}
miterator(const miterator& bbb)
{
t = bbb.t;
}
miterator& operator=(const miterator& bbb)
{
t = bbb.t;
return *this;
}
miterator& operator++()
{
t++;
return *this;
}
miterator operator++(int)
{
miterator iter = *this;
t++;
return iter;
}
miterator operator+(int count)
{
miterator iter = *this;
iter.t += count;
return iter;
}
miterator& operator--()
{
t--;
return *this;
}
miterator operator--(int)
{
miterator iter = *this;
t--;
return iter;
}
miterator operator-(int count)
{
miterator iter = *this;
iter.t -= count;
return iter;
}
int operator-(const miterator& bbb)
{
return t - bbb.t;
}
int operator-(const miterator& bbb)const
{
return t - bbb.t;
}
/*指针相关*/
/************************************************************************/
T& operator*()
{
return *t;
}
const T& operator*() const
{
return *t;
}
T* operator->()
{
return t;
}
const T* operator->() const
{
return t;
}
/************************************************************************/
bool operator!=(const miterator& bbb)const
{
return t != bbb.t;
}
bool operator==(const miterator& bbb)const
{
return t == bbb.t;
}
bool operator<(const miterator& bbb)const
{
return t<bbb.t;
}
bool operator<=(const miterator& bbb)const
{
return t <= bbb.t;
}
bool operator>(const miterator& bbb)const
{
return t > bbb.t;
}
bool operator>=(const miterator& bbb)const
{
return t >= bbb.t;
}
};<file_sep>#include <iostream>
using std::istream; using std::ostream; using std::endl;
#include <fstream>
#include <sstream>
using std::istringstream;
#include <map>
using std::map;
//#include <unordered_map>
// #include <vector>
#include "vector.h"
using xmh::vector;
#include <string>
using std::string;
#include <algorithm>
using std::for_each;
#include "inverted-index.h"
static map<string, vector<Posting>> inverted_index;
void create_inverted_index(istream &fin)
{
string line;
while (getline(fin, line))
{
int page_no = stoi(line);
getline(fin, line);
istringstream words(line);
string word;
// <word, position list>
map <string, vector<int>> temp_index;
int position = 0;
while (words >> word)
{
temp_index[word].push_back(position++);
}
// merge temp_index into our main index
for (const auto &e : temp_index)
{
Posting temp_posting(page_no, e.second);
inverted_index[e.first].push_back(temp_posting);
}
}
}
void print_inverted_index(ostream &out)
{
// format:
// term|docID1:pos1,pos2;docID2:pos3,pos4,pos5;бн
for (const auto &entry : inverted_index)
{
out << entry.first << '|';
for (const auto &posting : entry.second)
{
out << posting.docId << ':';
for_each(posting.positions.begin(), posting.positions.end() - 1,
[&out](int p) { out << p << ','; });
//out << posting.positions.back() << ';';
out << *(--posting.positions.end()) << ';';
}
out << endl;
}
}
// read the inverted index saved on disk into memory
void read_inverted_index(istream &in)
{
// format:
// term|docID1:pos1,pos2;docID2:pos3,pos4,pos5;бн
string line;
while (getline(in, line))
{
istringstream line_stream(line);
// read term
string term;
getline(line_stream, term, '|');
// Postings are separated by ';'
string posting_str;
vector<Posting> posting_list;
while (getline(line_stream, posting_str, ';'))
{
istringstream posting_stream(posting_str);
string docId;
getline(posting_stream, docId, ':');
vector<int> positions;
string pos_str;
// position numbers are separated by comma
while (getline(posting_stream, pos_str, ','))
{
positions.push_back(stoi(pos_str));
}
//posting_list.emplace_back(stoi(docId), positions);
Posting temp_posting(stoi(docId), positions);
posting_list.push_back(temp_posting);
}
inverted_index[term] = posting_list;
}
}
vector<Posting> search_inverted_index(string term)
{
auto it = inverted_index.find(term);
if (it != inverted_index.cend())
{
return it->second;
}
else
return vector<Posting>();
}
<file_sep>#pragma once
#include "vector_iterator.h"
#include <cassert>
namespace xmh {
template <class T>
class vector {
typedef miterator<T> iterator;
typedef const miterator<T> const_iterator;
public:
vector()
{
_buf = NULL;
_size=_capacity = 0;
}
explicit vector(int capa, const T& val = T())
{
init_through_capa(capa);
newroom(_capacity);
for (int i = 0; i < _size; i++)
_buf[i] = val;
}
vector(T* t_)
{
_buf = t_;
}
/*拷贝构造函数*/
vector(const vector& bbb)
{
_size = bbb.size();
_capacity = bbb._capacity;
if (_capacity) {
_buf = new T[_capacity];
//memcpy(_buf, bbb._buf, sizeof(T)*_capacity);
for (int i = 0; i < _capacity; i++)
_buf[i] = bbb._buf[i];
}
}
/*复制构造函数*/
vector& operator=(const vector& bbb)
{
//自复制
if (this == &bbb)
return *this;
//删除原有空间
delete[]_buf;
_buf = NULL;
_size = bbb.size();
_capacity = bbb._capacity;
if (_capacity) {
_buf = new T[_capacity];
assert(_buf);
//memcpy(_buf, bbb._buf, sizeof(T)*_capacity);
for (int i = 0; i < _capacity; i++)
_buf[i] = bbb._buf[i];
}
return *this;
}
~vector()
{
if (_buf)
{
delete[] _buf;
_buf = NULL;
}
_size = _capacity = 0;
}
void push_back(const T& t)
{
if (_size == _capacity)
{
int newcapa = calculate_highlevel_Capacity();
newroom(newcapa);
}
_buf[_size++] = t;
}
bool empty()
{
return _size==0;
}
void pop_back()
{
if (!empty())
erase(end() - 1);
}
iterator begin() const
{
return iterator(&_buf[0]);
}
iterator end() const
{
return iterator(&_buf[_size]);
}
iterator erase(const_iterator iter)
{
int index = iter - begin();
if (index < _size && _size>0)
{
memmove(_buf + index, _buf + index + 1, (_size - index)*sizeof(T));
_buf[--_size] = T();
}
return iterator(iter);
}
T& operator[](int index) const
{
assert(_size > 0 && index >= 0 && index < _size);
return _buf[index];
}
int size() const
{
return _size;
}
private:
int _size;
int _capacity;
T* _buf;
void newroom(int capa)
{
_capacity = capa;
T* newBuf = new T[_capacity];
assert(newBuf);
if (_buf)
{
//memcpy(newBuf, _buf, _size*sizeof(T));
for (int i = 0; i < _size; i++)
newBuf[i] = _buf[i];
delete[] _buf;
_buf = NULL;
}
_buf = newBuf;
}
/*划分出空间*/
int calculate_highlevel_Capacity()
{
return _capacity * 2 + 1;
}
void init_through_capa(int capa)
{
_buf = NULL;
_size = _capacity = capa > 0 ? capa : 0;
}
};
};<file_sep>#include "inverted-index.h"
#include <iostream>
#include <fstream>
using std::ifstream; using std::ofstream; using std::endl;
using std::cin; using std::cout; using std::cerr;
using std::string;
//using namespace std;
#include "vector.h"
using xmh::vector;
// define CREATE to create inverted index
#define CREATE
// define READ to test the implementation of read_inverted_index()
//#define READ
#ifdef CREATE
int main()
{
ifstream fin("F:\\Data structures\\big-project\\result2.txt");
if (!fin.is_open()) { cerr << "Fail to open file!\n"; exit(-1); }
create_inverted_index(fin);
cout << "Succeed to create inverted-index" << endl;
fin.close();
ofstream fout("F:\\Data structures\\big-project\\inverted_index22.txt");
if (!fout.is_open()) { cerr << "Fail to open file!\n"; exit(-1); }
print_inverted_index(fout);
cout << "Output finish" << endl;
fout.close();
return 0;
}
#elif defined READ
// test read_inverted_index()
int main()
{
ifstream fin("F:\\Data structures\\big-project\\inverted_index2.txt");
if (!fin.is_open()) { cerr << "Fail to open file!\n"; exit(-1); }
read_inverted_index(fin);
cout << "Reading inverted index finished.\n";
vector<Posting> postings_list;
string buf;
while (cin >> buf)
{
postings_list = search_inverted_index(buf);
if (postings_list.size() != 0)
{
for (const auto &e : postings_list)
cout << e.docId << " ";
cout << endl;
}
else
cout << "Not found!\n";
}
//ofstream fout("output.txt");
//print_inverted_index(fout);
return 0;
}
#else
#error
#endif<file_sep>//#include <vector>
#include "vector.h"
#include <string>
#include <iostream>
struct Posting
{
int docId;
xmh::vector<int> positions;
Posting() = default;
// how to remove the overhead of copying the vector ?
Posting(int id, const xmh::vector<int> &ivec) { docId = id; positions = ivec; }
};
void create_inverted_index(std::istream &);
void print_inverted_index(std::ostream &);
void read_inverted_index(std::istream &);
xmh::vector<Posting> search_inverted_index(std::string);<file_sep># mini-search-engine
## 爬虫
### 利用多线程Thread模块爬取 [南京大学](http://www.nju.edu.cn)及其指向的网页共计9000个。
## 分词
### 使用了[jieba分词](https://github.com/fxsjy/jieba)
## 倒排索引
倒排索引是从词语(term, words)到出现这个词语的网页的映射(map)。在倒排索引中,每个词语是一个关键码(key), 其对应的值(value)是这个词语的postings list。一个词语的postings list是包含出现这个词语的所有网页的列表。对于每一次问询,我们用倒排索引这一数据结构来返回所有相关的网页。
每个网页要用一个唯一的整数标识,我们称这个整数为文档编号(document ID)。
使用红黑树在内存中存储倒排索引。
## PageRank/DegreeRank
### 利用稀疏矩阵迭代计算Pagerank
### 两种排序方法的比较
## 服务器
- 使用JAVA搭建具有良好用户体验的搜索网页(html)
- 使用**JNI接口**调用C++函数
- 实现**布尔检索**功能
|
67011ce9673ae53ed0d565827b51f04e2deda557
|
[
"Markdown",
"Java",
"Python",
"C++"
] | 10 |
C++
|
forkme7/mini-search-engine-1
|
48f09933cabcef76e26f9fc6b7030cdd37a4d3fc
|
5476ab826a4333e9db0e4b846f665363edf18fd5
|
refs/heads/master
|
<file_sep># Import the modules
import cv2
from sklearn.externals import joblib
from skimage.feature import hog
import numpy as np
# Load the classifier created in generateClassifier.py
clf = joblib.load("digits_cls.pkl")
# Read the input image
im = cv2.imread("photo_3.jpg")
im = cv2.resize(im, (1080, 1080), interpolation=cv2.INTER_AREA)
cv2.imshow("Original", im)
cv2.waitKey(0)
cv2.destroyAllWindows()
# Convert to grayscale and apply Gaussian filtering to remove noise
im_gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
im_gray = cv2.GaussianBlur(im_gray, (5, 5), 0)
# Threshold the image(aka select pixels that exceed a certain value) then convert it to a binary image
ret, im_th = cv2.threshold(im_gray, 60, 255, cv2.THRESH_BINARY_INV)
cv2.imshow("Threshold Image", im_th)
# Find contours (a curve joining all continuous points along a boundary of the same color and intensity) in the image
ctrs, hier = cv2.findContours(im_th.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# Create a list of bounding rectangles for every contour found
rects = [cv2.boundingRect(ctr) for ctr in ctrs]
# For each rectangular region, calculate HOG features and predict
# the digit using Linear SVM.
for rect in rects:
if ((rect[2] > 14) and (rect[3] > 14)):
# Draw the rectangles
cv2.rectangle(im, (rect[0], rect[1]), (rect[0] + rect[2], rect[1] + rect[3]), (0, 255, 0), 3)
# Make the rectangular region around the digit
leng = int(rect[3] * 1.6)
topBorder = (leng - rect[3]) // 2
leftBorder = (leng - rect[2]) // 2
#print('Length: {}'.format(leng))
#roi = im_th[pt1:pt1+leng, pt2:pt2+leng]
temp = im_th[rect[1]:rect[1]+rect[3], rect[0]:rect[0]+rect[2]]
roi = np.zeros((leng,leng),temp.dtype)
roi[topBorder:(temp.shape[0]+topBorder), leftBorder:(temp.shape[1]+leftBorder)] = temp
# Resize the image
roi = cv2.resize(roi, (28, 28), interpolation=cv2.INTER_AREA)
roi = cv2.dilate(roi, (3, 3))
# Calculate the HOG features
roi_hog_fd = hog(roi, orientations=9, pixels_per_cell=(14, 14), cells_per_block=(1, 1), visualise=False)
nbr = clf.predict(np.array([roi_hog_fd], 'float64'))
print('Prediction = {}'.format(nbr[0]))
cv2.putText(im, str(int(nbr[0])), (rect[0], rect[1]),cv2.FONT_HERSHEY_DUPLEX, 2, (0, 255, 255), 3)
cv2.imshow("Resulting Image with Rectangular ROIs", im)
cv2.waitKey(0)
cv2.destroyAllWindows()
<file_sep>import numpy as np
import cv2
from matplotlib import pyplot as plt
def findAccuracy(cells):
# Make it into a Numpy array. It size will be (50,100,20,20)
npArrayInput = np.array(cells)
# Now we prepare train_data and test_data.
train = npArrayInput[:,:50].reshape(-1,400).astype(np.float32) # Size = (2500,400)
test = npArrayInput[:,50:100].reshape(-1,400).astype(np.float32) # Size = (2500,400)
# Create labels for train and test data
k = np.arange(10)
train_labels = np.repeat(k,250)[:,np.newaxis]
test_labels = train_labels.copy()
# Initiate kNN, train the data, then test it with test data for k=1
knn = cv2.ml.KNearest_create()
knn.train(train, cv2.ml.ROW_SAMPLE, train_labels)
ret,result,neighbours,dist = knn.find_nearest(test, k=5)
# Now we check the accuracy of classification
# For that, compare the result with test_labels and check which are wrong
matches = result==test_labels
correct = np.count_nonzero(matches)
accuracy = correct*100.0/result.size
print accuracy
img1 = cv2.imread('digits.png')
gray1 = cv2.cvtColor(img1,cv2.COLOR_BGR2GRAY)
# Now we split the image to 5000 cells, each 20x20 size
cells1 = [np.hsplit(row,100) for row in np.vsplit(gray1,50)]
findAccuracy(cells1)
<file_sep>import cv2
import numpy as np
# In this test we will use SVM instead of kNN
size=20
bin_n = 16 # Number of bins
svm_params = dict( kernel_type = cv2.SVM_LINEAR,
svm_type = cv2.SVM_C_SVC,
C=2.67, gamma=5.383 )
thisFlags = cv2.WARP_INVERSE_MAP|cv2.INTER_LINEAR
# First deskew image
def deskew(image):
myMoments = cv2.moments(image)
if abs(myMoments['mu02']) < 1e-2:
return image.copy()
skew = myMoments['mu11']/myMoments['mu02']
M = np.float32([[1, skew, -0.5*size*skew], [0, 1, 0]])
image = cv2.warpAffine(image,M,(size, size),flags=thisFlags)
return image
# HOG function
def hog(image):
gx = cv2.Sobel(image, cv2.CV_32F, 1, 0)
gy = cv2.Sobel(image, cv2.CV_32F, 0, 1)
mag, ang = cv2.cartToPolar(gx, gy)
bins = np.int32(bin_n*ang/(2*np.pi)) # quantizing binvalues in (0...16)
bin_cells = bins[:10,:10], bins[10:,:10], bins[:10,10:], bins[10:,10:]
mag_cells = mag[:10,:10], mag[10:,:10], mag[:10,10:], mag[10:,10:]
hists = [np.bincount(b.ravel(), m.ravel(), bin_n) for b, m in zip(bin_cells, mag_cells)]
hist = np.hstack(hists) # hist is a 64 bit vector
return hist
def GaussianFilter(sigma):
halfSize = 3 * sigma
maskSize = 2 * halfSize + 1
mat = np.ones((maskSize,maskSize)) / (float)( 2 * np.pi * (sigma**2))
xyRange = np.arange(-halfSize, halfSize+1)
xx, yy = np.meshgrid(xyRange, xyRange)
x2y2 = (xx**2 + yy**2)
exp_part = np.exp(-(x2y2/(2.0*(sigma**2))))
mat = mat * exp_part
return mat
image = cv2.imread('digits.png',0)
# If desired, image can be previously smoothed
gaussianFilter = GaussianFilter(1)
gaussianGray1 = cv2.filter2D(image, -1, gaussianFilter)
cells = [np.hsplit(row,100) for row in np.vsplit(image,50)]
# First half is trainData, remaining is testData
train_cells = [ i[:50] for i in cells ]
test_cells = [ i[50:] for i in cells]
# Training data
deskewedImage = [map(deskew,row) for row in train_cells]
hogData = [map(hog,row) for row in deskewedImage]
trainData = np.float32(hogData).reshape(-1,64)
dataResponses = np.float32(np.repeat(np.arange(10),250)[:,np.newaxis])
svm = cv2.SVM()
svm.train(trainData,dataResponses, params=svm_params)
svm.save('svm_data.dat')
# Testing data
deskewedImage = [map(deskew,row) for row in test_cells]
hogData = [map(hog,row) for row in deskewedImage]
testData = np.float32(hogData).reshape(-1,bin_n*4)
result = svm.predict_all(testData)
# Checking accuracy
mask = result==dataResponses
correct = np.count_nonzero(mask)
print correct*100.0/result.size<file_sep># Handwriting-OCR
## kNN OCR
run `python kNN_OCR.py` to test its accuracy against digits.png. For testing with different images change img source path and adjust size and number of cells in image.
## SVM OCR
run `python SVM_OCR.py` to test its accuracy against digits.png. For testing with different images change img source path and adjust size and number of cells in image.
## SVM OCR using MNST Dataset
run `python generateClassifier.py` followed by `python performRecognition.py`.
The following images can be used for testing by changing the image source path in the code:
- photo_1.jpg
- photo_2.jpg
- photo_3.jpg
|
7ce026a15234d64eb278849a03a8f89548276373
|
[
"Markdown",
"Python"
] | 4 |
Python
|
pabloduque0/handwrittingOCR
|
e8c93d55ef24f56314092dcd70636db19d574702
|
ad97b2ff063b2f697dd2a2f8da5971ee42a0dd72
|
refs/heads/master
|
<file_sep><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>junit.sjk</groupId>
<artifactId>junit-test</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>pom</packaging>
<name>junit-test</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<spring.version>4.0.1.RELEASE</spring.version>
<java.version>1.7</java.version>
<spring-data-jpa.version>1.7.2.RELEASE</spring-data-jpa.version>
<hibernate.version>4.3.8.Final</hibernate.version>
<mysql.version>5.1.26</mysql.version>
</properties>
<modules>
<module>junit-test-model</module>
<module>junit-test-dao</module>
<module>junit-test-core</module>
<module>junit-test-web</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>junit.sjk</groupId>
<artifactId>junit-test-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>junit.sjk</groupId>
<artifactId>junit-test-dao</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>junit.sjk</groupId>
<artifactId>junit-test-model</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>junit.sjk</groupId>
<artifactId>junit-test-web</artifactId>
<version>${project.version}</version>
</dependency>
<!-- Spring dependencies -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>${spring.version}</version>
</dependency>
<!-- JPA Provider (Hibernate) -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
<version>4.3.7.Final</version>
</dependency>
<!-- Spring Data JPA -->
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId>
<version>1.7.1.RELEASE</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.0.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>jstl</groupId>
<artifactId>jstl</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>cglib</groupId>
<artifactId>cglib</artifactId>
<version>2.2.2</version>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
<version>2.2.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>1.4.185</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>1.9.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</dependencyManagement>
</project>
<file_sep>package org.junit.test.dao;
import org.junit.test.model.entity.Employee;
import org.springframework.data.jpa.repository.JpaRepository;
public interface EmployeeRepository extends JpaRepository<Employee, Long> {
public Employee findById(long id);
}
<file_sep># junit-test
Project which help learn junit test.
<file_sep>databaseUrl=jdbc:h2:tcp://localhost/~/junitTest"
user=root
password=<PASSWORD>
<file_sep>package org.junit.test.core.service;
import org.junit.test.core.api.EmployeeService;
import org.junit.test.dao.EmployeeRepository;
import org.junit.test.model.entity.Employee;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Transactional
@Service("employeeService")
public class EmployeeServiceImpl implements EmployeeService {
@Autowired
private EmployeeRepository employeeRepository;
public boolean removeEmployee(Class<?>type, long id) {
for(Employee empl: employeeRepository.findAll()){
System.out.println(empl);
}
return false;
}
}
<file_sep>package org.junit.test.dao;
import org.junit.test.model.entity.Department;
import org.springframework.data.jpa.repository.JpaRepository;
public interface DepartmentRepository extends JpaRepository<Department, Long> {
public Department findById(long id);
}
<file_sep>package org.junit.test.dao;
import org.junit.runner.RunWith;
import org.mockito.runners.MockitoJUnitRunner;
import junit.framework.TestCase;
@RunWith(MockitoJUnitRunner.class)
public class EmployeeTest extends TestCase {
}
<file_sep>databaseUrl=jdbc:mysql://localhost:3306/junit-test
user=root
password=<PASSWORD>
|
8eeaf7ea15cfddd8d9946fbb935d1d18c642b330
|
[
"Markdown",
"Java",
"Maven POM",
"INI"
] | 8 |
Maven POM
|
SebJak/junit-test
|
de984ed5e110d5a8edf16bb6702353595bb02b1e
|
2f7f8cf1250788c176e747b94e2a453c8ade43bf
|
refs/heads/main
|
<repo_name>quarz12/simple-Discord-downloader<file_sep>/simple-discord-downloader.py
from __future__ import annotations
import sys
import time
from dataclasses import dataclass
from pathlib import Path
from time import mktime
import datetime
from typing import List
from tqdm import tqdm
import requests
from PIL import Image
from io import BytesIO
from json import loads
token=""
def timestampToSnowflake(timestamp):
# Multiply the timestamp by 1000 to approximate the UNIX epoch to the nearest millisecond.
timestamp *= 1000
# Subtract the UNIX epoch of January 1, 2015 from the timestamp as this is the minimum timestamp that Discord supports because Discord has only been around since May 2015.
timestamp -= 1420070400000
# Return the timestamp value bitshifted to the left by 22 bits.
return int(timestamp) << 22
def process_json(json) -> List[Message]:
messages=[]
try:
for message in json:
if len(message["attachments"])!=0:
filename=message["attachments"][0]["filename"]
img_url=message["attachments"][0]["url"]
else :
filename= None
img_url=None
messages.append(Message(message["id"],message["content"],filename if filename else None,img_url if img_url else None))
return messages
except Exception as error:
print(json)
print(error)
class Scraper:
def __init__(self,token=token):
self.session = requests.Session()
self.download_session=requests.Session()
self.session.headers = {
'User-Agent': "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) discord/0.0.309 Chrome/83.0.4103.122 Electron/9.3.5 Safari/537.36",
'Authorization': f"{token}"
}
self.snowflake_first_message= timestampToSnowflake(mktime((2015, 1, 2, 0, 0, 0, -1, -1, -1))) #Make snowflake of first possible discord message
self.snowflake_now= timestampToSnowflake(time.time()) #Make snowflake off current time
self.apiversion="v9"
self.query="has=image"
self.channels=[]
def get_messages(self,channelid):
return self.get_Messages(self.snowflake_first_message,channelid)
def get_Messages(self, snowflake,channelid)->List[Message]:
response = self.session.get(f'https://discord.com/api/{self.apiversion}/channels/{channelid}/messages?after={snowflake}&limit=25&{self.query}')
if response.status_code==429:
print(response.content)
timeout=loads(response.content).get("retry-after",1)
print(f"too many requests, retrying after {timeout}")
time.sleep(timeout+2)
return self.get_Messages(snowflake,channelid)
json=response.content
json=loads(json)
messages=process_json(json)
messages.reverse()
if len(messages)==0:
return messages
messages.extend(self.get_Messages(messages[-1].snowflake,channelid))
return messages
def download_images(self):
for channel in self.channels:
c=self.get_channel(channel)
s=self.get_server(c.server_id)
path=f"downloads/{s.name}/{c.name}"
make_folder(path)
dl_id=1
for message in tqdm(self.get_messages(channel),desc=f"processing channel: {c.name} of server: {s.name}"):
if not message.file_url is None:
with open(f"{path}/{dl_id} - {message.file_name}","wb") as file:
#print(f"url={message.file_url}",end="\r\r")
response=self.download_session.get(message.file_url)
if response.status_code==200:
file.write(response.content)
dl_id+=1
else:
print(response.content)
def queue_channel(self, channelids):
self.channels.append(channelids)
def get_channel(self, id):
tmp=loads(self.session.get(f'https://discord.com/api/v9/channels/{id}').content)
return Channel(id, tmp.get("guild_id"),tmp.get("name"))
def get_server(self, id):
tmp=loads(self.session.get(f'https://discord.com/api/v9/guilds/{id}').content)
return Server(id, tmp.get("name"))
@dataclass
class Channel:
id: int
server_id: int
name: str
@dataclass
class Server:
id: int
name: str
def make_folder(path):
p=Path(path)
if not p.exists():
p.mkdir(parents=True)
@dataclass
class Message:
snowflake: str
text : str
file_name: str = None
file_url :str = None
def main(args:List):
scraper = Scraper()
for channel in args:
scraper.queue_channel(channel)
scraper.download_images()
if __name__=="__main__":
main(sys.argv[1:])
<file_sep>/README.md
# simple-Discord-downloader
This is a simple python script that downloads all images from given Discord channels using Discords http API.
To use it you need to set the token variable to your discord token.
Next you run the script with all the channelids you want to download as parameters.
You can find a good explanation on how to get these values here: https://github.com/Dracovian/Discord-Scraper/blob/experimental/README.md
|
b91c4195ecfa53f28fddc0a538642813098300c4
|
[
"Markdown",
"Python"
] | 2 |
Python
|
quarz12/simple-Discord-downloader
|
e29a8b90fcb21c00f29b4b0a8ff120ed40ac52f8
|
506f34cabc8a973398e0e5eed7c46c10c36b696d
|
refs/heads/master
|
<file_sep>import { Router } from "https://deno.land/x/oak/mod.ts";
import db from './db/database.js';
const getAllProducts = async ({ response }) => {
response.body = await db.query("SELECT * FROM products ORDER BY id");
};
const router = new Router();
router.get("/", (response) => {
response.json({success: true});
})
router.get("/products", getAllProducts)
export default router;
<file_sep>import { Application } from "https://deno.land/x/oak/mod.ts";
import { config } from "https://deno.land/x/dotenv/mod.ts";
import * as log from "https://deno.land/std/log/mod.ts";
import router from "./routes.js";
const { APP_HOST, APP_PORT } = config({ safe: true });
const app = new Application();
await log.setup({
handlers: {
console: new log.handlers.ConsoleHandler("DEBUG"),
file: new log.handlers.FileHandler("WARNING", {
filename: "./log.txt",
formatter: "{levelName} {msg}",
}),
},
loggers: {
default: {
level: "DEBUG",
handlers: ["console", "file"],
},
tasks: {
level: "ERROR",
handlers: ["console"],
},
},
});
let logger;
logger = log.getLogger();
logger.debug("fizz");
logger.warning("buzz");
logger = log.getLogger("tasks");
logger.debug("fizz");
logger.error("buzz");
const unknownLogger = log.getLogger("mystery");
unknownLogger.info("foobar");
app.use(async (ctx, next) => {
await next();
const rt = ctx.response.headers.get("X-Response-Time");
console.log(`${ctx.request.method} ${ctx.request.url} - ${rt}`);
});
app.use(async (ctx, next) => {
const start = Date.now();
await next();
const ms = Date.now() - start;
ctx.response.headers.set("X-Response-Time", `${ms}ms`);
});
app.use(router.routes());
console.log(`Listening on ${APP_HOST}:${APP_PORT}...`);
await app.listen(`${APP_HOST}:${APP_PORT}`);
<file_sep>import { Client } from "https://deno.land/x/[email protected]/mod.ts";
class Database {
constructor() {
this.connect();
}
async connect() {
const config = 'postgres://postgres:postgres@localhost:5432/app-starter-deno';
this.client = new Client(config);
await this.client.connect();
}
}
export default new Database().client;
|
627573f372d72b3854d46e0143bd8877f740e17c
|
[
"JavaScript"
] | 3 |
JavaScript
|
rajaishwary/app-starter-deno
|
3117bb6fb4a9b9606c382d87bfd5c82ae488ccf7
|
3c94f97cbc6bb98d5143a86888daa321de8c5b7b
|
refs/heads/master
|
<file_sep>package pattern.p04_TemplateMethodPattern.common;
/**
* 抽象模板类
*
* @author Administrator
*
*/
public abstract class AbstractClass {
/*
* 基本方法
* 抽象模板中的方法尽量设计成为protected类型,符合迪米特发则,不需要暴露的属性和方法尽量不要设置为protected类型
* 实现类若非必要,尽量不要扩大父类中的访问权限。
*/
protected abstract void doSomething();
//基本方法
protected abstract void doAnything();
/*
* 模板方法
* 为了防止恶意操作,一般模板方法都加上final关键字,不允许被复写
*/
public final void templateMethod() {
/**
* 调用基本方法,完成相关的逻辑
*/
this.doAnything();
this.doSomething();
}
}
<file_sep>package pattern.P13_AdapterPattern.common;
/**
* 适配器模式:场景类
*
* 定义:将一个类的接口变成为客户端所期待的另一种接口,从而使原本因接口不匹配而无法在一起工作的2个类能够在一起工作
* 适配器模式又叫变压器模式,也叫包装模式。
*
* 适配器模式的3个角色:
* 1、Target目标角色:该角色定义把其他类转换成何种接口,也就是我们的期望接口。
* 2、Adaptee源角色:你想把谁转换成目标角色,这个“谁”就是目标角色,他是已经存在的运行良好的类或对象,经过适配器角色
* 的包装,它会成为一个崭新、靓丽的角色。
* 3、Adapter适配器角色:适配器模式的核心角色,其他2个角色是已经存在的,而适配器角色是需要建立的,他的职责非常简单:
* 通过继承或者是类关联的方式,把源角色转化为目标角色。
*
* 优点:
* 1、增加了类的透明性
* 2、提高了类的复用度
* 3、灵活性高
*
* 适用场景:
* 有动机修改一个已经投产中的接口时,就使用适配器模式。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
// 原有的业务逻辑
Target target = new ConcreteTarget();
target.request();
// 现在增加了适配器角色后的逻辑
Target target2 = new Adapter();
target2.request();
}
}
<file_sep>package pattern.P23_BridgePattern;
/**
* 桥梁模式:场景类
*
* 定义:
* 也叫做桥接模式,将抽象和实现解耦,使得两者可以独立的变化。
*
* 角色:
* 1、Abstraction抽象化角色
* 它的主要职责是定义出该角色的行为,同时保存一个对实现化角色的引用,该角色一般是抽象类。
* 2、Implementor实现化角色
* 它是接口或者抽象类,定义角色必须的行为和属性。
* 3、ConcreteImplementor具体实现化角色
* 它实现接口或抽象类定义的方法和属性。
*
* 优点:
* 1、抽象和现实分离
* 也是桥梁模式的主要特点,它完全是为了解决继承的缺点而提出的设计模式,在该模式下,实现可
* 以不受抽象的约束,不用再绑定在一个固定的抽象层次上。
* 2、优秀的扩展能力
* 3、实现细节对客户透明
* 客户不用关心细节的实现, 它已经由抽象层通过聚合关系完成了封装。
*
* 使用场景:
* 1、不希望或不适用使用的场景
* 例如继承层次过度,无法更细化设计颗粒等场景,需要考虑使用桥梁模式。
* 2、接口或者抽象类不稳定的场景
* 明知道接口不稳定还想通过实现或继承来实现业务需求,那就是得不偿失,也是比较失败的做法。
* 3、重用性要求较高的场景
* 设计的颗粒度越细,则重用的可能性就越大,而采用继承则受父类的限制,不可能出现太细的颗粒度。
*
* 注意事项:
* 桥梁模式是非常简单的,使用该模式时主要考虑如何拆分抽象和实现,并不是一涉及继承就要考虑使用
* 该模式,桥梁模式的意图还是对变化的封装,尽量将可能变化的因素封装到最细、最小的逻辑单元中,
* 避免风险扩散,因此在进行系统设计时,发现类继承有N层时,可以考虑使用桥梁模式。
*
*
* @author Administrator
*
*/
public class CLient {
public static void main(String[] args) {
// 定义一个实现化角色
Implementor implementor = new ConcreteImplementor1();
// 定义一个抽象化角色
Abstraction abstraction = new RefinedAbstraction(implementor);
// 执行流程
abstraction.request();
}
}
<file_sep>package pattern.P22_FlyweightPattern;
/**
* 享元模式:场景类
*
* 定义:
* 使用共享对可有效的支持大量的细粒度的对象。
* 享元模式定义为我们提出了两个要求:细粒度的对象和共享对象。这些对象的信息分为两个部分:内部状态和外部状态。
*
* 内部状态:
* 内部转态是对象可共享出来的信息,存储在享元对象内部并且不会随环境改变而改变,它们可以作为一个对象的动态附加信息,
* 不必直接存储在具体某个对象中,属于可以共享的部分。
*
* 外部状态:
* 外部状态是对象得以依赖的一个标记,是随环境改变而改变的,不可以共享的状态,它是一批对象的标识,是唯一的一个索引值。
*
* 角色:
* 1、Flyweight抽象享元角色
* 它简单的说就是一个产品的抽像类,同时定义出对象的外部状态和内部状态的接口或实现。
* 2、ConcreteFlyweight具体享元角色
* 具体的一个产品类,实现抽象角色定义的业务,该角色中需要注意的是内部状态处理应该和环境无关,不应该出现一个操作改变了
* 内部状态,同时修改了外部状态,这是绝对不允许的。
* 3、unshareConcreteFlyweight不可共享的享元角色
* 不存在外部状态或者安全要求(如线程安全)不能够使用共享技术的对象。
* 4、FlyweightFactory享元工厂
* 职责非常简单,就是构造一个池容器,同时提供从池中获得对象的方法。
*
* 优点:
* 享元模式是一个非常简单的模式,它可以大大减少应用程序创建的对象,降低程序内存的占用,增强程序的性能。
*
* 缺点:
* 提高了程序的复杂度,需要分离出外部状态和内部状态,而且外部状态具有固化特性,不应该随内部状态改变而改变,否则导致系统
* 逻辑混乱。
*
* 使用场景:
* 1、系统中存在大量的相似的对象。
* 2、细粒度的对象都具备较接近的外部状态,而且内部状态与环境无关,也就是说对象没有特定的身份。
* 3、需要缓冲池的场景。
*
* 注意事项:
* 虽然可以使用享元模式可以实现对象池,但是这两者还是有比较大的差异,对象池着重在对象的复用上,池中的每个对象是可以替换的
* 从同一个池中获取A对象和B对象对客户端来说是完全相同的,它主要解决复用,而享元模式在主要解决的对象的共享问题,如何建立多
* 个可共享的细粒度对象则是其关注的重点。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
//初始化池中数据
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 10; j++) {
String key = "第" + i + "行,第" + j + "列";
FlyweightFactory.getFlyweight(key);
}
}
//从池中取数据
@SuppressWarnings("unused")
Flyweight concreteFlyweight1 = FlyweightFactory.getFlyweight("第2行,第3列");
}
}
<file_sep>package pattern.P19_VisitorPattern.extend2;
/**
* 统计接口
*
* @author Administrator
*
*/
public interface ITotalVisitor extends IVisitor {
// 统计
public void getTotal();
}
<file_sep>package pattern.P11_DecoratorPattern;
/**
* 具体的装饰类
*
* @author Administrator
*
*/
public class ConcreteDecorator1 extends Decorator {
// 定义被修饰者
public ConcreteDecorator1(Component c) {
super(c);
}
// 定义自己的修饰方法
private void method1() {
System.out.println("method1修饰");
}
// 重写父类的operation方法
@Override
public void operate() {
this.method1();
super.operate();
}
}
<file_sep>package pattern.P07_PrototypePattern.extend;
/**
* 深拷贝:场景类
*
* 使用原型模式时,引用的成员变量必须满足2个条件才不会被拷贝,一是类的成员变量,而不是方法内变量
* 二是必须是一个可变的引用变量,而不是一个原始原型或不可变对象。
*
* @author Administrator
*
*/
public class DeepClient {
public static void main(String[] args) {
DeepCopy DeepCopy = new DeepCopy();
DeepCopy.setValue("张三");
DeepCopy copy = DeepCopy.clone();
copy.setValue("李四");
System.out.println(DeepCopy.getValue());
}
}
<file_sep>package pattern.P06_ProxyPattern.extend04;
import java.lang.reflect.InvocationHandler;
/**
* 具体业务动态代理类
*
* @author Administrator
*
*/
public class SubjectDynamicProxy extends DynamicProxy<Object> {
public static <T> T newProxyInstance(Subject subject) {
// 获得Classloader
ClassLoader loader = subject.getClass().getClassLoader();
// 活动接口数据
Class<?>[] classes = subject.getClass().getInterfaces();
// 获得handler
InvocationHandler handler = new MyInvocationHandler(subject);
return newProxyInstance(loader, classes, handler);
}
}
<file_sep>package pattern.P08_MediatorPattern;
/**
* 抽象中介者
*
* @author Administrator
*
*/
public abstract class Mediator {
//定义同事类
protected ConcreteColleague1 colleague1;
protected ConcreteColleague2 colleague2;
//通过get和set方法把同事类注入进来。
public ConcreteColleague1 getColleague1() {
return colleague1;
}
public void setColleague1(ConcreteColleague1 colleague1) {
this.colleague1 = colleague1;
}
public ConcreteColleague2 getColleague2() {
return colleague2;
}
public void setColleague2(ConcreteColleague2 colleague2) {
this.colleague2 = colleague2;
}
//中介者模式的业务逻辑
public abstract void doSomething1();
public abstract void doSomething2();
}
<file_sep>package pattern.P19_VisitorPattern.extend;
/**
* 抽象访问者
*
* @author Administrator
*
*/
public interface IVisitor {
// 可以访问哪些对象
public void visit(ConcreteElement1 concreteElement1);
public void visit(ConcreteElement2 concreteElement2);
// 统计功能
public int getTotal();
}
<file_sep>package pattern.P02_FactoryPattern.extend02;
/**
* 具体的工厂实现类:生产一个具体的对象
*
* @author Administrator
*
*/
public class ConcreteCreator1 extends Creator {
@Override
public Product createProduct() {
return new ConcreteProduct1();
}
}
<file_sep>package pattern.p14_IteratorPattern;
/**
* 抽象容器
*
* @author Administrator
*
*/
public interface Aggregate {
// 元素增加方法
public void add(Object obj);
// 元素删除方法
public void remove(Object obj);
// 由迭代器来遍历所有的元素
public Iterator iterator();
}
<file_sep>package pattern.P20_StatePattern;
/**
* 抽象状态角色
*
* @author Administrator
*
*/
public abstract class State {
// 定义一个环境角色,提供子类访问
protected Context context;
// 设置环境角色
public void setContext(Context context) {
this.context = context;
}
// 行为1
public abstract void handle1();
// 行为2
public abstract void handle2();
}
<file_sep>package pattern.P02_FactoryPattern.extend04;
import java.util.HashMap;
import java.util.Map;
/**
* 延迟初始化:一个对象被使用完后,并不立即释放,工厂类保持其初始化状态,等待再次被使用。
*
* 通过map变量产生一个缓存,对需要再次被重用的对象保留。
*
* @author Administrator
*
*/
public class ProductFactory {
private static final Map<String, Product> map = new HashMap<>();
public synchronized static Product createProduct(String type) {
Product product = null;
//如果map中已经有这个对象,则直接取出来用
if(map.containsKey(type)) {
product = map.get(type);
}else {
if(type.equals("product1")) {
product = new ConcreteProduct1();
}else {
product = new ConcreteProduct2();
}
//同时把对象放入到缓存容器中。
map.put(type, product);
}
return product;
}
}
<file_sep>package pattern.P10_ResponsibilityChainPattern;
/**
* 具体的处理者
*
* @author Administrator
*
*/
public class ConcreteHandler3 extends Handler {
private Level level;
public ConcreteHandler3(int l) {
if(level==null) {
this.level = new Level();
}
this.level.setLevel(l);
}
// 自己的处理级别
@Override
protected Level getHandlerLevel() {
return this.level;
}
// 定义自己的处理逻辑
@Override
protected Response echo(Request request) {
System.out.println("ConcreteHandler3正在处理中...");
return null;
}
}
<file_sep>package pattern.P18_MementoPattern.extend02;
/**
* 多状态的备忘录模式:场景类
*
* 为什么要使用HashMap,直接使用Originator对象的拷贝不是一个很好的方法吗?,可以这样做,这样做就破坏了发起人的通用性,
* 在做恢复动作的时候需要对该对象进行对称赋值操作,也容易产生错误。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
Originator originator = new Originator();
Caretaker caretaker = new Caretaker();
originator.setState1("中国");
originator.setState2("强势");
originator.setState3("繁荣");
System.out.println("======初始化状态======\n"+originator);
caretaker.setMemento(originator.createMemnto());
originator.setState1("软件");
originator.setState2("架构");
originator.setState3("优秀");
System.out.println("======修改后状态======\n"+originator);
originator.restoreMemento(caretaker.getMemento());
System.out.println("======恢复后状态======\n"+originator);
}
}
<file_sep>package pattern.P23_BridgePattern;
/**
* 具体实现化角色
*
* @author Administrator
*
*/
public class ConcreteImplementor2 implements Implementor {
@Override
public void doSomething() {
// 业务逻辑处理
}
@Override
public void doAnything() {
// 业务逻辑处理
}
}
<file_sep>package pattern.P08_MediatorPattern;
/**
* 同事类
*
* @author Administrator
*
*/
public class ConcreteColleague2 extends Colleague {
// 通过构造函数传递中介者
public ConcreteColleague2(Mediator m) {
super(m);
}
// 自有方法
public void selfMethod2() {
}
// 依赖方法
public void depMethod2() {
// 处理自己的业务逻辑
// 自己不能处理的业务逻辑,委托给中介者处理
super.mediator.doSomething2();
}
}
<file_sep>package pattern.p14_IteratorPattern;
import java.util.Vector;
/**
* 具体的容器
*
* @author Administrator
*
*/
public class ConcreteAggregate implements Aggregate {
//容纳对象的容器
@SuppressWarnings("rawtypes")
private Vector v = new Vector<>();
@SuppressWarnings("unchecked")
@Override
public void add(Object obj) {
this.v.add(obj);
}
@Override
public void remove(Object obj) {
this.v.remove(obj);
}
@Override
public Iterator iterator() {
return new ConcreteIterator(this.v);
}
}
<file_sep>package pattern.P21_InterpreterPattern;
/**
* 环境角色
*
* @author Administrator
*
*/
public class Context {
// 具体到例子中使用HashMap代替
}
<file_sep>package pattern.P19_VisitorPattern.common;
/**
* 具体元素
* @author Administrator
*
*/
public class ConcreteElement2 extends Element {
//完善业务逻辑
@Override
public void doSomething() {
System.out.println("元素2处理业务逻辑中...");
}
//允许哪个访问者访问
@Override
public void accept(IVisitor visitor) {
visitor.visit(this);
}
}
<file_sep>package pattern.P21_InterpreterPattern;
import java.util.Stack;
/**
* 解析模式:场景类(使用较少)
*
* 定义: 是一种按照规定语法进行解析的方案,在现在的项目中使用较少。给定一门语言,定义他的文法的一种表示,并定义一个解析器,
* 该解析器使用该表达式来解析语言中的句子。
*
* 角色:
* 1、Expression:抽象表达式
* 具体的解释任务由各个实现类完成
* 2、TerminalExpression:终结符表达式
* 实现与文法中的元素相关联的解释操作,通常一个解释器模式中只有一个终结符表达式,但有多个实例,对应不同的终结符。
* 3、NonterminalExpression:非终结符表达式
* 文法中的每条规则对应于一个非终结表示。非终结表达式根据逻辑的复杂程度而增加,原则上每个文法规则都对应一个非终结表达式。
* 4、Context:环境角色
* 具体到例子中就是采用HashMap代替。
*
* 优点:
* 解析器是一个简单语法分析工具,它最显著的有点就是扩展性,修改语法规则只要修改相应的非终结表达式就可以了,若扩展语法,则只要增加
* 非终结表达式类就可以了。
*
* 缺点:
* 1、解析器模式会引起类膨胀
* 每个语法都要产生一个非终结表达式,语法规则比较复杂的时候,就可能产生大量的类文件,为维护带来了很大麻烦。
* 2、解析器模式采用递归调用方法
* 每个非终结表达式只关心自己有关的表达式,每个表达式都需要知道最终的结果,必须一层一层的剥离,无论是面向过程语言还是面向对象的语言,
* 递归都是在必要条件下使用的,它导致调试非常复杂。
* 3、效率问题
* 解析器模式由于使用了大量的循环递归,效率是一个不容忽视的问题,特别是一用于解析复杂、冗长的语法时,效率是难以忍受的。
*
* 使用场景
* 1、重复发生的问题。
* 2、一个简单语法需要解析的场景。
*
* 注意事项
* 尽量不要在重要的模块中使用解析器模式,否则维护是一个很大的问题,在项目中,可以使用shell、JRuby、Groovy等脚本语言来代替解析器模式。
*
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
Context context = new Context();
// 通常定义一个语法容器,容纳一个具体的表达式,通常为ListArrays,Linkedlist,Stack等类型
Stack<Expression> stack = new Stack<>();
// for(;;) {
// 进行语法判断,并产生递归调用
// }
// 产生一个完整的语法树,由各个具体的语法分析进行解析。
Expression exp = stack.pop();
// 具体元素进入场景
exp.Interpreter(context);
}
}
<file_sep>package pattern.p15_CompositePattern.common;
/**
* 组合模式:场景类
*
* 定义:也叫做合成模式,或者部分-整体模式,定义如下:
* 将对象组合成树形结构以表示“部分-整体”的层次结构,使得用户单个对象和组合对象的使用具有一致性。
*
* 组合模式的角色:
* 1、Component抽象构件角色:定义参加组合对象的共有方法和属性,可以定义一些默认的行为和属性。
* 2、Leaf叶子构件:叶子对象,其下再也没有其他的分支,也就是遍历的最小单位。
* 3、Composite树枝构件:树枝对象,它的作用是组合树枝结点和叶子结点形成一个树形结构
*
*
* 优点:
* 1、高层模块调用简单:一颗树形结构的所有结点都是Component,局部和整体对调用者来说没有任何区别,
* 也就是说高层模块不必关心自己处理的是单个对象还是整个组合结构,简化了高层模块的代码。
* 2、节点自由增加:使用了组合模式后,想增加一个树枝节点或者树叶节点都很容易,只要找到他的父节点就
* 非常容易扩展,符合开闭原则,对以后的维护非常有利。
*
* 缺点:
* 树枝和树叶的定义直接使用了实现类,与依赖倒置原则冲突,限制了接口的影响范围。
*
* 使用场景:
* 1、维护和展现部分-整体关系的场景,如树新菜单,文件和文件夹管理。
* 2、从一个整体中能够独立出部分模块或功能的场景。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
// 创建一个根节点
Composite root = new Composite();
root.doSomething();
// 创建一个树枝结点
Composite branch = new Composite();
// 创建一个叶子结点
Leaf leaf = new Leaf();
// 组件整体
root.add(branch);
branch.add(leaf);
// 打印
display(root);
}
public static void display(Composite composite) {
for (Component c : composite.getChildren()) {
if (c instanceof Leaf) {// 叶子结点
c.doSomething();
} else {// 树枝结点···
display((Composite) c);
}
}
}
}
<file_sep>package pattern.P09_CommandPattern;
/**
* 具体的命令类
*
* @author Administrator
*
*/
public class ConcreteCommand1 extends Command {
//声明自己的默认接受者
public ConcreteCommand1() {
super(new ConcreteReceiver1());
}
//设置新的接受者
public ConcreteCommand1(Receiver r) {
super(r);
}
//每个具体的命令都必须实现一个命令
@Override
public void execute() {
super.receiver.doSomething();
}
}
<file_sep>package pattern.p16_ObserverPattern;
public class ConcreteSubject extends Subject {
// 具体的业务
public void doSomething() {
/*
* 一些业务逻辑
*/
super.notifyObserver();
}
}
<file_sep>package pattern.P11_DecoratorPattern;
/**
* 抽象装饰类
*
* @author Administrator
*
*/
public abstract class Decorator extends Component {
private Component component = null;
// 通过构造函数传递被修饰者
public Decorator(Component c) {
this.component = c;
}
// 委托给被修饰者执行
@Override
public void operate() {
this.component.operate();
}
}
<file_sep>package pattern.p15_CompositePattern.common;
import java.util.ArrayList;
/**
* ╩¸ÍŽ╣╣╝■
*
* @author Administrator
*
*/
public class Composite extends Component {
private ArrayList<Component> arrayList = new ArrayList<>();
public void add(Component component) {
this.arrayList.add(component);
}
public void remove(Component component) {
this.arrayList.remove(component);
}
public ArrayList<Component> getChildren() {
return this.arrayList;
}
}
<file_sep>package pattern.P02_FactoryPattern.extend02;
/**
* 多工厂模式的抽象工厂
*
* @author Administrator
*
*/
public abstract class Creator {
/**
* 抽象方法中已经不再需要传递相关参数了,
* 因为每一个具体的工厂都已经非常明确自己的职责:创建自己负责的产品对象
*
* @return
*/
public abstract Product createProduct();
}
<file_sep>package pattern.P19_VisitorPattern.extend2;
/**
* 具体的统计接口访问者
*
* @author Administrator
*
*/
public class TotalVisitor implements ITotalVisitor {
// 乘法系数
private final static int E1 = 5;
private final static int E2 = 2;
// e1统计值
private int E1Total = 0;
// e2统计值
private int E2Total = 0;
// 访问concreteElement1的元素
@Override
public void visit(ConcreteElement1 concreteElement1) {
getE1Total(concreteElement1.getNum());
}
// 访问concreteElement2的元素
@Override
public void visit(ConcreteElement2 concreteElement2) {
getE1Total(concreteElement2.getNum());
}
public void getE1Total(int num) {
this.E1Total = this.E1Total + num * E1;
}
public void getE2Total(int num) {
this.E2Total = this.E2Total + num * E2;
}
@Override
public void getTotal() {
System.out.println("统计值为:" + this.E1Total + this.E2Total);
}
}
<file_sep>package pattern.P06_ProxyPattern.extend04;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
/**
* 动态代理类的Handler类
*
* @author Administrator
*
*/
public class MyInvocationHandler implements InvocationHandler {
//被代理的对象
private Object target;
//通过构造函数传递一个对象
public MyInvocationHandler(Object object) {
this.target = object;
}
//代理方法
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
//执行被代理的方法
return method.invoke(this.target, args);
}
}
<file_sep>package pattern.P05_BuilderPattern;
/**
* 具体建造者
*
* @author Administrator
*
*/
public class ConcreteProduct extends Builder {
private Product product = new Product();
/*
* 设置产品零件
*/
@Override
public void setPart() {
System.out.println("产品类内的逻辑处理");
}
/*
* 组建一个产品
*/
@Override
public Product buildProduct() {
return product;
}
}
<file_sep>package pattern.P06_ProxyPattern.extend03;
import pattern.P06_ProxyPattern.common.RealSubject;
import pattern.P06_ProxyPattern.common.Subject;
/**
* 虚拟代理的代理类
*
* 在需要的时候才初始化真实角色,可以避免被代理对象较多而引起的初始化缓慢的问题,
* 其缺点是需要在每个方法(实现接口中的方法)中判断真实角色是否被创建。
*
* @author Administrator
*
*/
public class Proxy implements Subject {
// 要代理哪个实现类
private Subject subject = null;
// 实现接口中的方法
@Override
public void request() {
this.before();
/*
* 判断一下真实角色是否被初始化
*/
if (this.subject == null) {
subject = new RealSubject();
}
this.subject.request();
this.after();
}
// 预处理
private void before() {
System.out.println("before do something");
}
// 善后处理
private void after() {
System.out.println("after do something.");
}
}
<file_sep>package pattern.P06_ProxyPattern.common;
/**
* 抽象主题类
*
* @author Administrator
*
*/
public interface Subject {
//定义一个方法
public void request();
}
<file_sep>package pattern.P01_SingletonPattern.common;
/**
* 单例模式通用代码(饿汉式单例模式)
*
* @author Administrator
*
*/
public class Singleton_hungry {
/*
* 饿汉式单例模式 final修饰基本类型数据时,数据是无法改变的, final修饰引用类型数据是,对象的引用无法改变,但对象的内容是可以改变的。
*/
private static final Singleton_hungry singleton = new Singleton_hungry();
// 限制产生多个对象
private Singleton_hungry() {
}
// 获取实例对象方法
public static Singleton_hungry getSingleton() {
return singleton;
}
// 类中的其他方法。尽量使用static。
public static void otherMethod() {
}
}
<file_sep>package pattern.p16_ObserverPattern;
/**
* 具体的观察者
*
* @author Administrator
*
*/
public class ConcreteObserver implements Observer {
// 实现更新方法
@Override
public void update() {
System.out.println("接收到信息,并进行处理");
}
}
<file_sep>package pattern.P01_SingletonPattern.common;
/**
* 单例模式通用代码(懒汉式单例模式)
*
* @author Administrator
*
*/
public class Singleton_lazy {
// 懒汉式单例模式
private static Singleton_lazy singleton;
// 限制产生多个对象
private Singleton_lazy() {
}
// 获取实例对象方法(线程安全的)
public static Singleton_lazy getSingleton() {
if (singleton == null) {
synchronized (Singleton_lazy.class) {
if (singleton == null) {
singleton = new Singleton_lazy();
}
}
}
return singleton;
}
// 类中的其他方法。尽量使用static。
public static void otherMethod() {
}
}
<file_sep>package pattern.P17_FacadePattern;
/**
* 门面模式:场景类
*
* 定义:
* 也叫外观模式,是一种比较常见的封装模式,其定义如下:要求一个子系统的外部与其内部的通讯必须通过一个统一的对象进行,门面模式提供一个高层次的接口使得子系统更易于使用。
*
*
* 角色:
* 1、Facade门面角色:
* 客户端可以调用这个角色的方法,此角色知晓子系统的所有功能和责任。一般情况下本角色会将所有从客户端发来的请求委派到响应的子系统去,也就是说该角色没有实际的业务逻辑,只是一个委托类。
* 2、subsystem子系统角色:
* 可以同时拥有一个或者多个子系统,每个子系统都不是一个单独的类,而是一个类的集合,子系统并不知道门面的存在,对于子系统而言,门面仅仅是另外一个客户端而已。
*
* 有点:
* 1、减少系统的相互依赖
* 2、提高灵活性
* 3、提高安全性
*
* 缺点:
* 最大缺点是不符合开闭原则,对修改关闭,对扩展开放。
*
* 适用场景:
* 1、为一个复杂的模块或者子系统提供一个外界访问的即口。
* 2、子系统相对独立——外界对子系统的访问只要黑箱操作即可。
* 3、预防低水平的人员带来的风险扩散。.
*
* 注意事项:
* 1、一个子系统可以有多个门面。
* 2、门面不参与子系统内的业务逻辑。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
// 定义门面
Facade facade = new Facade();
// 调用子系统
facade.methodA();
facade.methodB();
facade.methodC();
}
}
<file_sep>package pattern.P22_FlyweightPattern;
/**
* 具体享元角色
*
* @author Administrator
*
*/
public class ConcreteFlyweight1 extends Flyweight {
// 接受外部状态
public ConcreteFlyweight1(String extrinsic) {
super(extrinsic);
}
// 根据外部状态进行逻辑处理
@Override
public void operate() {
// 业务逻辑
}
}
<file_sep>package pattern.p04_TemplateMethodPattern.extend;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
* 模板模式:场景类
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) throws Exception {
ConcreteClass1 class1 = new ConcreteClass1();
ConcreteClass2 class2 = new ConcreteClass2();
String type = (new BufferedReader(new InputStreamReader(System.in))).readLine();
if("0".equals(type)) {
class1.setCan(false);
}
class1.templateMethod();
class2.templateMethod();
}
}
<file_sep>package pattern.P19_VisitorPattern.common;
/**
* 访问者模式 :场景类
*
* 定义:
* 封装一些作用于某种数据结构中的各个元素的操作,它可以在不改变数据结构的前提下定义作用于这些元素的新操作。
*
* 角色:
* 1、Visitor抽象访问者
* 抽象类或者接口声明访问者可以访问哪些元素,具体到程序中就是visit方法的参数定义哪些对象是可以访问的。
* 2、ConcreteVisitor具体访问者
* 他影响访问者访问到一个类后该怎么干,要做什么事情。
* 3、Element抽象元素
* 接口或者抽像类,声明接受哪一类访问者访问,程序上是通过accept方法中的参数定义的。
* 4、ConcreteElement具体元素
* 实现accept方法,通常是visitor.visit(this),基本上都形成了一种模式了。
* 5、ObjectStruture结构对象
* 元素生产者,一般容纳在多个不同类、不同接口的容器中,如list,map,set等,在项目中,一般很少抽象出这个角色。
*
* 优点:
* 1、符合单一职责原则
* 具体元素角色也就是Element抽象类的两个子类负责数据的加载,而Visitor类则负责报表的展现,两个不同的职责非
* 常明确的分离开来,各自演绎变化。
* 2、优秀的扩展性
* 由于职责分开,继续增加对数据的操作是非常快捷的。
* 3、灵活性非常高
*
*
* 缺点:
* 1、具体元素对访问者提供细节
* 访问者要访问一个类就必然要求这个类公布一些方法和数据,也就是说访问者关注了其他类的内部细节,这是迪米特发则不建议的。
* 2、具体元素变更比较困难
* 具体元素角色的增加,删除,修改是比较困难的。
* 3、违背了依赖倒置原则
* 访问者依赖的是具体元素,而不是抽象元素,这破坏了依赖倒置的原则,特别是在面向对象编程中,抛弃了对接口的依赖,而直接
* 依赖实现类,扩展比较困难。
*
*
* 适用场景:
* 1、一个对象结构包含很多类的对象,它们有不同的接口,而你想对这些对象实施一些依赖于其具体类的操作,也就是说用迭代器模
* 式已经不能胜任的情景。
* 2、需要对一个对象结构中的对象进行很多不同并且不相关的操作,而你想避免让这些操作“污染”这些对象的类。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
for (int i = 0; i < 10; i++) {
// 获得对象元素
Element e = ObjectStruture.createElement();
// 接受访问者访问
e.accept(new Visitor());
}
}
}
<file_sep>package pattern.P22_FlyweightPattern;
/**
* 具体享元角色
*
* @author Administrator
*
*/
public class ConcreteFlyweight2 extends Flyweight {
// 接受外部状态
public ConcreteFlyweight2(String extrinsic) {
super(extrinsic);
}
// 根据外部状态进行逻辑处理
@Override
public void operate() {
// 业务逻辑
}
}
<file_sep>package pattern.P03_AbstractFactoryPattern;
/**
* 抽象工厂模式 ,场景类
*
* 抽象工厂的最大缺点就是扩展麻烦。
*
* @author Administrator
*
*/
public class Client {
@SuppressWarnings("unused")
public static void main(String[] args) {
//定义2个工厂
AbstractCreator a = new Creator1();
AbstractCreator b = new Creator2();
//产生产品A1对象
AbstractProductA A1 = a.createProductA();
//产生产品A2对象
AbstractProductA A2 = b.createProductA();
//产生产品B1对象
AbstractProductB B1 = a.createProductB();
//产生产品B2对象
AbstractProductB B2 = b.createProductB();
}
}
<file_sep>package pattern.P07_PrototypePattern.extend;
import java.util.ArrayList;
import java.util.List;
/**
* 深拷贝
*
* @author Administrator
*
*/
public class DeepCopy implements Cloneable {
// 定义一个私有变量
private ArrayList<String> list = new ArrayList<String>();
@SuppressWarnings("unchecked")
@Override
public DeepCopy clone() {
DeepCopy thing = null;
try {
thing = (DeepCopy) super.clone();
thing.list = (ArrayList<String>) this.list.clone();
} catch (CloneNotSupportedException e) {
e.printStackTrace();
}
return thing;
}
//赋值
public void setValue(String e) {
this.list.add(e);
}
//取值
public List<String> getValue() {
return this.list;
}
}
<file_sep>package pattern.P06_ProxyPattern.extend04;
/**
* 动态代理模式
*
* 场景类
*
* 要实现动态代理的首要条件是 :被代理类必须实现一个接口。
* (并不一定是绝对的,比如CGLIB技术可以实现不需要接口也可以实现动态代理)
*
* 什么是动态代理?
* 动态代理是在实现阶段不用关心代理谁,而在运行阶段才指定代理哪一个对象。
*
* AOP切面编程技术,核心就是采用了动态代理机制。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
// 定义一个主题
Subject subject = new RealSubject();
// 定义主题的代理
Subject proxy = SubjectDynamicProxy.newProxyInstance(subject);
// 代理的行为
proxy.doSomething("Finish");
}
}
<file_sep>package pattern.P06_ProxyPattern.common;
/**
* 场景类
*
* 代理模式:也叫做委托模式
* 主要有三个角色定义:
* 1、Subject抽象主题角色:抽象主题角色可以是抽像类也可以是接口,是一个最普通的业务类型定义,无特殊要求。
* 2、RealSubject具体主题角色:也被称为委托角色,被代理角色,是业务逻辑的具体执行者。
* 3、Proxy代理主题角色:也被称为委托类或者代理类,它负责对真实角色的应用,把所有抽象主题类定义的方法限制委托给真实主题角色实现
* 并且在真实主题角色处理完毕前后,做预处理和善后处理的工作。
*
* 例如:
* Struts2的Form元素映射就采用了代理模式。(动态代理)
*
* Spring AOP。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
//定义一个主题角色
Subject s1 = new RealSubject();
//定义一个代理角色
Proxy proxy = new Proxy(s1);
//代理角色处理主题角色的业务逻辑
proxy.request();
}
}
<file_sep>package pattern.p16_ObserverPattern;
/**
* 观察者模式:场景类
*
* 定义:
* 观察者模式也叫做发布订阅模式,他是一个在项目中经常使用的模式,定义如下,
* 定义对象间一种一对多的依赖关系,使得每当一个对象改变状态,则所有依赖于它的对象都会得到通知并自动更新。
*
* 角色模块:
* 1、Subject被观察者
* 定义被观察者必须实现的职责,它必须能够动态的增加,删除观察者,它一般是抽象类或者是实现类,仅仅完成作为
* 被观察者必选实现的职责:管理观察者并通知观察者。
* 2、Observer观察者
* 观察者接受到消息后,进行update(更新)操作,对接受到的信息进行处理。
* 3、ConcreteSubject具体的被观察者
* 定义被观察者自己的业务逻辑,同时定义对那些事件的通知。
* 4、ConcreteObserver具体的观察者
* 每个观察者在接受到消息后的处理反应是不同的,各个观察者有自己的处理逻辑。
*
* 优点:
* 1、观察者和被观察者之间是抽象耦合
* 如此设计,则不管是增加观察者,还是被观察者都非常容易扩展,而且在java中都已经实现的抽象层级定义,在系统
* 扩展方面都得心应手。
* 2、建立一套触发机制
* 根据单一职责原则,每个类的职责是单一的,比如,猎人打猎,死了一只母鹿,母鹿有三个幼崽,因没有母鹿在照顾而
* 饿死,尸体被两只秃鹰争夺,因分配不均开始斗殴,然后输了的秃鹰死掉,生存下来的因此扩大了地盘,这就形成了一
* 个触发机制,形成了一个触发链,观察者 模式可以完美的实现这里的链条形式。
*
*
* 缺点:
* 需要考虑一下开发效率和运行效率问题,一个被观察者,多个观察者,在开发和调试的时候就比较复杂,而且在java中
* 消息的通知默认是顺序执行的,一个观察者卡壳会影响整体的执行效率,这种情况下,一般考虑使用异步的方式。
*
*
* 适用场景:
* 1、关联行为场景。需要注意的是,关联行为是可拆分的,而不是组合的关系。
* 2、事件多级触发场景。
* 3、跨系统的消息交换场景,如消息队列的处理机制。
*
* 注意事项:
* 1、广播链的问题。
* 2、异步处理问题。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
// 创建一个被观察者
ConcreteSubject subject = new ConcreteSubject();
// 创建一个观察者
Observer observer = new ConcreteObserver();
// 观察者观察被观察者
subject.addObserver(observer);
// 观察者开始行动了
subject.doSomething();
}
}
<file_sep>package pattern.P13_AdapterPattern.common;
/**
* 目标角色
*
* @author Administrator
*
*/
public interface Target {
// 目标角色有自己的方法
public void request();
}
<file_sep>package pattern.P07_PrototypePattern.common;
/**
* 原型模式:
*
* 不通过new关键字来产生一个对象,而是通过对象复制来实现的模式叫做原型模式。
*
* 优点:
* 原型模式是在内存二进制流的拷贝,要比直接new一个对象性能好的多,特别是要在一个循环内产生大量对象时,原型模式可以更好的体现他的有点。
*
* 缺点:
* 通过原型模式产生的对象,不会执行构造函数。不受构造函数的约束。
*
* 使用原型模式时,类的成员变量上不要加上final关键字。
*
* @author Administrator
*
*/
public class PrototypeClass implements Cloneable {
@Override
public PrototypeClass clone() {
PrototypeClass prototypeClass = null;
try {
prototypeClass = (PrototypeClass) super.clone();
} catch (CloneNotSupportedException e) {
e.printStackTrace();
}
return prototypeClass;
}
}
<file_sep>package pattern.P06_ProxyPattern.extend01;
/**
* 真实主题类
*
* @author Administrator
*
*/
public class RealSubject implements Subject {
@SuppressWarnings("unused")
private String name = "";
/*
* 构造函数限制谁能创建对象,并同时传递名称
*
* 在构造函数中,传递进来一个RealSubject对象,检查谁能创建真实的角色,当前还可以有其他限制,比如类名必须是Proxy等等
* 可以自行扩展。
*/
public RealSubject(Subject sub,String name) throws Exception{
if(sub==null) {
throw new Exception("不能创建真实角色");
}else {
this.name = name;
}
}
// 实现方法
@Override
public void request() {
System.out.println("业务逻辑处理");
}
}
<file_sep>package pattern.P02_FactoryPattern.extend02;
/**
* 具体的工厂实现类:生产一个具体的对象
*
* @author Administrator
*
*/
public class ConcreteCreator2 extends Creator {
@Override
public Product createProduct() {
return new ConcreteProduct2();
}
}
<file_sep>package pattern.P02_FactoryPattern.extend03;
/**
* ³¡¾°Àà
*
* @author Administrator
*
*/
public class Client {
@SuppressWarnings("unused")
public static void main(String[] args) {
Singleton singleton = SingletonCreator.getSingleton();
}
}
<file_sep>package pattern.P09_CommandPattern;
/**
* 抽象接受者
*
* @author Administrator
*
*/
public abstract class Receiver {
// 抽象接受者,定义每个接受者都必须完成任务
public abstract void doSomething();
//每个接受者都要对直接执行的任务可以回滚
public void rollback() {
//具体的回滚操作
System.out.println("正在回退命令...");
}
}
<file_sep>package pattern.P09_CommandPattern;
/**
* 抽象命令类
*
* @author Administrator
*
*/
public abstract class Command {
//定义一个子类的全局共享变量
protected final Receiver receiver;
//实现类必须定义一个接受者
public Command(Receiver r) {
this.receiver = r;
}
//每个命令类都必须有一个执行命令的方法
public abstract void execute();
}
<file_sep>package pattern.p15_CompositePattern.common;
/**
* 抽象构件
*
* @author Administrator
*
*/
public abstract class Component {
// 个体和整体都具有的共享
public void doSomething() {
// 业务逻辑
}
}
<file_sep>package pattern.P19_VisitorPattern.extend2;
/**
* 具体元素
*
* @author Administrator
*
*/
public class ConcreteElement2 extends Element {
// private int num = new Random().nextInt(100);
private int num = 20;
public int getNum() {
return num;
}
// 完善业务逻辑
@Override
public void doSomething() {
System.out.println("元素2处理业务逻辑中...");
}
// 允许哪个访问者访问
@Override
public void accept(IVisitor visitor) {
visitor.visit(this);
}
}
<file_sep>package pattern.P21_InterpreterPattern;
/**
* 抽象表达式
*
* 抽象表达式是生成语法集合(也叫语法树)的关键,每个语法集合完成指定语法解析任务,
* 它是通过递归调用的方式,最终由最小的语法单元进行解析完成。
*
* @author Administrator
*
*/
public abstract class Expression {
// 必须有一个解析任务
public abstract Object Interpreter(Context c);
}
<file_sep>package pattern.P02_FactoryPattern.extend04;
/**
* 具体的产品类:可以有多个,都继承于抽象产品类
*
* @author Administrator
*
*/
public class ConcreteProduct1 extends Product {
@Override
public void method2() {
// TODO 业务逻辑实现。
}
}
<file_sep>package pattern.P19_VisitorPattern.common;
/**
* 具体的访问者
*
* @author Administrator
*
*/
public class Visitor implements IVisitor {
// 访问concreteElement1的元素
@Override
public void visit(ConcreteElement1 concreteElement1) {
concreteElement1.doSomething();
}
// 访问concreteElement2的元素
@Override
public void visit(ConcreteElement2 concreteElement2) {
concreteElement2.doSomething();
}
}
<file_sep>package pattern.P19_VisitorPattern.common;
/**
* 抽象访问者
* @author Administrator
*
*/
public interface IVisitor {
//可以访问哪些对象
public void visit(ConcreteElement1 concreteElement1);
public void visit(ConcreteElement2 concreteElement2);
}
<file_sep>package pattern.P13_AdapterPattern.extend;
/**
* 目标角色实现类
*
* @author Administrator
*
*/
public class ConcreteTarget implements Target {
@Override
public int request() {
System.out.println("if you need any help,please call me!---->1");
return 1;
}
@Override
public int request2() {
System.out.println("if you need any help,please call me!---->2");
return 2;
}
}
<file_sep>package pattern.P11_DecoratorPattern;
/**
* 装饰模式:场景类
*
* 装饰模式定义:动态的给一个对象添加一些额外的职责,就增加功能来说,装饰模式相比生成子类更加灵活。
*
* 主要分为4个角色:
* 1、Component是一个接口或者是抽象类,就是定义我们最核心的对象,也就是最原始的对象,
* 2、ConcreteComponent是最核心、最原始、最基本的接口或者抽象类的实现,你要装饰的就是他。
* 3、Decorator抽象装饰类,它里面不一定有抽象的方法,在它的属性里必然有一个private变量执行Component抽象构件。
* 3、ConcreteDecorator是具体的装饰类,你要把最核心、最原始、最基本的东西装饰成其他东西。
*
* 优点:
* 1、装饰类和被装饰类可以独立的发展,而不会相互耦合。换句话说,Component类无需知道Decorator类,Decorator类是从外部来扩展Component类的功能,而Decorator也不用知道具体的构件。
* 2、装饰模式是继承关系的一个替代方案,,我们看装饰类Decorator,不管装饰多杀层,返回的对象还是Component,实现的还是is-a的关系。
* 3、装饰模式可以动态的扩展一个实现类的功能。
*
* 缺点:
* 多层的装饰是比较复杂的,需要尽量减少装饰类的数量,以便降低系统的复杂度。
*
* 使用场景:
* 1、需要扩展一个类的功能,或者给一个类增加附加功能。
* 2、需要动态的给一个对象增加功能,这些功能可以再动态的撤销。
* 3、需要为一批的兄弟类进行改装或加装功能。
*
* @author Administrator
*
*/
public class Client {
public static void main(String[] args) {
//定义一个构件
Component component = new ConcreteComponent();
//第一次修饰
component = new ConcreteDecorator1(component);
//第二次修饰
component = new ConcreteDecorator2(component);
//修饰后运行
component.operate();
}
}
|
e11c941d7245ef93c66d449822161f60982e8434
|
[
"Java"
] | 61 |
Java
|
qujunyao/DesignPattern
|
d8df4f2a72ffd8147e1ebb7f1e401b79eb4dd2bd
|
4670134c2d18db55b80f4c68baa992001c168455
|
refs/heads/master
|
<file_sep>var modal = document.getElementById("myModal");
var create=document.getElementById("create");
var displaycreate=false;
var modalcontent=document.getElementById("modal-content");
// When the user clicks on the button, open the modal
function onClick(data){
data=JSON.parse(data);
var container=document.createElement("DIV");
var heading=document.createElement("h1");
heading.innerText="EDIT"
heading.classList.add("form-heading");
container.classList.add("container");
var form=document.createElement("FORM");
form.setAttribute("action",`/edit/${data.id}?_method=PUT`);
form.setAttribute("method","POST");
var labelfirstname=document.createElement("LABEL");
labelfirstname.setAttribute("for","firstname");
labelfirstname.innerText="FIRST NAME";
var labellastname=document.createElement("LABEL");
labellastname.setAttribute("for","lastname");
labellastname.innerText="LAST NAME";
var firstname=document.createElement("INPUT");
firstname.setAttribute("type","text");
firstname.setAttribute("name","firstname");
firstname.setAttribute("value",data.firstname);
firstname.setAttribute("id","firstname");
var lastname=document.createElement("INPUT");
lastname.setAttribute("type","text");
lastname.setAttribute("name","lastname");
lastname.setAttribute("value",data.lastname);
lastname.setAttribute("id","lastname");
form.appendChild(labelfirstname);
form.appendChild(firstname);
form.appendChild(labellastname);
form.appendChild(lastname);
var close=document.createElement("DIV");
close.classList.add("close");
close.innerHTML="×"
var button=document.createElement("BUTTON");
button.setAttribute("type","submit");
button.innerText="SUBMIT";
form.appendChild(button);
modalcontent.innerHTML="";
modalcontent.appendChild(close);
modalcontent.appendChild(heading)
container.appendChild(form);
modalcontent.appendChild(container);
modal.style.display="block";
var div = document.getElementsByClassName("close")[0];
div.onclick = function() {
modal.style.display = "none";
}
}
window.onclick = function(event) {
if (event.target == modal) {
modal.style.display = "none";
}
}
function displayCreate(){
if(!displaycreate){
create.style.display="block"
}else{
console.log("helo")
create.style.display="none";
}
displaycreate=!displaycreate;
}
|
b2118351cf0cc4acbc3516871a9edb2a90a934ba
|
[
"JavaScript"
] | 1 |
JavaScript
|
urvashigupta7/nodejs_sql
|
078166572fabd610fe82d0fe43ec5bcd642f7aa3
|
f8bed188c4841f72f6d488eb274d052044bd877f
|
refs/heads/main
|
<repo_name>JacekGrochowina/Users-Overview<file_sep>/src/app/modules/users/dialogs/dialog-add-single/dialog-add-single.component.ts
import { Component, OnInit } from '@angular/core';
import { FormBuilder, FormGroup } from '@angular/forms';
import { Genders } from 'src/app/resources/data/genders.data';
import { Nationalities } from 'src/app/resources/data/nationalities.data';
import { UsersService } from 'src/app/services/users.service';
import { UsersFacade } from '../../+state/users.facade';
@Component({
selector: 'app-dialog-add-single',
templateUrl: './dialog-add-single.component.html',
styleUrls: ['./dialog-add-single.component.scss'],
})
export class DialogAddSingleComponent implements OnInit {
formGroup!: FormGroup;
nationalities = Nationalities;
genders = Genders;
constructor(
private fb: FormBuilder,
private usersService: UsersService,
private usersFacade: UsersFacade
) {}
ngOnInit(): void {
this.initFormGroup();
}
onSubmit(): void {
// console.log(this.formGroup.value);
// this.usersService
// .getUsers(this.formGroup.value)
// .subscribe((users) => console.log(users));
this.usersFacade.addUsers(this.formGroup.value);
}
private initFormGroup(): void {
this.formGroup = this.fb.group({
gender: [this.genders[0].value],
nationality: [this.nationalities[0].value],
});
}
}
<file_sep>/src/app/resources/data/genders.data.ts
import { Gender } from '../interfaces/gender.interface';
export const Genders: Gender[] = [
{ name: 'Kobieta', value: 'female' },
{ name: 'Mężczyzna', value: 'male' },
];
<file_sep>/src/app/modules/users/modules/user/user.module.ts
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { UserRoutingModule } from './user-routing.module';
import { UserComponent } from './user.component';
import { MatButtonModule } from '@angular/material/button';
import { MatIconModule } from '@angular/material/icon';
@NgModule({
declarations: [UserComponent],
imports: [CommonModule, UserRoutingModule, MatButtonModule, MatIconModule],
})
export class UserModule {}
<file_sep>/src/app/modules/users/+state/users.reducers.ts
import { UsersState } from 'src/app/resources/interfaces/state/users-state.interface';
import { UsersAction, UsersActionTypes } from './users.actions';
const usersInitialState: UsersState = {
collection: [],
};
export function UsersReducer(
state = usersInitialState,
action: UsersAction
): UsersState {
switch (action.type) {
case UsersActionTypes.addUsers: {
return {
...state,
};
}
case UsersActionTypes.addUsersSuccess: {
return {
...state,
collection: state.collection.concat(action.payload.results),
};
}
case UsersActionTypes.addUsersFail: {
return {
...state,
};
}
default:
return {
...state,
};
}
}
<file_sep>/src/app/modules/users/components/users-table/users-table.component.ts
import { Component, AfterViewInit, OnInit, ViewChild } from '@angular/core';
import { FormBuilder, FormControl } from '@angular/forms';
import { MatPaginator } from '@angular/material/paginator';
import { MatTableDataSource } from '@angular/material/table';
import { merge, Observable, Subject } from 'rxjs';
import { takeUntil } from 'rxjs/operators';
import { ColumnDefinition } from 'src/app/resources/interfaces/column-definition.interface';
import { User } from 'src/app/resources/interfaces/user.interface';
import { UsersFacade } from '../../+state/users.facade';
@Component({
selector: 'app-users-table',
templateUrl: './users-table.component.html',
styleUrls: ['./users-table.component.scss'],
})
export class UsersTableComponent implements OnInit, AfterViewInit {
isEditMode: boolean = true;
formDisplayedColumns = this.fb.group({
nameFirst: [false],
nameLast: [false],
gender: [false],
age: [false],
email: [false],
phone: [false],
nat: [false],
});
users$ = this.usersFacade.users$;
usersCollection!: User[];
columnsDefinitions!: ColumnDefinition[];
columns: string[] = ['nameFirst', 'nameLast', 'gender', 'nat'];
dataSource!: MatTableDataSource<User>;
private unsubscribe$ = new Subject<void>();
@ViewChild(MatPaginator) paginator!: MatPaginator;
constructor(private fb: FormBuilder, private usersFacade: UsersFacade) {}
ngOnInit(): void {
this.handleUsers();
this.setTableColumns();
this.getDisplayedColumns();
}
ngAfterViewInit(): void {
this.dataSource.paginator = this.paginator;
const obsevables: Observable<boolean>[] = [
this.controlNameFirst.valueChanges,
this.controlNameLast.valueChanges,
this.controlGender.valueChanges,
this.controlAge.valueChanges,
this.controlEmail.valueChanges,
this.controlPhone.valueChanges,
this.controlNat.valueChanges,
];
merge(
obsevables[0],
obsevables[1],
obsevables[2],
obsevables[3],
obsevables[4],
obsevables[5],
obsevables[6]
)
.pipe(takeUntil(this.unsubscribe$))
.subscribe(() => {
this.columnsDefinitions[0].isVisible = this.controlNameFirst.value;
this.columnsDefinitions[1].isVisible = this.controlNameLast.value;
this.columnsDefinitions[2].isVisible = this.controlGender.value;
this.columnsDefinitions[3].isVisible = this.controlAge.value;
this.columnsDefinitions[4].isVisible = this.controlEmail.value;
this.columnsDefinitions[5].isVisible = this.controlPhone.value;
this.columnsDefinitions[6].isVisible = this.controlNat.value;
this.getDisplayedColumns();
});
}
searchFilter(event: Event): void {
const filterValue = (event.target as HTMLInputElement).value;
this.dataSource.filter = filterValue.trim().toLowerCase();
if (this.dataSource.paginator) {
this.dataSource.paginator.firstPage();
}
}
toggleEditMode(): void {
this.isEditMode = !this.isEditMode;
}
private getDisplayedColumns(): void {
this.columns = this.columnsDefinitions
.filter((cd: ColumnDefinition) => cd.isVisible)
.map((cd: ColumnDefinition) => cd.def);
}
private handleUsers(): void {
this.users$
.pipe(takeUntil(this.unsubscribe$))
.subscribe((users: User[]) => {
this.usersCollection = users;
this.dataSource = new MatTableDataSource<User>(this.usersCollection);
this.dataSource.paginator = this.paginator;
});
}
private setTableColumns(): void {
this.columns.forEach((tableColumn) => {
this.formDisplayedColumns.get(tableColumn)?.patchValue(true);
});
this.columnsDefinitions = [
{
def: 'nameFirst',
label: 'Imię',
isVisible: this.controlNameFirst.value,
},
{
def: 'nameLast',
label: 'Nazwisko',
isVisible: this.controlNameLast.value,
},
{ def: 'gender', label: 'Płeć', isVisible: this.controlGender.value },
{ def: 'age', label: 'Wiek', isVisible: this.controlAge.value },
{ def: 'email', label: 'Email', isVisible: this.controlEmail.value },
{
def: 'phone',
label: 'Telefon',
isVisible: this.controlPhone.value,
},
{ def: 'nat', label: 'Kraj', isVisible: this.controlNat.value },
];
this.getDisplayedColumns();
}
get controlNameFirst(): FormControl {
return this.formDisplayedColumns.get('nameFirst') as FormControl;
}
get controlNameLast(): FormControl {
return this.formDisplayedColumns.get('nameLast') as FormControl;
}
get controlGender(): FormControl {
return this.formDisplayedColumns.get('gender') as FormControl;
}
get controlAge(): FormControl {
return this.formDisplayedColumns.get('age') as FormControl;
}
get controlEmail(): FormControl {
return this.formDisplayedColumns.get('email') as FormControl;
}
get controlPhone(): FormControl {
return this.formDisplayedColumns.get('phone') as FormControl;
}
get controlNat(): FormControl {
return this.formDisplayedColumns.get('nat') as FormControl;
}
}
<file_sep>/src/app/modules/shared/shared.module.ts
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { DialogConfirmComponent } from 'src/app/components/dialogs/dialog-confirm/dialog-confirm.component';
import { ContentComponent } from 'src/app/components/content/content.component';
import { MatDialogModule } from '@angular/material/dialog';
import { MatSidenavModule } from '@angular/material/sidenav';
import { MatIconModule } from '@angular/material/icon';
import { MatButtonModule } from '@angular/material/button';
import { MatListModule } from '@angular/material/list';
import { MatCardModule } from '@angular/material/card';
const SharedComponents = [DialogConfirmComponent, ContentComponent];
@NgModule({
declarations: [SharedComponents],
entryComponents: [DialogConfirmComponent],
imports: [
CommonModule,
MatDialogModule,
MatSidenavModule,
MatIconModule,
MatButtonModule,
MatListModule,
MatCardModule,
],
exports: [SharedComponents],
})
export class SharedModule {}
<file_sep>/src/app/modules/users/users.module.ts
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { ReactiveFormsModule } from '@angular/forms';
import { UsersRoutingModule } from './users-routing.module';
import { UsersComponent } from './users.component';
import { DialogAddSingleComponent } from './dialogs/dialog-add-single/dialog-add-single.component';
import { DialogAddManyComponent } from './dialogs/dialog-add-many/dialog-add-many.component';
import { UsersTableComponent } from './components/users-table/users-table.component';
import { UsersAddComponent } from './components/users-add/users-add.component';
import { DialogsService } from 'src/app/services/dialogs.service';
import { MatDialogModule } from '@angular/material/dialog';
import { MatButtonModule } from '@angular/material/button';
import { MatIconModule } from '@angular/material/icon';
import { MatFormFieldModule } from '@angular/material/form-field';
import { MatInputModule } from '@angular/material/input';
import { MatSelectModule } from '@angular/material/select';
import { MatCheckboxModule } from '@angular/material/checkbox';
import { MatTableModule } from '@angular/material/table';
import { MatPaginatorModule } from '@angular/material/paginator';
import { MatMenuModule } from '@angular/material/menu';
import { MatTooltipModule } from '@angular/material/tooltip';
import { StoreModule } from '@ngrx/store';
import { UsersReducer } from './+state/users.reducers';
import { UsersFacade } from './+state/users.facade';
@NgModule({
declarations: [
UsersComponent,
DialogAddSingleComponent,
DialogAddManyComponent,
UsersTableComponent,
UsersAddComponent,
],
entryComponents: [DialogAddSingleComponent, DialogAddManyComponent],
imports: [
CommonModule,
UsersRoutingModule,
ReactiveFormsModule,
MatDialogModule,
MatButtonModule,
MatIconModule,
MatFormFieldModule,
MatInputModule,
MatSelectModule,
MatCheckboxModule,
MatTableModule,
MatPaginatorModule,
MatMenuModule,
MatTooltipModule,
StoreModule.forFeature('users', UsersReducer),
],
providers: [DialogsService, UsersFacade],
})
export class UsersModule {}
<file_sep>/src/app/resources/interfaces/column-definition.interface.ts
export interface ColumnDefinition {
def: string;
label: string;
isVisible: boolean;
}
<file_sep>/src/app/modules/users/+state/users.effects.ts
import { Injectable } from '@angular/core';
import { Actions, createEffect, ofType } from '@ngrx/effects';
import { map, switchMap } from 'rxjs/operators';
import { AddUsersSuccessPayload } from 'src/app/resources/interfaces/payloads/add-users-success.payload';
import { UsersService } from 'src/app/services/users.service';
import { AddUsers, AddUsersSuccess, UsersActionTypes } from './users.actions';
@Injectable()
export class UsersEffects {
constructor(private actions$: Actions, private usersService: UsersService) {}
getUsers$ = createEffect(() =>
this.actions$.pipe(
ofType(UsersActionTypes.addUsers),
switchMap((action: AddUsers) => {
return this.usersService
.addUsers(action.payload)
.pipe(
map(
(response: AddUsersSuccessPayload) =>
new AddUsersSuccess(response)
)
);
})
)
);
}
<file_sep>/src/app/services/users.service.ts
import { HttpClient, HttpParams } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Observable } from 'rxjs';
import { environment } from 'src/environments/environment.prod';
import { AddUsersSuccessPayload } from '../resources/interfaces/payloads/add-users-success.payload';
import { UsersParameters } from '../resources/interfaces/users-parameters.interface';
@Injectable({
providedIn: 'root',
})
export class UsersService {
readonly endpoints = {
user: environment.apiUrl,
};
constructor(private http: HttpClient) {}
addUsers(parameters: any): Observable<AddUsersSuccessPayload> {
let params = new HttpParams({
fromObject: {
...(parameters.amount ? { results: parameters.amount } : null),
...(parameters.gender ? { gender: parameters.gender } : null),
...(parameters.nationality ? { nat: [parameters.nationality] } : null),
},
});
return this.http.get<AddUsersSuccessPayload>(this.endpoints.user, {
params,
});
}
}
<file_sep>/src/app/services/dialogs.service.ts
import { ComponentType } from '@angular/cdk/portal';
import { Injectable } from '@angular/core';
import { MatDialog } from '@angular/material/dialog';
@Injectable({
providedIn: 'root',
})
export class DialogsService {
private dialogsWidth: string = '80%';
private dialogsMaxWidth: string = '400px';
constructor(private dialog: MatDialog) {}
openInfo(component: ComponentType<unknown>): void {
this.dialog.open(component, {
width: this.dialogsWidth,
maxWidth: this.dialogsMaxWidth,
});
}
}
<file_sep>/src/app/modules/users/dialogs/dialog-add-many/dialog-add-many.component.ts
import { Component, OnInit } from '@angular/core';
import {
FormBuilder,
FormControl,
FormGroup,
Validators,
} from '@angular/forms';
import { Gender } from 'src/app/resources/interfaces/gender.interface';
import { Genders } from 'src/app/resources/data/genders.data';
import { Nationality } from 'src/app/resources/interfaces/nationality.interface';
import { Nationalities } from 'src/app/resources/data/nationalities.data';
import { UsersService } from 'src/app/services/users.service';
import { UsersFacade } from '../../+state/users.facade';
@Component({
selector: 'app-dialog-add-many',
templateUrl: './dialog-add-many.component.html',
styleUrls: ['./dialog-add-many.component.scss'],
})
export class DialogAddManyComponent implements OnInit {
formGroup!: FormGroup;
nationalities: Nationality[] = Nationalities;
genders: Gender[] = Genders;
constructor(
private fb: FormBuilder,
private usersService: UsersService,
private usersFacade: UsersFacade
) {}
ngOnInit(): void {
this.initFormGroup();
}
onSubmit(): void {
// console.log(this.formGroup.value);
// this.usersService
// .getUsers(this.formGroup.value)
// .subscribe((users) => console.log(users));
this.usersFacade.addUsers(this.formGroup.value);
}
getNatsNameArray(): string[] {
const natsValues = this.formGroup.controls.nationality.value;
let natsNames: string[] = [];
natsValues.forEach((natValue: string) => {
this.nationalities.forEach((nat: Nationality) => {
if (nat.value === natValue) natsNames.push(nat.name);
});
});
return natsNames;
}
get isRandomGender(): FormControl {
return this.formGroup.get('isRandomGender') as FormControl;
}
get gender(): FormControl {
return this.formGroup.get('gender') as FormControl;
}
get isRandomNat(): FormControl {
return this.formGroup.get('isRandomNat') as FormControl;
}
get nationality(): FormControl {
return this.formGroup.get('nationality') as FormControl;
}
private initFormGroup(): void {
this.formGroup = this.fb.group({
amount: [
2,
[Validators.required, Validators.min(2), Validators.max(100)],
],
isRandomGender: [true],
gender: [{ value: null, disabled: true }],
isRandomNat: [true],
nationality: [{ value: [], disabled: true }],
});
this.isRandomGender.valueChanges.subscribe((checked) => {
this.toogleRandomGender(checked);
});
this.isRandomNat.valueChanges.subscribe((checked) => {
this.toogleRandomNat(checked);
});
}
private toogleRandomGender(checked: boolean): void {
if (!checked) {
this.gender.setValidators([Validators.required]);
this.gender.enable();
} else {
this.gender.setValidators(null);
this.gender.patchValue(null);
this.gender.disable();
}
this.gender.updateValueAndValidity();
}
private toogleRandomNat(checked: boolean): void {
if (!checked) {
this.nationality.setValidators([Validators.required]);
this.nationality.enable();
} else {
this.nationality.setValidators(null);
this.nationality.patchValue([]);
this.nationality.disable();
}
this.nationality.updateValueAndValidity();
}
}
<file_sep>/src/app/modules/users/+state/users.actions.ts
import { HttpErrorResponse } from '@angular/common/http';
import { Action } from '@ngrx/store';
import { AddUsersSuccessPayload } from 'src/app/resources/interfaces/payloads/add-users-success.payload';
import { UsersParameters } from 'src/app/resources/interfaces/users-parameters.interface';
export enum UsersActionTypes {
addUsers = '[Users] Add Users',
addUsersSuccess = '[Users] Add Users Success',
addUsersFail = '[Users] Add Users Fail',
}
export class AddUsers implements Action {
readonly type = UsersActionTypes.addUsers;
constructor(public payload: UsersParameters) {}
}
export class AddUsersSuccess implements Action {
readonly type = UsersActionTypes.addUsersSuccess;
constructor(public payload: AddUsersSuccessPayload) {}
}
export class AddUsersFail implements Action {
readonly type = UsersActionTypes.addUsersFail;
constructor(public payload: HttpErrorResponse) {}
}
export type UsersAction = AddUsers | AddUsersSuccess | AddUsersFail;
<file_sep>/src/app/resources/interfaces/state/users-state.interface.ts
import { User } from '../user.interface';
export interface UsersState {
collection: User[];
}
<file_sep>/src/app/resources/interfaces/users-parameters.interface.ts
export interface UsersParameters {
amount?: number;
gender?: string;
nationality?: string | string[];
isRandomGender?: boolean;
isRandomNat?: boolean;
}
<file_sep>/src/app/resources/data/nationalities.data.ts
import { Nationality } from '../interfaces/nationality.interface';
export const Nationalities: Nationality[] = [
{ name: 'Australia', value: 'au' },
{ name: 'Brazylia', value: 'br' },
{ name: 'Kanada', value: 'ca' },
{ name: 'Szwajcaria', value: 'ch' },
{ name: 'Niemcy', value: 'de' },
{ name: 'Dania', value: 'dk' },
{ name: 'Hiszpania', value: 'es' },
{ name: 'Finlandia', value: 'fi' },
{ name: 'Francja', value: 'fr' },
{ name: '<NAME>', value: 'gb' },
{ name: 'Irlandia', value: 'ie' },
{ name: 'Iran', value: 'ir' },
{ name: 'Norwegia', value: 'no' },
{ name: 'Holandia', value: 'nl' },
{ name: '<NAME>', value: 'nz' },
{ name: 'Turcja', value: 'tr' },
{ name: '<NAME>', value: 'us' },
];
<file_sep>/src/app/modules/users/+state/users.facade.ts
import { Injectable } from '@angular/core';
import { Store } from '@ngrx/store';
import { AppState } from 'src/app/+state/app-state.model';
import { UsersParameters } from 'src/app/resources/interfaces/users-parameters.interface';
import { AddUsers } from './users.actions';
import { selectUsersCollection } from './users.selectors';
@Injectable()
export class UsersFacade {
users$ = this.store.select(selectUsersCollection);
constructor(private store: Store<AppState>) {}
addUsers(params: UsersParameters): void {
this.store.dispatch(new AddUsers(params));
}
}
<file_sep>/src/app/+state/app-state.model.ts
import { UsersState } from '../resources/interfaces/state/users-state.interface';
export interface AppState {
users: UsersState;
}
<file_sep>/src/app/components/dialogs/dialog-confirm/dialog-confirm.component.ts
import { Component, Inject } from '@angular/core';
import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog';
@Component({
selector: 'app-dialog-confirm',
templateUrl: './dialog-confirm.component.html',
styleUrls: ['./dialog-confirm.component.scss'],
})
export class DialogConfirmComponent {
title!: string;
message!: string;
constructor(
public dialogRef: MatDialogRef<DialogConfirmComponent>,
@Inject(MAT_DIALOG_DATA) public data: DialogConfirmComponent
) {
this.title = data.title;
this.message = data.message;
}
onConfirm(): void {
this.dialogRef.close(true);
}
onDismiss(): void {
this.dialogRef.close(false);
}
}
<file_sep>/src/app/modules/users/+state/users.selectors.ts
import { createSelector } from '@ngrx/store';
import { AppState } from 'src/app/+state/app-state.model';
import { UsersState } from 'src/app/resources/interfaces/state/users-state.interface';
export const selectUsers = (state: AppState) => state.users;
export const selectUsersCollection = createSelector(
selectUsers,
(state: UsersState) => state.collection
);
<file_sep>/src/app/modules/users/components/users-add/users-add.component.ts
import { Component } from '@angular/core';
import { DialogsService } from 'src/app/services/dialogs.service';
import { DialogAddManyComponent } from '../../dialogs/dialog-add-many/dialog-add-many.component';
import { DialogAddSingleComponent } from '../../dialogs/dialog-add-single/dialog-add-single.component';
@Component({
selector: 'app-users-add',
templateUrl: './users-add.component.html',
styleUrls: ['./users-add.component.scss'],
})
export class UsersAddComponent {
constructor(private dialog: DialogsService) {}
openDialogAddSingle(): void {
this.dialog.openInfo(DialogAddSingleComponent);
}
openDialogAddMany(): void {
this.dialog.openInfo(DialogAddManyComponent);
}
}
|
dd9388db17d80382c00472823f9588fcac0f1b66
|
[
"TypeScript"
] | 21 |
TypeScript
|
JacekGrochowina/Users-Overview
|
ac709c0101f8e2569ff017eb02025bde9aa7b72c
|
cb475a012402a9c22e249af4a803985985a2958d
|
refs/heads/master
|
<repo_name>amosbaranes/finalproject<file_sep>/raz/main/forms.py
from django import forms
from django.contrib.auth.models import User
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ('first_name', 'last_name', 'email')
class SignupForm(forms.Form):
username = forms.CharField(
max_length=10,
widget=forms.TextInput({
'class': 'form-control',
'placeholder': 'username'
})
)
first_name = forms.CharField(
max_length=100,
widget=forms.TextInput({
'class': 'form-control',
'placeholder': 'First name'
})
)
last_name = forms.CharField(
max_length=200,
widget=forms.TextInput({
'class': 'form-control',
'placeholder': 'Last name'
})
)
email = forms.CharField(
max_length=200,
widget=forms.TextInput({
'class': 'form-control',
'placeholder': 'Email'
})
)
password = forms.CharField(
min_length=6,
max_length=10,
widget=forms.PasswordInput({
'class': 'form-control',
'placeholder': 'Password'
})
)
repeat_password = forms.CharField(
min_length=6,
max_length=10,
widget=forms.PasswordInput({
'class': 'form-control',
'placeholder': 'Repeat password'
})
)
def clean_username(self):
username = self.cleaned_data['username']
validate_unique_user(
error_message='* Username already in use',
username=username)
return username
def clean_email(self):
email = self.cleaned_data['email']
validate_unique_user(
error_message='* Email already in use',
email=email)
return email
def clean_repeat_password(self):
password1 = self.cleaned_data['password']
password2 = self.cleaned_data['<PASSWORD>']
if password1 != password2:
raise forms.ValidationError('* Passwords did not match')
return password1
def validate_unique_user(error_message, **criteria):
existent_user = User.objects.filter(**criteria)
if existent_user:
raise forms.ValidationError(error_message)<file_sep>/raz/main/admin.py
from django.contrib import admin
from .models import TeamMembers
admin.site.register(TeamMembers)<file_sep>/raz/app/views.py
from django.shortcuts import render
def index(request):
args = {'title': 'Home Page for App'}
return render(request, 'app/app_home.html', args)
<file_sep>/raz/main/models.py
from django.db import models
from django.urls import reverse
class TeamMembers(models.Model):
FullName = models.CharField(max_length=50)
IdentityNumber = models.IntegerField()
Image = models.ImageField(upload_to='static/teammembers/')
Bio = models.TextField()
def get_absolute_url(self):
return reverse('main:dteammember', kwargs={'pk': self.pk})
def __str__(self):
return self.FullName
<file_sep>/raz/main/views.py
from django.shortcuts import render
from django.views.decorators.csrf import csrf_protect
from .forms import SignupForm
from django.contrib.auth.models import User
from .models import TeamMembers
from django.views.generic import ListView, DetailView
def index(request):
teammembers = TeamMembers.objects.all()
args = {'title': 'Home Page', 'teammembers': teammembers }
return render(request, 'main/home.html', args)
@csrf_protect
def signup(request):
if request.method == 'POST':
form = SignupForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
username=form.cleaned_data['username'],
first_name=form.cleaned_data['first_name'],
last_name=form.cleaned_data['last_name'],
email=form.cleaned_data['email'],
password=<PASSWORD>_data['<PASSWORD>']
)
user.save()
return render(request, 'registration/create_account_success.html', {})
else:
form = SignupForm()
return render(request, 'registration/signup.html', {'form': form})
###########
class TeamMemberView(DetailView):
model = TeamMembers
fields = ['FullName', 'IdentityNumber', 'Image', 'Bio']
template_name = 'teammembers_detail.html'
<file_sep>/README.MD
#My Project
This is a project of Amos (055083729)
....<file_sep>/raz/templates/Registration/login.html
{% extends 'base.html' %}
{% block head %}
<title>Login</title>
{% endblock %}
{% block body %}
<div>
<form action="{% url 'login' %}" method="post" class="form-signin">
{% csrf_token %}
{{ form.as_p }}
<input class="btn btn-lg btn-primary btn-block"
type="Submit" value="Login">
</form>
</div>
{% endblock %}<file_sep>/raz/main/urls.py
from django.conf.urls import url
from .views import index, signup, TeamMemberView
app_name = "main"
urlpatterns = [
url(r'^$', index, name='index'),
url(r'^signup/', signup, name='signup'),
url(r'^dteammember/(?P<pk>\d+)/', TeamMemberView.as_view(), name='dteammember'),
]
<file_sep>/raz/elearning/views.py
# https://micropyramid.com/blog/how-to-use-nested-formsets-in-django/
from django.core.exceptions import PermissionDenied
from django.urls import reverse
from django.db import transaction
from django.views.generic import CreateView, DetailView, ListView
from django.shortcuts import render, redirect
from .models import (Course, Section, UserAnswer, Question)
#from .forms import SectionForm
def index(request):
context = {
'title': 'E-Learning',
}
return render(request, 'elearning/index.html', context)
class CourseDetailView(DetailView):
model = Course
course_detail = CourseDetailView.as_view()
class CourseListView(ListView):
model = Course
queryset = Course.objects.prefetch_related('students')
course_list = CourseListView.as_view()
class CourseAddView(CreateView):
model = Course
fields = '__all__'
course_add = CourseAddView.as_view()
# def section_add(request):
# if request.POST:
# form = SectionForm(request.POST)
# if form.is_valid():
# new_section = form.save()
# return HttpResponseRedirect(new_section.get_absolute_url())
# else:
# form = SectionForm()
# return render(request, 'elearning/section_form.html', {
# 'form': form,
# })
def do_section(request, section_id):
section = Section.objects.get(id=section_id)
return render(request, 'elearning/do_section.html', {
'section': section,
})
def do_test(request, section_id):
if not request.user.is_authenticated:
raise PermissionDenied
section = Section.objects.get(id=section_id)
if request.method == 'POST':
data = {}
for key, value in request.POST.items():
if key == 'csrfmiddlewaretoken':
continue
# {'question-1': '2'}
question_id = key.split('-')[1]
answer_id = request.POST.get(key)
data[question_id] = answer_id
perform_test(request.user, data, section)
return redirect(reverse('elearning:show_results', args=(section.id,)))
return render(request, 'elearning/do_test.html', {
'section': section,
})
def perform_test(user, data, section):
with transaction.atomic():
UserAnswer.objects.filter(user=user,
question__section=section).delete()
for question_id, answer_id in data.items():
question = Question.objects.get(id=question_id)
answer_id = int(answer_id)
if answer_id not in question.answer_set.values_list('id', flat=True):
raise SuspiciousOperation('Answer is not valid for this question')
user_answer = UserAnswer.objects.create(
user=user,
question=question,
answer_id=answer_id,
)
def calculate_score(user, section):
questions = Question.objects.filter(section=section)
correct_answers = UserAnswer.objects.filter(
user=user,
question__section=section,
answer__correct=True
)
return (correct_answers.count() / questions.count()) * 100
def show_results(request, section_id):
if not request.user.is_authenticated:
raise PermissionDenied
section = Section.objects.get(id=section_id)
return render(request, 'elearning/show_results.html', {
'section': section,
'score': calculate_score(request.user, section)
})
def show_results_all_sections(request):
if not request.user.is_authenticated:
raise PermissionDenied
courses = Course.objects.all()
scores = {}
for c in courses:
cc = []
for s in c.section_set.all():
score = calculate_score(request.user, s)
ss = s.title + ' ' + str(score)
cc.append(ss)
scores[c.name] = cc
return render(request, 'elearning/show_results_all_sections.html', {
# 'courses': courses,
'scores': scores
})
<file_sep>/raz/main/migrations/0001_initial.py
# Generated by Django 2.1.7 on 2019-02-28 07:59
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='TeamMembers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('FullName', models.CharField(max_length=50)),
('IdentityNumber', models.IntegerField()),
('Image', models.ImageField(upload_to='static/teammembers/')),
('Bio', models.TextField()),
],
),
]
<file_sep>/raz/elearning/urls.py
from django.conf.urls import url
from .views import (course_detail, course_add, do_section, do_test,
show_results, course_list, index, show_results_all_sections)
app_name = "elearning"
urlpatterns = [
url(r'^$', index, name='index'),
url(r'^course_detail/(?P<pk>\d+)/$', course_detail, name='course_detail'),
url(r'^course_list/$', course_list, name='course_list'),
url(r'^course_add/$', course_add, name='course_add'),
url(r'^section/(?P<section_id>\d+)/$', do_section, name='do_section'),
url(r'^section/(?P<section_id>\d+)/test/$', do_test, name='do_test'),
url(r'^section/(?P<section_id>\d+)/results/$', show_results, name='show_results'),
url(r'^section/results_all/$', show_results_all_sections, name='show_results_all_sections'),
]
|
8375eb81eff1a6f62a1b8731dacade97c19a0d51
|
[
"Markdown",
"Python",
"HTML"
] | 11 |
Python
|
amosbaranes/finalproject
|
0e6afdadb3d690557cd0be41381295262b7cb757
|
664ba402544e454f0ea995588d8b939cd612a1e7
|
refs/heads/master
|
<file_sep>#!/bin/bash
apt-get install -y nginx
rm -rf /var/www/html/*
|
9130170cfb2fb8ba93b9188029c7ebc57c6ab96a
|
[
"Shell"
] | 1 |
Shell
|
aseefahmed/aws-codepipeline-application-s3
|
7e6a4d25ab2be11e091a9761a982d232deba2603
|
114ad08406c743711f4d6e5eff2ea5c6c94a0049
|
refs/heads/master
|
<repo_name>jloutz/badmuthatagga<file_sep>/README.md
# badmuthatagga
Simple Annotation Tool for NER. Built with python
<file_sep>/tagga/tagga.py
import datetime
import tkinter as tk
import tkinter.font as tkfont
from tkinter.filedialog import askopenfilename, asksaveasfilename
import json
import jsonpickle
import uuid
import os
import re
class TaggaDoc:
def __init__(self, doc: dict, textkey: str = "text"):
if textkey not in doc:
raise Exception("Doc must be dict with text key ", textkey)
self.text = "\r\n".join([l.strip() for l in doc[textkey].splitlines() if l.strip()])
## dict of tuples of form (start,stop,name)
if "entities" in doc:
self.entities = doc["entities"]
else:
self.entities = dict()
if "id" in doc:
self.id = doc["id"]
else:
self.id = uuid.uuid4()
def add_entity_annotaton(self, start: int, end: int, name: str):
key = self.make_key(start, end, name)
text = self.text[start:end]
annot = (start, end, name, text)
self.entities[key] = annot
print("Added ", str(annot))
def remove_entity_annotation(self, start: int, end: int, name: str):
key = self.make_key(start, end, name)
del self.entities[key]
@classmethod
def make_key(cls, start: int, end: int, name: str):
return str(start) + "_" + str(end) + "_" + name
class TaggaConf:
def __init__(self):
## config
self.json_text_key = "content"
self.tagga_home = os.path.join("C:\\Anwendungen", "badmuthatagga")
self.tag_config = {"SKILL": dict(foreground='green', borderwidth=2, relief=tk.RIDGE),
"activity": dict(foreground='blue', background='gray')}
class TaggaProject:
def __init__(self, json=None, tagga_conf=TaggaConf()):
self.tagga_conf = tagga_conf
## creation timestamp
self.ts = str(datetime.datetime.now()).split(".")[0].replace(" ", "_").replace(":", "_")
if json is not None:
## new project from json
self.tagga_docs = [TaggaDoc(doc, textkey=self.tagga_conf.json_text_key) for doc in json]
else:
## new project
self.tagga_docs = list()
self.tagga_conf = tagga_conf
self.autotagga = AutoTagga()
@classmethod
def _load(cls, path):
with open(path) as f:
project_raw_json = f.read()
proj = jsonpickle.decode(project_raw_json)
return proj
def save(self, path=None):
if path is None:
path = os.path.join(self.tagga_conf.tagga_home, "tagga_project-" + self.ts + ".tagga")
project_json = jsonpickle.dumps(self)
with open(path, 'w+') as f:
f.write(project_json)
print("Wrote project to ", path)
return path
class AutoTagga:
def __init__(self):
## set of tuples of form (<entityvalue>,<entityname>) e.g. ("Teamplayer","SKILL")
self.vocab = set()
def add_to_vocab(self, entity_val, entity_type):
self.vocab.add((entity_val, entity_type))
def remove_from_vocab(self, entity_val, entity_type):
## TODO check this global removing behavior!
## for now, removing once from one doc removes from entire vocab
## until it is added again - this can mess stuff up
## TODO perhaps doc-level blacklist?
self.vocab.remove((entity_val, entity_type))
def autotag(self, doc: TaggaDoc):
new_entities = list()
for entry in list(self.vocab):
matched_entities = [(m.start(), m.end(), entry[1], m.group(0)) for m in re.finditer(entry[0], doc.text)]
new_entities.extend([(ent[0], ent[1], ent[2]) for ent in matched_entities if
not TaggaDoc.make_key(ent[0], ent[1], ent[2]) in doc.entities])
print("Autotagger added {} new entities from vocab".format(len(new_entities)))
for ent in new_entities:
doc.add_entity_annotaton(ent[0], ent[1], ent[2])
class Tagga(tk.Tk):
def __init__(self, config=TaggaConf()):
tk.Tk.__init__(self)
self.title("Bad Mutha Tagga")
self.tagga_config = config
os.makedirs(self.tagga_config.tagga_home, exist_ok=True)
self.active_project:TaggaProject = None
self.active_project_path = None
## Toolbar
self.toolbar = tk.Frame(self)
self.toolbar.pack(side="top", fill="x")
menubar = tk.Menu(self.toolbar)
filemenu = tk.Menu(menubar, tearoff=0)
filemenu.add_command(label="Import JSON", command=self.import_json)
filemenu.add_command(label="Export Annotations", command=None)
projmenu = tk.Menu(menubar, tearoff=0)
projmenu.add_command(label="Load Tagga Project (Ctrl-l)", command=self.load_tagga_project, accelerator="Ctrl+L")
projmenu.add_command(label="Save Tagga Project (Ctrl-s)", command=self.save_tagga_project, accelerator="Ctrl+S")
projmenu.add_command(label="Save Tagga Project As (Ctrl-a)", command=self.save_tagga_project_as,
accelerator="Ctrl+A")
menubar.add_cascade(label="Import/Export", menu=filemenu)
menubar.add_cascade(label="Project", menu=projmenu)
self.config(menu=menubar)
## Button bar
# buttonbar = tk.Frame(self,bd=1,relief="sunken",background="white",padx=20)
# img = tk.Image(file="C:\Projects/badmuddatagga/tagga/resources\ml_train.png")
# train_button = tk.Button(buttonbar,text="Train",image=img,command=self.train_ml_annotator)
# train_button.pack(side="right")
# buttonbar.pack(fill="x")
## Main part of the GUI
main_pane = tk.PanedWindow(self)
main_pane.pack(fill=tk.BOTH, expand=1)
# I'll use a frame to contain the widget and
# scrollbar; it looks a little nicer that way...
text_frame = tk.Frame(borderwidth=1, relief="sunken")
self.text = tk.Text(wrap="word", background="white",
borderwidth=0, highlightthickness=0)
asb = tk.Scrollbar(orient="vertical", borderwidth=1,
command=self.text.yview)
self.text.configure(yscrollcommand=asb.set)
asb.pack(in_=text_frame, side="right", fill="y", expand=False)
self.text.pack(in_=text_frame, side="left", fill="both", expand=True)
self.document_listbox = tk.Listbox(exportselection=False)
self.document_listbox.bind('<<ListboxSelect>>', self.on_content_select)
self.text.configure(yscrollcommand=asb.set)
asb.pack(in_=text_frame, side="right", fill="y", expand=False)
main_pane.add(self.document_listbox)
main_pane.add(text_frame)
main_pane.pack()
self.bind_all("<Control-s>", self.save_tagga_project)
self.bind_all("<Control-a>", self.save_tagga_project_as)
self.bind_all("<Control-l>", self.load_tagga_project)
# tagga tags
self.tagga_tags = self.tagga_config.tag_config
for name, conf in self.tagga_tags.items():
self.text.tag_configure(name, **conf)
# set up a binding to tag selected text.
self.text.bind("<Control-Shift-:>", self.tag_add)
self.text.bind("<Control-Shift-_>", self.tag_remove)
def train_ml_annotator(self,event):
print("training...")
def tag_add(self, event):
## handles tag event from gui
start = int(self.text.count(1.0, tk.SEL_FIRST)[0])
end = int(self.text.count(1.0, tk.SEL_LAST)[0])
print("Start: {}, End: {}".format(start, end))
# add tag to doc backing object
self.current_doc.add_entity_annotaton(start, end, "SKILL")
# add tag to autotagger vocab
text = self.text.selection_get()
self.active_project.autotagga.add_to_vocab(text,"SKILL")
#visualize
self._tag_add(start, end)
def _tag_add(self, start, end, name="SKILL"):
## visualizes the tag in text area
print("{} {} {}".format(start, end, name))
index1 = "1.0+" + str(start) + "c"
index2 = "1.0+" + str(end) + "c"
self.text.tag_add(name, index1, index2)
def tag_remove(self, event):
print(event)
start = int(self.text.count(1.0, tk.SEL_FIRST)[0])
end = int(self.text.count(1.0, tk.SEL_LAST)[0])
# remove tag from doc backing object
self.current_doc.remove_entity_annotation(start, end, "SKILL")
# remove from autotag vocab
text = self.text.selection_get()
self.active_project.autotagga.remove_from_vocab(text, "SKILL")
self.text.tag_remove("SKILL", tk.SEL_FIRST, tk.SEL_LAST)
def autotag(self,doc):
self.active_project.autotagga.autotag(doc)
def on_content_select(self, event):
print(str(self.document_listbox.curselection()))
if not self.document_listbox.curselection():
return
index = int(self.document_listbox.curselection()[0])
print("selected index: ", index)
self.current_doc: TaggaDoc = self.active_project.tagga_docs[index]
## Autotag! (this makes it a bad mutha tagga
self.autotag(self.current_doc)
## update text in main window
self.text.delete(1.0, tk.END)
self.text.insert(1.0, self.current_doc.text)
## visualize tags in window
for key in self.current_doc.entities:
ent = self.current_doc.entities[key]
print(str(ent))
self._tag_add(ent[0], ent[1], ent[2])
def load_tagga_project(self, event=None):
fname = askopenfilename(filetypes=[("Tagga Project", "*.tagga")])
self.active_project = TaggaProject._load(fname)
self.active_project_path = fname
self.init_content_panel()
def save_tagga_project(self, event=None):
return self.active_project.save(self.active_project_path)
def save_tagga_project_as(self, event=None):
fname = asksaveasfilename(filetypes=[("Tagga Project", "*.tagga")])
self.active_project_path = self.active_project.save(path=fname)
def import_json(self):
fname = askopenfilename(filetypes=[("JSON Files", "*.json")])
if fname:
with open(fname) as f:
docs = json.load(f)
if not isinstance(docs, list):
raise Exception("bad json.. bad!")
if len(docs) == 0:
raise Exception("no docs to load..")
## save active project if exists
if self.active_project is not None:
self.active_project.save(self.active_project_path)
## create new project with json docs and save right away
self.active_project = TaggaProject(json=docs, tagga_conf=self.tagga_config)
self.active_project_path = self.active_project.save()
self.init_content_panel()
def init_content_panel(self):
# self.content_list_panel.delete(0,tk.END)
children = self.document_listbox.winfo_children()
for c in children:
c.delete()
for doc in self.active_project.tagga_docs:
self.document_listbox.insert(tk.END, doc.text.split()[:3])
if __name__ == "__main__":
app = Tagga()
app.mainloop()
<file_sep>/test_data/dataturks_to_spacy.py
############################################ NOTE ########################################################
#
# Creates NER training data in Spacy format from JSON downloaded from Dataturks.
#
# Outputs the Spacy training data which can be used for Spacy training.
#
############################################################################################################
import json
import logging
def load_dataturks_resumes(path="C:/Projects/badmuddatagga/test_data/Entity Recognition in Resumes.json"):
with open(path, 'r', encoding="utf-8") as f:
lines = f.readlines()
datalist = []
for line in lines:
data = json.loads(line)
datalist.append(data)
return datalist
def convert_dataturks_to_spacy(dataturks_JSON_FilePath):
try:
training_data = []
lines=[]
with open(dataturks_JSON_FilePath, 'r',encoding="utf-8") as f:
lines = f.readlines()
for line in lines:
data = json.loads(line)
text = data['content']
entities = []
if data['annotation'] is None:
continue
for annotation in data['annotation']:
#only a single point in text annotation.
point = annotation['points'][0]
labels = annotation['label']
# handle both list of labels or a single label.
if not isinstance(labels, list):
labels = [labels]
for label in labels:
#dataturks indices are both inclusive [start, end] but spacy is not [start, end)
entities.append((point['start'], point['end'] + 1 ,label))
training_data.append((text, {"entities" : entities}))
return training_data
except Exception as e:
logging.exception("Unable to process " + dataturks_JSON_FilePath + "\n" + "error = " + str(e))
return None<file_sep>/tagga/annotators.py
import random
from pathlib import Path
import spacy
from spacy.util import minibatch, compounding
class SpacyAnnotator():
def __init__(self,model=None,labels=()):
self.is_blank = False
# load spaCy model
if model is not None:
self.nlp = spacy.load(model)
print("Loaded existing model: ",model)
else:
self.nlp = spacy.blank("en") # create blank Language class
self.is_blank=True
ner = self.nlp.create_pipe("ner")
self.nlp.add_pipe(ner, last=True)
print("Created blank 'en' model")
ner = self.nlp.get_pipe("ner")
for label in labels:
ner.add_label(label)
def train(self,X,n_iter):
# get names of other pipes to disable them during training
other_pipes = [pipe for pipe in self.nlp.pipe_names if pipe != "ner"]
with self.nlp.disable_pipes(*other_pipes):
# only train NER
# reset and initialize the weights randomly – but only if we're
# training a new model
if self.is_blank:
self.nlp.vocab.vectors.name = 'spacy_pretrained_vectors'
self.nlp.begin_training()
for itn in range(n_iter):
random.shuffle(X)
losses = {}
# batch up the examples using spaCy's minibatch
batches = minibatch(X, size=compounding(4.0, 32.0, 1.001))
for batch in batches:
texts, annotations = zip(*batch)
self.nlp.update(
texts, # batch of texts
annotations, # batch of annotations
drop=0.5, # dropout - make it harder to memorise data
losses=losses,
)
print("Losses", losses)
def test(self,X):
import numpy as np
# test on test data
scores = list()
for doc in X:
spacy_doc = self.nlp(doc[0])
true = [(ent.text, ent.label_) for ent in spacy_doc.ents]
print("FOUND: ",true)
actual = []
for x in doc[1]["entities"]:
actual_entity = (doc[0][x[0]:x[1]],x[2])
actual.append(actual_entity)
print("ACTUAL:",actual)
score = len([x for x in actual if x in true])/len(true)
print("Acuracy: ",score)
scores.append(score)
print("Final Score: ",np.mean(scores))
def eval(self,X):
# eval a list of texts
for doc in X:
spacy_doc = self.nlp(doc)
print("Entities", [(ent.text, ent.label_) for ent in spacy_doc.ents])
#print("Tokens", [(t.text, t.ent_type_, t.ent_iob) for t in spacy_doc])
def persist(self, output_dir):
# save model to output directory
if output_dir is not None:
output_dir = Path(output_dir)
if not output_dir.exists():
output_dir.mkdir()
self.nlp.to_disk(output_dir)
print("Saved model to", output_dir)
def load(self,path):
# test the saved model
print("Loading from", path)
self.nlp = spacy.load(path)
def spacy_test_data():
import test_data.dataturks_to_spacy as dtconv
docs = dtconv.convert_dataturks_to_spacy("C:\Projects/badmuddatagga/test_data\Entity Recognition in Resumes.json")
def convert(x):
return (x[0],x[1],x[2].upper().replace(" ","_"))
labels = set()
for doc in docs:
new_entities = []
for entity in doc[1]['entities']:
new_ent = convert(entity)
new_entities.append(new_ent)
labels.add(new_ent[2])
doc[1]["entities"]=new_entities
return docs,labels
def spacy_ner_test():
import random
spacy_resumes,labels = spacy_test_data()
random.shuffle(spacy_resumes)
train_data=spacy_resumes[:200]
annotator = SpacyAnnotator(labels=labels)
annotator.train(train_data,5)
test_data = spacy_resumes[::-1][:20]
annotator.test(test_data)
spacy_ner_test()
|
e89cdd3831afde1dcf70783e983ec2d9417912d2
|
[
"Markdown",
"Python"
] | 4 |
Markdown
|
jloutz/badmuthatagga
|
7b205129ecd770a94cdda8e491528e57b16e5f1b
|
7b997b973ad9550c440e01e4e498885b13057627
|
refs/heads/master
|
<file_sep>using Windows.UI.Xaml.Controls;
using Newtonsoft.Json;
using System.Net.Http;
using Windows.UI.Xaml.Navigation;
namespace aulauwp
{
public sealed partial class EditarPessoa : Page
{
private Pessoa pessoa = new Pessoa();
public EditarPessoa()
{
this.InitializeComponent();
}
protected override void OnNavigatedTo(NavigationEventArgs e)
{
pessoa = e.Parameter as Pessoa;
pessoaNomeTextBox.Text = pessoa.Nome;
pessoaSobreNomeTextBox.Text = pessoa.Sobrenome;
}
private async void alterarButton_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
var client = new HttpClient();
pessoa.Nome = pessoaNomeTextBox.Text;
pessoa.Sobrenome = pessoaSobreNomeTextBox.Text;
var pessoaJson = JsonConvert.SerializeObject(pessoa);
var HttpContent = new StringContent(pessoaJson);
HttpContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json");
await client.PutAsync("http://fcvwebapi.azurewebsites.net/api/Pessoas" + pessoa.Id, HttpContent);
Frame.GoBack();
}
private async void excluirButton_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
var client = new HttpClient();
await client.DeleteAsync("http://fcvwebapi.azurewebsites.net/api/Pessoas" + pessoa.Id);
Frame.GoBack();
}
}
}
<file_sep>using Newtonsoft.Json;
using System.Collections.Generic;
using System.Net.Http;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Navigation;
namespace aulauwp
{
public sealed partial class MainPage : Page
{
public MainPage()
{
this.InitializeComponent();
}
protected override async void OnNavigatedTo(NavigationEventArgs e)
{
HttpClient client = new HttpClient();
var RespostaJson = await client.GetStringAsync
("http://fcvwebapi.azurewebsites.net/api/Pessoas");
var pessoasResultado = JsonConvert.DeserializeObject<List<Pessoa>>
(RespostaJson);
pessoasLista.ItemsSource = pessoasResultado;
}
private void adicionarButton_Click(object sender,
Windows.UI.Xaml.RoutedEventArgs e)
{
Frame.Navigate(typeof(AdicionarPessoa));
}
}
}
<file_sep>using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Newtonsoft.Json;
using System.Net.Http;
namespace aulauwp
{
public sealed partial class AdicionarPessoa : Page
{
public AdicionarPessoa()
{
this.InitializeComponent();
}
private async void adicionarButton_Click(object sender, RoutedEventArgs e)
{
var pessoa = new Pessoa()
{
Nome = pessoaNomeTextBox.Text,
Sobrenome = pessoaSobreNomeTextBox.Text
};
var pessoaJson = JsonConvert.SerializeObject(pessoa);
var client = new HttpClient();
var HttpContent = new StringContent(pessoaJson);
HttpContent.Headers.ContentType =
new System.Net.Http.Headers.MediaTypeHeaderValue
("application/json");
await client.PostAsync
("http://fcvwebapi.azurewebsites.net/api/Pessoas", HttpContent);
}
private void cancelarButton_Click(object sender, RoutedEventArgs e)
{
Frame.GoBack();
}
}
}
|
0e430f0a1f2d81b642e863c05c97b2d1e11f908c
|
[
"C#"
] | 3 |
C#
|
williamyamamoto/UWP
|
93ff58646889eac09ae8c8fe45ae499602798017
|
31a88ac8d99a81700c7136c7f1ec34434f7b37c9
|
refs/heads/master
|
<repo_name>AnfernyReloded/Lab3<file_sep>/Assignment 1 Descision Making/Program.cs
using System;
namespace Assignment_1_Descision_Making
{
class Program
{
static void Main(string[] args)
{ //Application prompts user for an interger between 1 - 100 1point
//Application displays the associated result based on the integer value range entered
//Use if/else statements to make different actions depending on the user input
//if the integer is odd, print the number entered and odd
//if the integer entered is even and in the inclusive range 2 to 25 print "even and less than 25
//if the integer entered is even and in the inclusive range of 26 to 60, print "even"
//if the integer entereed is even and greatere than 60, print the number is "even"
//if the integer is odd and greater than 60, print the entered and odd
//remember to answer the LAB question Anthony
//Bonus Points; Include a set of parameters so that the program ends offically
//Ask for user name and use it in the outputs
Console.WriteLine("Hello, what is your name?");
string userName = (Console.ReadLine());
while (true)
{
Console.WriteLine(userName + " please enter a number between 1 - 100");
int userValue = int.Parse(Console.ReadLine());
int leftOvers = userValue % 2;
string choice;
if (leftOvers == 1)
{
Console.WriteLine(userName + " your number " + userValue + " and it is odd");
}
else if (userValue > 2 && userValue < 25 && leftOvers == 0)
{
Console.WriteLine(userName + " your number is even and less than 25");
}
else if (userValue >= 25 && userValue < 60 && leftOvers == 0)
{
Console.WriteLine(userName + " your number is even");
}
else if (userValue >= 60 && leftOvers == 0)
{
Console.WriteLine(userName + " your number is even");
}
else if (userValue >= 60 && leftOvers == 1)
{
Console.WriteLine(userName + " your number is " + userValue + " and it is odd");
}
Console.WriteLine("Would you like to input another number " + userName + "? Y or N");
choice = Console.ReadLine().ToLower(); //for input validation
if (choice == "n")
{
Console.WriteLine("Goodbye " + userName +" thank you for playing!");
break;
}
}
}
}
}
|
fc84d7fe0e04275c528e2420aec4e491b2b19d4f
|
[
"C#"
] | 1 |
C#
|
AnfernyReloded/Lab3
|
cf3c56e71d3d0e10a7cea2b51a262d2da3dcd508
|
4742d336b5f03cb5fa79a343e887eb0b516b9865
|
refs/heads/master
|
<file_sep>import { Duck } from "./Duck";
import { FlyWithWings } from "../behaviours/Fly/FlyWithWings";
import { Quack } from "../behaviours/Quack/Quack";
export class MallardDuck extends Duck {
constructor() {
super();
this.quackBehaviour = new Quack();
this.flyBehaviour = new FlyWithWings();
}
public display(): void {
console.log("I`m a real Mallard duck");
}
}<file_sep>import { Command } from "./Command";
export class MacroCommand implements Command {
commands: Command[];
public constructor(commands: Command[]) {
this.commands = commands;
}
public execute() {
for (let i = 0; i < this.commands.length; i++) {
this.commands[i].execute();
}
}
public undo() {
for (let i = this.commands.length - 1; i >= 0; i--) {
this.commands[i].undo();
}
}
}<file_sep>import { FlyBehaviour } from "./FlyBehaviour";
export class FlyNoWay implements FlyBehaviour {
public fly(): void {
console.log("I`m can`t fly");
}
}<file_sep>import { Cheese } from "./Cheese";
export class ParmesanCheese implements Cheese {
public toString(): string {
return "Shredded Parmesan";
}
}<file_sep>import CurrentConditionsDisplay from "./CurrentConditionsDisplay";
import WeatherData from "./WeatherData";
export const ExecuteObserverPattern = () => {
console.log('');
console.log('Observer pattern:');
let weatherData: WeatherData = new WeatherData();
let currentConditionDisplay: CurrentConditionsDisplay = new CurrentConditionsDisplay(weatherData);
weatherData.setMeasurements(80, 65, 30.4);
weatherData.setMeasurements(82, 70, 29.2);
weatherData.setMeasurements(78, 90, 29.2);
currentConditionDisplay.display();
}<file_sep>import { Cheese } from "./Cheese";
export class MozzarellaCheese implements Cheese {
public toString(): string {
return "Shredded Mozzarella";
}
}<file_sep>import { PizzaStore } from "../FactoryMethodPattern/PizzaStore/PizzaStore";
import { Pizza } from "../FactoryMethodPattern/Pizzas/Pizza";
import { NYPizzaStore } from "../FactoryMethodPattern/PizzaStore/NYPizzaStore";
import { ChicagoPizzaStore } from "../FactoryMethodPattern/PizzaStore/ChicagoPizzaStore";
export const ExecuteAbstractFactoryPattern = () => {
console.log('');
console.log('Abstract factory pattern:');
let nyStore: PizzaStore = new NYPizzaStore();
let chicagoStore: PizzaStore = new ChicagoPizzaStore();
let pizza: Pizza = nyStore.orderPizza("cheese");
console.info("Ethan ordered a " + pizza + "\n");
pizza = chicagoStore.orderPizza("cheese");
console.info("Joel ordered a " + pizza + "\n");
pizza = nyStore.orderPizza("clam");
console.info("Ethan ordered a " + pizza + "\n");
pizza = chicagoStore.orderPizza("clam");
console.info("Joel ordered a " + pizza + "\n");
pizza = nyStore.orderPizza("pepperoni");
console.info("Ethan ordered a " + pizza + "\n");
pizza = chicagoStore.orderPizza("pepperoni");
console.info("Joel ordered a " + pizza + "\n");
pizza = nyStore.orderPizza("veggie");
console.info("Ethan ordered a " + pizza + "\n");
pizza = chicagoStore.orderPizza("veggie");
console.info("Joel ordered a " + pizza + "\n");
}<file_sep>import { Dough } from "../Ingredients/dough/Dough";
import { Sauce } from "../Ingredients/sauce/Sauce";
import { Veggies } from "../Ingredients/veggies/Veggies";
import { Cheese } from "../Ingredients/cheese/Cheese";
import { Pepperoni } from "../Ingredients/pepperoni/Pepperoni";
import { Clams } from "../Ingredients/clams/Clams";
export abstract class Pizza {
name!: string;
dough!: Dough;
sauce!: Sauce;
veggies!: Veggies[];
cheese!: Cheese;
pepperoni!: Pepperoni;
clam!: Clams;
public abstract prepare(): any;
public bake() {
console.info("Bake for 25 minutes at 350");
}
public cut() {
console.info("Cutting the pizza into diagonal slices");
}
public box() {
console.info("Place pizza in official PizzaStore box");
}
public getName(): string {
return this.name;
}
public setName(name: string) {
this.name = name;
}
public toString(): string {
let result = `
---- ${this.name} ----
`;
if (this.dough != null) {
result.concat(this.dough.toString(), "\n");
}
if (this.sauce != null) {
result.concat(this.sauce.toString(), "\n");
}
if (this.cheese != null) {
result.concat(this.cheese.toString(), "\n");
}
if (this.veggies != null) {
for (let i = 0; i < this.veggies.length; i++) {
result.concat(this.veggies[i].toString());
if (i < this.veggies.length - 1) {
result.concat(", ");
}
}
}
if (this.clam != null) {
result.concat(this.clam.toString(), "\n");
}
if (this.pepperoni != null) {
result.concat(this.pepperoni.toString(), "\n");
}
return result;
}
}<file_sep>import { Singleton } from "./Singleton";
import { CoolerSingleton } from "./CoolerSingleton";
import { HotterSingleton } from "./HotterSingleton";
export const ExecuteClassicSingletonPattern = () => {
let foo: Singleton = CoolerSingleton.getInstance();
let bar: Singleton = HotterSingleton.getInstance();
console.info(foo);
console.info(bar);
}<file_sep>export class Singleton {
static uniqueInstance: Singleton;
public static getInstance(): Singleton {
if (Singleton.uniqueInstance == null) {
Singleton.uniqueInstance = new Singleton();
}
return Singleton.uniqueInstance;
}
}<file_sep>import { Veggies } from "./Veggies";
export class RedPepper implements Veggies {
public toString(): string {
return "Red Pepper";
}
}<file_sep>import { Sauce } from "./Sauce";
export class MarinaraSauce implements Sauce {
public toString(): string {
return "Marinara Sauce";
}
}<file_sep>import { Veggies } from "./Veggies";
export class Onion implements Veggies {
public toString(): string {
return "Onion";
}
}<file_sep>import { Veggies } from "./Veggies";
export class Eggplant implements Veggies {
public toString(): string {
return "Eggplant";
}
}<file_sep>import { Command } from "../Command";
import { TV } from "./TV";
export class TVOffCommand implements Command {
tv: TV;
public constructor(tv: TV) {
this.tv = tv;
}
public execute() {
this.tv.off();
}
public undo() {
this.tv.on();
}
}<file_sep>import { Beverage } from "./models/Beverage";
import { Expresso } from "./beverages/Espresso";
import { HouseBlend } from "./beverages/HouseBlend";
import { Mocha } from "./beverages/Mocha";
import { Whip } from "./beverages/Whip";
import { Soy } from "./beverages/Soy";
export const ExecuteDecoratorPattern = () => {
console.log('');
console.log('Decorator pattern:');
const beverage: Beverage = new Expresso();
console.log(beverage.getDescription() + " $" + beverage.cost());
let beverage1: Beverage;
beverage1 = new HouseBlend();
beverage1 = new Mocha(beverage1);
beverage1 = new Whip(beverage1);
beverage1 = new Whip(beverage1);
console.log(beverage1.getDescription() + " $" + beverage1.cost());
let beverage2: Beverage = new Expresso();
beverage2.setSize(1);
// beverage2 = new Mocha(beverage2);
beverage2 = new Soy(beverage2);
console.log(beverage2.getDescription() + " $" + beverage2.cost());
}<file_sep>import { Clams } from "./Clams";
export class FreshClams implements Clams {
public toString(): string {
return "Fresh Clams from Long Island Sound";
}
}<file_sep>import { NoCommand } from "./NoCommand";
import { Command } from "./Command";
export class RemoteControl {
onCommands!: Command[];
offCommands!: Command[];
undoCommand: Command;
public constructor() {
let noCommand: Command = new NoCommand();
for (let i: number = 0; i < 7; i++) {
this.onCommands[i] = noCommand;
this.offCommands[i] = noCommand;
}
this.undoCommand = noCommand;
}
public setCommand(slot: number, onCommand: Command, offCommand: Command) {
this.onCommands[slot] = onCommand;
this.offCommands[slot] = offCommand;
}
public onButtonWasPushed(slot: number) {
this.onCommands[slot].execute();
this.undoCommand = this.onCommands[slot];
}
public offButtonWasPushed(slot: number) {
this.offCommands[slot].execute();
this.undoCommand = this.offCommands[slot];
}
public undoButtonWasPushed() {
this.undoCommand.undo();
}
public toString(): string {
let stringBuff = `
------ Remote Control -------
`;
for (let i: number = 0; i < this.onCommands.length; i++) {
stringBuff.concat(
"[slot ", i.toString(), "] ",
this.onCommands[i].constructor.name,
this.offCommands[i].constructor.name,
"\n"
);
}
stringBuff.concat("[undo] ", this.undoCommand.constructor.name, "\n");
return stringBuff;
}
}<file_sep>import { Observer } from "./interfaces/Observer";
import { DisplayElement } from "./interfaces/DisplayElement";
import { Subject } from "./interfaces/Subject";
class CurrentConditionsDisplay implements Observer, DisplayElement {
private temperature: number = 0;
private humidity: number = 0;
private weatherData: Subject;
constructor(weatherData: Subject) {
this.weatherData = weatherData;
weatherData.registerObserver(this);
}
update(temperature: number, humidity: number, pressure: number): void {
this.temperature = temperature;
this.humidity = humidity;
this.display();
}
display(): void {
console.log("Current conditions: " + this.temperature + "F degrees and " + this.humidity + "% humidity");
}
}
export default CurrentConditionsDisplay;<file_sep>import { Veggies } from "./Veggies";
export class BlackOlives implements Veggies {
public toString(): string {
return "Black Olives";
}
}<file_sep>export class Light {
public on() {
console.info("Light is on");
}
public off() {
console.info("Light is off");
}
}<file_sep>import { FlyBehaviour } from "./FlyBehaviour";
export class FlyWithWings implements FlyBehaviour {
public fly(): void {
console.log("I`m flying");
}
}<file_sep>import { Clams } from "./Clams";
export class FrozenClams implements Clams {
public toString(): string {
return "Frozen Clams from Chesapeake Bay";
}
}<file_sep>import { FlyBehaviour } from "../behaviours/Fly/FlyBehaviour";
import { QuackBehaviour } from "../behaviours/Quack/QuackBehaviour";
export abstract class Duck {
public flyBehaviour!: FlyBehaviour;
public quackBehaviour!: QuackBehaviour;
public abstract display(): void;
public performFly(): void {
this.flyBehaviour.fly();
}
public performQuack(): void {
this.quackBehaviour.quack();
}
public setFlyBehaviour(fb: FlyBehaviour): void {
this.flyBehaviour = fb;
}
public setQuackBehaviour(qb: QuackBehaviour): void {
this.quackBehaviour = qb;
}
public swim(): void {
console.log("All ducks float, even decoys");
}
}<file_sep>import { Veggies } from "./Veggies";
export class Mushroom implements Veggies {
public toString(): string {
return "Mushrooms";
}
}<file_sep>import { PizzaIngredientFactory } from "./PizzaIngredientFactory";
import { Dough } from "../Ingredients/dough/Dough";
import { Sauce } from "../Ingredients/sauce/Sauce";
import { Cheese } from "../Ingredients/cheese/Cheese";
import { Veggies } from "../Ingredients/veggies/Veggies";
import { Pepperoni } from "../Ingredients/pepperoni/Pepperoni";
import { Clams } from "../Ingredients/clams/Clams";
import { ThickCrustDough } from "../Ingredients/dough/ThickCrustDough";
import { PlumTomatoSauce } from "../Ingredients/sauce/PlumTomatoSauce";
import { MozzarellaCheese } from "../Ingredients/cheese/MozzarellaCheese";
import { BlackOlives } from "../Ingredients/veggies/BlackOlives";
import { Spinach } from "../Ingredients/veggies/Spinach";
import { Eggplant } from "../Ingredients/veggies/Eggplant";
import { SlicedPepperoni } from "../Ingredients/pepperoni/SlicedPepperoni";
import { FrozenClams } from "../Ingredients/clams/FrozenClams";
export class ChicagoPizzaIngredientFactory implements PizzaIngredientFactory {
public createDough(): Dough {
return new ThickCrustDough();
}
public createSauce(): Sauce {
return new PlumTomatoSauce();
}
public createCheese(): Cheese {
return new MozzarellaCheese();
}
public createVeggies(): Veggies[] {
let veggies: Veggies[] = [new BlackOlives(), new Spinach(), new Eggplant()];
return veggies;
}
public createPepperoni(): Pepperoni {
return new SlicedPepperoni();
}
public createClam(): Clams {
return new FrozenClams();
}
}<file_sep>import { QuackBehaviour } from "./QuackBehaviour";
export class MuteQuack implements QuackBehaviour {
public quack(): void {
console.log("<< Silence >>");
}
}<file_sep>export abstract class Pizza {
public name!: string;
public dough!: string;
public sauce!: string;
public toppings: Array<any> = [];
public prepare() {
console.info("Preparing " + this.name);
console.info("Tossing dough...");
console.info("Adding sauce...");
console.info("Adding toppings: ");
for (let i = 0; i < this.toppings.length; i++) {
console.info(" " + this.toppings[i]);
}
}
public bake() {
console.info("Bake for 25 minutes at 350");
}
public cut() {
console.info("Cutting the pizza into diagonal slices");
}
public box() {
console.info("Place pizza in official PizzaStore box");
}
public getName(): string {
return this.name;
}
public toString(): string {
let display = `
---- ${this.name} ----
${this.dough}
${this.sauce}
`;
for (let i: number = 0; i < this.toppings.length; i++) {
display.concat(this.toppings[i] + '\n');
}
return display;
}
}<file_sep>import { Duck } from "./Duck";
import { FlyNoWay } from "../behaviours/Fly/FlyNoWay";
import { Quack } from "../behaviours/Quack/Quack";
export class ModelDuck extends Duck {
public constructor() {
super();
this.flyBehaviour = new FlyNoWay();
this.quackBehaviour = new Quack();
}
public display(): void {
console.log("I`m a model duck");
}
}<file_sep>import { ExecuteStrategyPattern } from "./StrategyPattern";
import { ExecuteObserverPattern } from "./ObserverPattern";
import { ExecuteDecoratorPattern } from "./DecoratorPattern";
import { ExecuteFactoryMethodPattern } from "./FactoryMethodPattern";
import { ExecuteAbstractFactoryPattern } from "./AbstractFactoryPattern";
import { ExecuteClassicSingletonPattern } from "./SingletonPattern/Classic";
import { ExecuteDoubleCheckedLockingSingletonPattern } from "./SingletonPattern/DoubleCheckedLocking";
import { ExecuteSimpleCommandPattern } from "./CommandPattern/SimpleCommand";
import { ExecuteRemoteWithPartyModeCommandPattern } from "./CommandPattern/RemoteWithPartyMode";
ExecuteStrategyPattern();
ExecuteObserverPattern();
ExecuteDecoratorPattern();
ExecuteFactoryMethodPattern();
ExecuteAbstractFactoryPattern();
ExecuteClassicSingletonPattern();
ExecuteDoubleCheckedLockingSingletonPattern();
ExecuteSimpleCommandPattern();
ExecuteRemoteWithPartyModeCommandPattern();<file_sep>import { QuackBehaviour } from "./QuackBehaviour";
export class Quack implements QuackBehaviour {
public quack(): void {
console.log("Quack");
}
}<file_sep>/* eslint-disable @typescript-eslint/no-useless-constructor */
import { Singleton } from "./Singleton";
export class HotterSingleton extends Singleton {
constructor() {
super();
}
}<file_sep>import { SimpleRemoteControl } from "./SimpleRemoteControl";
import { Light } from "./Light";
import { LightOnCommand } from "./LightOnCommand";
export const ExecuteSimpleCommandPattern = () => {
let remote: SimpleRemoteControl = new SimpleRemoteControl();
let light: Light = new Light();
let lightOn: LightOnCommand = new LightOnCommand(light);
remote.setCommand(lightOn);
remote.buttonWasPressed();
}<file_sep>import { Sauce } from "./Sauce";
export class PlumTomatoSauce implements Sauce {
public toString(): string {
return "Tomato sauce with plum tomatoes";
}
}<file_sep>import { Dough } from "../Ingredients/dough/Dough";
import { Sauce } from "../Ingredients/sauce/Sauce";
import { Cheese } from "../Ingredients/cheese/Cheese";
import { Veggies } from "../Ingredients/veggies/Veggies";
import { Pepperoni } from "../Ingredients/pepperoni/Pepperoni";
import { Clams } from "../Ingredients/clams/Clams";
export interface PizzaIngredientFactory {
createDough(): Dough;
createSauce(): Sauce;
createCheese(): Cheese;
createVeggies(): Veggies[];
createPepperoni(): Pepperoni;
createClam(): Clams;
}<file_sep>/* eslint-disable @typescript-eslint/no-useless-constructor */
import { Singleton } from "./Singleton";
export class CoolerSingleton extends Singleton {
static uniqueInstance: Singleton;
constructor() {
super();
}
}<file_sep>export class TV {
location: string;
channel!: number;
public constructor(location: string) {
this.location = location;
}
public on() {
console.info(this.location + " TV is on");
}
public off() {
console.info(this.location + " TV is off");
}
public setInputChannel() {
this.channel = 3;
console.info(this.location + " TV channel is set for DVD");
}
}<file_sep>import { Command } from "../Command";
import { Hottub } from "./Hottub";
export class HottubOnCommand implements Command {
hottub: Hottub;
public constructor(hottub: Hottub) {
this.hottub = hottub;
}
public execute() {
this.hottub.on();
this.hottub.setTemperature(104);
this.hottub.circulate();
}
public undo() {
this.hottub.off();
}
}<file_sep><file_sep>export class Hottub {
__on!: boolean;
temperature!: number;
public on() {
this.__on = true;
}
public off() {
this.__on = false;
}
public circulate() {
if (this.__on) {
console.info("Hottub is bubbling!");
}
}
public jetsOn() {
if (this.__on) {
console.info("Hottub jets are on");
}
}
public jetsOff() {
if (this.__on) {
console.info("Hottub jets are off");
}
}
public setTemperature(temperature: number) {
if (temperature > this.temperature) {
console.info("Hottub is heating to a steaming " + temperature + " degrees");
} else {
console.info("Hottub is cooling to " + temperature + " degrees");
}
this.temperature = temperature;
}
}<file_sep>import { RemoteControl } from "./RemoteControl";
import { Light } from "./Light/Light";
import { TV } from "./TV/TV";
import { Stereo } from "./Stereo/Stereo";
import { Hottub } from "./Hottube/Hottub";
import { LightOnCommand } from "./Light/Light/LightOnCommand";
import { LightOffCommand } from "./Light/Light/LightOffCommand";
import { StereoOnCommand } from "./Stereo/StereoOnCommand";
import { StereoOffCommand } from "./Stereo/StereoOffCommand";
import { TVOnCommand } from "./TV/TVOnCommand";
import { TVOffCommand } from "./TV/TVOffCommand";
import { HottubOnCommand } from "./Hottube/HottubOnCommand";
import { HottubOffCommand } from "./Hottube/HottubOffCommand";
import { Command } from "./Command";
import { MacroCommand } from "./MacroCommand";
export const ExecuteRemoteWithPartyModeCommandPattern = () => {
let remoteControl: RemoteControl = new RemoteControl();
let light: Light = new Light("Living Room");
let tv: TV = new TV("Living Room");
let stereo: Stereo = new Stereo("Living Room");
let hottub: Hottub = new Hottub();
let lightOn: LightOnCommand = new LightOnCommand(light);
let lightOff: LightOffCommand = new LightOffCommand(light);
let stereoOn: StereoOnCommand = new StereoOnCommand(stereo);
let stereoOff: StereoOffCommand = new StereoOffCommand(stereo);
let tvOn: TVOnCommand = new TVOnCommand(tv);
let tvOff: TVOffCommand = new TVOffCommand(tv);
let hottubOn: HottubOnCommand = new HottubOnCommand(hottub);
let hottubOff: HottubOffCommand = new HottubOffCommand(hottub);
let partyOn: Command[] = [lightOn, stereoOn, tvOn, hottubOn];
let partyOff: Command[] = [lightOff, stereoOff, tvOff, hottubOff];
let partyOnMacro: MacroCommand = new MacroCommand(partyOn);
let partyOffMacro: MacroCommand = new MacroCommand(partyOff);
remoteControl.setCommand(0, partyOnMacro, partyOffMacro);
console.info(remoteControl);
console.info("--- Pushing Macro On---");
remoteControl.onButtonWasPushed(0);
console.info("--- Pushing Macro Off---");
remoteControl.offButtonWasPushed(0);
}<file_sep>export abstract class Beverage {
description: string = "Unknown Beverage";
static SMALL: number = 1;
static LARGE: number = 2;
static EXTREA_LARGE: number = 3;
size: number = 0;
getDescription(): string {
return this.description;
}
setSize(size: number): void {
this.size = size;
}
getSize(): number {
return this.size;
}
abstract cost(): number;
}<file_sep>import { PizzaIngredientFactory } from "./PizzaIngredientFactory";
import { Dough } from "../Ingredients/dough/Dough";
import { Sauce } from "../Ingredients/sauce/Sauce";
import { Cheese } from "../Ingredients/cheese/Cheese";
import { Veggies } from "../Ingredients/veggies/Veggies";
import { Pepperoni } from "../Ingredients/pepperoni/Pepperoni";
import { Clams } from "../Ingredients/clams/Clams";
import { ThinCrustDough } from "../Ingredients/dough/ThinCrustDough";
import { MarinaraSauce } from "../Ingredients/sauce/MarinaraSauce";
import { ReggianoCheese } from "../Ingredients/cheese/ReggianoCheese";
import { Garlic } from "../Ingredients/veggies/Garlic";
import { Onion } from "../Ingredients/veggies/Onion";
import { Mushroom } from "../Ingredients/veggies/Mushroom";
import { RedPepper } from "../Ingredients/veggies/RedPepper";
import { SlicedPepperoni } from "../Ingredients/pepperoni/SlicedPepperoni";
import { FreshClams } from "../Ingredients/clams/FreshClams";
export class NYPizzaIngredientFactory implements PizzaIngredientFactory {
public createDough(): Dough {
return new ThinCrustDough();
}
public createSauce(): Sauce {
return new MarinaraSauce();
}
public createCheese(): Cheese {
return new ReggianoCheese();
}
public createVeggies(): Veggies[] {
let veggies: Veggies[] = [new Garlic(), new Onion(), new Mushroom(), new RedPepper()];
return veggies;
}
public createPepperoni(): Pepperoni {
return new SlicedPepperoni();
}
public createClam(): Clams {
return new FreshClams();
}
}<file_sep>import { Veggies } from "./Veggies";
export class Spinach implements Veggies {
public toString(): string {
return "Spinach";
}
}<file_sep>import { Beverage } from "../models/Beverage";
import { CondimentDecorator } from "../models/CondimentDecorator";
export class Whip extends CondimentDecorator {
beverage: Beverage;
constructor(beverage: Beverage) {
super();
this.beverage = beverage;
}
getDescription(): string {
return this.beverage.getDescription() + ", Whip";
}
cost(): number {
return 0.50 + this.beverage.cost();
}
}<file_sep>import { Pizza } from "../Pizza";
export class ChicagoStyleCheesePizza extends Pizza {
public constructor() {
super();
this.name = "Chicago Style Deep Dish Cheese Pizza";
this.dough = "Extra Thick Crust Dough";
this.sauce = "Plum Tomato Sauce";
this.toppings.push("Shredded Mozzarella Cheese");
}
public cut() {
console.info("Cutting the pizza into square slices");
}
}<file_sep>import { Subject } from "./interfaces/Subject";
import { Observer } from "./interfaces/Observer";
class WeatherData implements Subject {
private observers: Observer[];
private temperature: number = 0;
private humidity: number = 0;
private pressure: number = 0;
constructor() {
this.observers = [];
}
registerObserver(observer: Observer): void {
this.observers.push(observer);
}
removeObserver(observer: Observer): void {
let i: number = this.observers.indexOf(observer);
if (i >= 0) {
this.observers.slice(i, 1);
}
}
notifyObservers(): void {
for (let observer of this.observers) {
observer.update(this.temperature, this.humidity, this.pressure);
}
}
measurementsChanged(): void {
this.notifyObservers();
}
setMeasurements(temperature: number, humidity: number, pressure: number) {
this.temperature = temperature;
this.humidity = humidity;
this.pressure = pressure;
this.measurementsChanged();
}
}
export default WeatherData;<file_sep>import { Duck } from "./Duck";
import { MallardDuck } from "./MallardDuck";
import { ModelDuck } from "./ModelDuck";
export { Duck, MallardDuck, ModelDuck };<file_sep>import { Command } from "./Command";
import { Light } from "./Light";
export class LightOnCommand implements Command {
light: Light;
public constructor(light: Light) {
this.light = light;
}
public execute() {
this.light.on();
}
}<file_sep>import { Veggies } from "./Veggies";
export class Garlic implements Veggies {
public toString(): string {
return "Garlic";
}
}<file_sep>import { Beverage } from "../models/Beverage";
import { CondimentDecorator } from "../models/CondimentDecorator";
export class Soy extends CondimentDecorator {
beverage: Beverage;
constructor(beverage: Beverage) {
super();
this.beverage = beverage;
}
getDescription(): string {
return this.beverage.getDescription() + ", Soy";
}
cost(): number {
let cost: number = this.beverage.cost();
const size: number = this.beverage.getSize();
if (size === Beverage.SMALL) {
cost += 0.10;
} else if (size === Beverage.LARGE) {
cost += 0.15;
} else if (size === Beverage.EXTREA_LARGE) {
cost += 0.20;
}
return cost;
}
}<file_sep>import { Command } from "../Command";
import { CeilingFan } from "./CeilingFan";
export class CeilingFanMediumCommand implements Command {
ceilingFan: CeilingFan;
prevSpeed!: number;
public constructor(ceilingFan: CeilingFan) {
this.ceilingFan = ceilingFan;
}
public execute() {
this.prevSpeed = this.ceilingFan.getSpeed();
this.ceilingFan.medium();
}
public undo() {
switch ((this.prevSpeed)) {
case CeilingFan.HIGH:
this.ceilingFan.high();
break;
case CeilingFan.MEDIUM:
this.ceilingFan.medium();
break;
case CeilingFan.LOW:
this.ceilingFan.low();
break;
default:
this.ceilingFan.off();
break;
}
}
}<file_sep>import { Pizza } from "../Pizzas/Pizza";
import { NYStyleCheesePizza } from "../Pizzas/NewYork/NYStyleCheesePizza";
import { NYStyleVeggiePizza } from "../Pizzas/NewYork/NYStyleVeggiePizza";
import { NYStyleClamPizza } from "../Pizzas/NewYork/NYStyleClamPizza";
import { NYStylePepperoniPizza } from "../Pizzas/NewYork/NYStylePepperoniPizza";
import { ChicagoStyleCheesePizza } from "../Pizzas/Chicago/ChicagoStyleCheesePizza";
import { ChicagoStyleVeggiePizza } from "../Pizzas/Chicago/ChicagoStyleVeggiePizza";
import { ChicagoStyleClamPizza } from "../Pizzas/Chicago/ChicagoStyleClamPizza";
import { ChicagoStylePepperoniPizza } from "../Pizzas/Chicago/ChicagoStylePepperoniPizza";
export class DependentPizzaStore {
public createPizza(style: string, type: string): Pizza | null {
let pizza!: Pizza;
if (style === "NY") {
if (type === "cheese") {
pizza = new NYStyleCheesePizza();
} else if (type === "veggie") {
pizza = new NYStyleVeggiePizza();
} else if (type === "clam") {
pizza = new NYStyleClamPizza();
} else if (type === "pepperoni") {
pizza = new NYStylePepperoniPizza();
}
}
else if (style === "Chicago") {
if (type === "cheese") {
pizza = new ChicagoStyleCheesePizza();
} else if (type === "veggie") {
pizza = new ChicagoStyleVeggiePizza();
} else if (type === "clam") {
pizza = new ChicagoStyleClamPizza();
} else if (type === "pepperoni") {
pizza = new ChicagoStylePepperoniPizza();
}
} else {
console.info("Error: invalid type of pizza");
return null;
}
pizza.prepare();
pizza.bake();
pizza.cut();
pizza.box();
return pizza;
}
}<file_sep>import { Singleton } from "./Singleton";
export const ExecuteDoubleCheckedLockingSingletonPattern = () => {
let foo: Singleton = Singleton.getInstance();
console.info(foo);
}<file_sep>import { Pepperoni } from "./Pepperoni";
export class SlicedPepperoni implements Pepperoni {
public toString(): string {
return "Sliced Pepperoni";
}
}<file_sep>import { QuackBehaviour } from "./QuackBehaviour";
export class Squeak implements QuackBehaviour {
public quack(): void {
console.log("Squeak");
}
}<file_sep>import { PizzaStore } from "./PizzaStore";
import { Pizza } from "../Pizzas/Pizza";
import { NYStyleCheesePizza } from "../Pizzas/NewYork/NYStyleCheesePizza";
import { NYStyleVeggiePizza } from "../Pizzas/NewYork/NYStyleVeggiePizza";
import { NYStyleClamPizza } from "../Pizzas/NewYork/NYStyleClamPizza";
import { NYStylePepperoniPizza } from "../Pizzas/NewYork/NYStylePepperoniPizza";
export class NYPizzaStore extends PizzaStore {
createPizza(item: string): Pizza {
if (item === "cheese") {
return new NYStyleCheesePizza();
} else if (item === "veggie") {
return new NYStyleVeggiePizza();
} else if (item === "clam") {
return new NYStyleClamPizza();
} else if (item === "pepperoni") {
return new NYStylePepperoniPizza();
}
return new NYStylePepperoniPizza();
}
}<file_sep>import { Command } from "./Command";
export class NoCommand implements Command {
public execute() {
}
public undo() {
}
}<file_sep>export class Light {
location: string;
level!: number;
public constructor(location: string) {
this.location = location;
}
public on() {
this.level = 100;
console.info("Light is on");
}
public off() {
this.level = 0;
console.info("Light is off");
}
public dim(level: number) {
this.level = level;
if (level === 0) {
this.off();
} else {
console.info("Light is dimmed to " + level + "%");
}
}
public getLevel(): number {
return this.level;
}
}<file_sep>import { Duck, MallardDuck, ModelDuck } from "./models";
import { FlyRocketPowered } from "./behaviours/Fly/FlyRocketPowered";
export const ExecuteStrategyPattern = () => {
console.log('');
console.log('Strategy pattern:');
const mallard: Duck = new MallardDuck();
mallard.performQuack();
mallard.performFly();
const model: Duck = new ModelDuck();
model.performFly();
model.setFlyBehaviour(new FlyRocketPowered());
model.performFly();
}<file_sep>import { FlyBehaviour } from "./FlyBehaviour";
export class FlyRocketPowered implements FlyBehaviour {
public fly(): void {
console.log("I`m flying with a rocket!");
}
}<file_sep>import { PizzaStore } from "./PizzaStore";
import { Pizza } from "../Pizzas/Pizza";
import { ChicagoStyleCheesePizza } from "../Pizzas/Chicago/ChicagoStyleCheesePizza";
import { ChicagoStyleVeggiePizza } from "../Pizzas/Chicago/ChicagoStyleVeggiePizza";
import { ChicagoStyleClamPizza } from "../Pizzas/Chicago/ChicagoStyleClamPizza";
import { ChicagoStylePepperoniPizza } from "../Pizzas/Chicago/ChicagoStylePepperoniPizza";
export class ChicagoPizzaStore extends PizzaStore {
createPizza(item: string): Pizza {
if (item === "cheese") {
return new ChicagoStyleCheesePizza();
} else if (item === "veggie") {
return new ChicagoStyleVeggiePizza();
} else if (item === "clam") {
return new ChicagoStyleClamPizza();
} else if (item === "pepperoni") {
return new ChicagoStylePepperoniPizza();
}
return new ChicagoStylePepperoniPizza()
}
}
|
d2d1adcc0bf936b43ee36e5167bb3a10d609afee
|
[
"Markdown",
"TypeScript"
] | 62 |
TypeScript
|
oleg331/roadmap-patterns
|
08bd608b54029266e0469351139d1e8a21a63d39
|
645905d8a6f537f1eae863b9639f00a24511ef7a
|
refs/heads/master
|
<repo_name>94kazakov/CSCI4502<file_sep>/Kazakov_Denis_Homework2.py
#<NAME>, CSCI 4502, 102298967
import numpy as np
import matplotlib.pyplot as plt
import sys
import re
arguments = sys.argv
data = open(arguments[1], "r")
data_rows = []
for i,dataLine in enumerate(data):
if (i>2):
data_rows.append(dataLine.split(','))
print data_rows [2]
#regex = re.compile(r"^\"[0-9]*\.[0-9]*\"",re.IGNORECASE)
for line in data_rows:
for word in line:
word = re.sub(r"^\"[0-9]*\.[0-9]*\"",'\1', word)
print word
print data_rows [2]
columns = zip(*data_rows) #{1,2,3} + {4,5,6} => {(1,4),(2,5),(3,6)}
##1
#print normalized valued of Volume: v(i) = v(i)/v_max-v_min
|
a70c131007ad38f66f4f0a356c3b33b17cdb349d
|
[
"Python"
] | 1 |
Python
|
94kazakov/CSCI4502
|
05f45099271f167f39c4506f88f8fe46ac085a43
|
a31ceab584b94d6b93e2adad726cf7eb7b730bbe
|
refs/heads/master
|
<file_sep>
card_def = {
{ name = 'water',
count = 3,
cost = 1,
desc = 'water plant for 3 turns',
turns = 3,
effect = function(g) g.water += 1 end
},
{
name = 'sun',
count = 3,
cost = 1,
desc = 'put the flower in a sunny place',
turns = 1,
effect = function(g) g.sun = true end,
},
{
name = 'shade',
count = 3,
cost = 1,
desc = 'put the flower in the shade',
effect = function(g) g.sun = false end,
},
}
function shuffle(t)
for i = 1,#t do
j = #t - rnd(#t-i)\1
if i != j then
t[i], t[j] = t[j], t[i]
end
end
end
function new_game()
deck = {}
for i = 1,#card_def do
for j = 1,card_def[i].count do
add(deck, card_def[i])
end
end
shuffle(deck)
return {
hand = {},
deck = deck,
turn = 1,
sun = false,
water = 0,
light = 0,
size = 0,
sel = -1,
}
end
function _init()
game = new_game()
end
function _update60()
-- draw new cards if not enough
while #game.hand < 4 do
add(game.hand, game.deck[#game.deck])
game.deck[#game.deck] = nil
end
-- browse cards in hand
if #game.hand > 0 then
if game.sel < 0 then
game.sel = 1
end
if btnp(2) then game.sel = 1 + (game.sel - 2) % #game.hand end
if btnp(3) then game.sel = 1 + (game.sel) % #game.hand end
end
end
function _draw()
cls(0)
color(7)
print('turn '..game.turn, 1,1)
color(9)
print(' water '..game.water, 10, 20)
print(' light '..game.light, 10, 27)
print(' size '..game.size, 10, 34)
print(' sun '..tostr(game.sun), 10, 41)
color(12)
for i=1,#game.hand do
local prefix = i == game.sel and '* ' or ' '
print(prefix..game.hand[i].name..' ['..game.hand[i].cost..']', 3, 50 + 7 * i)
end
color(7)
if game.sel > 0 then
print('description:', 3, 100)
print(game.hand[game.sel].desc, 3, 107)
end
end
<file_sep>
-- print small version of map
cls(0)
for y=0,63 do
for x=0,63 do
local n=mget(x,y)
if n>0 then
sspr(n%16*8,n\16*8,8,8,x*2,y*2,2,2)
end
end
end
-- pick two random points
sx,sy = crnd(1,63)\1, crnd(1,63)\1
ex,ey = crnd(1,63)\1, crnd(1,63)\1
pset(sx*2,sy*2,8)
pset(ex*2,ey*2,8)
function encode(x,y)
n = x + y/256
return n
end
function size(t)
local s=0
for k,v in pairs(t) do
s+=1
end
return s
end
visited = {}
todo={}
todo[encode(sx,sy)]=1
next = {}
finish = encode(ex,ey)
light = finish
while visited[finish]==nil do
for k,v in pairs(todo) do
local m={}
m.x = flr(k)
m.y = k%1*256
function addtonext(x,y)
if m.x < 0 or m.x > 64 or m.y < 0 or m.y > 64 then
return
end
local val = 0
if mget(x,y)==1 then -- mountain
return
elseif mget(x,y)==5 then -- water
return
elseif mget(x,y)==7 then -- grass
val=v+8
else
val=v+1
end
if not visited[encode(x,y)] then
next[encode(x,y)]=val
else
if visited[encode(x,y)] > val then
next[encode(x,y)]=val
end
end
end
addtonext(m.x+1,m.y)
addtonext(m.x-1,m.y)
addtonext(m.x,m.y+1)
addtonext(m.x,m.y-1)
visited[k] = v
end
todo = next
next = {}
end
dist = visited[finish]
while dist>1 do
local a={}
local m={}
m.x = flr(light)
m.y = light%1*256
function addtolight(x0,y0)
if visited[encode(x0,y0)]!=nil then
if visited[encode(x0,y0)] < dist then
dist=visited[encode(x0,y0)]
a={x=x0, y=y0}
end
end
end
addtolight(m.x+1,m.y)
addtolight(m.x-1,m.y)
addtolight(m.x,m.y+1)
addtolight(m.x,m.y-1)
pset(a.x*2,a.y*2,7)
light = encode(a.x,a.y)
end
pset(sx*2,sy*2,12)
pset(ex*2,ey*2,12)
|
26ffec9aad934ae678ccff11b5e4368cce286539
|
[
"Lua"
] | 2 |
Lua
|
escargames/LD46
|
c506dccfea1f929261772cb80cb40a5e0285279b
|
c4d22999295c3415f95c44dc7184b61058ef1411
|
refs/heads/master
|
<file_sep>package usernameavailable
import (
"context"
"encoding/json"
"fmt"
"log"
"net/http"
"os"
"cloud.google.com/go/firestore"
"github.com/pixelogicdev/gruveebackend/pkg/sawmill"
)
// usernameAvailableReq includes the username to query the user collection
type usernameAvailableReq struct {
Username string `json:"username"`
}
// usernameAvailableResp includes a result of true or false
type usernameAvailableResp struct {
Result bool `json:"result"`
}
var firestoreClient *firestore.Client
var logger sawmill.Logger
func init() {
log.Println("UsernameAvailable intialized")
}
// UsernameAvailable checks to see if the given username is available to use
func UsernameAvailable(writer http.ResponseWriter, request *http.Request) {
// Initialize
isUsernameAvailable := true
initWithEnvErr := initWithEnv()
if initWithEnvErr != nil {
http.Error(writer, initWithEnvErr.Error(), http.StatusInternalServerError)
logger.LogErr("InitWithEnv", initWithEnvErr, nil)
return
}
// Get Username
var reqData usernameAvailableReq
reqDataErr := json.NewDecoder(request.Body).Decode(&reqData)
if reqDataErr != nil {
http.Error(writer, reqDataErr.Error(), http.StatusInternalServerError)
logger.LogErr("ReqData Decoder", reqDataErr, request)
return
}
// Make a Firebase request to see if user document is already create with the given uid
snapshots := firestoreClient.Collection("users").Where("username", "==", reqData.Username).Snapshots(context.Background())
documents, documentsErr := snapshots.Query.Documents(context.Background()).GetAll()
if documentsErr != nil {
http.Error(writer, documentsErr.Error(), http.StatusInternalServerError)
logger.LogErr("Firebase GetDocumentsQuery", documentsErr, request)
return
}
if len(documents) > 0 {
log.Printf("[UsernameAvailable] %s has already been taken", reqData.Username)
isUsernameAvailable = false
}
// Create result object
result := usernameAvailableResp{
Result: isUsernameAvailable,
}
// Send response
writer.Header().Set("Content-Type", "application/json")
json.NewEncoder(writer).Encode(result)
}
// Helpers
// initWithEnv takes our yaml env variables and maps them properly.
// Unfortunately, we had to do this is main because in init we weren't able to access env variables
func initWithEnv() error {
// Get paths
var currentProject string
if os.Getenv("ENVIRONMENT") == "DEV" {
currentProject = os.Getenv("FIREBASE_PROJECTID_DEV")
} else if os.Getenv("ENVIRONMENT") == "PROD" {
currentProject = os.Getenv("FIREBASE_PROJECTID_PROD")
}
// Initialize Firestore
client, err := firestore.NewClient(context.Background(), currentProject)
if err != nil {
return fmt.Errorf("UsernameAvailable [Init Firestore]: %v", err)
}
// Initialize Sawmill
sawmillLogger, err := sawmill.InitClient(currentProject, os.Getenv("GCLOUD_CONFIG"), os.Getenv("ENVIRONMENT"), "UsernameAvailable")
if err != nil {
log.Printf("UsernameAvailable [Init Sawmill]: %v", err)
}
firestoreClient = client
logger = sawmillLogger
return nil
}
<file_sep>package tokengen
// Remaiten - “Maybe now is the time to switch to Vue Native Kappa” (02/22/20)
// BackeyM - "It's go time 🙃" (02/22/20)
// TheDkbay - "It's double go time" (02/22/20)
// LilCazza - "I can't see what's going on here, it may be because I can only hear" (02/24/20)
// Tensei_c - "rust > go :P" (02/24/20)
// pahnev - "swift > rust" (02/24/20)
// jackconceprio - "I like pineapple juice any line" (02/25/20)
// OnePocketPimp - "TODO: Create Dog Treat API to properly reward all good doggos on streaming platforms" (03/08/20)
import (
"context"
"encoding/json"
"log"
"net/http"
"firebase.google.com/go"
"firebase.google.com/go/auth"
"github.com/pixelogicdev/gruveebackend/pkg/social"
)
var client *auth.Client
func init() {
// Init Firebase App
app, err := firebase.NewApp(context.Background(), nil)
if err != nil {
log.Printf("firebase.NewApp: %v", err)
return
}
log.Println("Firebase NewApp initialized")
// Init Fireabase Auth Admin
client, err = app.Auth(context.Background())
if err != nil {
log.Printf("auth.Client: %v", err)
return
}
log.Println("Firebase AuthAdmin initialized")
}
// GenerateCustomToken generates a CustomToken for Firebase Login
func GenerateCustomToken(writer http.ResponseWriter, request *http.Request) {
// If this is getting called, we have already authorized the user by verifying their API token is valid and pulls back their data
var tokenRequest social.GenerateTokenRequest
// Decode json from request
err := json.NewDecoder(request.Body).Decode(&tokenRequest)
if err != nil {
log.Printf("json.NewDecoder: %v", err)
http.Error(writer, err.Error(), http.StatusBadRequest)
return
}
// Garahorn - "We need to generate the quantum GUID once the flux capacitor reaches terminal velocity." (02/24/20)
token, err := client.CustomToken(context.Background(), tokenRequest.UID)
if err != nil {
log.Printf("client.CustomToken: %v", err)
http.Error(writer, err.Error(), http.StatusBadRequest)
return
}
// Create reponse object and pass it along
tokenResponse := social.GenerateTokenResponse{Token: token}
writer.Header().Set("Content-Type", "application/json")
json.NewEncoder(writer).Encode(tokenResponse)
}
<file_sep>package spotifyauth
// InukApp - "Todo: add Plex auth support" (03/22/20)
// DaedTTV - "32 Font Size Kinda THICC" (03/23/20)
// thoastyk "X O X" (02/26/20)
// thoastyk "_ X O" (02/26/20)
// pheonix_d123 "O O X I wanna interrupt the tic-tac-toe." (03/08/20)
// Belonix97 "X O O I want to interrupt the interrupted tic-tac-toe line." (03/08/20)
// ItsAstrix "O O X I wanna interrupt the tic-tac-toe." (03/08/20)
// thoastyk "X _ O" (02/26/20)
// creativenobu - "Have you flutter tried?" (02/26/20)
// TheDkbay - "If this were made in Flutter Alec would already be done but he loves to pain himself and us by using inferior technology maybe he will learn in the future." (03/02/20)
// OnePocketPimp - "Alec had an Idea at this moment in time 9:53 am 3-1-2020" (03/01/20)
// ZenonLoL - "go mod vendor - it just works" (03/08/20)
// gamma7869 - "Maybe if I get Corona, I could finally get friends. Corona Friends?" (03/12/20)
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"time"
"cloud.google.com/go/firestore"
"github.com/pixelogicdev/gruveebackend/pkg/firebase"
"github.com/pixelogicdev/gruveebackend/pkg/sawmill"
"github.com/pixelogicdev/gruveebackend/pkg/social"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
var spotifyMeURI = "https://api.spotify.com/v1/me"
var httpClient *http.Client
var firestoreClient *firestore.Client
var logger sawmill.Logger
var hostname string
func init() {
// Initialize client
httpClient = &http.Client{}
log.Println("AuthorizeWithSpotify initialized")
}
// AuthorizeWithSpotify will verify Spotify creds are valid and return any associated Firebase user or create a new Firebase user
func AuthorizeWithSpotify(writer http.ResponseWriter, request *http.Request) {
// Initialize
initWithEnvErr := initWithEnv()
if initWithEnvErr != nil {
http.Error(writer, initWithEnvErr.Error(), http.StatusInternalServerError)
logger.LogErr("InitWithEnvErr", initWithEnvErr, nil)
return
}
var spotifyAuthRequest social.SpotifyAuthRequest
authResponseErr := json.NewDecoder(request.Body).Decode(&spotifyAuthRequest)
if authResponseErr != nil {
http.Error(writer, authResponseErr.Error(), http.StatusInternalServerError)
logger.LogErr("SpotifyAuthRequest Decoder", authResponseErr, request)
return
}
if len(spotifyAuthRequest.APIToken) == 0 {
http.Error(writer, "AuthorizeWithSpotify: ApiToken was empty.", http.StatusBadRequest)
logger.LogErr("SpotifyAuthRequest Decoder", fmt.Errorf("ApiToken was empty"), request)
return
}
spotifyMeReq, spotifyMeReqErr := http.NewRequest("GET", spotifyMeURI, nil)
if spotifyMeReqErr != nil {
http.Error(writer, spotifyMeReqErr.Error(), http.StatusInternalServerError)
logger.LogErr("Request", spotifyMeReqErr, spotifyMeReq)
return
}
// pheonix_d123 - "Client's gotta do what the Client's gotta do!" (02/26/20)
spotifyMeReq.Header.Add("Authorization", "Bearer "+spotifyAuthRequest.APIToken)
resp, httpErr := httpClient.Do(spotifyMeReq)
if httpErr != nil {
http.Error(writer, httpErr.Error(), http.StatusBadRequest)
logger.LogErr("GET Request", httpErr, spotifyMeReq)
return
}
// Check to see if request was valid
if resp.StatusCode != http.StatusOK {
// Convert Spotify Error Object
var spotifyErrorObj social.SpotifyRequestError
err := json.NewDecoder(resp.Body).Decode(&spotifyErrorObj)
if err != nil {
http.Error(writer, err.Error(), http.StatusBadRequest)
logger.LogErr("Spotify Request Decoder", err, spotifyMeReq)
return
}
http.Error(writer, spotifyErrorObj.Error.Message, spotifyErrorObj.Error.Status)
logger.LogErr("Spotify Request Decoder", fmt.Errorf(spotifyErrorObj.Error.Message), spotifyMeReq)
return
}
var spotifyMeResponse social.SpotifyMeResponse
// syszen - "wait that it? #easyGo"(02/27/20)
// LilCazza - "Why the fuck doesn't this shit work" (02/27/20)
respDecodeErr := json.NewDecoder(resp.Body).Decode(&spotifyMeResponse)
if respDecodeErr != nil {
http.Error(writer, respDecodeErr.Error(), http.StatusBadRequest)
logger.LogErr("Spotify Request Decoder", respDecodeErr, spotifyMeReq)
return
}
// Check DB for user, if there return user object
authorizeWithSpotifyResp, userErr := getUser(spotifyMeResponse.ID)
if userErr != nil {
http.Error(writer, userErr.Error(), http.StatusBadRequest)
logger.LogErr("GetUser", userErr, nil)
return
}
// We do not have our user
if authorizeWithSpotifyResp == nil && userErr == nil {
// First, generate & write social platform object
socialPlatDocRef, socialPlatData, socialPlatErr := createSocialPlatform(spotifyMeResponse, spotifyAuthRequest)
if socialPlatErr != nil {
http.Error(writer, socialPlatErr.Error(), http.StatusBadRequest)
logger.LogErr("CreateSocialPlatform", socialPlatErr, nil)
return
}
// Then, generate & write Firestore User object
var firestoreUser, firestoreUserErr = createUser(spotifyMeResponse, socialPlatDocRef)
if firestoreUserErr != nil {
http.Error(writer, firestoreUserErr.Error(), http.StatusBadRequest)
log.Printf("AuthorizeWithSpotify [createUser]: %v", firestoreUserErr)
return
}
// Finally, get custom JWT
var customToken, customTokenErr = getCustomToken(firestoreUser.ID)
if customTokenErr != nil {
http.Error(writer, customTokenErr.Error(), http.StatusBadRequest)
logger.LogErr("CustomToken", customTokenErr, nil)
return
}
// sillyonly: "path.addLine(to: CGPoint(x: rect.width, y: rect.height))" (03/13/20)
writer.WriteHeader(http.StatusOK)
writer.Header().Set("Content-Type", "application/json")
var spoitfyAuthResp = social.AuthorizeWithSpotifyResponse{
Email: firestoreUser.Email,
ID: firestoreUser.ID,
Playlists: []firebase.FirestorePlaylist{},
PreferredSocialPlatform: *socialPlatData,
SocialPlatforms: []firebase.FirestoreSocialPlatform{*socialPlatData},
Username: firestoreUser.Username,
JWT: customToken.Token,
}
json.NewEncoder(writer).Encode(spoitfyAuthResp)
return
}
// We have our user
if authorizeWithSpotifyResp != nil {
// Still need to get our custom token here
var customToken, customTokenErr = getCustomToken(authorizeWithSpotifyResp.ID)
if customTokenErr != nil {
http.Error(writer, customTokenErr.Error(), http.StatusBadRequest)
logger.LogErr("CustomToken", customTokenErr, nil)
return
}
authorizeWithSpotifyResp.JWT = customToken.Token
writer.WriteHeader(http.StatusOK)
writer.Header().Set("Content-Type", "application/json")
json.NewEncoder(writer).Encode(authorizeWithSpotifyResp)
}
return
}
// initWithEnv takes our yaml env variables and maps them properly.
// Unfortunately, we had to do this is main because in init we weren't able to access env variables
func initWithEnv() error {
// Get paths
var currentProject string
if os.Getenv("ENVIRONMENT") == "DEV" {
currentProject = os.Getenv("FIREBASE_PROJECTID_DEV")
hostname = os.Getenv("HOSTNAME_DEV")
} else if os.Getenv("ENVIRONMENT") == "PROD" {
currentProject = os.Getenv("FIREBASE_PROJECTID_PROD")
hostname = os.Getenv("HOSTNAME_PROD")
}
// Initialize Firestore
client, err := firestore.NewClient(context.Background(), currentProject)
if err != nil {
return fmt.Errorf("AuthorizeWithSpotify [Init Firestore]: %v", err)
}
// Initialize Sawmill
sawmillLogger, err := sawmill.InitClient(currentProject, os.Getenv("GCLOUD_CONFIG"), os.Getenv("ENVIRONMENT"), "AuthorizeWithSpotify")
if err != nil {
log.Printf("AuthorizeWithSpotify [Init Sawmill]: %v", err)
}
log.Println(currentProject)
log.Println(hostname)
firestoreClient = client
logger = sawmillLogger
return nil
}
// sillyonly - "So 140 char? is this twitter or a coding stream!" (03/02/20)
func getUser(uid string) (*social.AuthorizeWithSpotifyResponse, error) {
// Go to firestore and check for uid
fbID := "spotify:" + uid
userRef := firestoreClient.Doc("users/" + fbID)
if userRef == nil {
return nil, fmt.Errorf("doesUserExist: users/%s is an odd path", fbID)
}
// If uid does not exist return nil
userSnap, err := userRef.Get(context.Background())
if status.Code(err) == codes.NotFound {
log.Printf("User with id %s was not found", fbID)
return nil, nil
}
// UID does exist, return firestore user
var firestoreUser firebase.FirestoreUser
dataErr := userSnap.DataTo(&firestoreUser)
if dataErr != nil {
return nil, fmt.Errorf("doesUserExist: %v", dataErr)
}
// Get references from socialPlatforms
socialPlatformSnaps, socialPlatformSnapsErr := fetchSnapshots(firestoreUser.SocialPlatforms)
if socialPlatformSnapsErr != nil {
return nil, fmt.Errorf("FetchSnapshots: %v", socialPlatformSnapsErr)
}
// Conver socialPlatforms to data
socialPlatforms, preferredPlatform := snapsToSocialPlatformData(socialPlatformSnaps)
// Get references from playlists
playlistsSnaps, playlistSnapsErr := fetchSnapshots(firestoreUser.Playlists)
if playlistSnapsErr != nil {
return nil, fmt.Errorf("FetchSnapshots: %v", playlistSnapsErr)
}
// Convert playlists to data
playlists := snapsToPlaylistData(playlistsSnaps)
// Convert user to response object
authorizeWithSpotifyResponse := social.AuthorizeWithSpotifyResponse{
Email: firestoreUser.Email,
ID: firestoreUser.ID,
Playlists: playlists,
PreferredSocialPlatform: preferredPlatform,
SocialPlatforms: socialPlatforms,
Username: firestoreUser.Username,
}
return &authorizeWithSpotifyResponse, nil
}
// fetchSnapshots takes in an array for Firestore Documents references and return their DocumentSnapshots
func fetchSnapshots(refs []*firestore.DocumentRef) ([]*firestore.DocumentSnapshot, error) {
docsnaps, err := firestoreClient.GetAll(context.Background(), refs)
if err != nil {
return nil, fmt.Errorf("fetchChildRefs: %v", err)
}
return docsnaps, nil
}
// snapsToPlaylistData takes in array of Firestore DocumentSnapshots and retursn array of FirestorePlaylists
func snapsToPlaylistData(snaps []*firestore.DocumentSnapshot) []firebase.FirestorePlaylist {
var playlists []firebase.FirestorePlaylist
for _, playlistSnap := range snaps {
var playlist firebase.FirestorePlaylist
dataErr := playlistSnap.DataTo(&playlist)
if dataErr != nil {
log.Printf("Encountered error while parsing playlist snapshot.")
log.Printf("%v", dataErr)
continue
}
playlists = append(playlists, playlist)
}
return playlists
}
// snapsToSocialPlatformData takes in array of Firestore DocumentSnapshots and retursn array of FirestoreSocialPlatforms & PreferredPlatform
func snapsToSocialPlatformData(snaps []*firestore.DocumentSnapshot) ([]firebase.FirestoreSocialPlatform, firebase.FirestoreSocialPlatform) {
var socialPlatforms []firebase.FirestoreSocialPlatform
var preferredService firebase.FirestoreSocialPlatform
for _, socialSnaps := range snaps {
var socialPlatform firebase.FirestoreSocialPlatform
dataErr := socialSnaps.DataTo(&socialPlatform)
if dataErr != nil {
log.Printf("Encountered error while parsing socialSnaps.")
log.Printf("%v", dataErr)
continue
}
socialPlatforms = append(socialPlatforms, socialPlatform)
if socialPlatform.IsPreferredService {
preferredService = socialPlatform
}
}
return socialPlatforms, preferredService
}
// createUser takes in the spotify response and returns a new firebase user
func createUser(spotifyResp social.SpotifyMeResponse,
socialPlatDocRef *firestore.DocumentRef) (*firebase.FirestoreUser, error) {
var createUserURI = hostname + "/createUser"
// Get profile image
var profileImage firebase.SpotifyImage
if len(spotifyResp.Images) > 0 {
profileImage = spotifyResp.Images[0]
} else {
profileImage = firebase.SpotifyImage{}
}
log.Println(socialPlatDocRef)
// Create, CreateUser Request object
var createUserReq = social.CreateUserReq{
Email: spotifyResp.Email,
ID: "spotify:" + spotifyResp.ID,
SocialPlatformPath: "social_platforms/" + socialPlatDocRef.ID,
ProfileImage: &profileImage,
Username: spotifyResp.DisplayName,
}
// Create jsonBody
jsonPlatform, jsonErr := json.Marshal(createUserReq)
if jsonErr != nil {
return nil, fmt.Errorf(jsonErr.Error())
}
// Create Request
createUser, createUserErr := http.NewRequest("POST", createUserURI, bytes.NewBuffer(jsonPlatform))
if createUserErr != nil {
return nil, fmt.Errorf(createUserErr.Error())
}
createUser.Header.Add("Content-Type", "application/json")
createUserResp, httpErr := httpClient.Do(createUser)
if httpErr != nil {
return nil, fmt.Errorf(httpErr.Error())
}
if createUserResp.StatusCode != http.StatusOK {
// Get error from body
var body []byte
body, _ = ioutil.ReadAll(createUserResp.Body)
return nil, fmt.Errorf((string(body)))
}
var firestoreUser firebase.FirestoreUser
respDecodeErr := json.NewDecoder(createUserResp.Body).Decode(&firestoreUser)
if respDecodeErr != nil {
return nil, fmt.Errorf(respDecodeErr.Error())
}
return &firestoreUser, nil
}
// createSocialPlatform calls our CreateSocialPlatform Firebase Function to create & write new platform to DB
func createSocialPlatform(spotifyResp social.SpotifyMeResponse,
authReq social.SpotifyAuthRequest) (*firestore.DocumentRef, *firebase.FirestoreSocialPlatform, error) {
var createSocialPlatformURI = hostname + "/createSocialPlatform"
// Create request body
var isPremium = false
if spotifyResp.Product == "premium" {
isPremium = true
}
var profileImage firebase.SpotifyImage
if len(spotifyResp.Images) > 0 {
profileImage = spotifyResp.Images[0]
} else {
profileImage = firebase.SpotifyImage{}
}
// Adds the expiresIn time to current time
var expiredAtStr = time.Now().Add(time.Second * time.Duration(authReq.ExpiresIn))
var apiToken = firebase.APIToken{
CreatedAt: time.Now().Format(time.RFC3339),
ExpiredAt: expiredAtStr.Format(time.RFC3339),
ExpiresIn: authReq.ExpiresIn,
Token: authReq.APIToken,
}
// Object that we will write to Firestore
var platform = firebase.FirestoreSocialPlatform{
APIToken: apiToken,
RefreshToken: authReq.RefreshToken,
Email: spotifyResp.Email,
ID: spotifyResp.ID,
IsPreferredService: true, // If creating a new user, this is the first platform which should be the default
IsPremium: isPremium,
PlatformName: "spotify",
ProfileImage: profileImage,
Username: spotifyResp.DisplayName,
}
// Create jsonBody
jsonPlatform, jsonErr := json.Marshal(platform)
if jsonErr != nil {
return nil, nil, fmt.Errorf(jsonErr.Error())
}
// Create Request
socialPlatformReq, newReqErr := http.NewRequest("POST", createSocialPlatformURI, bytes.NewBuffer(jsonPlatform))
if newReqErr != nil {
return nil, nil, fmt.Errorf(newReqErr.Error())
}
// Run firebase function to write platform to database
socialPlatformReq.Header.Add("Content-Type", "application/json")
socialPlatformResp, httpErr := httpClient.Do(socialPlatformReq)
if httpErr != nil {
return nil, nil, fmt.Errorf(httpErr.Error())
}
if socialPlatformResp.StatusCode != http.StatusOK {
// Get error from body
var body, _ = ioutil.ReadAll(socialPlatformResp.Body)
return nil, nil, fmt.Errorf(string(body))
}
// Get Document reference
platformRef := firestoreClient.Doc("social_platforms/" + platform.ID)
if platformRef == nil {
return nil, nil, fmt.Errorf("Odd number of IDs or the ID was empty")
}
return platformRef, &platform, nil
}
// getCustomRoken calles our GenerateToken Firebase Function to create & return custom JWT
func getCustomToken(uid string) (*social.GenerateTokenResponse, error) {
var generateTokenURI = hostname + "/generateCustomToken"
var tokenRequest = social.GenerateTokenRequest{
UID: uid,
}
// Create jsonBody
jsonPlatform, jsonErr := json.Marshal(tokenRequest)
if jsonErr != nil {
return nil, fmt.Errorf(jsonErr.Error())
}
// Create Request
customTokenReq, customTokenReqErr := http.NewRequest("POST", generateTokenURI, bytes.NewBuffer(jsonPlatform))
if customTokenReqErr != nil {
return nil, fmt.Errorf(customTokenReqErr.Error())
}
customTokenReq.Header.Add("Content-Type", "application/json")
customTokenResp, httpErr := httpClient.Do(customTokenReq)
if httpErr != nil {
return nil, fmt.Errorf(httpErr.Error())
}
// Decode the token to send back
var tokenResponse social.GenerateTokenResponse
customTokenDecodeErr := json.NewDecoder(customTokenResp.Body).Decode(&tokenResponse)
if customTokenDecodeErr != nil {
return nil, fmt.Errorf(customTokenDecodeErr.Error())
}
return &tokenResponse, nil
}
// no_neon_one - "BACKEND as a service" (02/29/20)
// sillyonly - "still waiting on alecc to give me a discount" (02/29/20)
// jackconceprio - "Baby lock the door and turn the lights down low Put some music on that's soft and slow
// Baby we ain't got no place to go I hope you understand I've been thinking 'bout this all day long Never
// felt a feeling quite this strong I can't believe how much it turns me on Just to be your man There's
// no hurry Don't you worry We can take our time Come a little closer Lets go over What I had in mind
// Baby lock the door and turn the lights down low Put some music on that's soft and slow Baby we ain't
// got no place to go I hope you understand I've been thinking 'bout this all day long Never felt a
// feeling quite this strong I can't believe how much it turns me on Just to be your man Ain't nobody ever
// love nobody The way that I love you We're alone now You don't know how Long I've wanted to Lock the door
// and turn the lights down low Put some music on that's soft and slow Baby we ain't got no place to go I
// hope you understand I've been thinking 'bout this all day long Never felt a feeling that was quite this
// strong I can't believe how much it turns me on Just to be your man I can't believe how much it turns me
// on Just to be your own" (03/01/20)
<file_sep>package doesuserdocexist
import (
"context"
"encoding/json"
"fmt"
"log"
"net/http"
"os"
"cloud.google.com/go/firestore"
"github.com/pixelogicdev/gruveebackend/pkg/sawmill"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
// doesUserDocExistReq includes the uid of the user we are checking
type doesUserDocExistReq struct {
UID string `json:"uid"`
}
// doesUserDocExistResp includes a result of true or false
type doesUserDocExistResp struct {
Result bool `json:"result"`
}
var firestoreClient *firestore.Client
var logger sawmill.Logger
func init() {
log.Println("DoesUserDocExist intialized")
}
// DoesUserDocExist checks to see if there is already a Firebase user document for someone right before they sign in
func DoesUserDocExist(writer http.ResponseWriter, request *http.Request) {
// Initialize
doesUserDocExist := false
initWithEnvErr := initWithEnv()
if initWithEnvErr != nil {
http.Error(writer, initWithEnvErr.Error(), http.StatusInternalServerError)
logger.LogErr("InitWithEnv", initWithEnvErr, nil)
return
}
// Get UserId
var reqData doesUserDocExistReq
reqDataErr := json.NewDecoder(request.Body).Decode(&reqData)
if reqDataErr != nil {
http.Error(writer, reqDataErr.Error(), http.StatusInternalServerError)
logger.LogErr("ReqData Decoder", reqDataErr, request)
return
}
// Make a Firebase request to see if user document is already create with the given uid
snapshot, snapshotErr := firestoreClient.Collection("users").Doc(reqData.UID).Get(context.Background())
if status.Code(snapshotErr) != codes.NotFound && snapshot.Exists() {
doesUserDocExist = true
}
// Create result object
result := doesUserDocExistResp{
Result: doesUserDocExist,
}
// Send response
writer.Header().Set("Content-Type", "application/json")
json.NewEncoder(writer).Encode(result)
}
// Helpers
// initWithEnv takes our yaml env variables and maps them properly.
// Unfortunately, we had to do this is main because in init we weren't able to access env variables
func initWithEnv() error {
// Get paths
var currentProject string
if os.Getenv("ENVIRONMENT") == "DEV" {
currentProject = os.Getenv("FIREBASE_PROJECTID_DEV")
} else if os.Getenv("ENVIRONMENT") == "PROD" {
currentProject = os.Getenv("FIREBASE_PROJECTID_PROD")
}
// Initialize Firestore
client, err := firestore.NewClient(context.Background(), currentProject)
if err != nil {
return fmt.Errorf("DoesUserDocExist [Init Firestore]: %v", err)
}
// Initialize Sawmill
sawmillLogger, err := sawmill.InitClient(currentProject, os.Getenv("GCLOUD_CONFIG"), os.Getenv("ENVIRONMENT"), "DoesUserDocExist")
if err != nil {
log.Printf("DoesUserDocExist [Init Sawmill]: %v", err)
}
firestoreClient = client
logger = sawmillLogger
return nil
}
<file_sep>package createsocialplaylist
// Dragonfleas - "bobby drop tables wuz here pog - Dragonfleas - Relevant XKCD" (03/23/20)
// HMigo - "<NAME>" (03/26/20)
import (
"bytes"
"context"
"encoding/json"
"fmt"
"log"
"net/http"
"os"
"cloud.google.com/go/firestore"
"github.com/pixelogicdev/gruveebackend/pkg/firebase"
"github.com/pixelogicdev/gruveebackend/pkg/sawmill"
"github.com/pixelogicdev/gruveebackend/pkg/social"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
// createSocialPlaylistRequest includes the socialPlatform and playlist that will be added
type createSocialPlaylistRequest struct {
SocialPlatform firebase.FirestoreSocialPlatform `json:"socialPlatform"`
PlaylistName string `json:"playlistName"`
}
// createSocialPlaylistResponse includes the refreshToken for the platform if there is one
type createSocialPlaylistResponse struct {
PlatformName string `json:"platformName"`
RefreshToken firebase.APIToken `json:"refreshToken"`
}
// appleMusicPlaylistRequest includes the payload needed to create an Apple Music Playlist
type appleMusicPlaylistRequest struct {
Attributes struct {
Name string `json:"name"`
Description string `json:"description"`
} `json:"attributes"`
}
// spotifyPlaylistRequest includes the payload needed to create a Spotify Playlist
type spotifyPlaylistRequest struct {
Name string `json:"name"`
Public bool `json:"public"`
Collaborative bool `json:"collaborative"`
Description string `json:"description"`
}
var firestoreClient *firestore.Client
var logger sawmill.Logger
var httpClient *http.Client
var hostname string
// ywnklme - "At least something in my life is social 😞" (03/23/20)
func init() {
// Set httpClient
httpClient = &http.Client{}
log.Println("CreateSocialPlaylist Initialized")
}
// CreateSocialPlaylist will take in a SocialPlatform and will go create a playlist on the social account itself
func CreateSocialPlaylist(writer http.ResponseWriter, request *http.Request) {
// Initialize paths
err := initWithEnv()
if err != nil {
http.Error(writer, err.Error(), http.StatusInternalServerError)
logger.LogErr("InitWithEnv", err, nil)
return
}
var socialPlaylistReq createSocialPlaylistRequest
// Decode our object
jsonDecodeErr := json.NewDecoder(request.Body).Decode(&socialPlaylistReq)
if jsonDecodeErr != nil {
http.Error(writer, jsonDecodeErr.Error(), http.StatusInternalServerError)
logger.LogErr("SocialPlaylistReq Decoder", jsonDecodeErr, request)
return
}
// Figure out what service we are going to create a playlist in
var platformEndpoint string
var socialRefreshTokens *social.RefreshTokensResponse
var socialRefreshTokenErr error
if socialPlaylistReq.SocialPlatform.PlatformName == "spotify" {
log.Printf("Creating playlist for Spotify")
platformEndpoint = "https://api.spotify.com/v1/users/" + socialPlaylistReq.SocialPlatform.ID + "/playlists"
// This is sort of weird, but I haven't been able to find any resources on an Apple Music tokens expiring
// Therefore, this check should only be done on Spotify at the moment
socialRefreshTokens, socialRefreshTokenErr = refreshToken(socialPlaylistReq.SocialPlatform)
if socialRefreshTokenErr != nil {
http.Error(writer, socialRefreshTokenErr.Error(), http.StatusBadRequest)
logger.LogErr("RefreshToken", socialRefreshTokenErr, request)
return
}
} else if socialPlaylistReq.SocialPlatform.PlatformName == "apple" {
log.Printf("Creating playlist for Apple Music")
platformEndpoint = "https://api.music.apple.com/v1/me/library/playlists"
}
// fr3fou - "i fixed this Kappa" (04/10/20)
// Setup resonse if we have a token to return
var response *createSocialPlaylistResponse
// Again, this is solely for Spotify at the moment
if socialPlaylistReq.SocialPlatform.PlatformName == "spotify" && socialRefreshTokens != nil {
// Get token for specified platform
platformRefreshToken, doesExist := socialRefreshTokens.RefreshTokens[socialPlaylistReq.SocialPlatform.PlatformName]
if doesExist == true {
log.Println("Setting new APIToken on socialPlatform")
socialPlaylistReq.SocialPlatform.APIToken.Token = platformRefreshToken.Token
// Write new apiToken as response
response = &createSocialPlaylistResponse{
PlatformName: socialPlaylistReq.SocialPlatform.PlatformName,
RefreshToken: platformRefreshToken,
}
} else {
// Another token needed refresh, but not the one we were looking for
log.Printf("%s was not refreshed", socialPlaylistReq.SocialPlatform.PlatformName)
}
}
// Call API to create playlist with data
createReqErr := createPlaylist(platformEndpoint, socialPlaylistReq.SocialPlatform, socialPlaylistReq.PlaylistName)
if createReqErr != nil {
http.Error(writer, createReqErr.Error(), http.StatusBadRequest)
logger.LogErr("CreatePlaylist", createReqErr, request)
return
}
if response != nil {
json.NewEncoder(writer).Encode(response)
} else {
writer.WriteHeader(http.StatusNoContent)
}
}
// Helpers
// initWithEnv takes our yaml env variables and maps them properly.
// Unfortunately, we had to do this is main because in init we weren't able to access env variables
func initWithEnv() error {
// Get paths
var currentProject string
if os.Getenv("ENVIRONMENT") == "DEV" {
currentProject = os.Getenv("FIREBASE_PROJECTID_DEV")
hostname = os.Getenv("HOSTNAME_DEV")
} else if os.Getenv("ENVIRONMENT") == "PROD" {
currentProject = os.Getenv("FIREBASE_PROJECTID_PROD")
hostname = os.Getenv("HOSTNAME_PROD")
}
// Initialize Firestore
client, err := firestore.NewClient(context.Background(), currentProject)
if err != nil {
return fmt.Errorf("SocialTokenRefresh [Init Firestore]: %v", err)
}
// Initialize Sawmill
sawmillLogger, err := sawmill.InitClient(currentProject, os.Getenv("GCLOUD_CONFIG"), os.Getenv("ENVIRONMENT"), "CreateSocialPlaylist")
if err != nil {
log.Printf("CreateSocial Playlist [Init Sawmill]: %v", err)
}
// DR_DinoMight - "Note to self! Welcome, Dr_DinoMight, Otherwise he'll spit his dummy out!" (06.15.20)
firestoreClient = client
logger = sawmillLogger
return nil
}
// createPlaylist takes the social platform and playlist information and creates a playlist on the user's preferred platform
func createPlaylist(endpoint string, platform firebase.FirestoreSocialPlatform,
playlistName string) error {
var request *http.Request
var requestErr error
// Check for platform
if platform.PlatformName == "spotify" {
request, requestErr = createSpotifyPlaylistRequest(playlistName, endpoint, platform.APIToken.Token)
} else if platform.PlatformName == "apple" {
request, requestErr = createAppleMusicPlaylistRequest(playlistName, endpoint, platform.APIToken.Token)
}
if requestErr != nil {
log.Printf("[createPlaylist] %v", requestErr.Error())
return requestErr
}
createPlaylistResp, httpErr := httpClient.Do(request)
if httpErr != nil {
log.Printf("[createPlaylist] %v", httpErr.Error())
return httpErr
}
// If we have errors, lets parse 'em out
if createPlaylistResp.StatusCode != http.StatusOK && createPlaylistResp.StatusCode != http.StatusCreated {
if platform.PlatformName == "spotify" {
var spotifyErrorObj social.SpotifyRequestError
err := json.NewDecoder(createPlaylistResp.Body).Decode(&spotifyErrorObj)
if err != nil {
log.Printf("[createPlaylist] %v", err.Error())
return err
}
return fmt.Errorf("Status Code %v: "+spotifyErrorObj.Error.Message, spotifyErrorObj.Error.Status)
} else if platform.PlatformName == "apple" {
var appleMusicReqErr social.AppleMusicRequestError
err := json.NewDecoder(createPlaylistResp.Body).Decode(&appleMusicReqErr)
if err != nil {
log.Printf("[createPlaylist] %v", err.Error())
return err
}
// The first error is the most important so for now let's just grab that
return fmt.Errorf("Status Code %v: "+appleMusicReqErr.Errors[0].Detail, appleMusicReqErr.Errors[0].Status)
}
}
return nil
}
// getAppleDevToken will check our DB for appleDevJWT and return it if there
func getAppleDevToken() (*firebase.FirestoreAppleDevJWT, error) {
// Go to Firebase and see if appleDevToken exists
snapshot, snapshotErr := firestoreClient.Collection("internal_tokens").Doc("appleDevToken").Get(context.Background())
if status.Code(snapshotErr) == codes.NotFound {
log.Println("[getAppleDevToken] AppleDevToken not found in DB.")
return nil, nil
}
if snapshotErr != nil {
log.Printf("[getAppleDevToken] %v", snapshotErr.Error())
return nil, snapshotErr
}
var appleDevToken firebase.FirestoreAppleDevJWT
dataToErr := snapshot.DataTo(&appleDevToken)
if dataToErr != nil {
log.Printf("[getAppleDevToken] %v", dataToErr.Error())
return nil, dataToErr
}
return &appleDevToken, nil
}
// createAppleMusicPlaylistRequest will generate the proper request needed for adding a playlist to Apple Music account
func createAppleMusicPlaylistRequest(playlistName string, endpoint string, apiToken string) (*http.Request, error) {
// Create playlist data
var appleMusicPlaylistReq appleMusicPlaylistRequest
appleMusicPlaylistReq.Attributes.Name = "Grüvee: " + playlistName
appleMusicPlaylistReq.Attributes.Description = "Created with love from Grüvee ❤️"
// Create json body
jsonPlaylist, jsonErr := json.Marshal(appleMusicPlaylistReq)
if jsonErr != nil {
log.Printf("[createAppleMusicPlaylistRequest] %v", jsonErr.Error())
return nil, jsonErr
}
// Create request object
createPlaylistReq, createPlaylistReqErr := http.NewRequest("POST", endpoint, bytes.NewBuffer(jsonPlaylist))
if createPlaylistReqErr != nil {
log.Printf("[createAppleMusicPlaylistRequest] %v", createPlaylistReqErr.Error())
return nil, createPlaylistReqErr
}
// Get Apple Developer Token
devJWT, err := getAppleDevToken()
if err != nil {
return nil, err
}
// Add headers
createPlaylistReq.Header.Add("Content-Type", "application/json")
createPlaylistReq.Header.Add("Music-User-Token", apiToken)
createPlaylistReq.Header.Add("Authorization", "Bearer "+devJWT.Token)
return createPlaylistReq, nil
}
// createSpotifyPlaylistRequest will generate the proper request needed for adding a playlist to Spotify account
func createSpotifyPlaylistRequest(playlistName string, endpoint string, apiToken string) (*http.Request, error) {
// Create playlist data
var spotifyPlaylistRequest = spotifyPlaylistRequest{
Name: "Grüvee: " + playlistName,
Public: true,
Collaborative: false,
Description: "Created with love from Grüvee ❤️",
}
// Create json body
jsonPlaylist, jsonErr := json.Marshal(spotifyPlaylistRequest)
if jsonErr != nil {
return nil, jsonErr
}
// Create request object
createPlaylistReq, createPlaylistReqErr := http.NewRequest("POST", endpoint, bytes.NewBuffer(jsonPlaylist))
if createPlaylistReqErr != nil {
return nil, createPlaylistReqErr
}
// Add headers
createPlaylistReq.Header.Add("Content-Type", "application/json")
createPlaylistReq.Header.Add("Authorization", "Bearer "+apiToken)
return createPlaylistReq, nil
}
// refreshToken takes all socialPlatforms and checks to see if their tokens need to be refreshed
func refreshToken(platform firebase.FirestoreSocialPlatform) (*social.RefreshTokensResponse, error) {
var refreshReq = social.TokenRefreshRequest{
UID: platform.PlatformName + ":" + platform.ID,
}
var tokenRefreshURI = hostname + "/socialTokenRefresh"
jsonTokenRefresh, jsonErr := json.Marshal(refreshReq)
if jsonErr != nil {
return nil, fmt.Errorf(jsonErr.Error())
}
tokenRefreshReq, tokenRefreshReqErr := http.NewRequest("POST", tokenRefreshURI, bytes.NewBuffer(jsonTokenRefresh))
if tokenRefreshReqErr != nil {
return nil, fmt.Errorf(tokenRefreshReqErr.Error())
}
tokenRefreshReq.Header.Add("Content-Type", "application/json")
tokenRefreshReq.Header.Add("User-Type", "Gruvee-Backend")
refreshedTokensResp, httpErr := httpClient.Do(tokenRefreshReq)
if httpErr != nil {
return nil, fmt.Errorf(httpErr.Error())
}
if refreshedTokensResp.StatusCode == http.StatusNoContent {
log.Println("Tokens did not need refresh")
return nil, nil
}
// Receive payload that includes uid
var refreshedTokens social.RefreshTokensResponse
// Decode payload
refreshedTokensErr := json.NewDecoder(refreshedTokensResp.Body).Decode(&refreshedTokens)
if refreshedTokensErr != nil {
return nil, fmt.Errorf(refreshedTokensErr.Error())
}
return &refreshedTokens, nil
}
<file_sep>module github.com/pixelogicdev/gruveebackend/cmd/fetchallmedia
go 1.13
<file_sep>package appleauth
import (
"context"
"fmt"
"log"
"net/http"
"os"
"cloud.google.com/go/firestore"
"github.com/pixelogicdev/gruveebackend/pkg/firebase"
"github.com/pixelogicdev/gruveebackend/pkg/sawmill"
"github.com/unrolled/render"
)
// zebcode - "Zebcode Rules 🦸♂️" (04/29/20)
type appleDevTokenResp struct {
Token string
}
var firestoreClient *firestore.Client
var logger sawmill.Logger
var appleDevToken firebase.FirestoreAppleDevJWT
var httpClient *http.Client
var hostname string
var templatePath string
func init() {
httpClient = &http.Client{}
log.Println("AuthorizeWithApple initialized.")
}
// AuthorizeWithApple will render a HTML page to get the AppleMusic credentials for user
func AuthorizeWithApple(writer http.ResponseWriter, request *http.Request) {
// Initialize
initWithEnvErr := initWithEnv()
if initWithEnvErr != nil {
http.Error(writer, initWithEnvErr.Error(), http.StatusInternalServerError)
logger.LogErr("InitWithEnv", initWithEnvErr, nil)
return
}
// Get Apple Dev Token
// DR_DinoMight: <PASSWORD>, Apple Really?!?!?! (08/11/20)
appleDevToken, appleDevTokenErr := firebase.GetAppleDeveloperToken()
if appleDevTokenErr != nil {
http.Error(writer, appleDevTokenErr.Error(), http.StatusInternalServerError)
log.Printf("[GetAppleMusicMedia]: %v", appleDevTokenErr)
return
}
// Render template
render := render.New(render.Options{
Directory: templatePath,
})
renderErr := render.HTML(writer, http.StatusOK, "auth", appleDevToken)
if renderErr != nil {
http.Error(writer, renderErr.Error(), http.StatusInternalServerError)
logger.LogErr("Render", renderErr, nil)
return
}
}
// Helpers
// initWithEnv takes our yaml env variables and maps them properly.
// Unfortunately, we had to do this is main because in init we weren't able to access env variables
func initWithEnv() error {
if os.Getenv("APPLE_TEAM_ID") == "" {
return fmt.Errorf("authorizeWithApple - APPLE_TEAM_ID does not exist")
}
// Get paths
var currentProject string
if os.Getenv("ENVIRONMENT") == "DEV" {
currentProject = os.Getenv("FIREBASE_PROJECTID_DEV")
hostname = os.Getenv("HOSTNAME_DEV")
templatePath = os.Getenv("APPLE_AUTH_TEMPLATE_PATH_DEV")
} else if os.Getenv("ENVIRONMENT") == "PROD" {
currentProject = os.Getenv("FIREBASE_PROJECTID_PROD")
hostname = os.Getenv("HOSTNAME_PROD")
templatePath = os.Getenv("APPLE_AUTH_TEMPLATE_PATH_PROD")
}
// Initialize Firestore
client, err := firestore.NewClient(context.Background(), currentProject)
if err != nil {
return fmt.Errorf("AuthorizeWithApple [Init Firestore]: %v", err)
}
// Initialize Sawmill
sawmillLogger, err := sawmill.InitClient(currentProject, os.Getenv("GCLOUD_CONFIG"), os.Getenv("ENVIRONMENT"), "AuthorizeWithApple")
if err != nil {
log.Printf("AuthorizeWithApple [Init Sawmill]: %v", err)
}
firestoreClient = client
logger = sawmillLogger
return nil
}
<file_sep>package socialplatform
// eminyilmazz - "If I got corona, this line is my legacy." (03/12/20)
import (
"context"
"encoding/json"
"fmt"
"log"
"net/http"
"os"
"cloud.google.com/go/firestore"
"github.com/pixelogicdev/gruveebackend/pkg/firebase"
"github.com/pixelogicdev/gruveebackend/pkg/sawmill"
)
var firestoreClient *firestore.Client
var logger sawmill.Logger
// JackGamesFTW - "TriHard 7" (03/18/20)
func init() {
log.Println("CreateSocialPlatform intialized")
}
// CreateSocialPlatform will write a new social platform to firestore
func CreateSocialPlatform(writer http.ResponseWriter, request *http.Request) {
// Initialize
initWithEnvErr := initWithEnv()
if initWithEnvErr != nil {
http.Error(writer, initWithEnvErr.Error(), http.StatusInternalServerError)
logger.LogErr("InitWithEnvErr", initWithEnvErr, nil)
return
}
var socialPlatform firebase.FirestoreSocialPlatform
socialPlatformErr := json.NewDecoder(request.Body).Decode(&socialPlatform)
if socialPlatformErr != nil {
http.Error(writer, socialPlatformErr.Error(), http.StatusInternalServerError)
logger.LogErr("SocialPlatform Decoder", socialPlatformErr, request)
return
}
// Write SocialPlatform to Firestore
_, writeErr := firestoreClient.Collection("social_platforms").Doc(socialPlatform.ID).Set(context.Background(), socialPlatform)
if writeErr != nil {
http.Error(writer, writeErr.Error(), http.StatusInternalServerError)
logger.LogErr("FireStore Set", writeErr, nil)
return
}
writer.WriteHeader(http.StatusOK)
}
// Helpers
// initWithEnv takes our yaml env variables and maps them properly.
// Unfortunately, we had to do this is main because in init we weren't able to access env variables
func initWithEnv() error {
// Get paths
var currentProject string
if os.Getenv("ENVIRONMENT") == "DEV" {
currentProject = os.Getenv("FIREBASE_PROJECTID_DEV")
} else if os.Getenv("ENVIRONMENT") == "PROD" {
currentProject = os.Getenv("FIREBASE_PROJECTID_PROD")
}
// Initialize Firestore
client, err := firestore.NewClient(context.Background(), currentProject)
if err != nil {
return fmt.Errorf("SocialTokenRefresh [Init Firestore]: %v", err)
}
// Initialize Sawmill
sawmillLogger, err := sawmill.InitClient(currentProject, os.Getenv("GCLOUD_CONFIG"), os.Getenv("ENVIRONMENT"), "CreateSocialPlatform")
if err != nil {
log.Printf("CreateSocialPlatform [Init Sawmill]: %v", err)
}
firestoreClient = client
logger = sawmillLogger
return nil
}
<file_sep>package createprovideruser
import (
"context"
"encoding/json"
"fmt"
"log"
"net/http"
"os"
"cloud.google.com/go/firestore"
"github.com/pixelogicdev/gruveebackend/pkg/sawmill"
)
// updateProviderUserReq takes in the Firebase Provider UID and the platform provider UID to map
type createProviderUserReq struct {
FirebaseProviderUID string `json:"firebaseProviderUID"`
PlatformProviderUID string `json:"platformProviderUID"`
}
// providerUser takes the platformUser document reference and stores in new collection
type providerUser struct {
PlatformUserRef *firestore.DocumentRef `firestore:"platformUserReference"`
}
var firestoreClient *firestore.Client
var logger sawmill.Logger
// CreateProviderUser will check to see if the newly created user needs to be added to the providers_users collection
func CreateProviderUser(writer http.ResponseWriter, request *http.Request) {
// Initialize
initErr := initWithEnv()
if initErr != nil {
http.Error(writer, initErr.Error(), http.StatusInternalServerError)
logger.LogErr("InitWithEnv", initErr, nil)
return
}
// Decode
var reqData createProviderUserReq
reqDataErr := json.NewDecoder(request.Body).Decode(&reqData)
if reqDataErr != nil {
http.Error(writer, reqDataErr.Error(), http.StatusInternalServerError)
logger.LogErr("ReqData Decoder", reqDataErr, request)
return
}
// Create document references
firebaseProviderDocRef := firestoreClient.Doc("provider_users/" + reqData.FirebaseProviderUID)
platformProviderDocRef := firestoreClient.Doc("users/" + reqData.PlatformProviderUID)
// Create ProviderUser Object
providerUserData := providerUser{
PlatformUserRef: platformProviderDocRef,
}
// Write to Firestore
_, writeErr := firebaseProviderDocRef.Set(context.Background(), providerUserData)
if writeErr != nil {
http.Error(writer, writeErr.Error(), http.StatusInternalServerError)
logger.LogErr("FireStore Set", writeErr, request)
return
}
writer.WriteHeader(http.StatusOK)
}
// Helpers
// initWithEnv takes our yaml env variables and maps them properly.
// Unfortunately, we had to do this is main because in init we weren't able to access env variables
func initWithEnv() error {
// Get paths
var currentProject string
// Get Project ID
if os.Getenv("ENVIRONMENT") == "DEV" {
currentProject = os.Getenv("FIREBASE_PROJECTID_DEV")
} else if os.Getenv("ENVIRONMENT") == "PROD" {
currentProject = os.Getenv("FIREBASE_PROJECTID_PROD")
}
// Initialize Firestore
client, err := firestore.NewClient(context.Background(), currentProject)
if err != nil {
return fmt.Errorf("CreateProviderUser [Init Firestore]: %v", err)
}
// Initialize Sawmill
sawmillLogger, err := sawmill.InitClient(currentProject, os.Getenv("GCLOUD_CONFIG"), os.Getenv("ENVIRONMENT"), "CreateProviderUser")
if err != nil {
log.Printf("CreateAppleDevToken [Init Sawmill]: %v", err)
}
firestoreClient = client
logger = sawmillLogger
return nil
}
<file_sep>package updatealgolia
import (
"context"
"fmt"
"log"
"os"
"cloud.google.com/go/functions/metadata"
"github.com/algolia/algoliasearch-client-go/v3/algolia/search"
"github.com/pixelogicdev/gruveebackend/pkg/firebase"
)
// algoliaUser implements a partial amount of data from firestoreUser to use for indexing
type algoliaUser struct {
ObjectID string `json:"objectID"`
ID string `json:"id"`
Email string `json:"email"`
ProfileImageURI string `json:"profileImage"`
DisplayName string `json:"displayName"`
Username string `json:"username"`
}
// UpdateAlgolia sends new data to Algolia service for indexing
func UpdateAlgolia(ctx context.Context, event firebase.FirestoreEvent) error {
log.Println("[UpdateAlgolia] Starting update...")
// Get IDs
algoliaAppID := os.Getenv("ALGOLIA_APP_ID")
if algoliaAppID == "" {
log.Println("Algolia App ID was empty in yaml file")
return fmt.Errorf("Algolia App ID was empty in yaml file")
}
algoliaSecretID := os.Getenv("ALGOLIA_SECRET_ID")
if algoliaSecretID == "" {
log.Println("Algolia Secret ID was empty in yaml file")
return fmt.Errorf("Algolia Secret ID was empty in yaml file")
}
var algoliaIndexName string
if os.Getenv("ENVIRONMENT") == "DEV" {
algoliaIndexName = os.Getenv("ALGOLIA_INDEX_NAME_DEV")
} else if os.Getenv("ENVIRONMENT") == "PROD" {
algoliaIndexName = os.Getenv("ALGOLIA_INDEX_NAME_PROD")
}
if algoliaIndexName == "" {
log.Println("Algolia Index Name was empty in yaml file")
return fmt.Errorf("Algolia Index Name was empty in yaml file")
}
// Init our client
client := search.NewClient(algoliaAppID, algoliaSecretID)
index := client.InitIndex(algoliaIndexName)
meta, err := metadata.FromContext(ctx)
if err != nil {
return fmt.Errorf("metadata.FromContext: %v", err)
}
// Print out our trigger data
log.Printf("Function triggered by change to: %v", meta.Resource)
log.Printf("Event Trigger: %v", event)
// Write objects to Algolia
res, err := index.SaveObject(algoliaUser{
ObjectID: event.Value.Fields.ID.StringValue,
ID: event.Value.Fields.ID.StringValue,
Email: event.Value.Fields.Email.StringValue,
ProfileImageURI: event.Value.Fields.ProfileImage.MapValue.Fields.URL.StringValue,
DisplayName: event.Value.Fields.DisplayName.StringValue,
Username: event.Value.Fields.Username.StringValue,
})
log.Printf("[UpdateAlgolia] SaveObject Res: %v", res)
if err != nil {
log.Printf("UpdateAlgolia [index.SaveObject]: %v", err)
return fmt.Errorf(err.Error())
}
return nil
}
<file_sep>package getapplemusicmedia
import (
"encoding/json"
"log"
"net/http"
"cloud.google.com/go/firestore"
"github.com/pixelogicdev/gruveebackend/pkg/firebase"
"github.com/pixelogicdev/gruveebackend/pkg/mediahelpers"
"github.com/pixelogicdev/gruveebackend/pkg/social"
)
var httpClient *http.Client
var firestoreClient *firestore.Client
// -- Apple Music Endpoints --/
var catalogHostname = "https://api.music.apple.com/v1/catalog"
func init() {
log.Println("GetAppleMusicMedia Initialized")
}
// GetAppleMusicMedia will take in Apple media data and get the exact media from Apple Music API
func GetAppleMusicMedia(writer http.ResponseWriter, request *http.Request) {
// Initialize
initWithEnvErr := initWithEnv()
if initWithEnvErr != nil {
http.Error(writer, initWithEnvErr.Error(), http.StatusInternalServerError)
log.Printf("GetAppleMusicMedia [initWithEnv]: %v", initWithEnvErr)
return
}
// Decode Request body to get media data
var appleMusicMediaReq social.GetMediaReq
appleMusicMediaReqErr := json.NewDecoder(request.Body).Decode(&appleMusicMediaReq)
if appleMusicMediaReqErr != nil {
http.Error(writer, appleMusicMediaReqErr.Error(), http.StatusInternalServerError)
log.Printf("GetAppleMusicMedia [Request Decoder]: %v", appleMusicMediaReqErr)
return
}
// Check to see if media is already part of collection, if so, just return that
mediaData, mediaDataErr := mediahelpers.GetMediaFromFirestore(*firestoreClient, appleMusicMediaReq.Provider, appleMusicMediaReq.MediaID)
if mediaDataErr != nil {
http.Error(writer, mediaDataErr.Error(), http.StatusInternalServerError)
log.Printf("[GetAppleMusicMedia]: %v", mediaDataErr)
return
}
// MediaData exists, return it to the client
if mediaData != nil {
log.Printf("Media already exists, returning")
writer.WriteHeader(http.StatusOK)
writer.Header().Set("Content-Type", "application/json")
json.NewEncoder(writer).Encode(mediaData)
return
}
// MediaData does not exist, call Apple Music Endpoint
// We need to get the developer token from firebase
appleDevToken, appleDevTokeErr := firebase.GetAppleDeveloperToken()
if appleDevTokeErr != nil {
http.Error(writer, appleDevTokeErr.Error(), http.StatusInternalServerError)
log.Printf("[GetAppleMusicMedia]: %v", appleDevTokeErr)
return
}
var (
firestoreMediaData interface{}
firestoreMediaDataErr error
)
// Time to make our request to Apple Music API
switch appleMusicMediaReq.MediaType {
case "track":
firestoreMediaData, firestoreMediaDataErr = getAppleMusicTrack(appleMusicMediaReq.MediaID, appleMusicMediaReq.Storefront, *appleDevToken)
if firestoreMediaDataErr != nil {
http.Error(writer, firestoreMediaDataErr.Error(), http.StatusInternalServerError)
log.Printf("GetAppleMusicMedia [GetAppleMusicTrack Switch]: %v", firestoreMediaDataErr)
return
}
case "playlist":
firestoreMediaData, firestoreMediaDataErr = getAppleMusicPlaylist(appleMusicMediaReq.MediaID, appleMusicMediaReq.Storefront, *appleDevToken)
if firestoreMediaDataErr != nil {
http.Error(writer, firestoreMediaDataErr.Error(), http.StatusInternalServerError)
log.Printf("GetAppleMusicMedia [GetAppleMusicPlaylist Switch]: %v", firestoreMediaDataErr)
return
}
case "album":
firestoreMediaData, firestoreMediaDataErr = getAppleMusicAlbum(appleMusicMediaReq.MediaID, appleMusicMediaReq.Storefront, *appleDevToken)
if firestoreMediaDataErr != nil {
http.Error(writer, firestoreMediaDataErr.Error(), http.StatusInternalServerError)
log.Printf("GetAppleMusicMedia [GetAppleMusicAlbum Switch]: %v", firestoreMediaDataErr)
return
}
default:
http.Error(writer, appleMusicMediaReq.MediaType+" media type does not exist", http.StatusInternalServerError)
log.Printf("GetAppleMusicMedia [MediaTypeSwitch]: %v media type does not exist", appleMusicMediaReq.MediaType)
return
}
writer.WriteHeader(http.StatusOK)
writer.Header().Set("Content-Type", "application/json")
json.NewEncoder(writer).Encode(firestoreMediaData)
}
<file_sep>package socialtokenrefresh
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"log"
"net/http"
"net/url"
"os"
"strings"
"time"
"cloud.google.com/go/firestore"
"github.com/pixelogicdev/gruveebackend/pkg/firebase"
"github.com/pixelogicdev/gruveebackend/pkg/sawmill"
"github.com/pixelogicdev/gruveebackend/pkg/social"
)
var httpClient *http.Client
var firestoreClient *firestore.Client
var logger sawmill.Logger
var spotifyRefreshTokenURI = "https://accounts.spotify.com/api/token"
func init() {
// Set httpClient
httpClient = &http.Client{}
log.Println("SocialTokenRefresh initialized")
}
// spotifyRefreshTokenRes contains the response from Spotify when trying to refresh the access token
type spotifyRefreshTokenRes struct {
PlatformName string `json:"platformName"`
AccessToken string `json:"access_token"`
TokenType string `json:"token_type"`
Scope string `json:"scope"`
ExpiresIn int `json:"expires_in"`
}
// SocialTokenRefresh checks to see if we need to refresh current API tokens for social platforms
func SocialTokenRefresh(writer http.ResponseWriter, request *http.Request) {
// Check to see if we have env variables
if os.Getenv("SPOTIFY_CLIENTID") == "" || os.Getenv("SPOTIFY_SECRET") == "" {
log.Fatalln("SocialTokenRefresh [Check Env Props]: PROPS NOT HERE.")
return
}
// Initialize
initWithEnvErr := initWithEnv()
if initWithEnvErr != nil {
http.Error(writer, initWithEnvErr.Error(), http.StatusInternalServerError)
logger.LogErr("InitWithEnv", initWithEnvErr, nil)
return
}
// Receive payload that includes uid
var socialTokenReq social.TokenRefreshRequest
// Decode payload
socialTokenErr := json.NewDecoder(request.Body).Decode(&socialTokenReq)
if socialTokenErr != nil {
http.Error(writer, socialTokenErr.Error(), http.StatusInternalServerError)
logger.LogErr("SocialTokenReq Decoder", socialTokenErr, request)
return
}
// Go to Firestore and get the platforms for user
platsToRefresh, platformErr := getUserPlatformsToRefresh(socialTokenReq.UID)
if platformErr != nil {
http.Error(writer, platformErr.Error(), http.StatusInternalServerError)
logger.LogErr("GetUserPlatforms", platformErr, request)
return
}
if platsToRefresh != nil && len(*platsToRefresh) == 0 {
// No refresh needed, lets return this with no content
writer.WriteHeader(http.StatusNoContent)
return
}
// Run refresh token logic
refreshTokenResp := refreshTokens(*platsToRefresh)
writer.WriteHeader(http.StatusOK)
writer.Header().Set("Content-Type", "application/json")
json.NewEncoder(writer).Encode(refreshTokenResp)
}
// Helpers
// initWithEnv takes our yaml env variables and maps them properly.
// Unfortunately, we had to do this is main because in init we weren't able to access env variables
func initWithEnv() error {
// Get paths
var currentProject string
if os.Getenv("ENVIRONMENT") == "DEV" {
currentProject = os.Getenv("FIREBASE_PROJECTID_DEV")
} else if os.Getenv("ENVIRONMENT") == "PROD" {
currentProject = os.Getenv("FIREBASE_PROJECTID_PROD")
}
// Initialize Firestore
client, err := firestore.NewClient(context.Background(), currentProject)
if err != nil {
return fmt.Errorf("SocialTokenRefresh [Init Firestore]: %v", err)
}
// Initialize Sawmill
sawmillLogger, err := sawmill.InitClient(currentProject, os.Getenv("GCLOUD_CONFIG"), os.Getenv("ENVIRONMENT"), "SocialTokenRefersh")
if err != nil {
log.Printf("SocialTokenRefresh [Init Sawmill]: %v", err)
}
firestoreClient = client
logger = sawmillLogger
return nil
}
func getUserPlatformsToRefresh(uid string) (*[]firebase.FirestoreSocialPlatform, error) {
// Go to Firebase and get document references for all social platforms
snapshot, snapshotErr := firestoreClient.Collection("users").Doc(uid).Get(context.Background())
if snapshotErr != nil {
return nil, fmt.Errorf(snapshotErr.Error())
}
// Grab socialPlatforms array
var firestoreUser firebase.FirestoreUser
dataToErr := snapshot.DataTo(&firestoreUser)
if dataToErr != nil {
return nil, fmt.Errorf(dataToErr.Error())
}
// WE SHOULD BE: checking to see which platforms need to be refreshed
// Currently we are not, just returning all platforms
socialPlatforms, fetchRefErr := fetchChildRefs(firestoreUser.SocialPlatforms)
if fetchRefErr != nil {
return nil, fmt.Errorf(fetchRefErr.Error())
}
// Return those platforms to main
return socialPlatforms, nil
}
// fetchChildRefs will convert document references to FiresstoreSocilaPlatform Objects
func fetchChildRefs(refs []*firestore.DocumentRef) (*[]firebase.FirestoreSocialPlatform, error) {
docsnaps, err := firestoreClient.GetAll(context.Background(), refs)
if err != nil {
return nil, fmt.Errorf("fetchChildRefs: %v", err)
}
var socialPlatforms []firebase.FirestoreSocialPlatform
for _, userSnap := range docsnaps {
var socialPlatform firebase.FirestoreSocialPlatform
dataErr := userSnap.DataTo(&socialPlatform)
if dataErr != nil {
log.Printf("Encountered error while parsing userSnapshot.")
log.Printf("%v", dataErr)
continue
}
socialPlatforms = append(socialPlatforms, socialPlatform)
}
return &socialPlatforms, nil
}
// refreshTokens goes through social platform objects and refreshes tokens as necessary
func refreshTokens(socialPlatforms []firebase.FirestoreSocialPlatform) social.RefreshTokensResponse {
// Get current time
var currentTime = time.Now()
var refreshTokensResp = social.RefreshTokensResponse{
RefreshTokens: map[string]firebase.APIToken{},
}
for _, platform := range socialPlatforms {
fmt.Printf("Expires In: %d seconds\n", platform.APIToken.ExpiresIn)
fmt.Printf("Expired At: %s\n", platform.APIToken.ExpiredAt)
fmt.Printf("Created At: %s\n", platform.APIToken.CreatedAt)
expiredAtTime, expiredAtTimeErr := time.Parse(time.RFC3339, platform.APIToken.ExpiredAt)
if expiredAtTimeErr != nil {
fmt.Println(expiredAtTimeErr.Error())
continue
}
if currentTime.After(expiredAtTime) {
// Call API refresh
fmt.Printf("%s access token is expired. Calling Refresh...\n", platform.PlatformName)
refreshToken, tokenActionErr := refreshTokenAction(platform)
if tokenActionErr != nil {
fmt.Println(tokenActionErr.Error())
continue
}
var expiredAtStr = time.Now().Add(time.Second * time.Duration(refreshToken.ExpiresIn))
var refreshedAPIToken = firebase.APIToken{
CreatedAt: time.Now().Format(time.RFC3339),
ExpiredAt: expiredAtStr.Format(time.RFC3339),
ExpiresIn: refreshToken.ExpiresIn,
Token: refreshToken.AccessToken,
}
// Set refresh token in map
refreshTokensResp.RefreshTokens[platform.PlatformName] = refreshedAPIToken
// Set new token data in database
writeTokenErr := writeToken(platform.ID, refreshedAPIToken)
if writeTokenErr != nil {
fmt.Println(writeTokenErr.Error())
continue
}
}
}
return refreshTokensResp
}
// refreshTokenAction will call the API per platform and return the data needed
func refreshTokenAction(platform firebase.FirestoreSocialPlatform) (*spotifyRefreshTokenRes, error) {
var authStr = os.Getenv("SPOTIFY_CLIENTID") + ":" + os.Getenv("SPOTIFY_SECRET")
// Create Request
data := url.Values{}
data.Set("grant_type", "refresh_token")
data.Set("refresh_token", platform.RefreshToken)
refreshTokenReq, refreshTokenReqErr := http.NewRequest("POST", spotifyRefreshTokenURI,
strings.NewReader(data.Encode()))
if refreshTokenReqErr != nil {
return nil, fmt.Errorf(refreshTokenReqErr.Error())
}
refreshTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded")
refreshTokenReq.Header.Add("Authorization", "Basic "+base64.StdEncoding.EncodeToString([]byte(authStr)))
customTokenResp, httpErr := httpClient.Do(refreshTokenReq)
if httpErr != nil {
return nil, fmt.Errorf(httpErr.Error())
}
// Decode the token to send back
var spotifyRefreshRes spotifyRefreshTokenRes
refreshTokenDecodeErr := json.NewDecoder(customTokenResp.Body).Decode(&spotifyRefreshRes)
if refreshTokenDecodeErr != nil {
return nil, fmt.Errorf(refreshTokenDecodeErr.Error())
}
// Make sure to add platform name here before continuing
spotifyRefreshRes.PlatformName = platform.PlatformName
return &spotifyRefreshRes, nil
}
// writeToken will write the new APIToken object to the social platform document
func writeToken(platformID string, token firebase.APIToken) error {
// Write new APIToken, ExpiredAt, ExpiresIn, CreatedAt
platformDoc := firestoreClient.Collection("social_platforms").Doc(platformID)
if platformDoc == nil {
return fmt.Errorf("platformId %s could not be found", platformID)
}
// Time to update
_, writeErr := platformDoc.Update(context.Background(), []firestore.Update{{Path: "apiToken", Value: token}})
if writeErr != nil {
return fmt.Errorf(writeErr.Error())
}
return nil
}
|
e422042dcdfca4bdbb5060d4ee6e597d2e2f3083
|
[
"Go Module",
"Go"
] | 12 |
Go
|
LeviHarrison/gruveebackend
|
ed03af65a7423fdc2b4c0526fc386a03ceb97865
|
404babc5f7b3070f4107d8da36e9f3a146677ebb
|
refs/heads/master
|
<file_sep>from tkinter import *
from tkinter import filedialog
from tkinter import font
import subprocess
root = Tk()
root.title("Wage IDE")
# Add main.iconbitmap()
root.geometry("1200x680")
# Global variables
global open_file_name, selected
open_file_name, selected = False, False
# New file function
def new_file(e):
# Delete previous text
my_text.delete("1.0", END)
# Update status bar
root.title("New File - Wage IDE")
status_bar.config(text="New file created ")
global open_file_name
open_file_name = False
# Open file function
def open_file(e):
# Grab filename
text_file = filedialog.askopenfilename(initialdir="C:/", title="Open File", filetypes=(("Text Files", "*.txt"), ("Python Files", "*.py"), ("Wage Files", "*.wage"), ("All Files", "*.*")))
# Check to see if there is a file name
if text_file:
global open_file_name
open_file_name = text_file
# Delete previous text
my_text.delete("1.0", END)
# Update status bar
name = text_file
status_bar.config(text="File opened ")
root.title(f'{name} - Wage IDE')
# Open fle
text_file = open(text_file, 'r')
text = text_file.read()
# Add file to text box
my_text.insert(END, text)
# Close the opened file
text_file.close()
# Save as file function
def save_as_file(e):
text_file = filedialog.asksaveasfilename(defaultextension=".*", initialdir="C:/", title="Save File", filetypes=(("Text Files", "*.txt"), ("Python Files", "*.py"), ("Wage Files", "*.wage"), ("All Files", "*.*")))
name = text_file
if text_file:
# Save the file
text_file = open(text_file, 'w')
text_file.write(my_text.get(1.0, END))
# Close the file
text_file.close()
# Update status bar
status_bar.config(text="File saved ")
root.title(f'{name} - Wage IDE')
# Save file function
def save_file(e):
global open_file_name
if open_file_name:
# Save the file
text_file = open(open_file_name, 'w')
text_file.write(my_text.get(1.0, END))
# Close the file
text_file.close()
# Update status bar
status_bar.config(text="File saved ")
root.title(f'{open_file_name} - Wage IDE')
else:
save_as_file()
def run_file():
global open_file_name
if open_file_name:
command = f'python {open_file_name}'
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, error = process.communicate()
code_output.insert('1.0', output)
code_output.insert('1.0', error)
else:
open_file()
# Cut text function
def cut_text(e):
global selected
# Check to use if keyboard shortcut used
if e:
selected = root.clipboard_get()
else:
if my_text.selection_get():
# Grap selected text from text box
selected = my_text.selection_get()
# Delete selected text from text box
my_text.delete("sel.first", "sel.last")
# Clear the clipboard the append
root.clipboard_clear()
root.clipboard_append(selected)
# Copy text function
def copy_text(e):
global selected
# Check to use if keyboard shortcut used
if e:
selected = root.clipboard_get()
if my_text.selection_get():
# Grap selected text from text box
selected = my_text.selection_get()
# Clear the clipboard the append
root.clipboard_clear()
root.clipboard_append(selected)
# Paste text function
def paste_text(e):
global selected
# Check to use if keyboard shortcut used
if e:
selected = root.clipboard_get()
else:
if selected:
position = my_text.index(INSERT)
my_text.insert(position, selected)
# Create main frame
my_frame = Frame(root)
my_frame.pack(pady=5)
# Create scrollbar for the text box
text_scroll = Scrollbar(my_frame)
text_scroll.pack(side=RIGHT, fill=Y)
# Horizontal scrollbar
hor_scroll = Scrollbar(my_frame, orient='horizontal')
hor_scroll.pack(side=BOTTOM, fill=X)
# Create text box
my_text = Text(my_frame, width=97, height=25, font=("Helvetica", 16), selectbackground="yellow", selectforeground="black", undo=True, yscrollcommand=text_scroll.set, wrap="none", xscrollcommand=hor_scroll.set)
my_text.pack()
# Configure scrollbar
text_scroll.config(command=my_text.yview)
hor_scroll.config(command=my_text.xview)
# Create menu
my_menu = Menu(root)
root.config(menu=my_menu)
# Add file menu
file_menu = Menu(my_menu, tearoff=False)
my_menu.add_cascade(label="File", menu=file_menu)
file_menu.add_command(label="New", command=lambda: new_file(False), accelerator="Ctrl+N")
file_menu.add_command(label="Open", command=lambda: open_file(False), accelerator="Ctrl+O")
file_menu.add_command(label="Save", command=lambda: save_file(False), accelerator="Ctrl+S")
file_menu.add_command(label="Save As", command=lambda: save_as_file(False), accelerator="Ctrl+Shift+S")
file_menu.add_separator()
file_menu.add_command(label="Exit", command=root.quit)
# Add edit menu
edit_menu = Menu(my_menu, tearoff=False)
my_menu.add_cascade(label="Edit", menu=edit_menu)
edit_menu.add_command(label="Cut", command=lambda: cut_text(False), accelerator="Ctrl+X")
edit_menu.add_command(label="Copy", command=lambda: copy_text(False), accelerator="Ctrl+C")
edit_menu.add_command(label="Paste", command=lambda: paste_text(False), accelerator="Ctrl+V")
edit_menu.add_separator()
edit_menu.add_command(label="Undo", command=my_text.edit_undo, accelerator="Ctrl+Z")
edit_menu.add_command(label="Redo", command=my_text.edit_redo, accelerator="Ctrl+Y")
# Add run menu
run_menu = Menu(my_menu, tearoff=False)
my_menu.add_cascade(label="Run", menu=run_menu)
run_menu.add_command(label="Run", command=run_file)
# Add status bar to bottom of app
status_bar = Label(root, text="Ready ", anchor=E)
status_bar.pack(fill=X, side=BOTTOM, ipady=15)
#Edit bindings
root.bind('<Control-n>', new_file)
root.bind('<Control-o>', open_file)
root.bind('<Control-s>', save_file)
root.bind('<Control-Shift-S>', save_as_file)
root.bind('<Escape>', quit)
root.bind('<Control-x>', cut_text)
root.bind('<Control-c>', copy_text)
root.bind('<Control-v>', paste_text)
code_output = Text(height=10)
code_output.pack()
root.mainloop()<file_sep># ------------------------------------------------------------
# calclex.py
#
# tokenizer for a simple expression evaluator for
# numbers and +,-,*,/
# ------------------------------------------------------------
from tokenize import Token
import ply.lex as lex
import ply.yacc as yacc
from ply.ctokens import tokens
# List of token names. This is always required
tokens = [
'INTEGER',
'FLOAT',
'BOOLEAN',
'PLUS',
'MINUS',
'MULTIPLICATION',
'POW',
'DIVIDE',
'LEFTOVER',
'INT_DIVISION',
#'OPERATOR',
'COMPARATOR',
'L_PAREN',
'R_PAREN',
'L_SQUARE_PAREN',
'R_SQUARE_PAREN',
'UP_SCOPE',
'DOWN_SCOPE',
'IDENTIFIER',
'COMMENT',
'EOL',
'INSTANCE'
]
reserved = {
'if' : 'IF',
'else' : 'ELSE',
'for' : 'FOR',
}
tokens=list(reserved.values())+tokens
# Regular expression rules for simple tokens
t_PLUS = r'\+'
t_MINUS = r'-'
t_MULTIPLICATION = r'\*'
t_DIVIDE = r'/'
t_INT_DIVISION = r'//'
t_LEFTOVER = r'%'
t_L_PAREN = r'\('
t_R_PAREN = r'\)'
t_L_SQUARE_PAREN = r'\['
t_R_SQUARE_PAREN = r'\]'
t_UP_SCOPE=r'{'
t_DOWN_SCOPE=r'}'
t_POW=r'\*\*'
t_ignore = ' \t'
t_EOL=r';'
t_INSTANCE=r'='
#Some complex regex
digit = r'([0-9])'
nonzerodigit = r'([1-9])'
nondigit = r'([_A-Za-z])'
floatnumber =r'( '+r'(' +nonzerodigit+ r'\.'+r'(' +digit+r')+'+ r')' + r'|' r'(' + r'(' +digit+r')+'+r'\.'+r'(' +digit+r')+'+r')' +r')'
identifier = r'(' + nondigit + r'(' + digit + r'|' + nondigit + r')*)'
boolean=r'TRUE'+r'|'+r'FALSE'
multiplication=r'\*'
division=r'/'
plus=r'\+'
minus=r'-'
int_division=r'//'
leftover=r'%'
equals=r'=='
lower=r'<'
higher=r'>'
lower_or_equal=r'<='
higher_or_equal=r'>='
pow=r'\*\*'
operators=r'('+pow+r'|'+multiplication+r'|'+division+r'|'+int_division+r'|'+leftover+r'|'+plus+r'|'+minus+r')'
comparator=r'('+equals+r'|'+lower+r'|'+lower_or_equal+r'|'+higher+r'|'+higher_or_equal+r')'
@lex.Token(comparator)
def t_COMPARATOR(t):
return t
'''
@lex.Token(floatnumber)
def t_OPERATOR(t):
return t
'''
@lex.Token(boolean)
def t_BOOLEAN(t):
return t
@lex.Token(floatnumber)
def t_FLOAT(t):
t.value=float(t.value)
return t
def t_INTEGER(t):
r'\d+'
t.value = int(t.value)
return t
def t_COMMENT(t):
r'\#\#.*'
pass
@lex.TOKEN(identifier)
def t_IDENTIFIER(t):
t.type = reserved.get(t.value,'IDENTIFIER') # Check for reserved words
return t
def t_newline(t):
r'\n+'
t.lexer.lineno += len(t.value)
# Error handling rule
def t_error(t):
print("Illegal character '%s'" % t.value[0])
t.lexer.skip(1)
# Build the lexer
data = '''
{var=12;var1=2**5;}
'''
lexer = lex.lex()
#lexer.input(data)
# Tokenize
'''
while True:
tok = lexer.token()
if not tok:
break # No more input
print(tok.type,tok.value)
'''
def p_statement_scope(p):
'statement : scope'
p[0]=p[1]
def p_statement_assign_math(p):
'statement : IDENTIFIER INSTANCE expression EOL'
p[0]=["ASSIGMENT",p[1],p[3]]
def p_statement_assign_boolean(p):
'statement : IDENTIFIER INSTANCE BOOLEAN EOL'
p[0]=["ASSIGMENT",p[1],p[3]]
def p_statement_expression(p):
'statement : expression EOL'
p[0]=p[1]
def p_scope(p):
'scope : UP_SCOPE group DOWN_SCOPE'
p[0]=["Scope",p[2]]
def p_group(p):
'group : nombre'
p[0]=p[1]
def p_nombre_final(p):
'nombre : statement'
p[0]=p[1]
def p_nombre_init(p):
'nombre : statement nombre'
p[0]=[p[1],p[2]]
def p_expression_plus(p):
'expression : expression PLUS term'
p[0] = p[1] + p[3]
def p_expression_minus(p):
'expression : expression MINUS term'
p[0] = p[1] - p[3]
def p_expression_term(p):
'expression : term'
p[0] = p[1]
def p_term_times(p):
'term : term MULTIPLICATION term2'
p[0] = p[1] * p[3]
def p_term_leftover(p):
'term : term LEFTOVER term2'
p[0]=p[1]%p[3]
def p_term_div(p):
'term : term DIVIDE term2'
p[0] = p[1] / p[3]
def p_term_factor(p):
'term : factor'
p[0] = p[1]
def p_factor_integer(p):
'factor : INTEGER'
p[0] = p[1]
def p_factor_float(p):
'factor : FLOAT'
p[0] = p[1]
def p_factor_expr(p):
'factor : L_PAREN expression R_PAREN'
p[0] = p[2]
def p_term_term2(p):
'term : term2'
p[0]=p[1]
def p_term2_pow(p):
'term2 : term2 POW factor2'
p[0]=p[1]**p[3]
def p_term2_integer_division(p):
'term2 : term2 INT_DIVISION factor2'
p[0]=p[1]//p[3]
def p_term2_factor2(p):
'term2 : factor2'
p[0] = p[1]
def p_factor2_integer(p):
'factor2 : INTEGER'
p[0] = p[1]
def p_factor2_float(p):
'factor2 : FLOAT'
p[0] = p[1]
def p_factor2_expr(p):
'factor2 : L_PAREN expression R_PAREN'
p[0] = p[2]
def p_term2_factor(p):
'term2 : factor'
p[0]=p[1]
# Error rule for syntax errors
def p_error(p):
print(p)
print("Syntax error in input!")
# Build the parser
parser = yacc.yacc()
while True:
try:
file=open("fuente.pn",'r')
#s =file.read()
#s="{var=12;var1=2**5;}"
s=input('calc> ')
print(s)
except EOFError:
break
if not s: continue
result = parser.parse(s,lexer=lexer)
print(result)
|
26da1f9702786707191e2ff5688967ec29283ca8
|
[
"Python"
] | 2 |
Python
|
lazhero/LedAnimator
|
033d4a2bce0d5ac891a188343dfaf896bd565b2e
|
b416482ec334545a0a6a04951f5e4e93006e2fbb
|
refs/heads/master
|
<repo_name>herrkris/wordpress-capistrano<file_sep>/content/mu-plugins/index.php
<?php
register_theme_directory( ABSPATH . 'wp-content/themes' );<file_sep>/content/plugins/WP-CLI-Migrate-master/README.md
WP-CLI-Migrate
==============
A database migration command for WP-CLI. Installs like an ordinary plugin.<file_sep>/config/config.SAMPLE.rb
set :application, 'APPLICATION-NAME'
set :repo_url, 'REPO URL'
set :local_url, "LOCAL WORDPRESS URL"
set :deploy_via, :remote_cache
set :copy_exclude, [".git", ".DS_Store", ".gitignore", ".gitmodules"]
set :scm, :git
set :use_sudo, false
# Database
# Set the values for host, user, pass, and name for both production and staging.
set :wpdb, {
:production => {
:host => 'PRODUCTION DB HOST',
:user => 'PRODUCTION DB USER',
:password => '<PASSWORD>',
:name => 'PRODUCTION DB NAME',
},
:staging => {
:host => 'STAGING DB HOST',
:user => 'STAGING DB USER',
:password => '<PASSWORD>',
:name => 'STAGING DB NAME',
},
:local => {
:host => 'LOCAL DB HOST',
:user => 'LOCAL DB USER',
:password => '<PASSWORD>',
:name => 'LOCAL DB NAME',
},
}<file_sep>/README.md
# WordPress Capistrano (3) Deploy
Easy deployment for WordPress sites including your database.
## Requirements
1. Your production and staging servers have to be accessible via ssh
2. Git has to be installed in every environment
2. The same applies to wp-cli (http://wp-cli.org/#install)
## Setup
For the capistrano deployment to run properly you have make some preparations to your local environment:
1. Get the latest WordPress version with `git submodule init` and `git submodule update`
2. capistrano must be installed: `bundler install`
3. Rename local-config.SAMPLE.php to local-config.php and replace the placeholders with your database credentials
4. Rename config.SAMPLE.rb to config.rb (located in the config directory). For every stage you have to edit your database credentials. Pay attention to the :local_url variable!
5. Rename the files in config/deploy accordingly and edit the variables so they match the environment
6. Activate the WP Migrate DB and WP Migrate DB CLI plugin
## Tasks
### `bundle exec cap staging deploy`
Deploy your current code base to the staging server
### `bundle exec cap staging wordpress:db:pull`
Copy the staging database to your local database.
### `bundle exec cap staging wordpress:db:push`
Copy the local database to the staging server
The tasks `wordpress:db:pull` and `wordpress:db:push` will find the remote url and replaces it with your local url. So keeping your database in sync is a no brainer.
## Todo
* WordPress Multisite Deployment
* Keeping upload folders in sync
* ~~WordPress as a submodule~~ it's ugly, but it gets the job done ...
<file_sep>/content/plugins/WP-CLI-Migrate-master/lib/migrate.php
<?php
/**
* Run database migrations using the search and replace powers of wp-migrate-db
*
* @package wp-cli
* @subpackage commands/community
* @author <NAME> <<EMAIL>>
*/
class WP_Migrate_DB_Command extends WP_CLI_Command {
/**
* Run database migrations using the search and replace powers of wp-migrate-db.
*
* ## OPTIONS
*
* <outputfolder>
* : Destination folder on new host.
*
* <outputhost>
* : Destination url on new host.
*
* <filename>
* : Output filename for SQL dump
* Use STDOUT as filename to write to STDOUT instead.
*
* ## EXAMPLES
*
* wp migrate to /var/www/example.com http://example.com filename.sql
* wp migrate to /var/www/example.com http://example.com STDOUT
*
* @synopsis <outputfolder> <outputhost> <filename>
*/
function to( $args , $assoc_args ) {
$wpmdb = new CLI_Migrate( $args );
$wpmdb->migrate();
WP_CLI::success( $wpmdb->get_filename() );
}
/**
* Help function
*/
public static function help() {
WP_CLI::line( <<<EOB
usage: wp migrate to [output folder] [output host] [filename]
For example, to migrate your db to example.com, where the files are stored in /var/www/example.com:
wp migrate to /var/www/example.com http://usage: wp migrate to [output folder] [output host] [filename]
example.com filename.sql
To write to STDOUT pass that as the filename, eg:
wp migrate run /var/www/example.com http://example.com STDOUT
EOB
);
}
}
/**
* WP-CLI API Adapter for WP_Migrate_DB
*/
class CLI_Migrate extends WP_Migrate_DB {
private $new_url;
private $new_path;
private $old_path;
private $old_url;
public $filename;
function __construct( $args ) {
parent::__construct();
if ( ! isset( $args[0] ) || ! isset( $args[1] ) ) {
WP_Migrate_DB_Command::help();
die();
}
$this->new_path = trailingslashit( $args[0] );
$this->new_url = $args[1];
$this->old_path = ABSPATH;
$this->old_url = get_bloginfo( 'url' );
// WP-Migrate-DB relies on the $_POST global for its settings
// so we have to spoof it here.
$_POST = array(
'old_path' => $this->old_path,
'new_path' => $this->new_path,
'old_url' => $this->old_url,
'new_url' => $this->new_url
);
if ( isset( $args[2] ) ) {
$this->filename = $args[2];
}
}
function migrate() {
if ( $this->filename == 'STDOUT' )
$this->fp = fopen( 'php://stdout', 'w' );
else
$this->fp = $this->open( $this->filename );
$this->db_backup_header();
$this->db_backup();
$this->close( $this->fp );
}
function get_filename() {
return $this->filename;
}
function db_backup() {
global $table_prefix, $wpdb;
$tables = $wpdb->get_results("SHOW TABLES", ARRAY_N);
$tables = array_map(create_function('$a', 'return $a[0];'), $tables);
foreach ($tables as $table) {
WP_CLI::line( "Migrating ${table}..." );
// Increase script execution time-limit to 15 min for every table.
if ( !ini_get('safe_mode')) @set_time_limit(15*60);
// Create the SQL statements
$this->stow("# --------------------------------------------------------\n");
$this->stow("# " . sprintf(__('Table: %s','wp-migrate-db'),$this->backquote($table)) . "\n");
$this->stow("# --------------------------------------------------------\n");
$this->backup_table($table);
}
if (count($this->errors)) {
return false;
} else {
return true;
}
}
function stow($query_line, $replace = true) {
if ($this->gzip()) {
if(! @gzwrite($this->fp, $query_line))
$this->errors['file_write'] = __('There was an error writing a line to the backup script:','wp-db-backup') . ' ' . $query_line . ' ' . $php_errormsg;
} else {
if(false === @fwrite($this->fp, $query_line))
$this->error['file_write'] = __('There was an error writing a line to the backup script:','wp-db-backup') . ' ' . $query_line . ' ' . $php_errormsg;
}
}
function db_backup_header() {
$this->stow("# " . __('WordPress MySQL database migration','wp-migrate-db') . "\n", false);
$this->stow("# " . sprintf(__('From %s to %s','wp-migrate-db'), $this->old_url, $this->new_url) . "\n", false);
$this->stow("#\n", false);
$this->stow("# " . sprintf(__('Generated: %s','wp-migrate-db'),date("l j. F Y H:i T")) . "\n", false);
$this->stow("# " . sprintf(__('Hostname: %s','wp-migrate-db'),DB_HOST) . "\n", false);
$this->stow("# " . sprintf(__('Database: %s','wp-migrate-db'),$this->backquote(DB_NAME)) . "\n", false);
$this->stow("# --------------------------------------------------------\n\n", false);
}
}
WP_CLI::add_command( 'migrate', 'WP_Migrate_DB_Command' );<file_sep>/Gemfile
gem 'capistrano', '~> 3.0.1'<file_sep>/content/plugins/WP-CLI-Migrate-master/wp-cli-migrate.php
<?php
/*
Plugin Name: WP Migrate DB CLI Interface
Description: Adds a migrate command to wp-cli. Depends on WP Migrate DB.
Author: <NAME>
Version: 0.1
Author URI: http://duncanjbrown.com
*/
function wp_migrate_db_cli_init() {
if ( !class_exists( 'WP_Migrate_DB' ) ) {
return;
}
if ( defined('WP_CLI') && WP_CLI ) {
include dirname(__FILE__) . '/lib/migrate.php';
}
}
add_action( 'plugins_loaded', 'wp_migrate_db_cli_init' );
|
6b4eb41b54032e38a4fd6cc1c6915d2e26dac21d
|
[
"Markdown",
"Ruby",
"PHP"
] | 7 |
PHP
|
herrkris/wordpress-capistrano
|
987146bd0d8d819748c52771200bdc7346e6dede
|
508db37688b0664ecdf4ec2acd11832fe5550aa4
|
refs/heads/master
|
<file_sep>using System.Collections.Generic;
namespace TechRecruiting.Models
{
public sealed class Recruiter : Person
{
public List<Candidate> Candidates { get; set; }
}
}<file_sep>using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Client;
using Microsoft.Azure.Documents.Linq;
using Microsoft.Azure.Graphs;
using Microsoft.Azure.Graphs.Elements;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace TechRecruiting.Web.Data
{
public sealed class ReportData : GraphData
{
public async Task<ILookup<string, string>> GetPoachers(string recruiterId)
{
DocumentClient client = GetDocumentClient();
DocumentCollection collection = await GetDocumentCollectionAsync(client);
IDocumentQuery<Vertex> query = client.CreateGremlinQuery<Vertex>(
collection,
$"g.V('{recruiterId}').outE('acquaintance').inV().inE('likes')"
);
List<Edge> edges = new List<Edge>();
while (query.HasMoreResults)
{
edges.AddRange(await query.ExecuteNextAsync<Edge>());
}
return edges.ToLookup(e => e.OutVertexId.ToString(), e => e.InVertexId.ToString());
}
}
}<file_sep>using System.Collections.Generic;
using System.Threading.Tasks;
using System.Web.Mvc;
using TechRecruiting.Models;
using TechRecruiting.Web.Data;
namespace TechRecruiting.Web.Controllers
{
[HandleError]
public class RecruitersController : Controller
{
private RecruiterData _recruiterData;
public RecruitersController(RecruiterData recruiterData)
{
_recruiterData = recruiterData;
}
[HttpGet]
[Route("~/recruiters", Name = "ListRecruiters")]
public async Task<ActionResult> Index()
{
IEnumerable<Recruiter> model = await _recruiterData.GetRecruiters();
return View(model);
}
[HttpGet]
[Route("~/recruiters/{id}", Name = "GetRecruiters")]
public async Task<ActionResult> Get(string id)
{
Recruiter model = await _recruiterData.GetRecruiterWithCandidates(id);
return View(model);
}
[HttpGet]
[Route("~/recruiters/create", Name = "CreateRecruiter")]
public ActionResult Add()
{
return View(new Recruiter());
}
[HttpPost]
[Route("~/recruiters/create", Name = "PersistRecruiter")]
public async Task<ActionResult> Add(Recruiter model)
{
await _recruiterData.PersistRecruiter(model);
return RedirectToRoute("ListRecruiters");
}
}
}<file_sep>using System.Collections.Generic;
using System.Threading.Tasks;
using System.Web.Mvc;
using TechRecruiting.Models;
using TechRecruiting.Web.Data;
namespace TechRecruiting.Web.Controllers
{
[HandleError]
public class CandidatesController : Controller
{
private CandidateData _candidateData;
public CandidatesController(CandidateData candidateData)
{
_candidateData = candidateData;
}
[HttpGet]
[Route("~/candidates", Name = "ListCandidates")]
public async Task<ActionResult> Index()
{
IList<Candidate> model = await _candidateData.GetCandidates();
return View(model);
}
[HttpGet]
[Route("~/candidates/{id}", Name = "GetCandidate")]
public async Task<ActionResult> Get(string id)
{
Candidate model = await _candidateData.GetCandidateWithFriendships(id);
return View(model);
}
[HttpGet]
[Route("~/candidates/create", Name = "CreateCandidate")]
public ActionResult Add()
{
return View(new Candidate());
}
[HttpPost]
[Route("~/candidates/create", Name = "PersistCandidate")]
public async Task<ActionResult> Add(Candidate model)
{
await _candidateData.PersistCandidate(model);
return RedirectToRoute("ListCandidates");
}
}
}<file_sep>using Microsoft.Azure.Graphs.Elements;
using System.Collections.Generic;
using System.Reflection;
namespace TechRecruiting.Models
{
public static class GraphExtensions
{
public static List<T> ToStrongType<T>(this IList<Vertex> vertices) where T : IEntity, new()
{
List<T> results = new List<T>();
foreach (Vertex vertex in vertices)
{
T result = new T
{
Id = vertex.Id.ToString()
};
foreach (VertexProperty property in vertex.GetVertexProperties())
{
PropertyInfo propertyInfo = typeof(T).GetProperty(property.Key);
propertyInfo.SetValue(result, property.Value);
}
results.Add(result);
}
return results;
}
}
}<file_sep>using System.Collections.Generic;
namespace TechRecruiting.Models
{
public sealed class Candidate : Person
{
public string SkillDescription { get; set; }
public List<Candidate> Friends { get; set; }
}
}<file_sep>namespace TechRecruiting.Models
{
public class Acquaintance
{
public string SourcePersonId { get; set; }
public string DestinationPersonId { get; set; }
}
}<file_sep>using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Client;
using Microsoft.Azure.Documents.Linq;
using Microsoft.Azure.Graphs;
using Microsoft.Azure.Graphs.Elements;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using TechRecruiting.Models;
namespace TechRecruiting.Web.Data
{
public sealed class CandidateData : GraphData
{
public async Task<IList<Candidate>> GetCandidates()
{
DocumentClient client = GetDocumentClient();
DocumentCollection collection = await GetDocumentCollectionAsync(client);
IDocumentQuery<Vertex> query = client.CreateGremlinQuery<Vertex>(
collection,
$"g.V().hasLabel('candidate')"
);
List<Vertex> vertices = new List<Vertex>();
while (query.HasMoreResults)
{
vertices.AddRange(await query.ExecuteNextAsync<Vertex>());
}
return vertices.Select(vertex =>
{
IEnumerable<VertexProperty> props = vertex.GetVertexProperties();
return new Candidate
{
Id = vertex.Id.ToString(),
FirstName = props.SingleOrDefault(p => p.Key == "firstName")?.Value?.ToString(),
LastName = props.SingleOrDefault(p => p.Key == "lastName")?.Value?.ToString(),
SkillDescription = props.SingleOrDefault(p => p.Key == "skillDescription")?.Value?.ToString()
};
}
).ToList<Candidate>();
}
public async Task<Candidate> GetCandidateWithFriendships(string candidateId)
{
DocumentClient client = GetDocumentClient();
DocumentCollection collection = await GetDocumentCollectionAsync(client);
IDocumentQuery<Vertex> candidateQuery = client.CreateGremlinQuery<Vertex>(
collection,
$"g.V('{candidateId}').hasLabel('candidate')"
);
Vertex vertex = null;
while (candidateQuery.HasMoreResults)
{
vertex = (await candidateQuery.ExecuteNextAsync<Vertex>()).First();
}
IEnumerable<VertexProperty> props = vertex.GetVertexProperties();
Candidate candidate = new Candidate
{
Id = vertex.Id.ToString(),
FirstName = props.SingleOrDefault(p => p.Key == "firstName")?.Value?.ToString(),
LastName = props.SingleOrDefault(p => p.Key == "lastName")?.Value?.ToString(),
SkillDescription = props.SingleOrDefault(p => p.Key == "skillDescription")?.Value?.ToString(),
Friends = new List<Candidate>()
};
IDocumentQuery<Vertex> friendsQuery = client.CreateGremlinQuery<Vertex>(
collection,
$"g.V('{candidateId}').hasLabel('candidate').outE('acquaintance').inV().hasLabel('candidate')"
);
List<Vertex> vertices = new List<Vertex>();
while (friendsQuery.HasMoreResults)
{
vertices.AddRange(await friendsQuery.ExecuteNextAsync<Vertex>());
}
foreach(var item in vertices)
{
IEnumerable<VertexProperty> itemProps = item.GetVertexProperties();
candidate.Friends.Add(new Candidate
{
Id = item.Id.ToString(),
FirstName = itemProps.SingleOrDefault(p => p.Key == "firstName")?.Value?.ToString(),
LastName = itemProps.SingleOrDefault(p => p.Key == "lastName")?.Value?.ToString(),
SkillDescription = itemProps.SingleOrDefault(p => p.Key == "skillDescription")?.Value?.ToString()
}
);
}
return candidate;
}
public async Task<Candidate> PersistCandidate(Candidate candidate)
{
DocumentClient client = GetDocumentClient();
DocumentCollection collection = await GetDocumentCollectionAsync(client);
IDocumentQuery<Vertex> query = client.CreateGremlinQuery<Vertex>(
collection,
$"g.addV('candidate').property('id', '{candidate.Id}').property('firstName', '{candidate.FirstName}').property('lastName', '{candidate.LastName}').property('skillDescription', '{candidate.SkillDescription}')"
);
Vertex vertex = null;
while (query.HasMoreResults)
{
vertex = (await query.ExecuteNextAsync<Vertex>()).First();
}
candidate.Id = vertex.Id.ToString();
return candidate;
}
}
}<file_sep>using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Client;
using System;
using System.Configuration;
using System.Threading.Tasks;
namespace TechRecruiting.Web.Data
{
public abstract class GraphData
{
private readonly string _endpointUrl = ConfigurationManager.AppSettings["CosmosEndpointUrl"];
private readonly string _accountKey = ConfigurationManager.AppSettings["CosmosAccountKey"];
protected readonly string _databaseName = ConfigurationManager.AppSettings["CosmosDatabaseName"];
protected readonly string _graphName = ConfigurationManager.AppSettings["CosmosGraphName"];
protected async Task<DocumentCollection> GetDocumentCollectionAsync(DocumentClient client)
{
Database database = await client.CreateDatabaseIfNotExistsAsync(new Database { Id = _databaseName });
DocumentCollection collection = await client.CreateDocumentCollectionIfNotExistsAsync(database.SelfLink, new DocumentCollection { Id = _graphName }, new RequestOptions { OfferThroughput = 400 });
return collection;
}
protected DocumentClient GetDocumentClient()
{
return new DocumentClient(new Uri(_endpointUrl), _accountKey);
}
}
}<file_sep>using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Client;
using Microsoft.Azure.Documents.Linq;
using Microsoft.Azure.Graphs;
using Microsoft.Azure.Graphs.Elements;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using TechRecruiting.Models;
namespace TechRecruiting.Web.Data
{
public sealed class RecruiterData : GraphData
{
public async Task<IEnumerable<Recruiter>> GetRecruiters()
{
DocumentClient client = GetDocumentClient();
DocumentCollection collection = await GetDocumentCollectionAsync(client);
IDocumentQuery<Vertex> query = client.CreateGremlinQuery<Vertex>(
collection,
$"g.V().hasLabel('recruiter')"
);
List<Vertex> vertices = new List<Vertex>();
while (query.HasMoreResults)
{
vertices.AddRange(await query.ExecuteNextAsync<Vertex>());
}
return vertices.Select(vertex =>
{
IEnumerable<VertexProperty> props = vertex.GetVertexProperties();
return new Recruiter
{
Id = vertex.Id.ToString(),
FirstName = props.SingleOrDefault(p => p.Key == "firstName")?.Value?.ToString(),
LastName = props.SingleOrDefault(p => p.Key == "lastName")?.Value?.ToString()
};
});
}
public async Task<Recruiter> GetRecruiterWithCandidates(string recruiterId)
{
DocumentClient client = GetDocumentClient();
DocumentCollection collection = await GetDocumentCollectionAsync(client);
IDocumentQuery<Vertex> candidateQuery = client.CreateGremlinQuery<Vertex>(
collection,
$"g.V('{recruiterId}').hasLabel('recruiter')"
);
Vertex vertex = null;
while (candidateQuery.HasMoreResults)
{
vertex = (await candidateQuery.ExecuteNextAsync<Vertex>()).First();
}
IEnumerable<VertexProperty> props = vertex.GetVertexProperties();
Recruiter candidate = new Recruiter
{
Id = vertex.Id.ToString(),
FirstName = props.SingleOrDefault(p => p.Key == "firstName")?.Value?.ToString(),
LastName = props.SingleOrDefault(p => p.Key == "lastName")?.Value?.ToString(),
Candidates = new List<Candidate>()
};
IDocumentQuery<Vertex> friendsQuery = client.CreateGremlinQuery<Vertex>(
collection,
$"g.V('{recruiterId}').hasLabel('recruiter').outE('acquaintance').inV().hasLabel('candidate')"
);
List<Vertex> vertices = new List<Vertex>();
while (friendsQuery.HasMoreResults)
{
vertices.AddRange(await friendsQuery.ExecuteNextAsync<Vertex>());
}
foreach (var item in vertices)
{
IEnumerable<VertexProperty> itemProps = item.GetVertexProperties();
candidate.Candidates.Add(new Candidate
{
Id = item.Id.ToString(),
FirstName = itemProps.SingleOrDefault(p => p.Key == "firstName")?.Value?.ToString(),
LastName = itemProps.SingleOrDefault(p => p.Key == "lastName")?.Value?.ToString(),
SkillDescription = itemProps.SingleOrDefault(p => p.Key == "skillDescription")?.Value?.ToString()
}
);
}
return candidate;
}
public async Task<Recruiter> PersistRecruiter(Recruiter recruiter)
{
DocumentClient client = GetDocumentClient();
DocumentCollection collection = await GetDocumentCollectionAsync(client);
IDocumentQuery<Vertex> query = client.CreateGremlinQuery<Vertex>(
collection,
$"g.addV('candidate').property('id', '{recruiter.Id}').property('firstName', '{recruiter.FirstName}').property('lastName', '{recruiter.LastName}')"
);
Vertex vertex = null;
while (query.HasMoreResults)
{
vertex = (await query.ExecuteNextAsync<Vertex>()).First();
}
recruiter.Id = vertex.Id.ToString();
return recruiter;
}
}
}<file_sep>using Microsoft.Practices.Unity;
using Microsoft.Practices.Unity.Mvc;
using System.Web;
using System.Web.Mvc;
using System.Web.Routing;
namespace TechRecruiting.Web
{
public class MvcApplication : HttpApplication
{
protected void Application_Start()
{
AreaRegistration.RegisterAllAreas();
RegisterGlobalFilters(GlobalFilters.Filters);
RegisterRoutes(RouteTable.Routes);
ConfigureUnity();
void RegisterGlobalFilters(GlobalFilterCollection filters)
{
filters.Add(new HandleErrorAttribute());
}
void RegisterRoutes(RouteCollection routes)
{
routes.IgnoreRoute("{resource}.axd/{*pathInfo}");
routes.MapMvcAttributeRoutes();
}
void ConfigureUnity()
{
IUnityContainer container = new UnityContainer();
DependencyResolver.SetResolver(new UnityDependencyResolver(container));
}
}
}
}<file_sep>namespace TechRecruiting.Models
{
public class Portrait : IEntity
{
public string Id { get; set; }
public string ImageUrl { get; set; }
public string ImageAuthorName { get; set; }
public string ImageAuthorId { get; set; }
public string ImageSourceId { get; set; }
}
}
|
2f0f06d6d6331e4edd9b368c5823e3fcca70a475
|
[
"C#"
] | 12 |
C#
|
edx-labs/cosmos-graph-web
|
54863c51977fb08f014b76b25c1fc350c621e4f3
|
2114ea293433d7c579521b60bce357b33f16c67a
|
refs/heads/master
|
<repo_name>nyuuke/SHA1-hash_Algorithm<file_sep>/main.js
var btnhash = document.querySelector('.btn')
let getValue = () => {
var hashInput = document.querySelector('.input').value
if (hashInput.length == 0)
console.log('error')
else
console.log(hashInput)
var crypted=sha1(hashInput)
alert(crypted)
return hashInput
}
btnhash.addEventListener('click', getValue);
|
699ac176983072652a1fcf4dc54cfc87f1fd4884
|
[
"JavaScript"
] | 1 |
JavaScript
|
nyuuke/SHA1-hash_Algorithm
|
9a0b3d301c0eb24fd9cc4969e9e239df362e81bf
|
d59a7a7adde0f4cbead039f0fd1f64b7cb31c7e5
|
refs/heads/master
|
<repo_name>ricardoantonello/autonomous-car-raspiberry-opencv-python<file_sep>/README.md
# Veículo Autônomo com OpenCV, Raspberry Pi 3, Arduíno e Python!
Autores: <NAME>, <NAME>, <NAME> e <NAME>.
Contato: <EMAIL>
Agradecimentos: Edital 20/2017 PIBIC-EM IFC. Instituto Federal Catarinense Campus Luzerna.
<file_sep>/carro_autonomo.ino
//Autor : Antonello
/**
Este projeto recebe informações via comunicação serial com o Raspberry e executa os comandos com o Shield AF_Motor no Arduíno.
*/
#include <AFMotor.h>
AF_DCMotor m1(1); //Seleciona o motor 1
AF_DCMotor m2(2); //Seleciona o motor 1
AF_DCMotor m3(3); //Seleciona o motor 1
AF_DCMotor m4(4); //Seleciona o motor 1
//Variáveis globais
int comando_serial=0; // -1=re 0=parar 1=frente 2=direita 3=esquerda
int tempo_acionamento=10; // tempo de atuação do movimento antes de nova leitura
/** tipo_mov = 0/para 1/frente -1/ré */
void mov(int motor, int tipo_mov){
mov(motor, tipo_mov, 150); //velocidade padrao = 150
}
void mov(int motor, int tipo_mov, int vel){
if(tipo_mov==1){
if(motor==1) {
m1.setSpeed(vel); //Define a velocidade maxima
m1.run(FORWARD); //Gira o motor sentido horario
} else if(motor==2) {
m2.setSpeed(vel); //Define a velocidade maxima
m2.run(FORWARD); //Gira o motor sentido horario
} else if(motor==3) {
m3.setSpeed(vel); //Define a velocidade maxima
m3.run(FORWARD); //Gira o motor sentido horario
} else if(motor==4) {
m4.setSpeed(vel); //Define a velocidade maxima
m4.run(FORWARD); //Gira o motor sentido horario
}
} else if(tipo_mov==-1) {
if(motor==1) {
m1.setSpeed(vel); //Define a velocidade maxima
m1.run(BACKWARD); //Gira o motor sentido horario
} else if(motor==2) {
m2.setSpeed(vel); //Define a velocidade maxima
m2.run(BACKWARD); //Gira o motor sentido horario
} else if(motor==3) {
m3.setSpeed(vel); //Define a velocidade maxima
m3.run(BACKWARD); //Gira o motor sentido horario
} else if(motor==4) {
m4.setSpeed(vel); //Define a velocidade maxima
m4.run(BACKWARD); //Gira o motor sentido horario
}
} else {
if(motor==1) {
m1.run(RELEASE); //Desliga o motor
} else if(motor==2) {
m2.run(RELEASE); //Desliga o motor
} else if(motor==3) {
m3.run(RELEASE); //Desliga o motor
} else if(motor==4) {
m4.run(RELEASE); //Desliga o motor
}
}
}
void parar(){
mov(1,0); mov(2,0); mov(3,0); mov(4,0);
}
void frente(){
mov(1,1); mov(2,1); mov(3,1); mov(4,1);
}
void re(){
mov(1,-1); mov(2,-1); mov(3,-1); mov(4,-1);
}
void direita(){
mov(1,-1); mov(2,-1); mov(3,1); mov(4,1);
}
void esquerda(){
mov(1,1); mov(2,1); mov(3,-1); mov(4,-1);
}
void setup(){
Serial.begin(9600);
Serial.setTimeout(1); // em milis, fundamental para nao atrasar o Serial.parseInt()
pinMode(LED_BUILTIN, OUTPUT);
}
void loop(){
// Leitura da Serial
if (Serial.available() > 0) {
//digitalWrite(LED_BUILTIN, HIGH); //liga led
comando_serial = Serial.parseInt();
//Serial.println(i);
}
if(comando_serial==-1){
//Serial.println("Parar");
re();
}else if(comando_serial==0){
//Serial.println("Parar");
parar();
}else if(comando_serial==1){
//Serial.println("Frente");
frente();
}else if(comando_serial==2){
//Serial.println("Direita");
direita();
}else if(comando_serial==3){
//Serial.println("Esquerda");
esquerda();
}else{
Serial.print("ERRO: comando serial desconhecido! Recebido: ");
Serial.println(comando_serial);
}
}
<file_sep>/autonomous_car_v2.py
# coding: utf-8
# Autor: <NAME>
# Site: cv.antonello.com.br
# E-mail: <EMAIL>
# import the necessary packages
import time
import cv2
import serial
import time
#Imports para a funcao preprocessamento1()
import numpy as np
from skimage import morphology
from skimage.morphology import skeletonize
from skimage.morphology import medial_axis
#Imports para a funcao calcula_angulo
from math import atan, pi
# Definições globais
ser = serial.Serial('/dev/ttyUSB0', 9600)
global_threshold=20
def velocidade(v):
ser.write(str(v).encode())
ser.flush()
def re():
ser.write(str('-1').encode())
ser.flush()
def parar():
ser.write(str('0').encode())
ser.flush()
def frente():
ser.write(str('1').encode())
ser.flush()
def direita():
ser.write(str('2').encode())
ser.flush()
def esquerda():
ser.write(str('3').encode())
ser.flush()
def texto(img, texto, coord, fonte = cv2.FONT_HERSHEY_SIMPLEX, cor=(0,0,255), tamanho=0.5, thickness=2):
textSize, baseline = cv2.getTextSize(texto, fonte, tamanho, thickness);
cor_background = 0
if type(cor)==int: # se não for colorida a imagem
cor_background=255-cor
else:
cor_background=(0,255,255)
#print(cor_background)
cv2.rectangle(img, (coord[0], coord[1]-textSize[1]-3), (coord[0]+textSize[0], coord[1]+textSize[1]-baseline), cor_background, -1)
#cv2.putText(img, texto, coord, fonte, tamanho, cor_background, thickness+1, cv2.LINE_AA)
cv2.putText(img, texto, coord, fonte, tamanho, cor, thickness, cv2.LINE_AA)
return img
def preprocessamento1(im):
#Aplicação do Skeleton na imagem
s = cv2.GaussianBlur(im, (15, 15), 0)
im = cv2.Canny(s, 50, 150)
#print(canny)
#Retira 30 pixels das bordas para evitar distorção
im=im[30:-30,30:-30] #Retira da imagem uma borda de 30 pixels
#im=im.astype(np.int32) #Converte imagem para 'int32'
return im
def calculo_angular_artigo_2016(im):
#Varre a imagem em sentido vertical em 10 pontos equidistantes
h, w = im.shape;
print("Imagem reduzida para processamento: Largura:", w, " Altura:", h, "Tipo:", im.dtype, type(im))
pontos = []; #Vetor que armazena os pontos encontrados na imagem
#Percorre a imagem na horizontal em 10 pontos distintos
intervalo = h / 9; #divide a altura em 9 partes para percorrer a imagem em 10 pontos diferentes
#print('Intervalo=',intervalo)
for i in range(0,10):
for j in range(w):
temp = 0
temp = i*intervalo if i*intervalo < h else h-1
if im[int(temp),int(j)]==1:
pontos.append([i*intervalo,j])
#print i*intervalo, j#imprime pontos, útil durante depuração do código
im[i*intervalo:i*intervalo+4,j:j+4]=120; #marca o ponto encontrado na imagem
#Percorre a imagem na vertical
intervalo = w / 9; #divide a largura em 9 partes para percorrer a imagem em 10 pontos diferentes
#print('Intervalo=',intervalo)
for j in range(0,10):
for i in range(h):
temp = 0
temp = j*intervalo if j*intervalo < w else w-1
if im[i,temp]==1:
pontos.append([i,j*intervalo])
#print i, j*intervalo #imprime pontos, útil durante depuração do código
im[i:i+4,j*intervalo:j*intervalo+4]=120; #marca o ponto encontrado na imagem
#Verifica se encontrou ao menos 4 pontos para realizar calculo
angulo = []; #vetor para armazenar os calculos
if len(pontos)>=4:
#Realiza o cálculo do primeiro ponto com o ultimo
y = pontos[0][0]-pontos[len(pontos)-1][0];
x = pontos[0][1]-pontos[len(pontos)-1][1];
if x!=0: #Verifica se divisor é zero e se for considera angula de 90 graus
anguloRad= atan(y*(-1)/float(x));
else:
anguloRad = pi/2;
angulo.append((anguloRad*180) / pi);
print('***************\nCalculo 1/2: Radianos', anguloRad, 'Ângulo em graus', angulo[0])
#Realiza o cálculo do segundo ponto com o penultimo
y = pontos[1][0]-pontos[len(pontos)-2][0];
x = pontos[1][1]-pontos[len(pontos)-2][1];
if x!=0: #Verifica se divisor é zero e se for considera angula de 90 graus
anguloRad= atan(y*(-1)/float(x));
else:
anguloRad = pi/2;
angulo.append((anguloRad*180) / pi);
print('Calculo 2/2: Radianos', anguloRad, 'Ângulo em graus', angulo[1])
#Como trabalhos futuros podemos melhorar o calculo dos angulos
#em relação aos pontos visando melhorar a precisão.
#Calcula a média
angulo = (angulo[0] + angulo[1]) / 2.0;
print('***************\nÂngulo em graus', angulo,'\n***************')
else:
print('\n***************\nImpossível calcular o angulo\n***************')
def angulo(img):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
edges = cv2.Canny(gray,50,150,apertureSize = 3)
lines = cv2.HoughLines(edges,1,np.pi/180,100)
#quanto a linha esta quase reta (90 graus) o resultado é 0,99 ou -0,99
#entao contamos a maioria para fazer a média dividindo certo
cont_pos=0 # positivos
cont_neg=0 # negativos
a_acum_pos=0
a_acum_neg=0
a_final=0
if lines!=None:
for line in lines:
for rho,theta in line:
a = np.cos(theta) # só usa essa informação
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
if a>0:
a_acum_pos+=a
cont_pos+=1
else:
a_acum_neg+=a
cont_neg+=1
cv2.line(img,(x1,y1),(x2,y2),(0,0,255),2)
#print('n:',int(a*1000))
if cont_pos>=cont_neg:
a_final=int(a_acum_pos/cont_pos*1000)
else:
a_final=int(a_acum_neg/cont_neg*1000)
else:
a_final = 0
print('\n***************\nImpossível calcular o angulo2\n***************')
#print('np.cos(theta):',a_final)
return img, a_final
def angulo_completo(img):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
edges = cv2.Canny(gray,50,150,apertureSize = 3)
lines = cv2.HoughLines(edges,1,np.pi/180,30)
rho_acum=0
if lines!=None:
for line in lines:
for rho,theta in line:
a = np.cos(theta) # só usa essa informação
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
cv2.line(img,(x1,y1),(x2,y2),(0,0,255),2)
print('rho',rho)
print('theta',theta)
print('a',a)
print('b',b)
print('x0',x0)
print('y0',y0)
else:
print('\n***************\nImpossível calcular o angulo2\n***************')
return img
def angulo2(img):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
#edges = cv2.Canny(gray, 75, 150)
edges = cv2.Canny(gray, 50, 150)
lines = cv2.HoughLinesP(edges, 1, np.pi/180, 30, maxLineGap=250)
if lines!=None:
for line in lines:
x1, y1, x2, y2 = line[0]
cv2.line(img, (x1, y1), (x2, y2), (0, 255, 0), 3)
cv2.circle(img, (x1,y1), 5, (0, 255, 0), 1)
cv2.circle(img, (x2,y2), 5, (0, 255, 0), 1)
#cv2.imshow("Edges", edges)
#INICIO CALCULO DOS ANGULOS
#quanto a linha esta quase reta (90 graus) o resultado é 0,99 ou -0,99
#entao contamos a maioria para fazer a média dividindo certo
cont_pos=0 # positivos
cont_neg=0 # negativos
a_acum_pos=0
a_acum_neg=0
a_final=0
if lines!=None:
for line in lines:
x1, y1, x2, y2 = line[0]
#Realiza o cálculo do primeiro ponto com o ultimo
y = y2-y1
x = x2-x1
if x!=0: #Verifica se divisor é zero e se for considera angula de 90 graus
anguloRad= atan(y*(-1)/float(x));
else:
anguloRad = pi/2;
a = (anguloRad*180) / pi
if a>0:
a_acum_pos+=a
cont_pos+=1
else:
a_acum_neg+=a
cont_neg+=1
#print('***************\nCalculo: Radianos', anguloRad, 'Ângulo em graus', a)
if cont_pos>=cont_neg:
a_final=int(a_acum_pos/cont_pos)
else:
a_final=int(a_acum_neg/cont_neg)
print('***************\nÂngulo em graus', a_final,'\n***************')
else:
a_final=0
print('\n***************\nImpossível calcular o angulo\n***************')
#FIM DO CALCULOS DOS ANGULOS
return img, a_final
try:
from picamera.array import PiRGBArray
from picamera import PiCamera
# initialize the camera and grab a reference to the raw camera capture
camera = PiCamera()
camera.resolution = (320, 240) #camera.resolution = (640, 480)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(320, 240)) #rawCapture = PiRGBArray(camera, size=(640, 480))
# allow the camera to warmup
time.sleep(0.1)
velocidade(100)
# capture frames from the camera
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
# grab the raw NumPy array representing the image, then initialize the timestamp
# and occupied/unoccupied text
img = frame.array
img = img[::-1,::-1,::].copy()
#INICIO DO ALGORITMO DE RECONHECIMENTO
#i = preprocessamento1(i)
#calculo_angular(i)
#break
#FIM DO ALGORITMO DE RECONHECIMENTO
#t1=time.time()
#img1, a1 = angulo(img.copy())
#t2=time.time()
img2, a2 = angulo2(img)
#t3=time.time()
#print('metodo 1:', a1, 'tempo', t2-t1, ' | metodo 2:', a2,'tempo',t3-t2)
#cv2.imshow("Frame1", img1)
a=a2 #usando algortimo 2
#COMANDO OS MOTORES VIA SERIAL
if a>0:
direita() if a < 88 else frente()
else:
esquerda() if a > -88 else frente()
# show the frame
cv2.imshow("Frame2", img2)
key = cv2.waitKey(1) & 0xFF
# clear the stream in preparation for the next frame
rawCapture.truncate(0)
# if the `q` key was pressed, break from the loop
if key == ord("q"):
cv2.destroyAllWindows()
break
except ImportError:
print('Não esta rodando em um Raspberry')
#para os motores
parar()
<file_sep>/serial_test.py
#!/usr/bin/python3
import serial
import time
ser = serial.Serial('/dev/ttyUSB0', 9600)
# read from Arduino
#i = ser.read()
#print ("Read input " + i.decode("utf-8") + " from Arduino")
#print ("Read input " + str(i) + " from Arduino\n")
#ser.write("b".encode())
for i in range(10):
print('Teste', i)
ser.write(str(i).encode())
ser.flush()
time.sleep(3)
#ser.close()
|
a63ecd277ebf8b33a1dc2c49e1cb12a7a51d58bb
|
[
"Markdown",
"Python",
"C++"
] | 4 |
Markdown
|
ricardoantonello/autonomous-car-raspiberry-opencv-python
|
907f5389fb041c3ba3acb93baabe9981dbac0923
|
b39b140a10c76e74084071e5a18fffb7cc2db35b
|
refs/heads/master
|
<file_sep><?php
namespace Jeux;
use Core\Database;
use Model\Jeu;
require_once __DIR__.'/../Model/JeuModel.php';
require_once 'Validator.php';
Class Jeux
{
private $db;
public function __construct(Database $db)
{
$this->setDb($db);
}
public function indexAction($params = null)
{
$jeu = new Jeu();
echo 'index jeux<br>';
echo $jeu->getXMLString();
}
public function getAction($id)
{
var_dump($id);
if(!empty($id)){
$db = $this->db->getConnection();
$stmt = $db->prepare("SELECT * FROM jeu WHERE id = :id");
$stmt->bindParam(':id', $id[0], \PDO::PARAM_INT);
$stmt->execute();
$result = $stmt->fetchAll();
var_dump($result);
}
else {
$db = $this->db->getConnection();
$stmt = $db->prepare("SELECT * FROM jeu");
$stmt->execute();
$result = $stmt->fetchAll();
var_dump($result);
}
}
public function postAction()
{
$db = $this->db->getConnection();
$stmt = $db->prepare("INSERT INTO jeu (nom, description, classification, dateSortie, deleted) VALUES (:nom, :description, :classification, :dateSortie, 0)");
$stmt->bindParam(':nom', $_POST["nom"]);
$stmt->bindParam(':description',$_POST["informations"]["descriptionInfo"]);
$stmt->bindParam(':classification', $_POST["informations"]["classification"]);
$stmt->bindParam(':dateSortie', $_POST["informations"]["date_sortie"]);
$stmt->execute();
$lastId = $db->lastInsertId();
$stmt = $db->prepare("INSERT INTO constructeurs (libelle) VALUES (:libelleconstruct);
INSERT INTO astuces (nom, description, idjeu, deleted) VALUES (:nomastuce, :descriptionastuce, :idjeu, 0);
INSERT INTO auteurnews (nom, site) VALUES (:nomaut, :siteaut);
INSERT INTO avis (auteur, dateavis, description, note, idjeu, deleted) VALUES (:auteuravis, :dateavis, :descriptionavis, :noteavis, :idjeu, 0);
INSERT INTO imagenews (source, dateimgnews, pathimgnews, idjeu, deleted) VALUES (:sourceimgnews, :dateimg, :pathimg, :idjeu, 0);
INSERT INTO langues (libelle) VALUES (:libellelangues);
INSERT INTO modes (libelle) VALUES (:libellemode);
INSERT INTO test (title, description, datetest, auteur, deleted) VALUES (:titletest, :descriptiontest, :datetest, :auteurtest, 0);
INSERT INTO wiki (nom, description, nomdivers, descriptiondivers, deleted) VALUES (:nomwiki, :descriptionwiki, :nomDivers, :descriptionDivers, 0);
INSERT INTO imageinfo (source, dateimginfo, pathimginfo, idjeu, deleted) VALUES (:sourceImageInfo, :dateImageInfo, :pathImageInfo, :idjeu, 0);
INSERT INTO imagetest (source, dateimgtest, pathimgtest, idjeu, deleted) VALUES (:sourceImageTest, :dateImageTest, :pathImageTest, :idjeu, 0);
INSERT INTO themes (libelle) VALUES (:libelleTheme);
INSERT INTO videoinfo (source, dateinfo, pathinfo, idjeu, deleted) VALUES (:sourceVideoInfo, :dateVideoInfo, :pathVideoInfo, :idjeu, 0);
INSERT INTO videotest (source, datevideotest, pathvideotest, idjeu, deleted) VALUES (:sourceVideoTest, :dateVideoTest, :pathVideoTest, :idjeu, 0);
INSERT INTO videonews (source, datevideonews, pathvideonews, idjeu, deleted) VALUES (:sourceVideoNew, :dateVideoNew, :pathVideoNew, :idjeu, 0);
");
$stmt->bindParam(':libelleconstruct', $_POST["informations"]["constructeur"]);
$stmt->bindParam(':nomastuce', $_POST["wiki"]["astuce"]["nomAstuce"]);
$stmt->bindParam(':descriptionastuce', $_POST["wiki"]["astuce"]["descriptionAstuce"]);
$stmt->bindParam(':nomaut', $_POST["news"]["auteurNew"]["nomAuteurNew"]);
$stmt->bindParam(':siteaut', $_POST["news"]["auteurNew"]["siteAuteurNew"]);
$stmt->bindParam(':auteuravis', $_POST["tests"]["avis"]["pseudonyme"]);
$stmt->bindParam(':dateavis', $_POST["tests"]["avis"]["dateAvis"]);
$stmt->bindParam(':descriptionavis', $_POST["tests"]["avis"]["descriptionAvis"]);
$stmt->bindParam(':noteavis', $_POST["tests"]["avis"]["noteAvis"]);
$stmt->bindParam(':idjeu', $lastId);
$stmt->bindParam(':sourceimgnews', $_POST["news"]["imagesNew"]["sourceImgNew"]);
$stmt->bindParam(':dateimg', $_POST["news"]["imagesNew"]["dateImgNew"]);
$stmt->bindParam(':pathimg', $_POST["news"]["imagesNew"]["nomImgNew"]);
$stmt->bindParam(':libellelangues', $_POST["langues"]["nomLangue"]);
$stmt->bindParam(':libellemode', $_POST["modes"]["libelleMode"]);
$stmt->bindParam(':titletest', $_POST["tests"]["nomTest"]);
$stmt->bindParam(':descriptiontest', $_POST["tests"]["descriptionTest"]);
$stmt->bindParam(':datetest', $_POST["tests"]["dateTest"]);
$stmt->bindParam(':auteurtest', $_POST["tests"]["auteurTest"]);
$stmt->bindParam(':nomwiki', $_POST["wiki"]["solution_complete"]["nomWikiComp"]);
$stmt->bindParam(':descriptionwiki', $_POST["wiki"]["solution_complete"]["descriptionWikiComp"]);
$stmt->bindParam(':nomDivers', $_POST["wiki"]["divers"]["nomDivers"]);
$stmt->bindParam(':descriptionDivers', $_POST["wiki"]["divers"]["descriptionDivers"]);
$stmt->bindParam(':sourceImageInfo', $_POST["informations"]["imagesInfo"]["nomImgInfo"]);
$stmt->bindParam(':dateImageInfo', $_POST["informations"]["imagesInfo"]["dateImgInfo"]);
$stmt->bindParam(':pathImageInfo', $_POST["informations"]["imagesInfo"]["sourceImgInfo"]);
$stmt->bindParam(':sourceImageTest', $_POST["tests"]["imagesTest"]["nomImgTest"]);
$stmt->bindParam(':dateImageTest', $_POST["tests"]["imagesTest"]["dateImgTest"]);
$stmt->bindParam(':pathImageTest', $_POST["tests"]["imagesTest"]["sourceImgTest"]);
$stmt->bindParam(':libelleTheme', $_POST["informations"]["theme"]["libelleTheme"]["libelleTheme1"]);
$stmt->bindParam(':sourceVideoInfo', $_POST["informations"]["videosInfo"]["nomVideoInfo"]);
$stmt->bindParam(':dateVideoInfo', $_POST["informations"]["videosInfo"]["sourceVideoInfo"]);
$stmt->bindParam(':pathVideoInfo', $_POST["informations"]["videosInfo"]["dateVideoInfo"]);
$stmt->bindParam(':sourceVideoTest', $_POST["tests"]["videosTest"]["nomVideoTest"]);
$stmt->bindParam(':dateVideoTest', $_POST["tests"]["videosTest"]["sourceVideoTest"]);
$stmt->bindParam(':pathVideoTest', $_POST["tests"]["videosTest"]["dateVideoTest"]);
$stmt->bindParam(':sourceVideoNew', $_POST["news"]["videosNew"]["nomVideoNew"]);
$stmt->bindParam(':dateVideoNew', $_POST["news"]["videosNew"]["sourceVideoNew"]);
$stmt->bindParam(':pathVideoNew', $_POST["news"]["videosNew"]["dateVideoNew"]);
$stmt->execute();
}
public function putAction($id, $champ, $valeur)
{
$db = $this->db->getConnection();
$update = Validator::validate($champ, $valeur);
if($update === true){
$stmt = $db->prepare("UPDATE jeu SET ".$champ." = :nom WHERE id = :id");
$stmt->execute(["nom"=>$valeur , "id"=> $id]);
}
}
public function deleteAction($params)
{
$stmt = $pdo->prepare("UPDATE jeu SET deleted = 1 WHERE idjeu = :idjeu,
UPDATE astuces SET deleted = 1 WHERE idjeu = :idjeu;
UPDATE avis SET deleted = 1 WHERE idjeu = :idjeu;
UPDATE imagenews SET deleted = 1 WHERE idjeu = :idjeu;
UPDATE imageinfo SET deleted = 1 WHERE idjeu = :idjeu;
UPDATE imagetest SET deleted = 1 WHERE idjeu = :idjeu;
UPDATE videoinfo SET deleted = 1 WHERE idjeu = :idjeu;
UPDATE videotest SET deleted = 1 WHERE idjeu = :idjeu;
UPDATE videonews SET deleted = 1 WHERE idjeu = :idjeu;");
$stmt->bindParam(':idjeu', $params[0], \PDO::PARAM_INT);
$stmt->execute();
}
public function setDb(Database $db)
{
$this->db = $db;
}
public function getDb()
{
return $this->db;
}
}
<file_sep><?php
namespace Model;
class Jeu
{
private $id;
private $schema;
private $xmlString;
private $nom;
private $constructeur;
private $date_sortie;
private $libelleGenre;
private $libelleMode;
private $descriptionInfo;
private $classification;
private $libelleTheme;
private $nomLangue;
private $nomImgInfo;
private $sourceImgInfo;
private $dateImgInfo;
private $nomVideoInfo;
private $sourceVideoInfo;
private $dateVideoInfo;
private $nomEditeur;
private $nomTest;
private $descriptionTest;
private $dateTest;
private $auteurTest;
private $nomImgTest;
private $sourceImgTest;
private $dateImgTest;
private $nomVideoTest;
private $sourceVideoTest;
private $dateVideoTest;
private $points_positif;
private $points_negatif;
private $pseudonyme;
private $dateAvis;
private $descriptionAvis;
private $noteAvis;
private $positif;
private $negatif;
private $nomNew;
private $descriptionNew;
private $nomImgNew;
private $sourceImgNew;
private $dateImgNew;
private $nomVideoNew;
private $sourceVideoNew;
private $dateVideoNew;
private $dateNew;
private $nomAuteurNew;
private $siteAuteurNew;
private $nomTag;
private $nomWikiComp;
private $descriptionWikiComp;
private $nomDivers;
private $descriptionDivers;
private $nomAstuce;
private $descriptionAstuce;
public function setNom($nom) { $this->nom = $nom; }
public function getNom() { return $this->nom; }
public function setConstructeur($constructeur) { $this->constructeur = $constructeur; }
public function getConstructeur() { return $this->constructeur; }
public function setDate_sortie($date_sortie) { $this->date_sortie = $date_sortie; }
public function getDate_sortie() { return $this->date_sortie; }
public function setLibelleGenre($libelleGenre) { $this->libelleGenre = $libelleGenre; }
public function getLibelleGenre() { return $this->libelleGenre; }
public function setLibelleMode($libelleMode) { $this->libelleMode = $libelleMode; }
public function getLibelleMode() { return $this->libelleMode; }
public function setDecriptionInfo($decriptionInfo) { $this->decriptionInfo = $decriptionInfo; }
public function getDecriptionInfo() { return $this->decriptionInfo; }
public function setClassification($classification) { $this->classification = $classification; }
public function getClassification() { return $this->classification; }
public function setLibelleTheme($libelleTheme) { $this->libelleTheme = $libelleTheme; }
public function getLibelleTheme() { return $this->libelleTheme; }
public function setNomLangue($nomLangue) { $this->nomLangue = $nomLangue; }
public function getNomLangue() { return $this->nomLangue; }
public function setNomImgInfo($nomImgInfo) { $this->nomImgInfo = $nomImgInfo; }
public function getNomImgInfo() { return $this->nomImgInfo; }
public function setSourceImgInfo($sourceImgInfo) { $this->sourceImgInfo = $sourceImgInfo; }
public function getSourceImgInfo() { return $this->sourceImgInfo; }
public function setDateImgInfo($dateImgInfo) { $this->dateImgInfo = $dateImgInfo; }
public function getDateImgInfo() { return $this->dateImgInfo; }
public function setNomVideoInfo($nomVideoInfo) { $this->nomVideoInfo = $nomVideoInfo; }
public function getNomVideoInfo() { return $this->nomVideoInfo; }
public function setSourceVideoInfo($sourceVideoInfo) { $this->sourceVideoInfo = $sourceVideoInfo; }
public function getSourceVideoInfo() { return $this->sourceVideoInfo; }
public function setDateVideoInfo($dateVideoInfo) { $this->dateVideoInfo = $dateVideoInfo; }
public function getDateVideoInfo() { return $this->dateVideoInfo; }
public function setNomEditeur($nomEditeur) { $this->nomEditeur = $nomEditeur; }
public function getNomEditeur() { return $this->nomEditeur; }
public function setNomTest($nomTest) { $this->nomTest = $nomTest; }
public function getNomTest() { return $this->nomTest; }
public function setDescriptionTest($descriptionTest) { $this->descriptionTest = $descriptionTest; }
public function getDescriptionTest() { return $this->descriptionTest; }
public function setDateTest($dateTest) { $this->dateTest = $dateTest; }
public function getDateTest() { return $this->dateTest; }
public function setAuteurTest($auteurTest) { $this->auteurTest = $auteurTest; }
public function getAuteurTest() { return $this->auteurTest; }
public function setNomImgTest($nomImgTest) { $this->nomImgTest = $nomImgTest; }
public function getNomImgTest() { return $this->nomImgTest; }
public function setSourceImgTest($sourceImgTest) { $this->sourceImgTest = $sourceImgTest; }
public function getSourceImgTest() { return $this->sourceImgTest; }
public function setDateImgTest($dateImgTest) { $this->dateImgTest = $dateImgTest; }
public function getDateImgTest() { return $this->dateImgTest; }
public function setNomVideoTest($nomVideoTest) { $this->nomVideoTest = $nomVideoTest; }
public function getNomVideoTest() { return $this->nomVideoTest; }
public function setSourceVideoTest($sourceVideoTest) { $this->sourceVideoTest = $sourceVideoTest; }
public function getSourceVideoTest() { return $this->sourceVideoTest; }
public function setDateVideoTest($dateVideoTest) { $this->dateVideoTest = $dateVideoTest; }
public function getDateVideoTest() { return $this->dateVideoTest; }
public function setPoints_positif($points_positif) { $this->points_positif = $points_positif; }
public function getPoints_positif() { return $this->points_positif; }
public function setPoints_negatif($points_negatif) { $this->points_negatif = $points_negatif; }
public function getPoints_negatif() { return $this->points_negatif; }
public function setPseudonyme($pseudonyme) { $this->pseudonyme = $pseudonyme; }
public function getPseudonyme() { return $this->pseudonyme; }
public function setDateAvis($dateAvis) { $this->dateAvis = $dateAvis; }
public function getDateAvis() { return $this->dateAvis; }
public function setDescriptionAvis($descriptionAvis) { $this->descriptionAvis = $descriptionAvis; }
public function getDescriptionAvis() { return $this->descriptionAvis; }
public function setNoteAvis($noteAvis) { $this->noteAvis = $noteAvis; }
public function getNoteAvis() { return $this->noteAvis; }
public function setPositif($positif) { $this->positif = $positif; }
public function getPositif() { return $this->positif; }
public function setNegatif($negatif) { $this->negatif = $negatif; }
public function getNegatif() { return $this->negatif; }
public function setNomNew($nomNew) { $this->nomNew = $nomNew; }
public function getNomNew() { return $this->nomNew; }
public function setDescriptionNew($descriptionNew) { $this->descriptionNew = $descriptionNew; }
public function getDescriptionNew() { return $this->descriptionNew; }
public function setNomImgNew($nomImgNew) { $this->nomImgNew = $nomImgNew; }
public function getNomImgNew() { return $this->nomImgNew; }
public function setSourceImgNew($sourceImgNew) { $this->sourceImgNew = $sourceImgNew; }
public function getSourceImgNew() { return $this->sourceImgNew; }
public function setDateImgNew($dateImgNew) { $this->dateImgNew = $dateImgNew; }
public function getDateImgNew() { return $this->dateImgNew; }
public function setNomVideoNew($nomVideoNew) { $this->nomVideoNew = $nomVideoNew; }
public function getNomVideoNew() { return $this->nomVideoNew; }
public function setSourceVideoNew($sourceVideoNew) { $this->sourceVideoNew = $sourceVideoNew; }
public function getSourceVideoNew() { return $this->sourceVideoNew; }
public function setDateVideoNew($dateVideoNew) { $this->dateVideoNew = $dateVideoNew; }
public function getDateVideoNew() { return $this->dateVideoNew; }
public function setDateNew($dateNew) { $this->dateNew = $dateNew; }
public function getDateNew() { return $this->dateNew; }
public function setNomAuteurNew($nomAuteurNew) { $this->nomAuteurNew = $nomAuteurNew; }
public function getNomAuteurNew() { return $this->nomAuteurNew; }
public function setSiteAuteurNew($siteAuteurNew) { $this->siteAuteurNew = $siteAuteurNew; }
public function getSiteAuteurNew() { return $this->siteAuteurNew; }
public function setNomTag($nomTag) { $this->nomTag = $nomTag; }
public function getNomTag() { return $this->nomTag; }
public function setNomWikiComp($nomWikiComp) { $this->nomWikiComp = $nomWikiComp; }
public function getNomWikiComp() { return $this->nomWikiComp; }
public function setDescriptionWikiComp($descriptionWikiComp) { $this->descriptionWikiComp = $descriptionWikiComp; }
public function getDescriptionWikiComp() { return $this->descriptionWikiComp; }
public function setNomDivers($nomDivers) { $this->nomDivers = $nomDivers; }
public function getNomDivers() { return $this->nomDivers; }
public function setDescriptionDivers($descriptionDivers) { $this->descriptionDivers = $descriptionDivers; }
public function getDescriptionDivers() { return $this->descriptionDivers; }
public function setNomAstuce($nomAstuce) { $this->nomAstuce = $nomAstuce; }
public function getNomAstuce() { return $this->nomAstuce; }
public function setDescriptionAstuce($descriptionAstuce) { $this->descriptionAstuce = $descriptionAstuce; }
public function getDescriptionAstuce() { return $this->descriptionAstuce; }
public function __construct()
{
$schema = array(
'nom' => 'nom',
'informations' => array(
'constructeur' => 'constructeur',
'date_sortie' => 'date_sortie',
'genres' => array(
'libelleGenre' => array()
),
'modes' => array(
'libelleMode' => array()
),
'descriptionInfo' => 'decriptionInfo',
'classification' => 'classification',
'theme' => array(
'libelleTheme' => array()
),
'langues' => array(
'nomLangue' => 'nomLangue'
),
'imagesInfo' => array(
'nomImgInfo' => 'nomImgInfo',
'sourceImgInfo' => 'sourceImgInfo',
'dateImgInfo' => 'dateImgInfo'
),
'videosInfo' => array(
'nomVideoInfo' => 'nomVideoInfo',
'sourceVideoInfo' => 'sourceVideoInfo',
'dateVideoInfo' => 'dateVideoInfo'
),
'editeurs' => array(
'nomEditeur' => array()
)
),
'tests' => array(
'nomTest' => 'nomTest',
'descriptionTest' => 'descriptionTest',
'dateTest' => 'dateTest',
'auteurTest' => 'auteurTest',
'imagesTest' => array(
'nomImgTest' => 'nomImgTest',
'sourceImgTest' => 'sourceImgTest',
'dateImgTest' => 'dateImgTest'
),
'videosTest' => array(
'nomVideoTest' => 'nomVideoTest',
'sourceVideoTest' => 'sourceVideoTest',
'dateVideoTest' => 'dateVideoTest'
),
'notes' => array(
'points_positif' => array(),
'points_negatif' => array()
),
'avis' => array(
'pseudonyme' => 'pseudonyme',
'dateAvis' => 'dateAvis',
'descriptionAvis' => 'descriptionAvis',
'noteAvis' => 'noteAvis',
'votes' => array(
'positif' => 'positif',
'negatif' => 'negatif'
),
)
),
'news' => array(
'nomNew' => 'nomNew',
'descriptionNew' => 'descriptionNew',
'imagesNew' => array(
'nomImgNew' => 'nomImgNew',
'sourceImgNew' => 'sourceImgNew',
'dateImgNew' => 'dateImgNew'
),
'videosNew' => array(
'nomVideoNew' => 'nomVideoNew',
'sourceVideoNew' => 'sourceVideoNew',
'dateVideoNew' => 'dateVideoNew'
),
'dateNew' => 'dateNew',
'auteurNew' => array(
'nomAuteurNew' => 'nomAuteurNew',
'siteAuteurNew' => 'siteAuteurNew'
),
'tag' => array(
'nomTag' => array()
)
),
'wiki' => array(
'solution_complete' => array(
'nomWikiComp' => 'nomWikiComp',
'descriptionWikiComp' => 'descriptionWikiComp'
),
'divers' => array(
'nomDivers' => 'nomDivers',
'descriptionDivers' => 'descriptionDivers'
),
'astuce' => array(
'nomAstuce' => 'nomAstuce',
'descriptionAstuce' => 'descriptionAstuce'
)
)
);
$this->setSchema($schema);
}
public function setId($id)
{
$this->id = $id;
}
public function getId()
{
return $this->id;
}
public function setSchema($schema)
{
$this->schema = $schema;
}
public function getSchema()
{
return $this->schema;
}
public function setXMLString($xmlString)
{
$this->xmlString = $xmlString;
}
public function getTmpXMLString()
{
return $this->xmlString;
}
public function getXMLString()
{
$schema = $this->getSchema();
$this->xmlConstruction($schema);
$xmlString = htmlentities('<jeu>').'<br>'.$this->xmlString.htmlentities('</jeu>');
return $xmlString;
}
public function xmlConstruction($schema)
{
if(is_array($schema)) {
foreach($schema as $key => $value) {
if(empty($value)) {
$get = 'get'.ucfirst($key);
$set = 'set'.ucfirst($key);
$this->$set(['1', '2','3']);
$array = $this->$get();
foreach($array as $v) {
$this->setXMLString($this->getTmpXMLString().htmlentities('<'.$key.'>').'<br>');
$this->setXMLString($this->getTmpXMLString().$v.'<br>');
$this->setXMLString($this->getTmpXMLString().htmlentities('</'.$key.'>').'<br>');
}
} else {
$this->setXMLString($this->getTmpXMLString().htmlentities('<'.$key.'>').'<br>');
$this->xmlConstruction($value);
$this->setXMLString($this->getTmpXMLString().htmlentities('</'.$key.'>').'<br>');
}
}
} else {
$get = 'get'.ucfirst($schema);
$set = 'set'.ucfirst($schema);
$this->$set($this->generateRandomString());
$this->setXMLString($this->getTmpXMLString().$this->$get().'<br>');
}
}
public function generateRandomString($length = 10) {
$characters = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
$charactersLength = strlen($characters);
$randomString = '';
for ($i = 0; $i < $length; $i++) {
$randomString .= $characters[rand(0, $charactersLength - 1)];
}
return $randomString;
}
}
<file_sep><?php
$url = 'http://localhost/restapixml/jeux/post';
$fields = array(
'nom' => 'nomm',
'informations' => array(
'constructeur' => 'constructeur',
'date_sortie' => 'date_sortie',
'genres' => array(
'libelleGenre' => array("libelleGenre1", "libelleGenre2")
),
'modes' => array(
'libelleMode' => array("libelleMode1", "libelleMode2")
),
'descriptionInfo' => 'decriptionInfo',
'classification' => 'classification',
'theme' => array(
'libelleTheme' => array("libelleTheme1","libelleTheme2")
),
'langues' => array(
'nomLangue' => 'nomLangue'
),
'imagesInfo' => array(
'nomImgInfo' => 'nomImgInfo',
'sourceImgInfo' => 'sourceImgInfo',
'dateImgInfo' => 'dateImgInfo'
),
'videosInfo' => array(
'nomVideoInfo' => 'nomVideoInfo',
'sourceVideoInfo' => 'sourceVideoInfo',
'dateVideoInfo' => 'dateVideoInfo'
),
'editeurs' => array(
'nomEditeur' => array("nomEditeur1", "nomEditeur2")
)
),
'tests' => array(
'nomTest' => 'nomTest',
'descriptionTest' => 'descriptionTest',
'dateTest' => 'dateTest',
'auteurTest' => 'auteurTest',
'imagesTest' => array(
'nomImgTest' => 'nomImgTest',
'sourceImgTest' => 'sourceImgTest',
'dateImgTest' => 'dateImgTest'
),
'videosTest' => array(
'nomVideoTest' => 'nomVideoTest',
'sourceVideoTest' => 'sourceVideoTest',
'dateVideoTest' => 'dateVideoTest'
),
'notes' => array(
'points_positif' => array("points_positif1", "points_positif2"),
'points_negatif' => array("points_negatif1", "points_negatif2")
),
'avis' => array(
'pseudonyme' => 'pseudonyme',
'dateAvis' => 'dateAvis',
'descriptionAvis' => 'descriptionAvis',
'noteAvis' => 'noteAvis',
'votes' => array(
'positif' => 'positif',
'negatif' => 'negatif'
),
)
),
'news' => array(
'nomNew' => 'nomNew',
'descriptionNew' => 'descriptionNew',
'imagesNew' => array(
'nomImgNew' => 'nomImgNew',
'sourceImgNew' => 'sourceImgNew',
'dateImgNew' => 'dateImgNew'
),
'videosNew' => array(
'nomVideoNew' => 'nomVideoNew',
'sourceVideoNew' => 'sourceVideoNew',
'dateVideoNew' => 'dateVideoNew'
),
'dateNew' => 'dateNew',
'auteurNew' => array(
'nomAuteurNew' => 'nomAuteurNew',
'siteAuteurNew' => 'siteAuteurNew'
),
'tag' => array(
'nomTag' => array(
'nomTag1', 'nomTag2'
)
)
),
'wiki' => array(
'solution_complete' => array(
'nomWikiComp' => 'nomWikiComp',
'descriptionWikiComp' => 'descriptionWikiComp'
),
'divers' => array(
'nomDivers' => 'nomDivers',
'descriptionDivers' => 'descriptionDivers'
),
'astuce' => array(
'nomAstuce' => 'nomAstuce',
'descriptionAstuce' => 'descriptionAstuce'
)
)
);
//url-ify the data for the POST
$fields_string = http_build_query($fields);
//open connection
$ch = curl_init();
//set the url, number of POST vars, POST data
curl_setopt($ch,CURLOPT_URL, $url);
curl_setopt($ch,CURLOPT_POST, 1);
curl_setopt($ch,CURLOPT_POSTFIELDS, $fields_string);
//execute post
$result = curl_exec($ch);
var_dump($result);
die();
//close connection
curl_close($ch);<file_sep>var restapixml = angular.module('restapixmlApp', []);
restapixmlApp.controller('MainCtrl', function ($scope, $http){
$http.get('phones/phones.json').success(function(data) {
$scope.phones = data;
});
});<file_sep>-- phpMyAdmin SQL Dump
-- version 4.2.5
-- http://www.phpmyadmin.net
--
-- Client : localhost:8889
-- Généré le : Ven 12 Juin 2015 à 14:27
-- Version du serveur : 5.5.38
-- Version de PHP : 5.5.14
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
--
-- Base de données : `restapixml`
--
-- --------------------------------------------------------
--
-- Structure de la table `astuces`
--
CREATE TABLE `astuces` (
`id` int(11) NOT NULL,
`nom` tinytext COLLATE utf8_bin NOT NULL,
`description` text COLLATE utf8_bin NOT NULL,
`idjeu` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=8 ;
--
-- Contenu de la table `astuces`
--
INSERT INTO `astuces` (`id`, `nom`, `description`, `idjeu`) VALUES
(1, 'nomAstuce', 'descriptionAstuce', 28),
(2, 'nomAstuce', 'descriptionAstuce', 29),
(3, 'nomAstuce', 'descriptionAstuce', 30),
(4, 'nomAstuce', 'descriptionAstuce', 31),
(5, 'nomAstuce', 'descriptionAstuce', 32),
(6, 'nomAstuce', 'descriptionAstuce', 33),
(7, 'nomAstuce', 'descriptionAstuce', 34);
-- --------------------------------------------------------
--
-- Structure de la table `auteurnews`
--
CREATE TABLE `auteurnews` (
`id` int(11) NOT NULL,
`nom` tinytext COLLATE utf8_bin NOT NULL,
`site` tinytext COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=10 ;
--
-- Contenu de la table `auteurnews`
--
INSERT INTO `auteurnews` (`id`, `nom`, `site`) VALUES
(1, 'nomAuteurNew', 'siteAuteurNew'),
(2, 'nomAuteurNew', 'siteAuteurNew'),
(3, 'nomAuteurNew', 'siteAuteurNew'),
(4, 'nomAuteurNew', 'siteAuteurNew'),
(5, 'nomAuteurNew', 'siteAuteurNew'),
(6, 'nomAuteurNew', 'siteAuteurNew'),
(7, 'nomAuteurNew', 'siteAuteurNew'),
(8, 'nomAuteurNew', 'siteAuteurNew'),
(9, 'nomAuteurNew', 'siteAuteurNew');
-- --------------------------------------------------------
--
-- Structure de la table `avis`
--
CREATE TABLE `avis` (
`id` int(11) NOT NULL,
`auteur` tinytext NOT NULL,
`dateavis` date NOT NULL,
`description` tinytext NOT NULL,
`note` tinyint(4) NOT NULL,
`idjeu` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=3 ;
--
-- Contenu de la table `avis`
--
INSERT INTO `avis` (`id`, `auteur`, `dateavis`, `description`, `note`, `idjeu`) VALUES
(1, 'pseudonyme', '0000-00-00', 'descriptionAvis', 0, 33),
(2, 'pseudonyme', '0000-00-00', 'descriptionAvis', 0, 34);
-- --------------------------------------------------------
--
-- Structure de la table `constructeurs`
--
CREATE TABLE `constructeurs` (
`id` int(11) NOT NULL,
`libelle` tinytext COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=8 ;
--
-- Contenu de la table `constructeurs`
--
INSERT INTO `constructeurs` (`id`, `libelle`) VALUES
(1, 'constructeur'),
(2, 'constructeur'),
(3, 'constructeur'),
(4, 'constructeur'),
(5, 'constructeur'),
(6, 'constructeur'),
(7, 'constructeur');
-- --------------------------------------------------------
--
-- Structure de la table `editeurjeu`
--
CREATE TABLE `editeurjeu` (
`idjeu` int(11) NOT NULL,
`idediteur` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Structure de la table `editeurs`
--
CREATE TABLE `editeurs` (
`id` int(11) NOT NULL,
`libelle` tinytext COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `genrejeu`
--
CREATE TABLE `genrejeu` (
`idjeu` int(11) NOT NULL,
`idgenre` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Structure de la table `genres`
--
CREATE TABLE `genres` (
`id` int(11) NOT NULL,
`libelle` tinytext COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `imageinfo`
--
CREATE TABLE `imageinfo` (
`id` int(11) NOT NULL,
`source` tinytext COLLATE utf8_bin NOT NULL,
`dateimginfo` date NOT NULL,
`pathimginfo` tinytext COLLATE utf8_bin NOT NULL,
`idjeu` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `imagenews`
--
CREATE TABLE `imagenews` (
`id` int(11) NOT NULL,
`source` tinytext COLLATE utf8_bin NOT NULL,
`dateimgnews` date NOT NULL,
`pathimgnews` tinytext COLLATE utf8_bin NOT NULL,
`idjeu` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=2 ;
--
-- Contenu de la table `imagenews`
--
INSERT INTO `imagenews` (`id`, `source`, `dateimgnews`, `pathimgnews`, `idjeu`) VALUES
(1, 'sourceImgNew', '0000-00-00', 'nomImgNew', 34);
-- --------------------------------------------------------
--
-- Structure de la table `imagetest`
--
CREATE TABLE `imagetest` (
`id` int(11) NOT NULL,
`source` tinytext COLLATE utf8_bin NOT NULL,
`dateimgtest` date NOT NULL,
`pathimgtest` tinytext COLLATE utf8_bin NOT NULL,
`idjeu` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `jeu`
--
CREATE TABLE `jeu` (
`id` int(11) NOT NULL,
`nom` tinytext COLLATE utf8_bin NOT NULL,
`description` text COLLATE utf8_bin NOT NULL,
`classification` tinytext COLLATE utf8_bin NOT NULL,
`dateSortie` date NOT NULL,
`deleted` tinyint(1) NOT NULL,
`idconstructeur` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=35 ;
--
-- Contenu de la table `jeu`
--
INSERT INTO `jeu` (`id`, `nom`, `description`, `classification`, `dateSortie`, `deleted`, `idconstructeur`) VALUES
(1, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(2, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(3, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(4, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(5, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(6, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(7, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(8, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(9, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(10, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(11, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(12, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(13, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(14, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(15, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(16, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(17, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(18, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(19, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(20, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(21, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(22, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(23, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(24, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(25, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(26, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(27, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(28, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(29, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(30, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(31, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(32, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(33, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0),
(34, 'nomm', 'decriptionInfo', 'classification', '0000-00-00', 0, 0);
-- --------------------------------------------------------
--
-- Structure de la table `languejeu`
--
CREATE TABLE `languejeu` (
`idjeu` int(11) NOT NULL,
`idlangue` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Structure de la table `langues`
--
CREATE TABLE `langues` (
`id` int(11) NOT NULL,
`libelle` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `modejeu`
--
CREATE TABLE `modejeu` (
`idjeu` int(11) NOT NULL,
`idmode` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Structure de la table `modes`
--
CREATE TABLE `modes` (
`id` int(11) NOT NULL,
`libelle` tinytext COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `news`
--
CREATE TABLE `news` (
`id` int(11) NOT NULL,
`nom` tinytext COLLATE utf8_bin NOT NULL,
`description` text COLLATE utf8_bin NOT NULL,
`date` date NOT NULL,
`idauteurnews` int(11) NOT NULL,
`idjeu` int(11) NOT NULL,
`deleted` tinyint(1) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `notestest`
--
CREATE TABLE `notestest` (
`id` int(11) NOT NULL,
`type` tinytext COLLATE utf8_bin NOT NULL,
`libelle` tinytext COLLATE utf8_bin NOT NULL,
`idtest` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `tags`
--
CREATE TABLE `tags` (
`id` int(11) NOT NULL,
`libelle` tinytext COLLATE utf8_bin NOT NULL,
`idnews` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `test`
--
CREATE TABLE `test` (
`id` int(11) NOT NULL,
`title` tinytext COLLATE utf8_bin NOT NULL,
`description` text COLLATE utf8_bin NOT NULL,
`datetest` date NOT NULL,
`auteur` tinytext COLLATE utf8_bin NOT NULL,
`deleted` tinyint(1) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `themejeu`
--
CREATE TABLE `themejeu` (
`idjeu` int(11) NOT NULL,
`idtheme` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Structure de la table `themes`
--
CREATE TABLE `themes` (
`id` int(11) NOT NULL,
`libelle` tinytext COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `users`
--
CREATE TABLE `users` (
`id` int(11) NOT NULL,
`name` tinytext COLLATE utf8_bin NOT NULL,
`token` tinytext COLLATE utf8_bin NOT NULL,
`nomdomaine` tinytext COLLATE utf8_bin NOT NULL,
`secretkey` tinytext COLLATE utf8_bin NOT NULL,
`apikey` tinytext COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `videoinfo`
--
CREATE TABLE `videoinfo` (
`id` int(11) NOT NULL,
`source` tinytext COLLATE utf8_bin NOT NULL,
`dateinfo` date NOT NULL,
`pathinfo` tinytext COLLATE utf8_bin NOT NULL,
`idjeu` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `videonews`
--
CREATE TABLE `videonews` (
`id` int(11) NOT NULL,
`source` tinytext COLLATE utf8_bin NOT NULL,
`datevideonews` date NOT NULL,
`pathvideonews` tinytext COLLATE utf8_bin NOT NULL,
`idjeu` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `videotest`
--
CREATE TABLE `videotest` (
`id` int(11) NOT NULL,
`source` tinytext COLLATE utf8_bin NOT NULL,
`datevideotest` date NOT NULL,
`pathvideotest` tinytext COLLATE utf8_bin NOT NULL,
`idjeu` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=1 ;
-- --------------------------------------------------------
--
-- Structure de la table `wiki`
--
CREATE TABLE `wiki` (
`id` int(11) NOT NULL,
`nom` tinytext COLLATE utf8_bin NOT NULL,
`description` text COLLATE utf8_bin NOT NULL,
`nomdivers` tinytext COLLATE utf8_bin NOT NULL,
`descriptiondivers` text COLLATE utf8_bin NOT NULL,
`deleted` tinyint(1) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin AUTO_INCREMENT=6 ;
--
-- Contenu de la table `wiki`
--
INSERT INTO `wiki` (`id`, `nom`, `description`, `nomdivers`, `descriptiondivers`, `deleted`) VALUES
(1, 'nomWikiComp', 'descriptionWikiComp', 'nomDivers', 'descriptionDivers', 0),
(2, 'nomWikiComp', 'descriptionWikiComp', 'nomDivers', 'descriptionDivers', 0),
(3, 'nomWikiComp', 'descriptionWikiComp', 'nomDivers', 'descriptionDivers', 0),
(4, 'nomWikiComp', 'descriptionWikiComp', 'nomDivers', 'descriptionDivers', 0),
(5, 'nomWikiComp', 'descriptionWikiComp', 'nomDivers', 'descriptionDivers', 0);
--
-- Index pour les tables exportées
--
--
-- Index pour la table `astuces`
--
ALTER TABLE `astuces`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `auteurnews`
--
ALTER TABLE `auteurnews`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `avis`
--
ALTER TABLE `avis`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `constructeurs`
--
ALTER TABLE `constructeurs`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `editeurs`
--
ALTER TABLE `editeurs`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `genres`
--
ALTER TABLE `genres`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `imageinfo`
--
ALTER TABLE `imageinfo`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `imagenews`
--
ALTER TABLE `imagenews`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `imagetest`
--
ALTER TABLE `imagetest`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `jeu`
--
ALTER TABLE `jeu`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `langues`
--
ALTER TABLE `langues`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `modes`
--
ALTER TABLE `modes`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `news`
--
ALTER TABLE `news`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `notestest`
--
ALTER TABLE `notestest`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `tags`
--
ALTER TABLE `tags`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `test`
--
ALTER TABLE `test`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `themes`
--
ALTER TABLE `themes`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `videoinfo`
--
ALTER TABLE `videoinfo`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `videonews`
--
ALTER TABLE `videonews`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `videotest`
--
ALTER TABLE `videotest`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `wiki`
--
ALTER TABLE `wiki`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT pour les tables exportées
--
--
-- AUTO_INCREMENT pour la table `astuces`
--
ALTER TABLE `astuces`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=8;
--
-- AUTO_INCREMENT pour la table `auteurnews`
--
ALTER TABLE `auteurnews`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=10;
--
-- AUTO_INCREMENT pour la table `avis`
--
ALTER TABLE `avis`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT pour la table `constructeurs`
--
ALTER TABLE `constructeurs`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=8;
--
-- AUTO_INCREMENT pour la table `editeurs`
--
ALTER TABLE `editeurs`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `genres`
--
ALTER TABLE `genres`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `imageinfo`
--
ALTER TABLE `imageinfo`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `imagenews`
--
ALTER TABLE `imagenews`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT pour la table `imagetest`
--
ALTER TABLE `imagetest`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `jeu`
--
ALTER TABLE `jeu`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=35;
--
-- AUTO_INCREMENT pour la table `langues`
--
ALTER TABLE `langues`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `modes`
--
ALTER TABLE `modes`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `news`
--
ALTER TABLE `news`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `notestest`
--
ALTER TABLE `notestest`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `tags`
--
ALTER TABLE `tags`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `test`
--
ALTER TABLE `test`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `themes`
--
ALTER TABLE `themes`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `users`
--
ALTER TABLE `users`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `videoinfo`
--
ALTER TABLE `videoinfo`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `videonews`
--
ALTER TABLE `videonews`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `videotest`
--
ALTER TABLE `videotest`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT pour la table `wiki`
--
ALTER TABLE `wiki`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=6;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep><?php
class Validator{
public static function validate($champ, $valeur){
$update = false;
switch ($champ) {
case 'nom':
if(is_string($valeur) && strlen($valeur) > 0 && strlen($valeur) < 255){
$update = true;
}
else{
die();
}
break;
case 'description':
if(is_string($valeur) && strlen($valeur) > 0 && strlen($valeur) < 255){
$update = true;
}
else{
die();
}
break;
case 'classification':
if(is_string($valeur) && strlen($valeur) > 0 && strlen($valeur) < 10){
$update = true;
}
else{
die();
}
break;
case 'dateSortie':
if(checkdate($valeur)){
$update = true;
}
else{
die();
}
break;
case 'deleted':
if(is_int($valeur)){
$update = true;
}
else{
die();
}
break;
default:
echo "Oups";
}
return $update;
}
}<file_sep><?php
/**
* Class Autoloader
*/
class Autoloader{
private $dir;
public function __construct($dir) {
$this->dir = $dir;
}
/**
* Enregistre l'autoloader
*/
static function register(){
spl_autoload_register(array(__CLASS__, 'autoload'));
}
/**
* Inclue le fichier correspondant à la classe
* @param $class string Le nom de la classe à charger
*/
static function autoload($class){
require 'class/' . $class . '.php';
}
}<file_sep><?php
require_once 'Core/Router.php';
require_once 'Core/Database.php';
require 'Autoloader.php';
Autoloader::register();
$router = new Core\Router();
$db = Core\Database::getInstance();
$array = $router->getRoute();
$default = $router->getDefault();
$require = 'Controller/'.$array['controller'].'.php';
if(file_exists($require)) {
require_once $require;
}
$objectCall = ucfirst($array['controllerName']).'\\'.ucfirst($array['controllerName']);
if(class_exists($objectCall)) {
$object = new $objectCall($db);
if(method_exists($object, $array['action']) === true) {
if(isset($array['params'])) {
$xml = $object->$array['action']($array['params']);
} else {
$xml = $object->$array['action']();
}
} else if(is_numeric(trim($array['action'], 'Action'))) {
array_unshift($array['params'], trim($array['action'], 'Action'));
$xml = $object->$default($array['params']);
} else {
$notFound = true;
}
} else {
$notFound = true;
}
if(isset($notFound) && $notFound === true) {
header('HTTP/1.0 404 Not Found');
echo "<h1>404 Not Found</h1>";
echo "The page that you have requested could not be found.";
exit;
}
if(isset($xml)) {
libxml_use_internal_errors(true);
$dom = new DOMDocument();
if(!$dom->loadXML($xml)) {
foreach (libxml_get_errors() as $err) {
error_log("[".date("Y/m/d H:i:s")."] ERROR.loading | Col: ".$err->column.", Line: ".$err->line." | Message: ".$err->message."\r", 3, "logs/xml.log");
}
libxml_clear_errors();
}
if (!$dom->schemaValidate('XML/TP1_Schema.xsd')) {
foreach(libxml_get_errors() as $err) {
error_log("[".date("Y/m/d H:i:s")."] ERROR.validation | Col: ".$err->column.", Line: ".$err->line." | Message: ".$err->message."\r", 3, "logs/xml.log");
}
echo 'Une erreur s\'est produite.';
} else {
var_dump($xml);
}
}
<file_sep><?php
namespace Core;
Class Router
{
public function getRoute()
{
$URI = $_SERVER['REQUEST_URI'];
$URIArray = explode('/', trim($URI, '/'));
$array = [];
$array['api'] = $URIArray[0];
$array['controllerName'] = (isset($URIArray[1]) && $URIArray[1] != '') ? $URIArray[1] : 'index';
$array['controller'] = $array['controllerName'].'Controller';
$array['action'] = (isset($URIArray[2])) ? $URIArray[2] : 'index';
$array['action'] .= 'Action';
$array['params'] = [];
if(isset($URIArray[3])) {
for($i = 3; $i < count($URIArray); $i++) {
$array['params'][] = $URIArray[$i];
}
}
if(isset($array['params']) && isset($_POST)) {
$array['params'] = array_merge($array['params'], $_POST);
}
return $array;
}
public function getDefault()
{
return 'indexAction';
}
}
|
b189350fef35c3fd3220683ad6dfb812402028f8
|
[
"JavaScript",
"SQL",
"PHP"
] | 9 |
PHP
|
Sinuide/restapixml
|
a11fe6b36a30064562707a3e22e2c4ade00c5e88
|
cdbea69e5fe6f049cafe685e583ef3f8e7a4c2cd
|
refs/heads/main
|
<file_sep>const Animation = (props) => {
return (
<section className="animation-flex">
<p className="animation">V Scroll For More V</p>
</section>
)
}
export default Animation;
|
056ce7a1f5df7661437c8a39f91a0a48368ae29a
|
[
"JavaScript"
] | 1 |
JavaScript
|
mattwoodruff808/html-css-3-lab-exercise
|
71a18183e18767a7af744aaee99ce81f2c988e46
|
47c26b6b2988fa96e1ca4ea789e33c27ec31afba
|
refs/heads/master
|
<file_sep><?php
session_start();
class helper{
protected $errors;
protected $data;
function __construct(){
$this->errors=isset($_SESSION["form_error"]) && is_array($_SESSION["form_error"])?$_SESSION["form_error"]:NULL;
$this->data=isset($_SESSION["form_data"]) && is_array($_SESSION["form_data"])?$_SESSION["form_data"]:NULL;
if(is_array($this->errors)):unset($_SESSION["form_error"]);endif;
if(is_array($this->data)):unset($_SESSION["form_data"]);endif;
}
public function getE($key=NULL){
//Get filtered single error
if(is_array($this->errors) && is_string($key) && array_key_exists($key,$this->errors) && !empty(trim($this->errors[$key]," "))){
return $this->errors[$key];
}
return NULL;
}
public function getD($key=NULL){
//Get filtered Data
if(is_array($this->data) && is_string($key) && array_key_exists($key,$this->data) && !empty(trim($this->data[$key]," "))){
return $this->data[$key];
}
return NULL;
}
}
$sc=new helper();
?>
<!Doctype HTML>
<html>
<head>
<title>Sample copy</title>
<link rel="stylesheet" href="bootstrap-3.3.7/css/bootstrap.min.css"/>
<script src="jquery-3.1.1.min.js"></script>
<script src="bootstrap-3.3.7/js/bootstrap.min.js"></script>
</head>
<body>
<div class="container" style="margin-top:50px;">
<div class="row">
<div class="col-md-6 col-md-offset-3">
<div class="panel panel-default">
<div class="panel-heading">
<h3 class="panel-title">Sample signup</h3>
</div>
<div class="panel-body">
<form class="form-horizontal" action="form_action.php" method="post">
<div class="form-group">
<div class="col-md-12<?php echo $sc->getE("fname")?' has-error':NULL;?>">
<label class="control-label" for="fname">First name</label>
<input name="fname" id="fname" class="form-control" type="text" value="<?php echo $sc->getD("fname");?>"/>
<?php if($sc->getE("fname")):?>
<span class="help-block"><?php echo $sc->getE("fname");?></span>
<?php endif;?>
</div>
<div class="col-md-12<?php echo $sc->getE("lname")?' has-error':NULL;?>">
<label class="control-label" for="lname">Last name</label>
<input name="lname" id="lname" class="form-control" type="text" value="<?php echo $sc->getD("lname");?>"/>
<?php if($sc->getE("lname")):?>
<span class="help-block"><?php echo $sc->getE("lname");?></span>
<?php endif;?>
</div>
<div class="col-md-12<?php echo $sc->getE("email")?' has-error':NULL;?>">
<label class="control-label" for="email">Email address</label>
<input name="email" class="form-control" id="email" type="email" value="<?php echo $sc->getD("email");?>"/>
<?php if($sc->getE("email")):?>
<span class="help-block"><?php echo $sc->getE("email");?></span>
<?php endif;?>
</div>
<div class="col-md-12<?php echo $sc->getE("password")?' has-error':NULL;?>">
<label class="control-label" for="password">Password</label>
<input name="password" class="form-control" id="password" type="password" value="<?php echo $sc->getD("password");?>"/>
<?php if($sc->getE("password")):?>
<span class="help-block"><?php echo $sc->getE("password");?></span>
<?php endif;?>
</div>
<div class="col-md-12<?php echo $sc->getE("cpassword")?' has-error':NULL;?>">
<label class="control-label" for="cpassword">Confirm password</label>
<input name="cpassword" class="form-control" id="cpassword" type="password" value="<?php echo $sc->getD("cpassword");?>"/>'
<?php if($sc->getE("cpassword")):?>
<span class="help-block"><?php echo $sc->getE("cpassword");?></span>
<?php endif;?>
</div>
</div>
<input type="hidden" name="signup_form" value="1"/>
<div class="form-group">
<?php if($sc->getE("main")):?>
<div class="bg-primary" style="padding:15px"><?php echo $sc->getE("main");?></div>
<?php endif;?>
<div class="col-md-12">
<button class="btn btn-primary" type="submit">Signup</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</body>
</html>
<file_sep><?php
session_start();
if(!filter_input(INPUT_POST,"signup_form")){
header("location:index.php");
exit();
}
class form_action_helper{
protected $f_data;
protected $error;
function __construct(){
$this->error=new stdClass();//Define as a object veriable
$this->f_data=(object)filter_input_array(INPUT_POST);//Load form post data and covert to object
$this->check_form();//Run checking function
$this->insert_to_database();//Run Insert to database function
}
private function FPAE($key=NULL){//Form Post array key exists echek function
return array_key_exists($key,(array)$this->f_data)?1:0;
}
private function check_form(){
$form_fields_check_data=array_unique(array($this->FPAE("fname"),$this->FPAE("lname"),$this->FPAE("email"),$this->FPAE("password"),$this->FPAE("cpassword")));
if(intval($form_fields_check_data[0])===0):
$this->error->main="HTML modified, please try again.";
else:
if(!$this->f_data->fname){$this->error->fname="Can't left empty";}//First name empty check
elseif(preg_match('/[^a-zA-Z]/',$this->f_data->fname)){$this->error->fname="Invalid first name";}
if(!$this->f_data->lname){$this->error->lname="Can't left empty";}
elseif(preg_match('/[^a-zA-Z]/',$this->f_data->lname)){$this->error->lname="Invalid last name";}
if(!$this->f_data->email){$this->error->email="Can't left empty";}
elseif(!filter_var($this->f_data->email,FILTER_VALIDATE_EMAIL)){$this->error->email="Invalid email";}
if(!$this->f_data->password){$this->error->password="Can't left empty";}
elseif(strlen($this->f_data->password)<5){$this->error->password="<PASSWORD>";}
elseif(strlen($this->f_data->password)>16){$this->error->password="Maximum 16 characters allowed";}
if(!$this->f_data->cpassword){$this->error->cpassword="Can't left empty";}
elseif($this->f_data->password && $this->f_data->cpassword && !isset($this->error->password) && $this->f_data->password!== $this->f_data->cpassword){$this->error->cpassword="Password not matched";}
endif;
}
private function insert_to_database(){
if(empty(implode("",(array)$this->error))){
/* --- Insert data to database by help of ( $this->f_data ) ----*/
echo "No error Found";
}
}
public function get_error(){
return (array)$this->error;//Conver THIS CLASS error VARIABLE from object to array
}
public function get_data(){
return (array)$this->f_data;//Conver THIS CLASS f_data VARIABLE from object to array
}
}
$self_form_action_helper=new form_action_helper();
$error=$self_form_action_helper->get_error();
$data=$self_form_action_helper->get_data();
if(!empty(implode("",$error))){
print_r($error);
$_SESSION["form_error"]=$error;
$_SESSION["form_data"]=$data;
header("Location:index.php");
exit();
}
|
7f7b4ab19efa5ad9c8d80437506feb1e314b395e
|
[
"PHP"
] | 2 |
PHP
|
NilGems/Sample-signup-by-PHP-oops
|
ed4574068f7c972cee0b98334f967cc3f9fd9c45
|
9d0df60c6ed9ca2b52da6ceb7f820c9d55e3a788
|
refs/heads/main
|
<repo_name>youssefatti/TodoList<file_sep>/src/types/TodoTypes.ts
import { ADD_TODO, EDIT_TODO, DELETE_TODO } from "../store/actions/todoActionsConstantes"
export type TodoItem = {
text: string,
key: string,
}
export type TodoTypesState = Array<TodoItem>
export interface AddTodoAction {
type: typeof ADD_TODO,
payload: TodoItem
}
export interface EditTodoAction {
type: typeof EDIT_TODO,
payload: TodoItem
}
export interface DeleteTodoAction {
type: typeof DELETE_TODO,
key: string
}
export type TodoActionsType = AddTodoAction | EditTodoAction | DeleteTodoAction<file_sep>/src/store/reducers/todosReducer.ts
import { TodoActionsType, TodoTypesState } from "../../types/TodoTypes";
import { ADD_TODO, EDIT_TODO, DELETE_TODO } from "../actions/todoActionsConstantes";
const initialState:TodoTypesState = [
{
text:'Faire le test technique',
key:'1'
},
{
text:'Sauvegarder le test technique',
key:'2'
},
{
text:'Envoyer le test technique',
key:'3'
},
{
text:`Aller à l'entretien`,
key:'4'
},
]
const todosReducer = (state = initialState, action:TodoActionsType) => {
switch (action.type) {
case ADD_TODO: {
// Add the new todo to the list
return [
...state, action.payload
]
}
case EDIT_TODO: {
// Looking for the current todo to update and edit it
return state.map((todo:any) => {
if (todo.key === action.payload.key) {
return {
...todo,
...action.payload,
};
} else {
return todo;
}
});
}
case DELETE_TODO: {
// Looking for the current todo to delete and remove it
return state.filter((todo:any) => todo.key !== action.key);
}
default: {
return state;
}
}
};
export default todosReducer;<file_sep>/README.md
# TodoList
# Install Dependencies :
yarn i
# run ios :
yarn ios
# run android :
yarn android
<file_sep>/src/assets/Colors.ts
export default {
primary: '#1292B4',
white: '#FFF',
lighter: '#F3F3F3',
dark: '#444',
black: '#000',
red: '#DA7465',
blue:'#88B5DD',
};<file_sep>/src/store/actions/todoAction.ts
import { ActionCreator } from 'redux';
import { AddTodoAction, DeleteTodoAction, EditTodoAction, TodoItem } from './../../types/TodoTypes';
import { ADD_TODO, EDIT_TODO, DELETE_TODO } from "./todoActionsConstantes";
export const addTodo :ActionCreator<AddTodoAction> = (payload:TodoItem)=> {
return {
type: ADD_TODO,
payload,
};
}
export const editTodo :ActionCreator<EditTodoAction> = (payload:TodoItem)=> {
return {
type: EDIT_TODO,
payload,
};
}
export const deleteTodo :ActionCreator<DeleteTodoAction> = (key:string)=> {
return {
type: DELETE_TODO,
key,
};
}<file_sep>/src/store/reducers/rootReducer.ts
import { combineReducers } from "redux";
import type { StateType } from 'typesafe-actions'
import todosReducer from "./todosReducer"
export type RootState = StateType<typeof rootReducer>
const rootReducer = combineReducers({
todos : todosReducer
})
export default rootReducer<file_sep>/src/store/actions/rootAction.ts
import { ActionType } from 'typesafe-actions';
import {addTodo, editTodo, deleteTodo} from './todoAction';
export type RootAction = ActionType<typeof rootActions>
const rootActions = {
addTodo, editTodo, deleteTodo,
}
export default rootActions<file_sep>/src/types/StackParamList.ts
export type StackParamList = {
Home: undefined;
Details: { typeAction:'add' | 'edit', data?: any };
};
|
6ee48584bd35b85380e3a9a5daad94927710cab8
|
[
"Markdown",
"TypeScript"
] | 8 |
TypeScript
|
youssefatti/TodoList
|
8ffab45224c8563b5c70a1b51d47b96260c3a4dc
|
b95a1686e704aa13b1d7a51fd557804fddaf5394
|
refs/heads/master
|
<repo_name>Fashad-Ahmed/Python-Practices<file_sep>/join function.py
sample_list2 = ["chalk", "duster", "board", "chair", "table", "benches"]
print("We need", ", ".join(sample_list2), "in class.")
print(",".join("python"))
<file_sep>/1b.py
import t1a
t1a.mostimportant()<file_sep>/concatenation of dictionaries.py
#d1={"a":1,"b":2}
#d2={"c":3,"d":4}
d1={}
x=int(input("enter number of records: "))
for i in range(x):
key=input("enter key: ")
value = input("enter value: ")
d1[key] = value
print(d1)
d2={}
y=int(input("enter number of records: "))
for i in range(y):
key=input("enter key: ")
value = input("enter value: ")
d2[key] = value
print(d2)
# d={}
# for i in d1:
# d[i]=d1.values()
# for j in d2:
# d[j]=d2.values()
# print(d)
# ******************************** alternate way************************
# d={**d1,**d2}
# print(d)
d={}
for i in d1.items():
d[i[0]] = i[1]
for j in d2.items():
d[j[0]] = j[1]
print(d)<file_sep>/map filter reduce.py
from functools import reduce
# map ( func. to apply, no. of inputs)
def cube(x):
return x**3
l1 = [1,2,3,4,5,6]
l2 = list(map(cube,l1))
# TYPE-CASTED IN LIST
print(l2)
def greater_than_2(n):
if n>2:
return True
else:
return False
h1 = [1,2,3,4,5,6,7,-2,-5]
greater_th_2 = list(filter(greater_than_2, h1))
print(greater_th_2)
def string_sum(a, b):
return a+b
newstring = reduce(string_sum, ["You", " are", " human"])
print(newstring)
sum_x = reduce(string_sum,h1)
print(sum_x)<file_sep>/format.py
pet_name = ['mox','tom','bruno']
pet_type = ['cat','kitten','bulldog']
for i in range(len(pet_name)):
out = "{} is a {}"
print(out.format(pet_name[i],pet_type[i]))<file_sep>/comprehension.py
list_1 = [1,32,4,5,45,4,4,3,3,3,5,6,3,5,6,343,343,5,4]
print("Using List comprehensions", [item for item in list_1 if item%3==0])
dict1 = {'a':45, 'b':65, 'A':5}
print({k.lower():dict1.get(k.lower(), 0)+dict1.get(k.upper(), 0) for k in dict1.keys()})
squared = {x**2 for x in [1,2,3,4, 4,4,4,5,5,5,5]}
print(squared)
gen = (i for i in range(56) if i%3==0)
for item in gen:
print(item)
<file_sep>/exceptions.py
try:
a=int(input("enter a number:"))
b=int(input("enter a number:"))
print(a/b)
except Exception as e:
print("sahi number daluu bhai!")
except ZeroDivisionError:
print("sahi num daluu na yr")
except ValueError:
print("same data type dalu")
else:
print("contact ur boss")
finally:
print("thank you")
<file_sep>/parser html.py
from urllib.request import urlopen
from bs4 import BeautifulSoup
base_URL = "http://olympus.realpython.org"
address = base_URL + "/profiles"
htmlPage = urlopen(address)
htmlText = htmlPage.read().decode("utf-8")
soup = BeautifulSoup(htmlText, "html.parser")
for anchor in soup.find_all("a"):
linkAdd = base_URL + anchor['href']
print(f"--- fetching {linkAdd}: ")
linkPage = urlopen(linkAdd)
linkText = linkPage.read().decode('utf-8')
linkSoup = BeautifulSoup(linkText, 'html.parser')
print(linkSoup.getText())<file_sep>/dictionary basic.py
#program to add key to dictionary
dictionary={"first name": "fashad","last name":"ahmed"}
dictionary["roll number"]="ct-043"
print(dictionary)<file_sep>/t1a.py
import os
def mostimportant():
print("LMAOOO !!!")
print(__name__)
def main():
print(os.listdir("/"))
print("LA MASIA")
if (__name__== "__main__"):
# print(os.listdir("/"))
# print("LA MASIA")
main();<file_sep>/args and kwargs.py
def fun(*args):
print("Hello ",args[0],",",args[1],"and",args[2])
print(type(args))
fun("jane","jake","john")
list_1 = ["sam","alex","cody"]
fun(*list_1)
def showMarks(**kwargs):
print(type(kwargs))
for key,value in kwargs.items():
print(key,value)
list_2 = {"adlas":20 , "adler":36 , "lolla":41 , "ellenberg":46}
showMarks(**list_2)
print("\t ****************************************")
def master(normal,*args,**kwargs):
print(normal)
for i in args:
print(i)
for key, value in kwargs.items():
print(key, value)
master(5,*list_1,**list_2)<file_sep>/html forms.py
import mechanicalsoup
browser = mechanicalsoup.Browser()
loginUrl = "http://olympus.realpython.org/login"
loginPage = browser.get(loginUrl)
loginHtml = loginPage.soup
print(loginPage, loginHtml)
form = loginHtml.form
form.select('input')[0]['value'] = 'zapata'
form.select('input')[1]['value'] = 'alexdarias'
subForm = browser.submit(form, loginPage.url)
print(subForm)
title = subForm.soup.title
print(f"Title: {title.text}")
loginPage = browser.get(loginUrl)
logTitle = loginPage.soup.title
print(f"title: {logTitle.text}")
form = loginHtml.form
form.select('input')[0]['value'] = 'wrong'
form.select('input')[1]['value'] = '<PASSWORD>'
errorPage = browser.submit(form, loginPage.url)
print("Login Failed.") if errorPage.text.find("Wrong username or password!") != 1 else print("Login Successful.")<file_sep>/dict call.py
#dict1={"name": "fashad","roll.no":43}
#name_of_std=dict1["name"]
#print(name_of_std)
list=[[1,2,3],[4,5,6],[7,8,9]]
print(list[0][0],list[1][0],list[2][0])
print(list[0][0]+list[1][0]+list[2][0])
<file_sep>/duplications.py
x=int(input("enter character: "))
list1=[]
for i in range(x):
a=int(input("ennter number: "))
if type(a) == int:
list1.append(a)
else:
continue
final=[]
for i in list1:
if i not in final:
final.append(i)
print(final)<file_sep>/asterik diagonal.py
#x = 1
'''
for i in range(1,6):
print('*')
for j in range(0,i):
print(" ",end="")
'''
#j = 6
#while x <
'''
x = 1
y = 6
space = " "
while x<=y:
print("*")
print(space * x, end="")
x += 1
'''
x =0
y = 6
#sp = " "
while x <=6:
print("*")
x+=1
# print(" ")
for i in range(0,x):
print(" ",end="")
# x+=1
<file_sep>/clean city.py
cleanest_cities = ["Karachi", "Peshawar", "Quetta"]
value = input("Enter city: ")
for city in cleanest_cities:
if value in cleanest_cities:
print("Cleanest city")
break
else:
print("Not Cleanest city")
break
<file_sep>/README.md
# Python-Practices
These files are the basic practices of syntax , arrays etc. of Python to understand and learn this Language.
<file_sep>/scrape and parse.py
from urllib.request import urlopen
import re
url = "http://olympus.realpython.org/profiles/dionysus"
html_page = urlopen(url)
html_text = html_page.read().decode("utf-8")
for tag in ["Name: ", "Favorite animal: "]:
tag_start = html_text.find(tag) + len(tag)
tag_end = html_text[tag_start:].find("<")
# Remove extra spaces and newline padding
print(html_text[tag_start : tag_start + tag_end].strip(" \n"))
for tag in ["Name: .*?[\n<]", "Favourite Color: .*?[\n<]"]:
match_results = re.search(tag, html_text)
# remove label from first result
result = re.sub(".*: ", "", match_results.group())
print(result)
print(result.strip(" \n<"))
<file_sep>/iterators iterables decoraters.py
def gen(n):
for i in range(n):
yield i
for i in gen(100000):
print(i)
def gen(n):
for i in range(n):
yield i
ob1 = gen(4)
print(next(ob1))
print(next(ob1))
print(next(ob1))
print(next(ob1))
print(next(ob1))
num = "abcd"
iter1 = iter(num)
print(next(iter1))
print(next(iter1))<file_sep>/bisect insort.py
import bisect
lis = [1,2,5,10,15,17,28,35,59,61]
print(bisect.bisect(lis, 55))
bisect.insort(lis, 55)
print(lis)
lis1 = ["a","e", "h", "m", "o", "s"]
print(bisect.bisect(lis1, "k"))
bisect.insort(lis1, "k")
print(lis1)
<file_sep>/continue break.py
'''
i = 1
while i <= 10:
if i%2==1:
print(i)
i = i + 1
else:
continue
'''
i = 0
while i <= 10:
if i%2==1:
print(i)
i = i + 1
continue
else:
break
<file_sep>/enumerate.py
a = ["CHERRY", "PUMPKIN", "JELLY", "<NAME>"]
for i, item in enumerate(a):
print(i, item)
# i = 0
# for item in a:
# print(i, item)
# i += 1
|
64cc7f7a184ffc8b22744bbe68bce0949a1445ce
|
[
"Markdown",
"Python"
] | 22 |
Python
|
Fashad-Ahmed/Python-Practices
|
25bcaffb134017afc697c7b5c5adb0fc005d93e7
|
f57e86160c3f74f376b20d133a78973c458a0cb6
|
refs/heads/master
|
<file_sep>let sideEffectsChart;
const searchForm = () => {
const searchQueryElem = document.querySelector('.search-input');
const searchQuery = searchQueryElem.value;
if (!searchQuery) {
searchQueryElem.classList.add('is-invalid');
} else {
searchQueryElem.classList.remove('is-invalid');
}
searchAdverseEvents(searchQuery)
.then((response) => {
clearResults();
if (response.results && response.results.length > 0) {
showAdverseEvents(response.results, searchQuery);
} else {
displayText('There are no results.');
}
})
.catch((error) => {
console.error(error);
clearResults();
displayText('No connection. Please try again');
});
return false;
}
const clearResults = () => {
document.getElementById("results").innerHTML = "";
}
const displayText = (text) => {
const textElem = document.createElement("p");
textElem.classList.add("text-center");
textElem.classList.add("empty-results");
textElem.textContent = text;
document.getElementById("results").appendChild(textElem);
}
const convertObjToParams = (obj) => {
let str = "";
for (let key in obj) {
if (str != "") {
str += "&";
}
str += key + "=" + encodeURIComponent(obj[key]);
}
return str;
}
const searchAdverseEvents = (searchQuery) => {
let url = "https://api.fda.gov/drug/event.json?"
url += convertObjToParams({
search: 'patient.drug.medicinalproduct:"' + searchQuery + '"',
count: 'patient.reaction.reactionmeddrapt.exact',
api_key:'<KEY>',
});
return fetch(url, {mode: 'cors'})
.then((response) => {
return response.json()
});
}
function changeData(chart, labels, data) {
chart.data.labels = labels;
chart.data.datasets.forEach((dataset) => {
dataset.data = data;
});
chart.update();
}
const showAdverseEvents = (results, drug) => {
let myChart = document.getElementById('mychart').getContext('2d');
var terms = [];
var counts = [];
for (i = 0; i < 5; i++) {
terms.push(results[i].term)
counts.push(results[i].count)
}
console.log(terms);
console.log(counts);
if (sideEffectsChart) {
changeData(sideEffectsChart, terms, counts);
sideEffectsChart.options.title.text = drug;
}
else {
sideEffectsChart = new Chart(myChart, {
type: 'doughnut',//bar,horizontal bar,pie,radar,line,polararea
data: {
labels: terms,
datasets: [{
data: counts,
backgroundColor: [
'rgba(255, 99, 132, 0.2)',
'rgba(54, 162, 235, 0.2)',
'rgba(255, 206, 86, 0.2)',
'rgba(75, 192, 192, 0.2)',
'rgba(153, 102, 255, 0.2)',
],
borderColor: [
'rgba(255,99,132,1)',
'rgba(54, 162, 235, 1)',
'rgba(255, 206, 86, 1)',
'rgba(75, 192, 192, 1)',
'rgba(153, 102, 255, 1)',
],
borderWidth: 1,
hoverBorderColor:'#000'
}],
},
options: {
title: {
display: true,
text: drug
},
legend :{
position :'right'
}
}
});
}
}
|
2da10f70c3cbab5d216912283d881d56e6cd1057
|
[
"JavaScript"
] | 1 |
JavaScript
|
MahILUL/adverse-effects-web-app
|
1696e57dd38af9d836a1e3115ebc95d32751bca3
|
fc296284200aa2e416be98af877266c174ed1bd3
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using Microsoft.Win32;
namespace shittywinlock {
public partial class Form1 : Form {
string pravelniiCod = "73556087355608123";
bool joeBiden = false;
public Form1() {
this.FormBorderStyle = FormBorderStyle.None;
this.WindowState = FormWindowState.Maximized;
this.FormClosing += Form1_FormClosing;
TopMost = true;
// Registry.CurrentUser.CreateSubKey(@"");
InitializeComponent();
System.Diagnostics.Process.Start("taskkill.exe", "/f /im explorer.exe");
// System.Diagnostics.Process.Start("mountvol.exe", "C: /d");
}
private void typedPravCod() {
System.Diagnostics.Process.Start(System.IO.Path.Combine(Environment.GetEnvironmentVariable("windir"), "explorer.exe")); ;
joeBiden = true;
Application.Exit();
}
private void button1_Click(object sender, EventArgs e) {
label4.Text += "1";
}
private void button2_Click(object sender, EventArgs e) {
label4.Text += "2";
}
private void button3_Click(object sender, EventArgs e) {
label4.Text += "3";
}
private void button4_Click(object sender, EventArgs e) {
label4.Text += "4";
}
private void button5_Click(object sender, EventArgs e) {
label4.Text += "5";
}
private void button6_Click(object sender, EventArgs e) {
label4.Text += "6";
}
private void button7_Click(object sender, EventArgs e) {
label4.Text += "7";
}
private void button8_Click(object sender, EventArgs e) {
label4.Text += "8";
}
private void button9_Click(object sender, EventArgs e) {
label4.Text += "9";
}
private void button10_Click(object sender, EventArgs e) {
label4.Text += "0";
}
private void button11_Click(object sender, EventArgs e) { // clear
label4.Text = "";
}
private void button12_Click(object sender, EventArgs e) { // basically enter button
if (label4.Text == pravelniiCod) {
typedPravCod();
}
else {
label4.Text = "";
label3.Text = "Неправельный код! Not corect cod\nCod : ";
}
}
private void Form1_FormClosing(object sender, FormClosingEventArgs e) {
if(!joeBiden) e.Cancel = true;
}
private void button13_Click(object sender, EventArgs e) {
MessageBox.Show("If you see this running, most probably a forkie, a pornfag or an anydeskfag was here before. This VM is no longer usable, do a vote reset", "About", MessageBoxButtons.OK);
}
}
}
|
158c761c006811513d3905235c90fe4b949df905
|
[
"C#"
] | 1 |
C#
|
h-hh-hhh/shittywinlock
|
92f234dd73de2fd2bf84333872acbc0620af2e8a
|
e3b3cdf692beb3664ee6eae8c6c2d40c9e9d5c74
|
refs/heads/master
|
<file_sep>#include <Servo.h>
#define SERVO 2
Servo s; // Variável Servo
int pos; // Posição Servo
void setup ()
{
s.attach(SERVO);
Serial.begin(9600);
s.write(0); // Inicia motor posição zero
}
void loop()
{
s.write(0);
delay(1000);
s.write(180);
delay(1000);
}
<file_sep>package br.com.brunohpmarques.ewbc.adapters;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import java.util.List;
import br.com.brunohpmarques.ewbc.R;
import br.com.brunohpmarques.ewbc.holders.CommandOptionHolder;
import br.com.brunohpmarques.ewbc.models.Command;
/**
* Created by <NAME> on 08/08/2017.
*/
public class CommandOptionAdapter extends RecyclerView.Adapter<CommandOptionHolder> {
private List<Command> list;
public CommandOptionAdapter(List<Command> Data) {
list = Data;
}
@Override
public CommandOptionHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext())
.inflate(R.layout.item_command_option, parent, false);
CommandOptionHolder holder = new CommandOptionHolder(view);
return holder;
}
@Override
public void onBindViewHolder(CommandOptionHolder holder, int position) {
holder.commandTitle.setText(list.get(position).getTitle());
holder.commandPosition.setText(position+"");
holder.commandImage.setImageResource(list.get(position).getResourceId());
}
@Override
public int getItemCount() {
return list.size();
}
}
<file_sep>package br.com.brunohpmarques.ewbc;
import android.app.Activity;
import android.app.ProgressDialog;
import android.app.admin.DeviceAdminInfo;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothClass;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v4.content.res.ResourcesCompat;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.app.AppCompatDelegate;
import android.support.v7.view.menu.ActionMenuItemView;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.logging.Logger;
import br.com.brunohpmarques.ewbc.adapters.CommandAdapter;
import br.com.brunohpmarques.ewbc.adapters.CommandOptionAdapter;
import br.com.brunohpmarques.ewbc.bluetooth.BluetoothHC;
import br.com.brunohpmarques.ewbc.bluetooth.BluetoothReceive;
import br.com.brunohpmarques.ewbc.models.Command;
import br.com.brunohpmarques.ewbc.models.ECommandCode;
/**
* Created by <NAME> on 08/08/2017.
*/
public class MainActivity extends AppCompatActivity {
public static BluetoothHC bt;
public static BluetoothReceive bluetoothReceive;
public static Menu menu;
public static final String TAG = Command.class.getSimpleName().toUpperCase() + "_TAG";
private static final int REQUEST_PERMISSIONS = 543;
/**
* Lista de comandos disponiveis
*/
private static final List<Command> commandOptionList = new ArrayList<>();
/**
* Lista de comandos a serem enviados
*/
private static final List<Command> mainList = new ArrayList<>();
private static boolean isSending;
private static MainActivity mainInstance;
private static LinearLayoutManager horizontalLayoutManager, verticalLayoutManager;
private static RecyclerView horizontalList;
private static RecyclerView verticalList;
private static ProgressDialog progressDialog;
private static Button btnStart;
private static RelativeLayout mainLayout;
private static LinearLayout emptyLayout;
//////////////////////////////////////////////
private static final Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case BluetoothHC.MESSAGE_STATE_CHANGE:
Log.d(TAG, "MESSAGE_STATE_CHANGE: " + msg.arg1);
break;
case BluetoothHC.MESSAGE_WRITE:
Log.d(TAG, "MESSAGE_WRITE ");
break;
case BluetoothHC.MESSAGE_READ:
Log.d(TAG, "MESSAGE_READ ");
break;
case BluetoothHC.MESSAGE_DEVICE_NAME:
Log.d(TAG, "MESSAGE_DEVICE_NAME " + msg);
break;
case BluetoothHC.MESSAGE_TOAST:
Log.d(TAG, "MESSAGE_TOAST " + msg);
break;
}
}
};
public static MainActivity getInstance() {
return mainInstance;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
AppCompatDelegate.setCompatVectorFromResourcesEnabled(true);
MainActivity.bt = new BluetoothHC(this, mHandler, BluetoothAdapter.getDefaultAdapter());
MainActivity.bluetoothReceive = new BluetoothReceive();
// Comandos
if (commandOptionList != null) commandOptionList.clear();
if (mainList != null) mainList.clear();
ECommandCode commOps[] = ECommandCode.values();
Command commandOption;
for (int i = 0; i < commOps.length; i++) {
commandOption = new Command(commOps[i]);
commandOptionList.add(commandOption);
}
//
// Layout
this.horizontalLayoutManager = new LinearLayoutManager(this, LinearLayoutManager.HORIZONTAL, false);
this.horizontalList = (RecyclerView) findViewById(R.id.horizontalList);
if (commandOptionList.size() > 0 & horizontalList != null) {
horizontalList.setAdapter(new CommandOptionAdapter(commandOptionList));
}
horizontalList.setLayoutManager(horizontalLayoutManager);
this.verticalLayoutManager = new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false);
this.verticalList = (RecyclerView) findViewById(R.id.verticalList);
if (this.mainList.size() > 0 & verticalList != null) {
verticalList.setAdapter(new CommandAdapter(this.mainList));
}
verticalList.setLayoutManager(verticalLayoutManager);
this.mainLayout = (RelativeLayout) findViewById(R.id.mainLayout);
this.emptyLayout = (LinearLayout) findViewById(R.id.emptyLayout);
Drawable draw = ResourcesCompat.getDrawable(getResources(), R.drawable.ic_add_circle, null).mutate();
((ImageView) findViewById(R.id.btnEmptyLayout)).setImageDrawable(draw);
this.btnStart = (Button) findViewById(R.id.btnStart);
this.btnStart.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
sendCommands();
}
});
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP){
Drawable roundDrawable = ResourcesCompat.getDrawable(getResources(), R.drawable.btnstart, null).mutate();
roundDrawable.setColorFilter(ResourcesCompat.getColor(getResources(), R.color.green, null), PorterDuff.Mode.SRC_ATOP);
this.btnStart.setBackground(roundDrawable);
}
//
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu_options, menu);
this.menu = menu;
MenuItem bltDisabled = this.menu.findItem(R.id.btnBltDisabled);
MenuItem bltActivated = this.menu.findItem(R.id.btnBltActivated);
MenuItem bltConnecting = this.menu.findItem(R.id.btnBltConnecting);
MenuItem bltConnected = this.menu.findItem(R.id.btnBltConnected);
Drawable draw = ResourcesCompat.getDrawable(getResources(), R.drawable.ic_bluetooth_disabled, null).mutate();
bltDisabled.setIcon(draw);
draw = ResourcesCompat.getDrawable(getResources(), R.drawable.ic_bluetooth, null).mutate();
bltActivated.setIcon(draw);
draw = ResourcesCompat.getDrawable(getResources(), R.drawable.ic_bluetooth_searching, null).mutate();
bltConnecting.setIcon(draw);
draw = ResourcesCompat.getDrawable(getResources(), R.drawable.ic_bluetooth_connected, null).mutate();
bltConnected.setIcon(draw);
if(bt != null && bt.isOn()){
setIcBluetooth(R.id.btnBltActivated);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle item selection
if (BluetoothAdapter.getDefaultAdapter() != null) {
switch (item.getItemId()) {
case R.id.btnBltDisabled:
// bluetooth desativado
bt.on();
return true;
case R.id.btnBltActivated:
// bluetooth ativado
bt.findDevices(MainActivity.this);
return true;
case R.id.btnBltConnecting:
// conectando com o robo
bt.off();
return true;
case R.id.btnBltConnected:
// bluetooth conectado com o robo
bt.off();
return true;
default:
break;
}
} else {
Snackbar.make(verticalList, getString(R.string.bluetoothNotFound), Snackbar.LENGTH_SHORT).show();
}
return super.onOptionsItemSelected(item);
}
public static void setIcBluetooth(final int btnBltId){
if (menu != null) {
final MainActivity activity = MainActivity.getInstance();
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
MenuItem bltDisabled = menu.findItem(R.id.btnBltDisabled);
MenuItem bltActivated = menu.findItem(R.id.btnBltActivated);
MenuItem bltConnecting = menu.findItem(R.id.btnBltConnecting);
MenuItem bltConnected = menu.findItem(R.id.btnBltConnected);
if (bltDisabled != null && bltActivated != null && bltConnecting != null && bltConnected != null) {
switch (btnBltId) {
case R.id.btnBltDisabled:
// bluetooth desativado
bltDisabled.setVisible(true);
bltActivated.setVisible(false);
bltConnecting.setVisible(false);
bltConnected.setVisible(false);
Snackbar.make(verticalList, activity.getApplicationContext()
.getString(R.string.disabled), Snackbar.LENGTH_SHORT).show();
return;
case R.id.btnBltActivated:
// bluetooth ativado
bltDisabled.setVisible(false);
bltActivated.setVisible(true);
bltConnecting.setVisible(false);
bltConnected.setVisible(false);
Snackbar.make(verticalList, activity.getApplicationContext()
.getString(R.string.activated), Snackbar.LENGTH_SHORT).show();
return;
case R.id.btnBltConnecting:
// conectando com o robo
bltDisabled.setVisible(false);
bltActivated.setVisible(false);
bltConnecting.setVisible(true);
bltConnected.setVisible(false);
Snackbar.make(verticalList, activity.getApplicationContext()
.getString(R.string.connecting), Snackbar.LENGTH_SHORT).show();
return;
case R.id.btnBltConnected:
// bluetooth conectado com o robo
bltDisabled.setVisible(false);
bltActivated.setVisible(false);
bltConnecting.setVisible(false);
bltConnected.setVisible(true);
Snackbar.make(verticalList, activity.getApplicationContext()
.getString(R.string.connected), Snackbar.LENGTH_SHORT).show();
return;
default:
return;
}
}
}
});
}
}
public static void checkCoarsePermissions(Activity activity){
boolean isPermission = false;
if(Build.VERSION.SDK_INT >= 23){
if(ContextCompat.checkSelfPermission(activity, android.Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED){
// Se o usuario ja negou a permissao
if(ActivityCompat.shouldShowRequestPermissionRationale(activity, android.Manifest.permission.ACCESS_COARSE_LOCATION)){
// Explicar o uso da permissao com um dialog
}
ActivityCompat.requestPermissions(activity, new String[]{android.Manifest.permission.ACCESS_COARSE_LOCATION}, REQUEST_PERMISSIONS);
}else{
// ja tem permissao
isPermission = true;
}
}else{
// eh menor do que a API 23, ou seja, ja tem permissao desde a instalacao
isPermission = true;
}
}
@Override
protected void onStart() {
super.onStart();
MainActivity.mainInstance = this;
bluetoothReceive.setActivity(this);
checkCoarsePermissions(this);
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onDestroy() {
bluetoothReceive.unregisterReceiver();
bt.off();
super.onDestroy();
}
public void showCommandInfo(int position, Command command){
// TODO trocar por activity
Context ctx = MainActivity.getInstance();
if(ctx != null){
ArrayAdapter<String> adapter = new ArrayAdapter<String>(ctx, R.layout.adapter_string);
adapter.add(command.getInfo(ctx));
AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(ctx, R.style.DialogTheme);
alertDialogBuilder.setCancelable(true)
.setPositiveButton(ctx.getString(R.string.ok),
new DialogInterface.OnClickListener(){
public void onClick(DialogInterface dialog, int id){
dialog.dismiss();
/// TODO
}
})
.setOnCancelListener(new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
}
});
alertDialogBuilder.setAdapter(adapter, null);
alertDialogBuilder.setTitle(position+": "+command.getTitle());
AlertDialog alert = alertDialogBuilder.create();
alert.getListView().deferNotifyDataSetChanged();
alert.show();
}
}
/** Adiciona comando na lista a ser enviada para o robo **/
public static void addCommand(Command command){
if(command != null){
Log.i("addCommand",command.getTitle());
MainActivity.mainList.add(0, command);
MainActivity.verticalList.setAdapter(new CommandAdapter(MainActivity.mainList));
MainActivity.verticalList.scrollTo(0, 0);
if(MainActivity.mainList.size() == 1){
MainActivity.mainLayout.setVisibility(View.VISIBLE);
MainActivity.emptyLayout.setVisibility(View.GONE);
}
}
}
/** Remove comando na lista a ser enviada para o robo **/
public static void remCommand(int index){
if(index >= 0 && index <= MainActivity.mainList.size()){
index = MainActivity.mainList.size()-index;
Log.i("remCommand", index+"");
MainActivity.mainList.remove(index);
MainActivity.verticalList.setAdapter(new CommandAdapter(MainActivity.mainList));
if(MainActivity.mainList.isEmpty()){
MainActivity.mainLayout.setVisibility(View.GONE);
MainActivity.emptyLayout.setVisibility(View.VISIBLE);
}
}
}
/** Retorna lista de comandos adicionados **/
public static Command getCommandMain(int index){
Command command = null;
if(index >= 0 && index < MainActivity.mainList.size()){
command = MainActivity.mainList.get(index);
}
return command;
}
/** Retorna lista de comandos disponiveis **/
public static Command getCommandOption(int index){
Command command = null;
if(index >= 0 && index < MainActivity.commandOptionList.size()){
command = MainActivity.commandOptionList.get(index);
}
return command;
}
/** Retorna lista de comandos disponiveis **/
public static void sendCommands(){
if(MainActivity.mainList != null && !MainActivity.mainList.isEmpty()) {
if(bt.isOn()) {
if(bt.isConected()) {
if(!MainActivity.isSending) {
MainActivity.isSending = true;
showProgress(R.string.sending);
String message;
int total = MainActivity.mainList.size();
// Enviar 1 a 1
// Command comm;
// for (int i = total-1; i>=0; i--) {
// comm = MainActivity.mainList.get(i);
// message = comm.getCodeFormatted();
// Log.i("sendCommands", (total-i)+"/"+total+": "+message);
// bt.sendMessage(message);
// MainActivity.progressDialog.setMessage((total-i)+"/"+total+": "+message);
// }
//
// Enviar todos de 1 vez
message = "$"+total;
for (int i = total - 1; i >= 0; i--) {
message += MainActivity.mainList.get(i).getCodeFormatted();
}
message = message + "#";
Log.i("sendCommands", total + ": " + message);
bt.sendMessage(message);
MainActivity.closeProgress();
MainActivity.isSending = false;
Snackbar.make(verticalList, verticalList.getContext().getString(R.string.sended), Snackbar.LENGTH_SHORT).show();
//
} else {
Snackbar.make(verticalList, verticalList.getContext().getString(R.string.alreadySending), Snackbar.LENGTH_SHORT).show();
}
} else {
Snackbar.make(verticalList, verticalList.getContext().getString(R.string.notConnected), Snackbar.LENGTH_SHORT).show();
}
}else{
Snackbar.make(verticalList, verticalList.getContext().getString(R.string.disabled), Snackbar.LENGTH_SHORT).show();
}
}else{
Snackbar.make(verticalList, verticalList.getContext().getString(R.string.listEmpty), Snackbar.LENGTH_SHORT).show();
}
}
public static void showProgress(int stringId){
if(MainActivity.progressDialog == null) {
MainActivity.progressDialog = ProgressDialog.show(MainActivity.getInstance(), verticalList.getContext().getString(stringId), "", true);
}
}
public static void closeProgress(){
if(MainActivity.progressDialog != null){
MainActivity.progressDialog.dismiss();
MainActivity.progressDialog = null;
}
}
}
<file_sep>package br.com.brunohpmarques.ewbc.adapters;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import java.util.List;
import br.com.brunohpmarques.ewbc.R;
import br.com.brunohpmarques.ewbc.holders.CommandHolder;
import br.com.brunohpmarques.ewbc.models.Command;
/**
* Created by <NAME> on 08/08/2017.
*/
public class CommandAdapter extends RecyclerView.Adapter<CommandHolder> {
private List<Command> list;
public CommandAdapter(List<Command> Data) {
list = Data;
}
@Override
public CommandHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext())
.inflate(R.layout.item_command, parent, false);
CommandHolder holder = new CommandHolder(view);
return holder;
}
@Override
public void onBindViewHolder(CommandHolder holder, int position) {
holder.commandTitle.setText(list.get(position).getTitle());
holder.commandImage.setImageResource(list.get(position).getResourceId());
holder.commandPos.setText(list.size()-position+"");
}
@Override
public int getItemCount() {
return list.size();
}
}
<file_sep>package br.com.brunohpmarques.ewbc.holders;
import android.graphics.drawable.Drawable;
import android.media.Image;
import android.support.design.widget.Snackbar;
import android.support.v4.content.res.ResourcesCompat;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.View;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import br.com.brunohpmarques.ewbc.MainActivity;
import br.com.brunohpmarques.ewbc.R;
import br.com.brunohpmarques.ewbc.models.Command;
/**
* Created by <NAME> on 08/08/2017.
*/
public class CommandHolder extends RecyclerView.ViewHolder {
private static Drawable infoDraw, removeDraw;
public ImageButton commandImage;
public TextView commandTitle;
public ImageView commandInfo;
public ImageView commandRem;
public TextView commandPos;
public CommandHolder(View view) {
super(view);
if(infoDraw == null){
infoDraw = ResourcesCompat.getDrawable(view.getResources(), R.drawable.ic_info, null).mutate();
}
if(removeDraw == null){
removeDraw = ResourcesCompat.getDrawable(view.getResources(), R.drawable.ic_cancel, null).mutate();
}
commandImage = (ImageButton) view.findViewById(R.id.btnComm);
commandTitle = (TextView) view.findViewById(R.id.lblComm);
commandInfo = (ImageView) view.findViewById(R.id.btnInfComm);
commandRem = (ImageView) view.findViewById(R.id.btnRemComm);
commandPos = (TextView) view.findViewById(R.id.lblCommPos);
commandImage.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View view) {
Snackbar.make(view, commandTitle.getText(), Snackbar.LENGTH_SHORT).show();
return false;
}
});
commandInfo.setImageDrawable(infoDraw);
commandInfo.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
int position = Integer.parseInt(commandPos.getText().toString());
Log.e("POSICAO", ""+position);
Command command = MainActivity.getCommandMain(position-1);
MainActivity.getInstance().showCommandInfo(position, command);
}
});
commandRem.setImageDrawable(removeDraw);
commandRem.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Snackbar.make(view, commandTitle.getText()+" "+view.getContext().getString(R.string.removed), Snackbar.LENGTH_SHORT).show();
int index = Integer.parseInt(commandPos.getText().toString());
MainActivity.remCommand(index);
}
});
}
}
<file_sep>package br.com.brunohpmarques.ewbc.models;
import br.com.brunohpmarques.ewbc.R;
/**
* Created by <NAME> on 15/08/2017.
*/
public enum ECommandCode {
ACELERAR("W","Acelerar", R.drawable.ic_arrow_up, R.string.commandAC),
// RE("S","Ré",R.drawable.ic_arrow_down, R.string.commandRE),
DIREITA("D","Direita",R.drawable.ic_arrow_forward, R.string.commandDI),
ESQUERDA("A","Esquerda",R.drawable.ic_arrow_back, R.string.commandES),
CHUTAR("K","Chutar",R.drawable.ic_gavel, R.string.commandCH);
// SUGAR("SU","Sugar",R.drawable.ic_publish, R.string.commandSU),
// SOLTAR("SO","Soltar",R.drawable.ic_file_download, R.string.commandSO);
private String label;
private String title;
private int imageId;
private int infoId;
ECommandCode(String label, String title, int imageId, int infoId){
this.label = label;
this.title = title;
this.imageId = imageId;
this.infoId = infoId;
}
public String getLabel() {
return label;
}
public String getTitle() {
return title;
}
public int getImageId() {
return imageId;
}
public int getInfoId() {
return infoId;
}
}
<file_sep>/************************************************
* UNIVERSIDADE FEDERAL RURAL DE PERNAMBUCO *
* BACHARELADO EM CIÊNCIAS DA COMPUTAÇÃO *
* CURSO DE ARDUINO SECOMP *
* <NAME> *
* RECIFE, 24/08/2016 *
************************************************/
#define rodaDirPWM 5
#define rodaDirFrente 4
#define rodaEsqPWM 6
#define rodaEsqFrente 7
#define sensorDir A15
#define sensorEsq A14
int leituraBitsDir;
int leituraBitsEsq;
// velocidade nas curvas
#define POTL 125
// raio nas curvas
#define POTS 10
// aceleracao
#define POTF 155
// Valor da LINHA
#define LINHA 700
char movimento = 'f';
void acelerar(int aceleracaoRodaDir, int aceleracaoRodaEsq, int sentido){
analogWrite(rodaDirPWM, aceleracaoRodaDir);
analogWrite(rodaEsqPWM, aceleracaoRodaEsq);
if(sentido == 1){ // frente
digitalWrite(rodaDirFrente, HIGH);
digitalWrite(rodaEsqFrente, HIGH);
}else{ // tras
digitalWrite(rodaDirFrente, LOW);
digitalWrite(rodaEsqFrente, LOW);
}
}
void parar(){
acelerar(0, 0, 1);
}
char mover(char direcao){
switch(direcao){
//para frente
case 'f':
acelerar(POTF, POTF, 1);
break;
//para tras
case 't':
acelerar(POTF, POTF, 0);
break;
//para direita
case 'd':
acelerar(POTL, POTS, 1);
break;
//para esquerda
case 'e':
acelerar(POTS, POTL, 1);
break;
}
return direcao;
}
void setup() {
pinMode(rodaDirPWM, OUTPUT);
pinMode(rodaDirFrente, OUTPUT);
pinMode(rodaEsqPWM, OUTPUT);
pinMode(rodaEsqFrente, OUTPUT);
}
void loop() {
leituraBitsDir = analogRead(sensorDir);
leituraBitsEsq = analogRead(sensorEsq);
if(leituraBitsEsq < LINHA && leituraBitsDir < LINHA){
movimento = mover('f');
}else if(leituraBitsEsq > LINHA && leituraBitsDir < LINHA){
movimento = mover('d');
}else if(leituraBitsDir > LINHA && leituraBitsEsq < LINHA){
movimento = mover('e');
}else{
parar();
}
}<file_sep>//Pinos de conexao do modulo
const int s0 = 9;
const int s1 = 8;
const int s2 = 11;
const int s3 = 10;
const int out = 12;
//Variaveis cores
int red = 0;
int green = 0;
int blue = 0;
void setup()
{
pinMode(s0, OUTPUT);
pinMode(s1, OUTPUT);
pinMode(s2, OUTPUT);
pinMode(s3, OUTPUT);
pinMode(out, INPUT);
Serial.begin(9600);
digitalWrite(s0, HIGH);
digitalWrite(s1, LOW);
}
void loop()
{
//Detecta a cor
color();
//Mostra valores no serial monitor
Serial.print("Vermelho: ");
Serial.print(red, DEC);
Serial.print(" Verde : ");
Serial.print(green, DEC);
Serial.print(" Azul : ");
Serial.print(blue, DEC);
Serial.println();
if (red > 400 && green > 400 && blue > 400) {
Serial.println("Preto");
}
else if (red < 200 && green < 150 && blue < 100) {
Serial.println("Azul claro");
}
else if (red < 200 && green < 200 && blue < 200) {
Serial.println("Branco");
}
else if (red < blue && red < green && red < 200 && green > 250 && blue > 150)
{
Serial.println("Vermelho");
}
//Verifica se a cor azul foi detectada
else if (blue < red && blue < green && red > 400)
{
Serial.println("Azul");
}
Serial.println();
delay(500);
}
void color()
{
//Rotina que le o valor das cores
digitalWrite(s2, LOW);
digitalWrite(s3, LOW);
//count OUT, pRed, RED
red = pulseIn(out, digitalRead(out) == HIGH ? LOW : HIGH);
digitalWrite(s3, HIGH);
//count OUT, pBLUE, BLUE
blue = pulseIn(out, digitalRead(out) == HIGH ? LOW : HIGH);
digitalWrite(s2, HIGH);
//count OUT, pGreen, GREEN
green = pulseIn(out, digitalRead(out) == HIGH ? LOW : HIGH);
}
<file_sep>/*
* Interpretador de comandos BT-Arduino
*
*/
// byte recebido
char data = 0;
// fila de comandos a serem executados
char commandQueue[50];
// numero de comandos (valor real e valor encontrado)
int numCommandsReal = 0;
int numCommandsFound = 0;
// estados do carrinho
typedef enum { NONE, READ_NUM_COMMANDS, READ_COMMANDS, READY } states;
typedef enum { NO_COLOR, RED, GREEN, BLUE } colors;
// estado atual
states currentState = NONE;
// cor atual (objeto)
colors currentColor = NO_COLOR;
void setup() {
// TODO: setar pinos de entrada e saída
Serial1.begin(9600);
}
void moveForward() {
// TODO: andar para frente até detectar algo
Serial1.println("Andar para frente");
}
void turnLeft() {
// TODO
Serial1.println("Virar para esquerda");
}
void turnRight() {
// TODO
Serial1.println("Virar para direita");
}
void kickObject() {
// TODO: andar e chutar objeto
Serial1.println("Chutar objeto");
}
void processCommandQueue() {
for (int i = 0; i < numCommandsReal; i++) {
if (commandQueue[i] == 'W') {
moveForward();
} else if (commandQueue[i] == 'A') {
turnLeft();
} else if (commandQueue[i] == 'D') {
turnRight();
} else if (commandQueue[i] == 'K') {
kickObject();
}
}
numCommandsReal = 0;
currentState = NONE;
}
void processIncomingByte(const char b) {
if (isdigit(b)) {
if (currentState == READ_NUM_COMMANDS) {
numCommandsReal *= 10;
numCommandsReal += b - 48;
}
// else {
// if (b == '1') {
// Serial1.print(b);
//
// currentColor = RED;
// } else if (b == '2') {
// Serial1.print(b);
//
// currentColor = GREEN;
// } else if (b == '3') {
// Serial1.print(b);
//
// currentColor = BLUE;
// }
// }
} else {
if (currentState == READ_NUM_COMMANDS && numCommandsReal != 0) {
currentState = READ_COMMANDS;
}
if (currentState == NONE) {
if (b == '$') {
Serial1.print(b);
currentState = READ_NUM_COMMANDS;
}
} else if (currentState == READ_COMMANDS) {
if (b == 'W' || b == 'A' || b == 'D' || b == 'K') {
Serial1.print(b);
commandQueue[numCommandsFound++] = b;
} else if (b == '#') {
if (numCommandsReal == numCommandsFound) {
Serial1.println(b);
currentState = READY;
} else {
// erro de transmissão
// TODO: pedir reenvio?
Serial1.println("E");
numCommandsReal = 0;
currentState = NONE;
}
numCommandsFound = 0;
}
}
}
}
void loop() {
if (Serial1.available() > 0) {
data = Serial1.read();
// processIncomingByte(data);
Serial.println(data);
}
// if (currentState == READY) {
// // executar fila de comandos
// processCommandQueue();
// }
}
|
7fa20c77b99d7b2a4156ff0902b4f6d642b49a58
|
[
"Java",
"C++"
] | 9 |
C++
|
brunohpmarques/e-wbc
|
c065079ac16cf728e18244c69cb0f2b35416e8a6
|
48a26960cddd55acb026147b008c210bf32132ad
|
refs/heads/master
|
<file_sep>//MODULE
//['ngRoute', 'ngResource'] are the app's dependencies
var weatherApp = angular.module('weatherApp', ['ngRoute', 'ngResource']);
|
d23041d9c729ee202b0b640f75f667d89bdd1fb4
|
[
"JavaScript"
] | 1 |
JavaScript
|
Edento/weatherApp
|
348bcd09d0f758d2c4b5b5dc1409a6b0e1189403
|
c056b203f62f39ba710a77d03afbb058fb846af9
|
refs/heads/master
|
<repo_name>lewisbenge/Win81-DreamCheeky-Thunder-MissleLauncher<file_sep>/RocketLauncher/MainPage.xaml.cs
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Input;
// The Blank Page item template is documented at http://go.microsoft.com/fwlink/?LinkId=234238
namespace RocketLauncher
{
/// <summary>
/// An empty page that can be used on its own or navigated to within a Frame.
/// </summary>
public sealed partial class MainPage : Page
{
private MissleLauncher _launcher;
public MainPage()
{
InitializeComponent();
Loaded += MainPage_Loaded;
Unloaded += MainPage_Unloaded;
}
private void MainPage_Unloaded(object sender, RoutedEventArgs e)
{
}
private void MainPage_Loaded(object sender, RoutedEventArgs e)
{
MissleLauncher.MissleLauncherFound += async (o, args) =>
{
_launcher = args.MissleLauncher;
await _launcher.TurnLedOn();
};
MissleLauncher.SearchForMissleLauncher(Dispatcher);
}
private async void TapLeft(object sender, TappedRoutedEventArgs e)
{
await _launcher.MoveLeft();
}
private async void TapRight(object sender, TappedRoutedEventArgs e)
{
await _launcher.MoveRight();
}
private async void TapUp(object sender, TappedRoutedEventArgs e)
{
await _launcher.MoveUp();
}
private async void TapDown(object sender, TappedRoutedEventArgs e)
{
await _launcher.MoveDown();
}
private async void FireDown(object sender, TappedRoutedEventArgs e)
{
await _launcher.Fire();
}
}
}
<file_sep>/RocketLauncher/MissleLauncherEventArgs.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace RocketLauncher
{
public class MissleLauncherEventArgs
{
public MissleLauncherEventArgs(MissleLauncher launcher)
{
MissleLauncher = launcher;
}
public MissleLauncher MissleLauncher { get; set; }
}
}
<file_sep>/README.md
Connecting Windows 8.1 to the Dream Cheeky Thunder Missle Launcher
==================================================================
A sample app demonstrating using the Dream Cheeky Thunder USB Missle Launcher with Windows 8.1.
Full details of the code can be found here: http://lewisbenge.azurewebsites.net/2013/09/26/connecting-the-dream-cheeky-thunder-missile-launcher-to-windows-8-1/
<file_sep>/RocketLauncher/MissleLauncher.cs
using System;
using System.Runtime.InteropServices.WindowsRuntime;
using System.Threading.Tasks;
using Windows.Devices.Enumeration;
using Windows.Devices.HumanInterfaceDevice;
using Windows.Storage;
using Windows.UI.Core;
namespace RocketLauncher
{
public class MissleLauncher
{
private static readonly byte[] CMD = {0, 0, 0, 0, 0, 0, 0, 0, 2};
private static readonly byte[] Up = {0, 2, 2, 0, 0, 0, 0, 0, 0};
private static readonly byte[] DOWN = {0, 2, 1, 0, 0, 0, 0, 0, 0};
private static readonly byte[] LEFT = {0, 2, 4, 0, 0, 0, 0, 0, 0};
private static readonly byte[] RIGHT = {0, 2, 8, 0, 0, 0, 0, 0, 0};
private static readonly byte[] FIRE = {0, 2, 16, 0, 0, 0, 0, 0, 0};
private static readonly byte[] Stop = {0, 2, 32, 0, 0, 0, 0, 0, 0};
private static readonly byte[] GetStatus = {0, 1, 0, 0, 0, 0, 0, 0, 0};
private static readonly byte[] LedOn = {0, 3, 1, 0, 0, 0, 0, 0, 0};
private static readonly byte[] LedOff = {0, 3, 0, 0, 0, 0, 0, 0, 0};
private const int delay = 1000;
private const ushort vid = 8483;
private const ushort pid = 4112;
private const ushort uid = 16;
private const ushort uPage = 1;
private MissleLauncher(HidDevice hidDevice)
{
_hidDevice = hidDevice;
}
public async Task Fire()
{
await SendOutputMessage(FIRE);
}
public async Task TurnLedOn()
{
await SendOutputMessage(LedOn);
}
public async Task TurnLedOff()
{
await SendOutputMessage(LedOff);
}
public async Task MoveUp()
{
await SendOutputMessage(Up);
await Task.Delay(delay);
await SendOutputMessage(Stop);
}
public async Task MoveDown()
{
await SendOutputMessage(DOWN);
await Task.Delay(delay);
await SendOutputMessage(Stop);
}
public async Task MoveLeft()
{
await SendOutputMessage(LEFT);
await Task.Delay(delay);
await SendOutputMessage(Stop);
}
public async Task MoveRight()
{
await SendOutputMessage(RIGHT);
await Task.Delay(delay);
await SendOutputMessage(Stop);
}
private async Task SendOutputMessage(byte[] message)
{
if (_hidDevice != null)
{
HidOutputReport report = _hidDevice.CreateOutputReport();
report.Data = message.AsBuffer();
await _hidDevice.SendOutputReportAsync(report);
}
}
public static EventHandler<MissleLauncherEventArgs> MissleLauncherFound;
private readonly HidDevice _hidDevice;
public static void SearchForMissleLauncher(CoreDispatcher dispatcher)
{
DeviceWatcher deviceWatcher =
DeviceInformation.CreateWatcher(HidDevice.GetDeviceSelector(uPage, uid, vid, pid));
deviceWatcher.Added += (s, a) => dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
{
HidDevice hidDevice = await HidDevice.FromIdAsync(a.Id, FileAccessMode.ReadWrite);
var launcher = new MissleLauncher(hidDevice);
if (MissleLauncherFound != null)
MissleLauncherFound(null, new MissleLauncherEventArgs(launcher));
deviceWatcher.Stop();
});
deviceWatcher.Start();
}
public void Dispose()
{
_hidDevice.Dispose();
}
~MissleLauncher()
{
Dispose();
}
}
}
<file_sep>/RocketLauncher/Commands.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace RocketLauncher
{
public class Commands
{
public static byte[] CMD = new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 2 };
public static byte[] UP1 = new byte[] { 0, 2, 2, 0, 0, 0, 0, 0, 0};
public static byte[] DOWN1 = new byte[] { 0, 2, 1, 0, 0, 0, 0, 0, 0 };
public static byte[] LEFT1 = new byte[] { 0, 2, 4, 0, 0, 0, 0, 0, 0};
public static byte[] RIGHT1 = new byte[] { 0, 2, 8, 0, 0, 0, 0, 0, 0 };
public static byte[] FIRE1 = new byte[] { 0, 2, 16, 0, 0, 0, 0, 0, 0 };
public static byte[] STOP1 = new byte[] { 0, 2, 32, 0, 0, 0, 0, 0, 0 };
public static byte[] GET_STATUS1 = new byte[] { 0, 1, 0, 0, 0, 0, 0, 0, 0 };
public static byte[] LED_ON = new byte[] { 0, 3, 1, 0, 0, 0, 0, 0, 0 };
public static byte[] LED_OFF = new byte[] { 0, 3, 0, 0, 0, 0, 0, 0, 0 };
}
}
|
f676ccfebd6678a6ed6134d0b466530ccf2aabb8
|
[
"Markdown",
"C#"
] | 5 |
C#
|
lewisbenge/Win81-DreamCheeky-Thunder-MissleLauncher
|
bc096cc80efa269318ca5f1dff5355d50961fd2c
|
1a02954c43e1a53fe9ae176d1abe7ce513042bba
|
refs/heads/master
|
<file_sep>#!/bin/sh
# Init edited and modified by tiad8
/bin/mount -o remount,rw / /
/bin/mount -t proc proc /proc
/bin/mount -t sysfs sys /sys
#internet speed tweaks,vm management tweaks, and battery tweaks
echo 1024 > /sys/devices/virtual/bdi/179:0/read_ahead_kb
echo 7 > /proc/sys/vm/laptop_mode
echo 90 > /proc/sys/vm/dirty_ratio
echo 60000 > /proc/sys/vm/dirty_expire_centisecs
echo 60000 > /proc/sys/vm/dirty_writeback_centisecs
card=/sdcard
mount -t vfat -o fmask=0000,dmask=0000,utf8,rw,flush,noatime,nodiratime /dev/block/mmcblk0p1 /sdcard
[ $? -eq 0 ] || fail "Failed to mount the SD card. Cannot continue."
if [ ! -d /sdcard/npkboot/tmp ] ; then
mkdir /sdcard/npkboot/tmp
fi
mount /sdcard/npkboot/tmp /tmp
[ $? -eq 0 ] || fail "Failed to mount TMP directory. Cannot continue."
mount -t ext2 -o noatime,nodiratime /dev/block/mmcblk0p2 /system
[ $? -eq 0 ] || fail "Failed to mount SYSTEM partition. Cannot continue."
mount -t ext2 -o noatime,nodiratime /dev/block/mmcblk0p3 /data
[ $? -eq 0 ] || fail "Failed to mount DATA partition. Cannot continue."
mount -t ext2 -o noatime,nodiratime /dev/block/mmcblk0p4 /cache
[ $? -eq 0 ] || fail "Failed to mount CACHE partition. Cannot continue."
clear
if [ ! -d /cache/recovery ]; then
mkdir /cache/recovery
fi
clear
if [ -d /sdcard/npkboot ]; then
echo " "
echo " "
echo " "
read -s -n3 -p "PRESS VOLUME DOWN **NOW** TO ENTER RECOVERY..." -t5 keypress
if [ "$keypress" = "[2" ] ; then
if [ ! -d /cache/recovery ] ; then
mkdir /cache/recovery
fi
rm -f /init.rc
rm -f /etc
ln -s /init.cfg/etc /etc
ln -s /init.cfg/etc/init.rc /init.rc
if cat /proc/cpuinfo|grep -q Kovsky ; then
echo "XPERIA detected"
mount --bind /sbin/recoveryx /sbin/recovery
fi
exec /init.recovery
else
echo "Booting Android..."
fi
fi
if [ -f /sdcard/npkboot/recovery.txt ]; then
if [ ! -d /cache/recovery ] ; then
mkdir /cache/recovery
fi
rm -f /init.rc
rm -f /etc
ln -s /init.cfg/etc /etc
ln -s /init.cfg/etc/init.rc /init.rc
if cat /proc/cpuinfo|grep -q Kovsky ; then
echo "XPERIA detected"
mount --bind /sbin/recoveryx /sbin/recovery
fi
exec /init.recovery
fi
if [ -d $card/npkboot/AndroidApps ] ; then
echo Copying Applications
if [ ! -d /data/app ] ; then
mkdir -m 0775 /data/app
chown -R 1000:1000 /data/app
fi
(cd $card/npkboot/AndroidApps; for i in *; do if [ ! -f "/data/app/$i" -o "$i" -nt "/data/app/$i" ]; then \
cp "$i" /data/app; fi done)
fi
if [ ! -d $card/npkboot/media ] ; then
echo You have no media folder, please extract the resources to your SD card NPKBOOT/MEDIA folder
fi
if [ ! -d $card/npkboot/mp3 ] ; then
echo You have no mp3 folder, please extract the resources to your SD card NPKBOOT/MP3 folder
fi
if [ ! -d /data/app ] ; then
mkdir -m 0775 /data/app
fi
chown -R 1000:1000 /data/app
if [ ! -d /data/app-private ] ; then
mkdir -m 0775 /data/app-private
fi
chown -R 1000:1000 /data/app-private
cp -ar /init.etc/* /system/etc
KAISER=0
DIAMOND=0
WIFI_TI=0
WIFI_BCM=0
XPERIA=0
WVGA=0
HW=`cat /proc/cpuinfo | grep Hardware`
case "$HW" in
*Diamond*)
echo "DIAMOND detected"
HW=DIAMOND
DIAMOND=1
WIFI_TI=1
;;
*Raphael*)
echo "RAPHAEL detected"
HW=RAPHAEL
DIAMOND=1
WIFI_TI=1
;;
*blackstone*)
echo "BLACKSTONE detected"
HW=BLACKSTONE
DIAMOND=1
WIFI_TI=1
WVGA=1
;;
*Topaz*)
echo "TOPAZ detected"
HW=TOPAZ
DIAMOND=1
WIFI_TI=1
WVGA=1
;;
*Kovsky*)
echo "XPERIA detected"
HW=KOVSKY
XPERIA=1
WIFI_TI=1
WVGA=1
;;
*Rhodium*)
echo "RHODIUM detected"
HW=RHODIUM
DIAMOND=1
WIFI_BCM=1
WVGA=1
;;
*)
echo "VOGUE detected"
HW=VOGUE
;;
esac
RCSCRIPT=""
RCCONFIG=""
echo "Checking for build type..."
if [ -f /system/TiaDroid.build ] ; then
echo "TiaDroid8 2.2.2 build detected"
RCSCRIPT="froyo"
RCCONFIG="froyo"
mount --bind /lib/froyo/hw /system/lib/hw
elif [ -f /system/TiaGB.build ] ; then
echo "TiaGB.2.3.7 build detected"
RCSCRIPT="gingerbread"
RCCONFIG="gingerbread"
elif [ -f /system/TiaCM7.build ] ; then
echo "TiaCM.2.3.7 build detected"
RCSCRIPT="cm7"
RCCONFIG="cm7"
elif [ -f /system/TiaSense.build ] ; then
echo "TiaSense 2.2 build detected"
RCSCRIPT="sensefroyo"
RCCONFIG="sensefroyo"
elif [ -f /system/TiaICS.build ] ; then
echo "TiaICS 4.0 build detected"
RCSCRIPT="ics"
RCCONFIG="ics"
elif [ -f /system/TiaMiui.build ] ; then
echo "TiaMiui 2.3.7 build detected"
RCSCRIPT="miui"
RCCONFIG="miui"
else
echo "Unknown Android build. Assuming Ion variant"
RCSCRIPT="ion"
RCCONFIG="ion"
# for the fake sensors library
mount /lib/hw /system/lib/hw -o loop
chmod 666 /dev/input/event0
if [ $DIAMOND -eq 1 ] ; then
RCCONFIG="ion.diamond"
fi
fi
LCDDENSITY=`/bin/grep -o "lcd.density=.*" /proc/cmdline | /bin/sed -e "s/.*lcd.density=//g" -e "s/ .*//g"`
DENS2=`grep lcd_density /system/etc/default.prop | sed -e 's/.*=//'`
if [ -z "$DENS2" ]; then
echo "ro.sf.lcd_density=$LCDDENSITY" >> /system/etc/default.prop
echo "qemu.sf.lcd_density=$LCDDENSITY" >> /system/etc/default.prop
else
sed -i /ro.sf.lcd_density/s/=.*/=$LCDDENSITY/ /system/etc/default.prop
sed -i /qemu.sf.lcd_density/s/=.*/=$LCDDENSITY/ /system/etc/default.prop
fi
DENS2a=`grep qemu.sf.lcd_density /system/build.prop | sed -e 's/.*=//'`
if [ -z "$DENS2a" ]; then
echo "ro.sf.lcd_density=$LCDDENSITY" >> /system/build.prop
echo "qemu.sf.lcd_density=$LCDDENSITY" >> /system/build.prop
else
sed -i /ro.sf.lcd_density/s/=.*/=$LCDDENSITY/ /system/build.prop
sed -i /qemu.sf.lcd_density/s/=.*/=$LCDDENSITY/ /system/build.prop
fi
if [ $XPERIA -eq 1 ] ; then
ROT=`grep hwrotation /system/etc/default.prop | sed -e 's/.*=//'`
if [ "$ROT" != 180 ]; then
echo Setting ro.sf.hwrotation=180
if [ -z "$ROT" ]; then
echo "ro.sf.hwrotation=180" >> /system/etc/default.prop
else
sed -i /hwrotation/s/=.*/=180/ /system/etc/default.prop
fi
fi
fi
if [ $XPERIA -eq 1 ] ; then
ROT1=`grep hwrotation /system/build.prop | sed -e 's/.*=//'`
if [ "$ROT1" != 180 ]; then
echo Setting ro.sf.hwrotation=180
if [ -z "$ROT" ]; then
echo "ro.sf.hwrotation=180" >> /system/build.prop
else
sed -i /hwrotation/s/=.*/=180/ /system/build.prop
fi
fi
fi
rm -f /init.rc
rm -f /etc
ln -s "/init.cfg/init.$RCSCRIPT.rc" /init.rc
ln -s /system/etc /etc
# echo /dev/block/mmcblk0p2 > /sys/devices/platform/usb_mass_storage/lun0/file
ifconfig usb0 192.168.20.1 up
/bin/busybox /bin/telnetd -b 192.168.20.1 -l /system/bin/sh
#chmod 4755 /bin/su
#mkdir -m 0777 /smodem
if [ -d /data/dropbear/ ] ; then
echo "Starting SSH"
mknod /dev/random c 1 8
mknod /dev/urandom c 1 9
/bin/dropbear -r /data/dropbear/dropbear_rsa_host_key -s
fi
if [ "$WIFI_TI" = "1" ] || [ "$WIFI_BCM" = "1" ];then
if [ -e "$card/npkboot/modules-$(uname -r).tar.gz" ] && ! `strings /lib/modules/mac80211.ko 2>/dev/null | grep -q "vermagic=$(uname -r)"`; then
echo "Installing $card/npkboot/modules-$(uname -r).tar.gz"
rm -rf /lib/modules/*
tar xzf $card/npkboot/modules-$(uname -r).tar.gz -C /lib/modules
ln -s /lib/modules /lib/modules/`uname -r`
else
echo "Modules already unpacked for this kernel version -- skipping installation of $card/modules-$(uname -r).tar.gz"
fi
if [ -f /system/etc/wifi/wlan.ko ]; then
cp -R /system/etc/wifi/* /lib/modules/
rm /system/etc/wifi/wlan.ko
depmod
fi
if [ "`grep -c ^wifi /system/etc/default.prop`" != "2" ]; then
echo "wifi.interface = wlan0" >> /system/etc/default.prop
echo "wifi.supplicant_scan_interval = 180" >> /system/etc/default.prop
fi
if [ "`grep -c ^wifi /system/build.prop`" != "2" ]; then
echo "wifi.interface = wlan0" >> /system/build.prop
echo "wifi.supplicant_scan_interval = 180" >> /system/build.prop
fi
fi
if [ -f /system/eclair.build ]
then
mount --bind /lib/eclair/libhardware_legacy.so /system/lib/libhardware_legacy.so
fi
if [ "$WIFI_BCM" = "1" ] ;then
# fyi: firmware is loaded from /data/etc/wifi/bcm432x/bcm4325-rtecdc.bin, nvram from /data/wifi-nvram.txt
# extract nvram (todo: clean this up/integrate with TI calibration)
if [ ! -e /data/wifi-nvram.txt ]; then
insmod /lib/modules/mtdchar.ko
mknod /dev/mtd0ro c 90 1
for SKIP in 5744 4736;do
mkdir /lib/modules/$(uname -r)
dd if=/dev/mtd0ro of=/tmp/tempcal bs=8192 count=1 skip=$SKIP
rmdir /lib/modules/$(uname -r)
dd if=/tmp/tempcal of=/data/wifi-nvram.txt bs=1 count=466
rm /tmp/tempcal
if grep nocrc= /data/wifi-nvram.txt 2>&1 > /dev/null;then
break
fi;
done
rm /dev/mtd0ro
rmmod mtdchar
if [ "$SKIP" = "5744" ];then
echo "CDMA Rhodium detected"
fi;
if [ "$SKIP" = "4736" ];then
echo "GSM Rhodium detected"
fi;
fi
fi
if [ -f "$system/etc/conf/$RCCONFIG.user.conf" ]; then
/bin/userinit.sh -c "$system/etc/conf/$RCCONFIG.user.conf"
else
echo "No user config files ($RCCONFIG) found on sdcard"
fi
for i in local.prop eri.xml serialno; do
if [ -f "$card/$i" ]; then
if [ ! -f /data/$i -o "$card/$i" -nt /data/$i ]; then
cp "$card/$i" /data/$i
fi
fi
done
mount -tdebugfs none /dbgfs
#Activate baclight control
echo 3 > /sys/class/htc_hw/test
#Smooth scroll
echo 40 >/sys/devices/system/cpu/cpu0/cpufreq/ondemand/up_threshold
#Activate full charging
echo 2 > /dbgfs/htc_battery/charger_state
[ -z $KBD ] && KBD=`sed 's/.*physkeyboard=\([0-9a-z_]*\).*/\1/' /proc/cmdline`
cp /system/etc/keymaps/default/*.kl /system/usr/keylayout/
cp /system/etc/keymaps/default/*.kcm* /system/usr/keychars/
if [ -d "/system/etc/keymaps/$KBD" ]
then
cp /system/etc/keymaps/"$KBD"/*.kl /system/usr/keylayout/
cp /system/etc/keymaps/"$KBD"/*.kcm* /system/usr/keychars/
fi
mount -o remount,ro /system
for i in /sys/class/input/input* ; do
if [ "`cat $i/name`" = "tssc-manager" ] ; then
touchscreendir=$i
echo "Touchscreen device directory is $i"
fi
done
## Ts Calibration
for i in /sys/class/input/input* ; do
if [ "`cat $i/name`" = "tssc-manager" ] ; then
touchscreendir=$i
echo "Touchscreen device directory is $i"
fi
done
if [ -f /data/ts-calibration ] ; then
echo "Using Saved Touchscreen Calibration"
echo 128,128,1903,128,128,1907,1903,1907,1024,1024 > $touchscreendir/calibration_screen
cat /data/ts-calibration > $touchscreendir/calibration_points
else
echo "Using Default Calibration Values"
echo 836,883,194,888,510,520,838,179,188,181 > /data/ts-calibration
cat /data/ts-calibration > $touchscreendir/calibration_points
fi
mkdir /dev/msm_camera
mknod /dev/msm_camera/control0 c 250 0
mknod /dev/msm_camera/config0 c 250 1
mknod /dev/msm_camera/frame0 c 250 2
cp /backup.rle /initlogo.rle
# Generate a "serial number" for the Android system to identify the device.
# Put it in /data so it survives reboots. Won't survive a factory reset, though.
[ -f "/data/serialno" ] || echo -e `cat /dev/urandom | tr -dc 'A-Z0-9' | fold -w 12 | head -n 1` >/data/serialno
SERIALNO=$(cat /data/serialno)
cp /proc/cmdline /tmp/cmdline
sed -i -e "s:$: androidboot.serialno=${SERIALNO}:" /tmp/cmdline
mount --bind /tmp/cmdline /proc/cmdline
# Transition sequence for FRX06->FRX07 (eth0/tiwlan0 renamed)
if [ -f "/data/misc/wifi/wpa_supplicant.conf" ]
then
sed -i -e 's:=\(tiwlan0\|eth0\):=wlan0:' /data/misc/wifi/wpa_supplicant.conf
chown 1000:1010 /data/misc/wifi/wpa_supplicant.conf
fi
# Bind-mount /sdcard/.android_secure to /mnt/secure/asec for native Apps2SD
[ -d /sdcard/.android_secure ] || mkdir /sdcard/.android_secure
mount --bind /sdcard/.android_secure /mnt/secure/asec
sleep 3
exec /init.android
|
a6da0d6fa81ebcb6934aac962aef6c7aaf02e915
|
[
"Shell"
] | 1 |
Shell
|
tiad8/TiaCWM-V6
|
904ad2b9bd7bfff69f1f7b8f8b67090ceaffd83a
|
6cedf16771c0f60a6dec3ac0d96997a3d6ec433e
|
refs/heads/master
|
<repo_name>istommao/tcpipnotes<file_sep>/demo/multiprocess/server.py
"""multiprocess server."""
import os
import sys
import socket
import signal
port = 6000
try:
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('127.0.0.1', port))
except Exception as error:
print('===', error)
server.listen()
print('Server ready')
def chldhanlder(signum, stackframe):
try:
pid, status = os.waitpid(-1, os.WNOHANG)
except OSError:
pass
signal.signal(signal.SIGCHLD, chldhanlder)
try:
while True:
conn, addr = server.accept()
print('Accept new conn from %s:%s' % addr)
pid = os.fork()
if pid == 0:
data = conn.recv(1024)
conn.send(data)
conn.close()
sys.exit()
else:
conn.close()
except KeyboardInterrupt:
print('\nexit with KeyboardInterrupt')
except Exception as error:
print(error)
server.close()
<file_sep>/demo/rawsocket/sniffer.py
import socket
import struct
import binascii
rawsocket = socket.socket(
socket.AF_PACKET, socket.SOCK_RAW, socket.htons(0x0800))
data, addr = rawsocket.recvfrom(2048)
# ethernet header
eth_header = data[0:14]
# 6字节目的mac地址,6字节源mac地址,2字节协议类型
eth_hdr = struct.unpack('!6s6s2s', eth_header)
binascii.hexlify(eth_hdr[0])
binascii.hexlify(eth_hdr[1])
binascii.hexlify(eth_hdr[2])
ipheader = data[14:34]
# 标示转换网络字节序,前12字节为版本、头部长度、服务类型、总长度、标志
# 等其他选项, 后面的两个四字节依次为源IP地址和目的IP地址
ip_hdr = ('!12s4s4s', ipheader)
print('Source IP Address: ' + socket.inet_ntoa(ip_hdr[1]))
print('Destination IP Address: ' + socket.inet_ntoa(ip_hdr[2]))
tcpheader = data[34:54]
tcp_hdr = struct.unpack("!HH16s", tcpheader)
print(tcp_hdr)
<file_sep>/book/interview.md
# 面试题
- 为什么需要tcp? 同时又需要udp?
- tcp/udp端口的意义
- tcp 和 udp端口的区别, 是否可以在一个终端上同时使用相同的tcp端口和udp端口
- tcp端口范围,为什么要保留一些端口
- udp格式及为什么要这么设计udp格式
- tcp的主要功能
- MSS和MTU的区别及联系
- 为什么建立连接能用三次握手,而断开连接需要四次消息
- sequence number的含义, ISN是怎么选
- tcp server和client之间怎么交换参数
- tcp格式及为什么要这么设计tcp格式
- (重点)tcp重传机制
- (重点)tcp怎么处理拥塞
- (重点)滑动窗口的意义及实现
- (重点)tcp建立连接及断开连接的状态机
## MSS与MTU
链路层帧最大长度 MTU
最大报文长度 MSS
<file_sep>/demo/rawsocket/rawip.py
"""
raw socket ip proto
https://tools.ietf.org/html/rfc791.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|Version| IHL |Type of Service| Total Length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Identification |Flags| Fragment Offset |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Time to Live | Protocol | Header Checksum |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Source Address |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Destination Address |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Options | Padding |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Example Internet Datagram Header
"""
import socket
import struct
class ByKeyOrValue(object):
_set_of_pairs = set()
@classmethod
def get(cls, key_or_value, default="Unknown"):
for pair in cls._set_of_pairs:
if pair[0] == key_or_value:
return pair[1]
elif pair[1] == key_or_value:
return pair[0]
return default
class EtherTypes(ByKeyOrValue):
_set_of_pairs = {
("IPv4", 0x0800),
("ARP", 0x0806),
("RARP", 0x8035),
("SNMP", 0x814c),
("IPv6", 0x86dd)
}
class IPVersions(ByKeyOrValue):
_set_of_pairs = {
("IPv4", 4),
("IPv6", 6)
}
class TransportProtocols(ByKeyOrValue):
_set_of_pairs = {
("ICMP", 1),
("TCP", 6),
("UDP", 17)
}
def format_field(field, field_type):
if field_type == "mac":
# Format a MAC address as XX:XX:XX:XX:XX:XX
byte_str = ["{:02x}".format(field[i])
for i in range(0, len(field))]
return ":".join(byte_str)
elif field_type == "ethertype":
return EtherTypes.get(field)
elif field_type == "ipver":
return IPVersions.get(field)
elif field_type == "transproto":
return TransportProtocols.get(field)
class IPFlags(object):
def __init__(self, flag_bits):
# Flags is an integer taking 3-bit
# The 1st bit is reserved and is of no use
# The 2nd bit:
self.DF = flag_bits & 0b11 >> 1
# The 3rd bit:
self.MF = flag_bits & 0b1
def __str__(self):
result = []
if self.DF:
result.append("DF, ")
if self.MF:
result.append("MF, ")
"".join(result)
if result:
return result[:-2]
else:
return "--"
def handler_ip_header(ip_header):
iph = struct.unpack('!BBHHHBBH4s4s', ip_header)
version_ihl = iph[0]
version = version_ihl >> 4
ihl = version_ihl & 0xF
total_len = iph[2]
ident = '0x{:04x}'.format(iph[3])
iph_length = ihl * 4
flags = IPFlags(iph[4] >> 3)
frag_off = iph[4] & 0b1111111111111
ttl = iph[5]
protocol = iph[6]
check_sum = iph[7]
src_ip = socket.inet_ntoa(iph[8])
dst_ip = socket.inet_ntoa(iph[9])
print(
'Version: %s IHL: %s Total Length: %s\n'
'Identification: %s Flags: %s Fragment Offset: %s\n'
'TTL: %s Protocol: %s Header Checksum: %s\n'
'Source Address: %s\n'
'Destination Address: %s\n' %
(version, iph_length, total_len, ident, flags, frag_off,
ttl, format_field(protocol, 'transproto'), check_sum, src_ip, dst_ip)
)
def main():
# the public network interface
host = socket.gethostbyname(socket.gethostname())
# create a raw socket and bind it to the public interface
sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_IP)
sock.bind((host, 0))
while True:
data, addr = sock.recvfrom(65565)
print('Data len: %s addr: %s' % (len(data), addr))
handler_ip_header(data[:20])
if __name__ == '__main__':
main()
<file_sep>/demo/simple/tcpserver.py
"""tcp server."""
import socket
EOL1 = b'\n\n'
EOL2 = b'\n\r\n'
response = """HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\n\r\n\r\nHello world!"""
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
port = 8060
server.bind(('127.0.0.1', port))
server.listen()
print('Server listen http://127.0.0.1:{}/'.format(port))
try:
while True:
conn, addr = server.accept()
print('Accept new connection from %s:%s' % addr)
request = b''
while EOL1 not in request and EOL2 not in request:
request += conn.recv(2048)
print(request.decode())
conn.send(response.encode())
conn.close()
except KeyboardInterrupt:
pass
finally:
server.close()
<file_sep>/book/dns/index.md
# DNS服务器
> 人类很难记忆像192.168.127.12这样的IP地址
> DNS服务的出现就是为了解决这个记忆问题
> 显然github.com这样的名字比192.168.127.12更容易记忆
> DNS就是提供这样的对应关系的解决方案
## 什么是DNS
> DNS是一个由分层的DNS服务器实现的分布式数据库
> DNS运行在UDP协议之上,使用`53`端口
## 主要用途
- 域名与地址对应关系的查找
- 邮件服务器别名查找
- 负载均衡 服务器查找
## 一些命令
```shell
dig github.com
; <<>> DiG 9.8.3-P1 <<>> github.com
;; global options: +cmd
;; Got answer:
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 2864
;; flags: qr rd ra; QUERY: 1, ANSWER: 3, AUTHORITY: 0, ADDITIONAL: 0
;; QUESTION SECTION:
;github.com. IN A
;; ANSWER SECTION:
github.com. 600 IN A 172.16.58.3
github.com. 600 IN A 172.16.17.32
github.com. 600 IN A 192.168.127.12
;; Query time: 16 msec
;; SERVER: 192.168.1.1#53(192.168.1.1)
;; WHEN: Fri Apr 13 21:17:38 2018
;; MSG SIZE rcvd: 76
```
```shell
nslookup github.com
Server: 192.168.1.1
Address: 192.168.1.1#53
Non-authoritative answer:
Name: github.com
Address: 172.16.17.32
Name: github.com
Address: 192.168.127.12
Name: github.com
Address: 172.16.58.3
```
<file_sep>/demo/simple/tcpclient.py
"""tcp client."""
import socket
ipaddr = '127.0.0.1'
port = 8060
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((ipaddr, port))
data = """GET / HTTP/1.0\r\n\r\nHello world!"""
client.send(data.encode('utf-8'))
resp = client.recvfrom(2048)
print(resp[0].decode())
client.close()
<file_sep>/book/tcp/rt.md
# 超时与重传
## 重发超时如何确定
超时间隔必须大于该连接的往返时间 RTT
<file_sep>/book/README.md
# 简介
> 本站用于记录 TCP/IP 相关协议的学习过程,未经本人允许 不得转载
## TCP/IP 资源
- TCP/IP详解 卷1:协议 http://www.52im.net/topic-tcpipvol1.html
- 《TCP/IP详解》学习笔记 http://www.52im.net/forum.php?mod=collection&action=view&ctid=3
- TCP 的那些事儿(上)https://coolshell.cn/articles/11564.html
- TCP 的那些事儿(下)https://coolshell.cn/articles/11609.html
- 技术往事:改变世界的TCP/IP协议 http://www.52im.net/thread-520-1-1.html
## 高性能网络编程
- 单台服务器并发TCP连接数到底可以有多少 http://www.52im.net/thread-561-1-1.html
- 上一个10年,著名的C10K并发连接问题 http://www.52im.net/thread-566-1-1.html
- 下一个10年,是时候考虑C10M并发问题了 http://www.52im.net/thread-568-1-1.html
- 从C10K到C10M高性能网络应用的理论探索 http://www.52im.net/thread-578-1-1.html
- 一文读懂高性能网络编程中的I/O模型 http://www.52im.net/thread-1935-1-1.html
- 一文读懂高性能网络编程中的线程模型 http://www.52im.net/thread-1939-1-1.html
<file_sep>/demo/multiprocess/client.py
"""multiprocess client."""
import socket
ipaddr = '127.0.0.1'
port = 6000
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((ipaddr, port))
client.send('Hello'.encode('utf-8'))
resp = client.recvfrom(1024)
print(resp[0].decode())
client.close()
<file_sep>/demo/simple/epoll_server.py
"""epoll server."""
import socket
import select
EOL1 = b'\n\n'
EOL2 = b'\n\r\n'
response = """HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\nContent-Lenght:13\r\n\r\nHello world!"""
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
port = 8080
server.bind(('0.0.0.0', port))
server.listen()
server.setblocking(0)
epoll = select.epoll()
epoll.register(server.fileno(), select.EPOLLIN)
print('Server listen http://127.0.0.1:{}/'.format(port))
try:
conns = {}
requests = {}
responses = {}
while True:
events = epoll.poll(1)
for fileno, event in events:
if fileno == server.fileno():
conn, addr = server.accept()
conn.setblocking(0)
epoll.register(conn.fileno(), select.EPOLLIN)
conns[conn.fileno()] = conn
requests[conn.fileno()] = b''
responses[conn.fileno()] = response
elif event & select.EPOLLIN:
if EOL1 in requests[fileno] or EOL2 in requests[fileno]:
epoll.modify(fileno, select.EPOLLOUT)
print('-' * 40 + '\n' + requests[fileno].decode()[:-2])
elif event & select.EPOLLOUT:
bytesdata = conns[fileno].send(responses[fileno])
responses[fileno] = responses[fileno][bytesdata:]
if len(responses[fileno]) == 0:
epoll.modify(fileno, 0)
conns[fileno].shutdown(socket.SHUT_RDWR)
elif event & select.EPOLLHUP:
epoll.unregister(fileno)
conns[fileno].close()
del conns[fileno]
except KeyboardInterrupt:
pass
finally:
epoll.unregister(server.fileno())
epoll.close()
server.close()
<file_sep>/book/protocol/ip.md
# IP协议
## IP地址分类
> IPv4 地址分为 4个类别, A类、B类、C类、D类
**A类地址**
`0.0.0.0 ~ 127.0.0.0`
**B类地址**
`172.16.58.3 ~ 192.168.3.11`
**C类地址**
`192.0.0.0 ~ 172.16.31.10`
**D类地址**
`172.16.31.10 ~ 192.168.3.11`
## ping 命令
> ping 检查网络是否连通
```bash
ping csdn.net
PING csdn.net (172.16.58.3): 56 data bytes
64 bytes from 172.16.58.3: icmp_seq=0 ttl=35 time=35.833 ms
64 bytes from 172.16.58.3: icmp_seq=1 ttl=35 time=37.213 ms
64 bytes from 172.16.58.3: icmp_seq=2 ttl=35 time=88.710 ms
64 bytes from 172.16.58.3: icmp_seq=3 ttl=35 time=33.389 ms
```
## traceroute 命令
> traceroute 路由追踪
```bash
traceroute csdn.net
traceroute to csdn.net (172.16.58.3), 64 hops max, 52 byte packets
1 192.168.31.1 (192.168.31.1) 41.530 ms 1.268 ms 1.037 ms
2 192.168.1.1 (192.168.1.1) 1.672 ms 1.995 ms 2.720 ms
3 10.96.0.1 (10.96.0.1) 5.581 ms 5.305 ms 48.190 ms
4 192.168.127.12 (192.168.127.12) 7.064 ms 10.488 ms 6.935 ms
```<file_sep>/book/http/index.md
# HTTP协议
> HTTP 超文本传输协议 是建立在 TCP协议之上的应用层协议
## HTTP报文
### 请求报文
```
<method> <request-URL> <version>
<headers>
<body>
```
`示例`
```
GET / HTTP/1.1
Host: www.github.com
Connection: Keep-Alive
```
<file_sep>/demo/tcp/client.py
# https://pymotw.com/3/socket/tcp.html
# socket_echo_client.py
import socket
import sys
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Connect the socket to the port where the server is listening
server_address = ('localhost', 10000)
print('connecting to {} port {}'.format(*server_address))
sock.connect(server_address)
try:
# Send data
sock.sendall(b'hello world')
data = sock.recv(1024)
print('received {!r}'.format(data))
finally:
print('closing socket')
sock.close()
<file_sep>/book/tcp/congestion_control.md
# 拥塞控制
> 一般来说,计算机网络处在一个共享的环境,因此通信刚开始就发送大量数据
> 很可能引发网络拥堵而导致网络的瘫痪
> TCP为了防止该问题的出现,在通信刚开始采用慢启动的方式对
> 发送的数据量进行控制
<file_sep>/book/SUMMARY.md
# Summary
* [简介](README.md)
* [数据链路](protocol/datalink.md)
* [IP协议](protocol/ip.md)
* [路由协议](protocol/route.md)
* [TCP协议](tcp/index.md)
* [窗口控制](tcp/window.md)
* [超时重传](tcp/rt.md)
* [拥塞控制](tcp/congestion_control.md)
* [HTTP协议](http/index.md)
* [DNS](dns/index.md)
<file_sep>/book/app_protocol.md
# 应用层协议
- HTTP
- SSH
- SMTP
- POP3
- DNS
- DHCP
- NFS
<file_sep>/README.md
# tcpipnotes
TCP/IP notes
* [简介](README.md)
* [数据链路](book/protocol/datalink.md)
* [IP协议](book/protocol/ip.md)
* [路由协议](book/protocol/route.md)
* [TCP协议](book/tcp/index.md)
* [窗口控制](book/tcp/window.md)
* [超时重传](book/tcp/rt.md)
* [应用层协议](book/app_protocol.md)
* [SSH协议](book/ssh.md)
* [HTTP协议](book/http/index.md)
## demo code
- [x] TCP simple server/client demo
- [x] UDP simple server/client demo
<file_sep>/book/protocol/datalink.md
# 数据链路
<file_sep>/demo/rawsocket/rawtcp.py
"""
https://tools.ietf.org/html/rfc793
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Source Port | Destination Port |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Sequence Number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Acknowledgment Number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Data | |U|A|P|R|S|F| |
| Offset| Reserved |R|C|S|S|Y|I| Window |
| | |G|K|H|T|N|N| |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Checksum | Urgent Pointer |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Options | Padding |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| data |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
"""
import socket
import struct
def handler_tcp_header(packet, iph_length):
"""Handler tcp header."""
tcp_header = packet[iph_length:iph_length + 20]
tcph = struct.unpack('!HHLLBBHHH', tcp_header)
src_port = tcph[0]
dst_port = tcph[1]
seq = tcph[2]
ack = tcph[3]
doff_reserved = tcph[4]
tcph_length = doff_reserved >> 4
print('Source Port: %s Dest Port: %s Seq: %s Ack: %s TCP header length: %s' %
(src_port, dst_port, seq, ack, tcph_length))
header_size = iph_length + tcph_length * 4
data_size = len(packet) - header_size
# get data from the packet
data = packet[data_size:]
print('Data', data)
def main():
# create a raw socket and bind it to the public interface
sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_TCP)
host = socket.gethostbyname(socket.gethostname())
port = 0
sock.bind((host, port))
while True:
data, addr = sock.recvfrom(65565)
print('Data len: %s Addr: %s' % (len(data), addr))
handler_tcp_header(data, 20)
if __name__ == '__main__':
main()
<file_sep>/book/tcp/window.md
# 窗口控制
## 滑动窗口
## 窗口大小
## 慢启动
<file_sep>/book/tcp/index.md
# TCP协议
通过IP数据报实现可靠性传输需要考虑很多问题
例如:
- 数据的破坏
- 丢包
- 重复
- 分片顺序混乱
TCP设计:
- 校验和
- 序列号
- 确认应答
- 重发控制
- 连接管理
- 窗口控制
## TCP报文格式

- 源端口号 Source Port : 表示发送端端口
- 目标端口号 Destination Port : 表示接收端端口号
- 序列号 Sequence Number : 发送数据的位置
- 确认应答号 Acknowledgement Number : 表示已接收到 应答号减一为止的数据
- 数据偏移 Data Offset : TCP首部长度
- 保留 Reserved : 该字段主要为了以后扩展用
- 控制位 Control Flag
- CWR : 与拥塞窗口相关
- ECE
- URG
- ACK
- PSH
- RST
- SYN
- FIN
- 窗口大小 Window Size : 一次可以发送数据的大小
- 校验和 Checksum : 数据完整性校验
- 紧急指针 Urgent Pointer :
## TCP连接的建立与终止
- 时间序列
- 建立连接
- 建立连接超时
- 最大报文段长度 MSS
- TCP的半关闭
- TCP的状态变迁
- 2MSL等待状态
- 平静时间的概念
- 复位报文段 RST
- 同时打开
- 同时关闭
- TCP选项
- TCP服务器的设计
## 三次握手、四次挥手
- [“三次握手,四次挥手”你真的懂吗?](https://www.cnblogs.com/qcrao-2018/p/10182185.html)
## TCP的交互数据流
## TCP的成块数据
## TCP的超时重传
## TCP的定时器
## TCP的未来与性能
<file_sep>/demo/rawsocket/rawsocket.py
"""raw socket demo."""
import sys
import socket
import struct
# def calc_checksum(data):
# """TCP checksum.
# https://tools.ietf.org/html/rfc1071
# """
# s = 0
# n = len(data) % 2
# for i in range(0, len(data) - n, 2):
# s += ord(data[i]) + (ord(data[i + 1]) << 8)
# if n:
# s += ord(data[i + 1])
# while (s >> 16):
# s = (s & 0xFFFF) + (s >> 16)
# s = ~s & 0xffff
# return s
def calc_checksum(source_string):
# I'm not too confident that this is right but testing seems to
# suggest that it gives the same answers as in_cksum in ping.c.
sum = 0
l = len(source_string)
count_to = (l / 2) * 2
count = 0
while count < count_to:
this_val = source_string[count + 1] * 256 + source_string[count]
sum = sum + this_val
sum = sum & 0xffffffff # Necessary?
count = count + 2
if count_to < l:
sum = sum + source_string[l - 1]
sum = sum & 0xffffffff # Necessary?
sum = (sum >> 16) + (sum & 0xffff)
sum = sum + (sum >> 16)
answer = ~sum
answer = answer & 0xffff
# Swap bytes. Bugger me if I know why.
answer = answer >> 8 | (answer << 8 & 0xff00)
return answer
try:
conn = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_TCP)
except socket.error as msg:
print('Socket could not be created. Error %s' % msg)
sys.exit()
packet = b''
source_ip = '192.168.1.142'
dest_ip = '192.168.1.130'
def generate_ip_header():
ip_ihl = 5
ip_ver = 4
ip_tos = 0
ip_tot_len = 0
ip_id = 54321
ip_frag_off = 0
ip_ttl = 255
ip_proto = socket.IPPROTO_TCP
ip_check = 0
ip_saddr = socket.inet_aton(source_ip)
ip_daddr = socket.inet_aton(dest_ip)
ip_ihl_ver = (ip_ver << 4) + ip_ihl
ip_header = struct.pack('!BBHHHBBH4s4s', ip_ihl_ver, ip_tos, ip_tot_len,
ip_id, ip_frag_off, ip_ttl, ip_proto, ip_check, ip_saddr, ip_daddr)
return ip_header
def generate_tcp_header(source_port=None, destination_port=None, check_sum=None):
# tcp header fields
src_port = 34234 # source port
dst_port = 24830 # destination port
seq = 454
ack_seq = 0
doff = 5 # 4 bit field, size of tcp header, 5 * 4 = 20 bytes
# tcp flags
tcp_fin = 0
tcp_syn = 1
tcp_rst = 0
tcp_psh = 0
tcp_ack = 0
tcp_urg = 0
window_size = socket.htons(5840) # maximum allowed window size
check_sum = 0
urg_ptr = 0
offset_res = (doff << 4) + 0
flags = tcp_fin + (tcp_syn << 1) + (tcp_rst << 2) + \
(tcp_psh << 3) + (tcp_ack << 4) + (tcp_urg << 5)
# the ! in the pack format string means network order
if check_sum:
tcp_header = struct.pack(
'!HHLLBBH', src_port, dst_port, seq, ack_seq,
offset_res, flags, window_size
) + struct.pack('H', check_sum) + struct.pack('!H', urg_ptr)
else:
tcp_header = struct.pack(
'!HHLLBBHHH', src_port, dst_port, seq, ack_seq,
offset_res, flags, window_size, check_sum, urg_ptr)
return tcp_header
user_data = b'Hello, how are you'
# pseudo header fields
src_addr = socket.inet_aton(source_ip)
dst_addr = socket.inet_aton(dest_ip)
placeholder = 0
tcp_header = generate_tcp_header()
tcp_length = len(tcp_header) + len(user_data)
psh = struct.pack('!4s4sBBH', src_addr, dst_addr,
placeholder, socket.IPPROTO_TCP, tcp_length)
psh = psh + tcp_header + user_data
check_sum = calc_checksum(psh)
# print tcp_checksum
# # make the tcp header again and fill the correct checksum - remember checksum is NOT in network byte order
tcp_header = generate_tcp_header(check_sum)
ip_header = generate_ip_header()
# final full packet - syn packets dont have any data
packet = ip_header + tcp_header + user_data
# Send the packet finally - the port specified has no effect
# put this in a loop if you want to flood the target
result = conn.sendto(packet, (dest_ip, 0))
print(packet, result)
<file_sep>/demo/rawsocket/index.md
# rawsocket
- [Sending raw Ethernet frames from Python](https://csl.name/post/raw-ethernet-frames/)
- [Python socket.SOCK_RAW() Examples](https://www.programcreek.com/python/example/7887/socket.SOCK_RAW)
- [Raw socket communication on Linux in Python](http://www.kanadas.com/program-e/2014/08/raw_socket_communication_on_li.html)
- [Python之socket初见 用python3实现简单抓包器](http://kissg.me/2016/05/01/python-socket-programming-and-packet-sniffer/)
- [python 利用Raw Socket进行以太网帧嗅探](https://blog.csdn.net/Jeanphorn/article/details/45482773)
|
0ca9af4250f2cbe970309afd48b5ad303915035c
|
[
"Markdown",
"Python"
] | 24 |
Python
|
istommao/tcpipnotes
|
2dfb8ad16709b4216e5de68206e3c4f222df1551
|
3fffbf85f581a1f9899224bde1b4bcefd3e2f141
|
refs/heads/master
|
<repo_name>yohe-kitamura/RssSampleAndroid<file_sep>/RssSmaple/app/src/main/java/jp/co/pockeps/rsssample/presentation/presenter/ArticleListPresenter.java
package jp.co.pockeps.rsssample.presentation.presenter;
import android.support.annotation.Nullable;
import javax.inject.Inject;
import jp.co.pockeps.rsssample.entity.Articles;
import jp.co.pockeps.rsssample.repository.NetworkListener;
import jp.co.pockeps.rsssample.repository.UxMilkRepository;
import jp.co.pockeps.rsssample.presentation.view.UxMilkListView;
public class ArticleListPresenter implements NetworkListener<Articles> {
@SuppressWarnings("WeakerAccess") final UxMilkRepository repository;
@Nullable private UxMilkListView view;
@Inject
ArticleListPresenter(UxMilkRepository repository) {
this.repository = repository;
}
public void setView(@Nullable UxMilkListView view) {
this.view = view;
}
/**
* データ取得
*/
public void loadDate() {
repository.getUxMilkRss(this);
}
@Override
public void onSuccess(Articles response) {
if (view != null) {
view.fetchData(response);
}
}
@Override
public void onFailure() {
if (view != null) {
view.loadError();
}
}
}
<file_sep>/RssSmaple/app/src/test/java/jp/co/pockeps/rsssample/presenter/ArticleListPresenterTest.java
package jp.co.pockeps.rsssample.presenter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.*;
import jp.co.pockeps.rsssample.entity.Articles;
import jp.co.pockeps.rsssample.presentation.presenter.ArticleListPresenter;
import jp.co.pockeps.rsssample.repository.NetworkListener;
import jp.co.pockeps.rsssample.repository.UxMilkRepository;
import rx.Scheduler;
import rx.android.plugins.RxAndroidPlugins;
import rx.android.plugins.RxAndroidSchedulersHook;
import rx.schedulers.Schedulers;
import static org.mockito.Matchers.*;
@SuppressWarnings("unchecked")
@RunWith(JUnit4.class)
public class ArticleListPresenterTest {
@Before
public void setUp() throws Exception {
RxAndroidPlugins.getInstance().registerSchedulersHook(new RxAndroidSchedulersHook() {
@Override
public Scheduler getMainThreadScheduler() {
return Schedulers.immediate();
}
});
}
@After
public void tearDown() {
RxAndroidPlugins.getInstance().reset();
}
@Test
public void loadDate() throws Exception {
UxMilkRepository repository = Mockito.spy(new UxMilkRepository());
ArticleListPresenter presenter = new ArticleListPresenter(repository);
presenter.loadDate();
Mockito.verify(repository, Mockito.times(1)).getUxMilkRss((NetworkListener<Articles>) any());
}
}<file_sep>/RssSmaple/app/src/main/java/jp/co/pockeps/rsssample/entity/uxmilk/Item.java
package jp.co.pockeps.rsssample.entity.uxmilk;
import org.simpleframework.xml.Element;
import org.simpleframework.xml.Root;
import jp.co.pockeps.rsssample.util.DateUtil;
import jp.co.pockeps.rsssample.entity.Article;
@Root(strict = false)
public class Item {
private static final String DATE_FORMAT = "EEE, dd MMM yyyy HH:mm:ss Z";
@Element(required = false)
private String title;
@Element(required = false)
private String link;
@Element(required = false)
private String description;
@Element(required = false)
private String pubDate;
@SuppressWarnings("unused")
public Item() {
}
public Item(String title, String link, String pubDate, String description) {
this.title = title;
this.link = link;
this.pubDate = pubDate;
this.description = description;
}
public Article createArticle() {
return new Article(title, link, description, DateUtil.convertDateFromStr(DATE_FORMAT, pubDate), null);
}
}
<file_sep>/RssSmaple/app/build.gradle
apply plugin: 'com.android.application'
apply plugin: 'com.neenbedankt.android-apt'
android {
signingConfigs {
release {
keyAlias 'rsssample'
keyPassword 'opt634-15'
storeFile file('/Users/kitamurayouhei/Documents/dev/keys/rsssample/key.jks')
storePassword '<PASSWORD>'
}
}
compileSdkVersion 24
buildToolsVersion "24.0.2"
defaultConfig {
applicationId "jp.co.pockeps.rsssmaple"
minSdkVersion 21
targetSdkVersion 24
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled true
signingConfig signingConfigs.release
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
packagingOptions {
exclude 'META-INF/DEPENDENCIES'
exclude 'META-INF/LICENSE'
exclude 'META-INF/LICENSE.txt'
exclude 'META-INF/license.txt'
exclude 'META-INF/NOTICE'
exclude 'META-INF/NOTICE.txt'
exclude 'META-INF/notice.txt'
exclude 'META-INF/ASL2.0'
exclude 'META-INF/notice.txt'
}
return void;
}
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
compile 'com.android.support:appcompat-v7:24.2.1'
//okhttp
compile 'com.squareup.okhttp:okhttp:2.4.0'
//Rx
compile 'io.reactivex:rxandroid:1.2.0'
//Retrofit
compile 'com.squareup.retrofit:retrofit:1.9.0'
compile 'com.squareup.retrofit2:adapter-rxjava:2.0.0'
compile('com.squareup.retrofit2:converter-simplexml:2.1.0') {
exclude module: 'stax'
exclude module: 'stax-api'
exclude module: 'xpp3'
}
//ButterKnife
compile 'com.jakewharton:butterknife:8.4.0'
apt 'com.jakewharton:butterknife-compiler:8.4.0'
//Dagger2
compile 'com.google.dagger:dagger:2.0.2'
androidTestApt 'com.google.dagger:dagger-compiler:2.0.2'
apt 'com.google.dagger:dagger-compiler:2.0.2'
provided 'javax.annotation:jsr250-api:1.0'
//Chrome Custom Tab
compile 'com.android.support:customtabs:24.2.1'
//Jackson
compile 'com.fasterxml.jackson.core:jackson-databind:2.3.4'
//realm
compile 'io.realm:realm-android:0.86.0'
//Espresso
androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
exclude group: 'com.android.support', module: 'support-annotations'
})
androidTestCompile('com.android.support.test.espresso:espresso-intents:2.2.2', {
exclude group: 'com.android.support', module: 'support-annotations'
})
androidTestCompile 'com.android.support:support-annotations:24.2.1'
androidTestCompile 'com.android.support.test:runner:0.5'
androidTestCompile 'com.android.support.test:rules:0.5'
androidTestCompile 'org.hamcrest:hamcrest-library:1.3'
androidTestCompile 'org.mockito:mockito-core:1.9.5'
androidTestCompile 'com.google.dexmaker:dexmaker:1.1'
androidTestCompile 'com.google.dexmaker:dexmaker-mockito:1.1'
//JUnit
testCompile 'junit:junit:4.12'
testCompile 'org.mockito:mockito-core:1.10.19'
//LeakCanary
debugCompile 'com.squareup.leakcanary:leakcanary-android:1.3'
releaseCompile 'com.squareup.leakcanary:leakcanary-android-no-op:1.3'
}
<file_sep>/RssSmaple/app/src/main/java/jp/co/pockeps/rsssample/presentation/adapter/ArticleAdapter.java
package jp.co.pockeps.rsssample.presentation.adapter;
import android.content.Context;
import android.support.annotation.NonNull;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import butterknife.BindView;
import jp.co.pockeps.rsssample.R;
import jp.co.pockeps.rsssample.entity.Article;
import jp.co.pockeps.rsssample.entity.Articles;
public class ArticleAdapter extends ArrayAdapter<Article> {
public ArticleAdapter(Context context, Articles articles) {
super(context, 0, articles.getArticles());
}
@NonNull
@Override
public View getView(int position, View convertView, @NonNull ViewGroup parent) {
ViewHolder viewHolder;
if (convertView == null) {
convertView = View.inflate(getContext(), R.layout.adater_item, null);
viewHolder = new ViewHolder(convertView);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
Article item = getItem(position);
if (item != null) {
viewHolder.title.setText(item.title);
viewHolder.pubDate.setText(item.getPubDateFormatString());
viewHolder.description.setText(item.description);
}
return convertView;
}
class ViewHolder {
@BindView(R.id.pub_date) TextView pubDate;
@BindView(R.id.title) TextView title;
@BindView(R.id.description) TextView description;
ViewHolder(View view) {
butterknife.ButterKnife.bind(this, view);
}
}
}
<file_sep>/RssSmaple/app/src/main/java/jp/co/pockeps/rsssample/entity/uxmilk/UxMilkRss.java
package jp.co.pockeps.rsssample.entity.uxmilk;
import android.support.annotation.NonNull;
import org.simpleframework.xml.Element;
import org.simpleframework.xml.Root;
import java.util.ArrayList;
import java.util.List;
import jp.co.pockeps.rsssample.entity.Article;
@Root(name = "rss", strict = false)
public class UxMilkRss {
@Element
public Channel channel;
public List<Item> getItems() {
if (channel == null) {
return new ArrayList<>();
}
return channel.items;
}
@NonNull
public List<Article> getArticles() {
List<Article> list = new ArrayList<>();
for (Item item : getItems()) {
list.add(item.createArticle());
}
return list;
}
}
<file_sep>/RssSmaple/app/src/androidTest/java/jp/co/pockeps/rsssample/mock/di/module/MockInfraLayerModule.java
package jp.co.pockeps.rsssample.mock.di.module;
import android.support.annotation.NonNull;
import dagger.Module;
import dagger.Provides;
import jp.co.pockeps.rsssample.repository.UxMilkRepository;
@Module
public class MockInfraLayerModule {
private final UxMilkRepository repository;
public MockInfraLayerModule(@NonNull UxMilkRepository repository) {
this.repository = repository;
}
@Provides
public UxMilkRepository provideUxMilkRepository(){
return repository;
}
}
<file_sep>/README.md
# RssSampleAndroid
特定のサイトのRSSを表示するサンプル
試したいLibrary
・Realm
個人的にはAndroidのRealmを使うのは時期尚早な気がするが、試しに使って見る
・RxAndroid
最近、流行ってるから使って見る。API通信のとこだけ
・Dagger2
テストを円滑に進めるために使用。
<file_sep>/RssSmaple/app/src/main/java/jp/co/pockeps/rsssample/network/UxMilkService.java
package jp.co.pockeps.rsssample.network;
import jp.co.pockeps.rsssample.entity.uxmilk.UxMilkRss;
import retrofit2.Response;
import retrofit2.http.GET;
import rx.Observable;
public interface UxMilkService {
/**
* Rss取得
*
* @return Rss取得用Observable
*/
@GET("/feed")
Observable<Response<UxMilkRss>> get();
}
|
99e6a4f1ffbe6edf53b7c6cbf2e07cab4764164c
|
[
"Markdown",
"Java",
"Gradle"
] | 9 |
Java
|
yohe-kitamura/RssSampleAndroid
|
ee0f0798c2ef2a8be37655b8734e6efd446e331c
|
55b2cc1808799a34a17fda39dcfb9266f7e14944
|
refs/heads/master
|
<repo_name>philbarker/sharinglearning_theme<file_sep>/functions.php
<?php
/**
* Sharing and learning functions and definitions
*
* supplements or overrides functions in Twenty_Thirteen parent theme
* @package WordPress
* @subpackage Twenty_Thirteen
* @since sharinglearning 0.1
*/
defined( 'ABSPATH' ) or die( 'Be good. If you can\'t be good be careful' );
if ( ! function_exists( 'sharinglearning_entry_meta' ) ) :
/**
* Print HTML with meta information for current post: categories, tags, permalink, author, and date.
*
* Create your own twentythirteen_entry_meta() to override in a child theme.
*
* @since Twenty Thirteen 1.0
*/
function sharinglearning_entry_meta() {
if ( is_sticky() && is_home() && ! is_paged() )
echo '<span class="featured-post">' . __( 'Sticky', 'twentythirteen' ) . '</span>';
if ( ! has_post_format( 'link' ) && 'post' == get_post_type() )
sharinglearning_entry_date();
// Translators: used between list items, there is a space after the comma.
$categories_list = get_the_category_list( __( ', ', 'twentythirteen' ) );
if ( $categories_list ) {
echo '<span class="categories-links">' . $categories_list . '</span>';
}
// Translators: used between list items, there is a space after the comma.
$tag_list = get_the_tag_list( '', __( ', ', 'twentythirteen' ) );
if ( $tag_list ) {
echo '<span class="tags-links">' . $tag_list . '</span>';
}
// Post author
if ( 'post' == get_post_type() ) {
printf( '<span class="author vcard"><a class="url fn n" href="%1$s" title="%2$s" rel="author" itemprop="author">%3$s</a></span>',
esc_url( get_author_posts_url( get_the_author_meta( 'ID' ) ) ),
esc_attr( sprintf( __( 'View all posts by %s', 'twentythirteen' ), get_the_author() ) ),
get_the_author()
);
}
}
endif;
/* Create a custom post type for Week Notes
*
*/
add_action( 'init', 'sharinglearning_create_weeknote_type' );
function sharinglearning_create_weeknote_type() {
register_post_type( 'weeknote',
array(
'labels' => array(
'name' => __( 'Week Notes', 'sharinglearning' ),
'singular_name' => __( 'Week Note', 'sharinglearning' )
),
'public' => true,
'menu_position' => 5,
'has_archive' => true,
'rewrite' => array('slug' => 'weeknotes'),
'supports' => array('title', 'editor', 'author',
'revisions', 'thumbnail' )
)
);
}
if ( ! function_exists( 'sharinglearning_entry_date' ) ) :
/**
* Print HTML with date information for current post.
*
* Create your own twentythirteen_entry_date() to override in a child theme.
*
* @since Twenty Thirteen 1.0
*
* @param boolean $echo (optional) Whether to echo the date. Default true.
* @return string The HTML-formatted post date.
*/
function sharinglearning_entry_date( $echo = true ) {
if ( has_post_format( array( 'chat', 'status' ) ) )
$format_prefix = _x( '%1$s on %2$s', '1: post format name. 2: date', 'twentythirteen' );
else
$format_prefix = '%2$s';
$date = sprintf( '<span class="date"><a href="%1$s" title="%2$s" rel="bookmark"><time class="entry-date" datetime="%3$s"><span itemprop="dateCreated datePublished" >%4$s</span></time></a></span>',
esc_url( get_permalink() ),
esc_attr( sprintf( __( 'Permalink to %s', 'twentythirteen' ), the_title_attribute( 'echo=0' ) ) ),
esc_attr( get_the_date( 'c' ) ),
esc_html( sprintf( $format_prefix, get_post_format_string( get_post_format() ), get_the_date() ) )
);
if ( $echo )
echo $date;
return $date;
}
endif;
add_theme_support( 'post-thumbnails' );
function default_thumbnail() {
$img_html = '<img src="'
.get_stylesheet_directory_uri()
.'/images/'
.get_post_type()
.'.svg"'
.'alt="'
.get_the_title()
.'" class="pubwp-thumb"/>';
echo $img_html;
}
function jeherve_custom_image( $media, $post_id, $args ) {
if ( $media ) {
return $media;
} else {
$permalink = get_permalink( $post_id );
$url = apply_filters( 'jetpack_photon_url', 'http://blogs.pjjk.net/phil/content/uploads/me.jpg' );
return array( array(
'type' => 'image',
'from' => 'custom_fallback',
'src' => esc_url( $url ),
'href' => $permalink,
) );
}
}
add_filter( 'jetpack_images_get_images', 'jeherve_custom_image', 10, 3 );
<file_sep>/archive-publication.php
<?php
/**
* Template archive of publications
*
* @package WordPress
* @subpackage Sharing_and_learning/pubwp
* @since Sharing_and_learning 2.0
*/
?>
<?php
$cpargs = array('_builtin' => False,
'exclude_from_search' => False,
'pubwp_type' => 'publication');
$custom_post_types = get_post_types( $cpargs, 'names', 'and' );
$args = array(
'post_type' => $custom_post_types,
'posts_per_page' => -1
);
$publications = new WP_Query( $args );
?>
<div id="primary" class="content-area">
<div id="content" class="site-content" role="main">
<?php if ( $publications->have_posts() ) : ?>
<header class="archive-header">
<h1 class="archive-title"><NAME>'s publication list</h1>
<div class="entry-meta"></div>
</header><!-- .archive-header -->
<?php /* The loop */ ?>
<?php while ( $publications->have_posts() ) : $publications->the_post(); ?>
<article id="post-<?php the_ID(); ?>">
<header class="entry-header">
<div class="entry-meta">
<?php default_thumbnail(); ?>
<p class="pubwp-ref">
<?php echo pubwp_citation( $post ); ?>
</p>
</div>
</header><!-- .entry-header -->
<div class="entry-content" > </div>
<footer class="entry-meta" />
</article>
<?php endwhile; ?>
<?php /* end of the loop */ ?>
<?php endif; ?>
</div><!-- #content -->
</div><!-- #primary -->
<?php get_sidebar(); ?>
<?php get_footer(); ?>
<file_sep>/single-pubwp_confpaper.php
<?php
/**
* Template for displaying single journal papers
*
* @package WordPress
* @subpackage Sharing_and_learning/pubwp
* @since Sharing_and_learning 2.0
*/
get_header(); ?>
<div id="primary" class="content-area">
<header class="archive-header">
<h1 class="archive-title"><NAME>'s publications</h1>
</header><!-- .archive-header -->
<div id="content" class="site-content" role="main">
<?php /* The loop */ ?>
<?php while ( have_posts() ) : the_post(); ?>
<article id="post-<?php the_ID(); ?>"
<?php post_class(); ?>
typeof="http://schema.org/ScholarlyArticle">
<header class="entry-header">
<h1 property="name" class="pubwp-title"><?php the_title(); ?></h1>
<?php if ( has_post_thumbnail()
&& ! post_password_required()
&& ! is_attachment() ) : ?>
<div class="publication-thumbnail">
<?php the_post_thumbnail(); ?>
</div>
<?php elseif ( ! post_password_required()
&& ! is_attachment() ) : ?>
<div class="publication-thumbnail">
<?php default_thumbnail(); ?>
</div>
<?php endif; ?>
<div class="pubwp-entry-meta">
<p> <?php pubwp_print_authors( ); ?>
(<?php pubwp_print_date_published( ); ?>)<br />
Presented at:
<?php pubwp_print_meeting_info( ); ?><br />
<?php pubwp_print_isbn( $br=True ); ?>
<?php pubwp_print_uri( $br=True ); ?>
<?php pubwp_print_peer_reviewed($before='Peer reviewed: ',
$after='.' ,
$br = True); ?>
<?php pubwp_print_licence_info( $post );?>
</p>
</div> <!-- .pubwp-entry-meta -->
</header><!-- .entry-header -->
<div class="entry-content">
<h2 class="pubwp-abs-head">Abstract</h2>
<?php pubwp_print_abstract( ); ?>
<?php pubwp_print_local_info( ); ?>
</div><!-- .entry-content -->
<?php endwhile; ?>
<?php /* end of the loop */ ?>
</div><!-- #content -->
</div><!-- #primary -->
<?php get_sidebar(); ?>
<?php get_footer(); ?>
|
639ffeff2585ff664bcc92f3dc5c23042489d451
|
[
"PHP"
] | 3 |
PHP
|
philbarker/sharinglearning_theme
|
089b48ff7ec3a3639c0760baa611412914babfff
|
da6dd8b81e51669df39a19e262cef67ec87fc28e
|
refs/heads/master
|
<repo_name>robertpainsi/robertpainsi.github.io<file_sep>/templates/urls.js
"use strict";
var URLS = (function() {
var BASE;
BASE = 'https://raw.githubusercontent.com/robertpainsi/robertpainsi.github.data/master/';
return {
BASE: BASE,
PROGRAM_STATISTICS: BASE + 'catrobat/program-statistics.json'
};
}());
<file_sep>/templates/utils.js
"use strict";
var webcomponentsUtils = {
importScope: function(callback) {
callback(document._currentScript.ownerDocument);
},
registerElement: function(tagName, registerCallbacks) {
var proto = Object.create(HTMLElement.prototype);
registerCallbacks.call(proto);
document.registerElement(tagName, {prototype: proto});
},
addTemplate: function(element, template) {
element.appendChild(webcomponentsUtils.cloneTemplate(template));
return element.lastElementChild;
},
cloneTemplate: function(template) {
return document.importNode(template.content, true);
},
appendChildren: function(children, newParent) {
for (var i = 0; i < children.length; i++) {
newParent.appendChild(children[i]);
}
},
prependChildren: function(children, newParent) {
for (var i = children.length - 1; i >= 0; i--) {
newParent.insertBefore(children[i], newParent.firstElementChild);
}
},
/**
*
* @param from, either HTMLElement or an Array of Attributes
* @param to, either HTMLElement or an Array of HTMLElement
*/
passAllAttributes: function(from, to) {
if (to.constructor === HTMLCollection) {
for (var i = 0; i < to.length; i++) {
webcomponentsUtils.passAllAttributes(from, to[i]);
}
} else {
for (var i = 0; i < from.attributes.length; i++) {
var attribute = from.attributes[i];
to.setAttribute(attribute.name, attribute.value);
}
}
}
};
var htmlUtils = {
escapeHtml: function(htmlString) {
return htmlString
.replace(/&/g, "&")
.replace(/</g, "<")
.replace(/>/g, ">")
.replace(/"/g, """)
.replace(/'/g, "'");
},
getAttributeValue: function(e, attribute) {
var attr = e.attributes.getNamedItem(attribute);
if (attr) {
return attr.value;
}
return undefined;
},
scrollTo: function(id) {
if (!arguments.length) {
id = location.hash.substring(location.hash.indexOf('#') + 1);
}
if (typeof id === 'string') {
id = document.getElementById(id);
}
if (id) {
id.scrollIntoView(true);
}
},
toElement: function(htmlString) {
var template = document.createElement('template');
template.innerHTML = htmlString;
return template.content.firstChild;
},
clear: function(e) {
while (e.firstChild) {
e.removeChild(e.firstChild);
}
},
removeClassFromElements: function(clazz) {
var elements = document.getElementsByClassName(clazz);
while (elements.length) {
elements[0].classList.remove(clazz);
}
},
getParameterByName: function(name, url) {
if (!url) url = window.location.href;
name = name.replace(/[\[\]]/g, "\\$&");
var regex = new RegExp("[?&]" + name + "(=([^&#]*)|&|#|$)"),
results = regex.exec(url);
if (!results) return null;
if (!results[2]) return '';
return decodeURIComponent(results[2].replace(/\+/g, " "));
},
prepend: function(e, container) {
container.insertBefore(e, container.firstChild);
}
};
var mathUtils = {
trimToRange: function(value, from, to) {
if (value < from) {
return from;
} else if (value > to) {
return to;
} else {
return value;
}
},
toRadians: function(degree) {
return degree * (Math.PI / 180);
},
toDegrees: function(radians) {
return radians * (180 / Math.PI);
},
distance: function(x1, y1, x2, y2) {
x2 = x2 || 0;
y2 = y2 || 0;
return Math.sqrt(Math.pow(x1 - x2, 2) + Math.pow(y1 - y2, 2))
}
};
var colorUtils = {
hexToRGB: function(hex) {
if (typeof hex === 'string') {
hex = hex.replace('#', '');
}
hex = parseInt(hex, 16);
var r = hex >> 16;
var g = hex >> 8 & 0xFF;
var b = hex & 0xFF;
return {r: r, g: g, b: b};
},
hexToRGBA: function(hex, a) {
var rgb = colorUtils.hexToRGB(hex);
rgb.a = a;
return rgb;
},
rgbToHex: function(rgb, addLeadingHash) {
var r = 0;
var g = 0;
var b = 0;
if (arguments.length < 3) {
r = rgb.r;
g = rgb.g;
b = rgb.b;
} else {
r = arguments[0];
g = arguments[1];
b = arguments[2];
}
r = numberUtils.pad(mathUtils.trimToRange(r, 0, 255).toString(16), 2);
g = numberUtils.pad(mathUtils.trimToRange(g, 0, 255).toString(16), 2);
b = numberUtils.pad(mathUtils.trimToRange(b, 0, 255).toString(16), 2);
return ((addLeadingHash) ? '#' : '') + r + g + b;
},
rgba: function(rgba) {
var r = 0;
var g = 0;
var b = 0;
var a = 0;
if (arguments.length === 1) {
r = rgba.r;
g = rgba.g;
b = rgba.b;
a = rgba.a;
} else {
r = arguments[0];
g = arguments[1];
b = arguments[2];
a = arguments[3];
}
return 'rgba(' + r + ', ' + g + ', ' + b + ', ' + a + ')';
},
increaseBrightnessColor: function(hex, by) {
var c = colorUtils.hexToRGB(hex);
c.r += by;
c.g += by;
c.b += by;
return colorUtils.rgbToHex(c, true);
}
};
var numberUtils = {
pad: function(num, size) {
var s = num + "";
while (s.length < size) s = "0" + s;
return s;
}
};
<file_sep>/web-nfc/NFC.js
/* global NDEFReader, NDEFWriter */
/* eslint no-undef: "error" */
/*
* TODO: Scan (read) only specific id
* TODO: Scan (read) only specific mediaType
* TODO: Handle different recordType read
* TODO: Handle different mediaType read
* TODO: Read id, etc
*
* TODO: Write multiple records
* TODO: Write id?, recordType, mediaType, etc
* TODO: ignoreRead while writing (default)
*
* TODO: Abort NFC operations https://web.dev/nfc/#abort-nfc-operations
*/
class NFC {
reader = null;
async getPermissionState() {
return (await navigator.permissions.query({name: 'nfc'})).state;
}
async initRead() {
if (await this.getPermissionState() === 'denied') {
this.reader = null;
throw new Error(`NFC permission denied`);
}
this.reader = new NDEFReader();
await this.reader.scan();
this.reader.onreading = (event) => {
const message = event.message;
for (const record of message.records) {
const data = new TextDecoder(record.encoding).decode(record.data); // TODO partially read record causes exception
document.dispatchEvent(new CustomEvent('nfc-read-text', {
bubbles: false,
cancelable: false,
detail: {
text: data,
},
}));
}
};
this.reader.onerror = (event) => {
console.log('error');
document.dispatchEvent(new CustomEvent('nfc-read-error', {
bubbles: false,
cancelable: false,
detail: event,
}));
};
}
async writeText(text) {
const writer = new NDEFWriter();
await writer.write({
records: [
{
recordType: 'text',
lang: 'en',
encoding: 'utf-8',
data: text,
}],
});
}
}
const nfc = new NFC();
<file_sep>/catrobat/statistics/scripts/chart-utils.js
"use strict";
var CHART_COLORS = [
'#1f8dd6',
'#ff3333',
'#3fca3f',
'#ff973b',
'#7e649a',
'#bb6434',
'#a6cee3',
'#fb9a99',
'#b2df8a',
'#fdbf6f',
'#cab2d6',
'#ffff99'
];
function sortData(data, sortFunction) {
data = data.slice();
if (data[data.length - 1].isOthers) {
var others = data.pop();
}
if (typeof sortFunction === "function") {
data.sort(sortFunction);
} else {
data.sort(function(a, b) {
return b.value - a.value;
});
}
if (others) {
data.push(others);
}
return data;
}
function getDataByLabel(data, label) {
var result = null;
data.some(function(d) {
if (d.label === label) {
result = d;
return true;
}
});
return result;
}
function topOccurrences(data, top, options) {
var other = 0;
var result = data.slice().sort(function(a, b) {
return b.value - a.value;
}).filter(function(e, i, a) {
if (i < top - 1 || e.value === a[top - 1].value) {
return true;
} else {
other += e.value;
return false;
}
});
if (options.others && other > 0) {
result.push({
isOthers: true,
label: 'Other',
value: other,
color: '#eaeaea'
});
}
return result;
}
function toDiffHtml(value, previousValue, options) {
if (isNumber(value)) {
options = options || {};
var diff = toDiffNumber(value, previousValue);
var diffClass;
if (diff === 'discontinued' || diff === 'out') {
diffClass = 'diff diff-less';
} else if (diff === 'new') {
diffClass = 'diff diff-greater';
} else if (diff < 0) {
diffClass = 'diff diff-less';
} else if (diff > 0) {
diffClass = 'diff diff-greater';
} else {
diffClass = 'diff diff-equals';
}
return '<span class="' + diffClass + '" style="' + options.style + '">' + toDiffText(value, previousValue) + '</span>';
} else {
return '';
}
}
function toDiffText(value, previousValue) {
if (isNumber(value)) {
var diff = toDiffNumber(value, previousValue);
var prefix = '';
if (diff > 0) {
prefix = '+';
}
if (isNumber(diff)) {
return prefix + diff.toFixed(2) + '%';
} else {
return prefix + diff;
}
} else {
return '';
}
}
function toDiffNumber(value, previousValue) {
if (isNumber(value) && isNumber(previousValue)) {
if (previousValue === 0) {
return 'new';
} else {
return (value - previousValue) * 100 / previousValue;
}
} else if (!isNumber(value) && isNumber(previousValue)) {
return 'discontinued';
} else if (isNumber(value) && !isNumber(previousValue)) {
return 'new';
} else {
return 'out';
}
}
function objectToArray(o) {
if (!o) {
return null
}
return Object.keys(o).map(function(key) {
return {
label: key,
value: o[key]
};
});
}
var chartContainer = [];
function createChart(e, options) {
var chart = new Chart(e, options);
chartContainer.push(chart);
chart.generateLegend();
return chart;
}
function destroyChart(chart) {
chartContainer.splice(chartContainer.indexOf(chart), 1);
chart.destroy();
}
function destroyCharts() {
chartContainer.forEach(function(chart) {
destroyChart(chart);
});
}
window.addEventListener('resize', function() {
chartContainer.forEach(function(c) {
var chartjsIFrame = c.chart.canvas.parentElement.querySelector('.chartjs-hidden-iframe');
var newChartWidth = chartjsIFrame.clientWidth;
document.getElementById('new-and-remixed-programs-chart').style.width = newChartWidth - 24 + 'px';
})
});
<file_sep>/web-bluetooth-lego-train-controller/index.js
'use strict';
async function wait(millis) {
return new Promise((resolve) => {
setTimeout(resolve, millis);
});
}
function clamp(value, min, max) {
return Math.min(Math.max(value, min), max);
};
window.addEventListener('load', async () => {
const sliderContainer = document.getElementById('slider-container');
const reconnectContainer = document.getElementById('reconnect-container');
const reconnectAttemptElement = document.getElementById('reconnect-attempt');
const reconnectMessage = document.getElementById('reconnecting-message');
const connectContainer = document.getElementById('connect-container');
const connectButton = document.getElementById('connect-button');
const connectErrorMessage = document.getElementById('connect-error-message');
const legoTrain = document.getElementById('lego-train');
const legoWire = document.getElementById('lego-wire');
let wireDirection = parseInt(Cookies.get('wire-direction'));
if (isNaN(wireDirection)) {
wireDirection = -1;
}
let connectorPosition = parseInt(Cookies.get('connector-position'));
if (isNaN(connectorPosition)) {
connectorPosition = -1;
}
function updateLegoUi() {
Cookies.set('wire-direction', wireDirection, {expires: 365});
Cookies.set('connector-position', connectorPosition, {expires: 365});
let scale = 1;
let translateX = 0;
if (wireDirection === 1 && connectorPosition === 1) {
translateX = 100;
scale = 1;
} else if (wireDirection === 1 && connectorPosition === -1) {
translateX = -35;
scale = 1;
} else if (wireDirection === -1 && connectorPosition === 1) {
translateX = 35;
scale = -1;
} else if (wireDirection === -1 && connectorPosition === -1) {
translateX = -100;
scale = -1;
}
legoWire.style.setProperty('transform', `translateX(${translateX}px) scaleX(${scale})`);
legoTrain.style.setProperty('transform', `scaleX(${connectorPosition})`);
}
updateLegoUi();
legoTrain.classList.add('show');
legoWire.classList.add('show');
legoTrain.addEventListener('click', function(event) {
connectorPosition *= -1;
updateLegoUi();
});
legoWire.addEventListener('click', function(event) {
wireDirection *= -1;
updateLegoUi();
});
let slider = null;
let currentValue = null;
const bluetooth = new Bluetooth();
bluetooth.addEventListener('connect', () => {
if (slider) {
slider.setValue(currentValue, true);
}
sliderContainer.style.setProperty('display', '');
connectContainer.style.setProperty('display', 'none', 'important');
reconnectContainer.style.setProperty('display', 'none', 'important');
});
let reconnectAttempt;
bluetooth.addEventListener('disconnect', () => {
reconnectAttempt = 1;
reconnectMessage.innerText = '';
sliderContainer.style.setProperty('display', 'none', 'important');
reconnectContainer.style.setProperty('display', '');
});
bluetooth.addEventListener('reconnecting', (event) => {
reconnectAttemptElement.innerText = `#${reconnectAttempt}`;
reconnectAttempt++;
});
bluetooth.addEventListener('reconnect-failed', (event) => {
reconnectMessage.innerText = event.detail.message;
});
connectButton.addEventListener('click', async () => {
connectErrorMessage.innerHTML = '';
connectButton.disabled = true;
try {
await bluetooth.connect();
const values = 17; // positive, odd number
const factor = 1; // > 0 && <= 1
const lowerStart = 0.45;
const options = [];
const middle = (values - 1) / 2;
for (let i = 0; i < values; i++) {
let distance = -(i - middle);
if (distance === -0) {
distance = 0;
}
const percentage = Math.round(distance / middle * 10000) / 10000;
const absPercentage = Math.abs(percentage);
let value = (factor * percentage / (1 + lowerStart) + lowerStart * Math.sign(distance)) * -1;
if (wireDirection === 1 && connectorPosition === 1) {
value *= 1;
} else if (wireDirection === 1 && connectorPosition === -1) {
value *= 1;
} else if (wireDirection === -1 && connectorPosition === 1) {
value *= -1;
} else if (wireDirection === -1 && connectorPosition === -1) {
value *= -1;
}
let backgroundColor;
if (distance === 0) {
backgroundColor = ``;
} else if (distance > 0) {
backgroundColor = `rgba(${Math.round(absPercentage * 255)}, ${Math.round((1 - absPercentage) * 255)}, 0, 0.6)`;
} else {
backgroundColor = `rgba(0, ${Math.round((1 - absPercentage) * 255)}, ${Math.round(absPercentage * 255)}, 0.6)`;
}
options.push({
label: `${Math.round(percentage * 100)} %`,
value,
backgroundColor,
});
}
console.log(options);
currentValue = options[middle].value;
slider = new Slider(document.getElementById('slider'), {options});
slider.addEventListener('change', (event) => {
currentValue = event.detail.value;
bluetooth.writeValue(event.detail.value);
});
slider.setValue(currentValue);
} catch (e) {
console.error(e);
connectErrorMessage.innerText = e.message;
connectButton.disabled = false;
}
});
});
class Bluetooth {
#elementForListeners = document.createElement('div');
#device = null;
#characteristic = null;
#writePromise = Promise.resolve();
#currentValue = null;
#latestValueToWrite = null;
async connect() {
console.log('Pressed', navigator, navigator.bluetooth);
this.#device = await navigator.bluetooth.requestDevice({
optionalServices: ['0bd51666-e7cb-469b-8e4d-2742f1ba77cc'],
acceptAllDevices: true,
});
console.log('Device', this.#device);
this.#device.addEventListener('gattserverdisconnected', () => {
console.log('Disconnected');
this.#characteristic = null;
this.#elementForListeners.dispatchEvent(new CustomEvent('disconnect', {}));
this.#reconnect();
});
await this.#connect();
}
async #connect() {
console.log('Connecting');
const server = await this.#device.gatt.connect();
console.log('Server', server);
const service = await server.getPrimaryService('0bd51666-e7cb-469b-8e4d-2742f1ba77cc');
console.log('Service', service);
for (const characteristic of await service.getCharacteristics('e7add780-b042-4876-aae1-112855353cc1')) {
console.log('Characteristic', characteristic);
this.#characteristic = characteristic;
}
this.#elementForListeners.dispatchEvent(new CustomEvent('connect', {}));
}
async #reconnect() {
let reconnectWait = 1000;
while (true) {
this.#elementForListeners.dispatchEvent(new CustomEvent('reconnecting', {}));
try {
console.log('Reconnecting');
await this.#connect();
break;
} catch (e) {
console.error(e);
this.#elementForListeners.dispatchEvent(new CustomEvent('reconnect-failed', {detail: e}));
}
await wait(reconnectWait);
reconnectWait = Math.min(reconnectWait + 200, 5000);
}
}
isConnected() {
return !!this.#characteristic;
}
async writeValue(value) {
if (!this.isConnected()) {
throw Error(`Bluetooth not connected`);
}
this.#latestValueToWrite = value;
this.#writePromise = this.#writePromise
.then(async () => {
if (value === this.#latestValueToWrite && value !== this.#currentValue) {
// if (this.#currentValue === 0) {
// let startUpBoostValue = clamp(value * 1.1, -1, 1);
// console.log('Sending start up boost: ' + startUpBoostValue);
// await this.#characteristic.writeValue(new TextEncoder('utf-8').encode(startUpBoostValue));
// await wait(10);
// }
console.log('Sending: ' + value);
await this.#characteristic.writeValue(new TextEncoder('utf-8').encode(value));
this.#currentValue = value;
await wait(20);
}
})
.catch(console.error);
return this.#writePromise;
}
addEventListener(type, listener) {
this.#elementForListeners.addEventListener(type, listener);
}
removeEventListener(type, listener) {
this.#elementForListeners.addEventListener(type, listener);
}
}
class Slider {
#elementForListeners = document.createElement('div');
#lastTriggerTarget = null;
#steps = [];
#middleIndex = null;
constructor(element, {options, middle = null}) {
if (middle === null) {
this.#middleIndex = (options.length - 1) / 2;
}
for (const option of options) {
const stepElement = document.createElement('div');
stepElement.classList.add('step', 'd-flex', 'justify-content-center', 'align-items-center', 'flex-grow-1');
stepElement.setAttribute('slider-value', option.value);
stepElement.setAttribute('slider-background-color', option.backgroundColor);
stepElement.innerHTML = `<span class="label text-right">${option.label}</span>`;
element.append(stepElement);
this.#steps.push({
element: stepElement,
options: option,
});
}
const mouseDown = (event) => {
element.addEventListener('mousemove', mouseMove);
element.addEventListener('touchmove', touchMove);
element.addEventListener('mouseup', mouseUp);
element.addEventListener('touchend', mouseUp);
this.#triggerChangeEvent(event.target);
};
const mouseMove = event => {
this.#triggerChangeEvent(event.target);
};
const touchMove = event => {
const touch = event.touches[0];
const target = document.elementFromPoint(touch.pageX, touch.pageY);
this.#triggerChangeEvent(target);
};
const mouseUp = () => {
element.removeEventListener('mousemove', mouseMove);
element.removeEventListener('touchmove', touchMove);
element.removeEventListener('mouseup', mouseUp);
element.removeEventListener('touchend', mouseUp);
};
element.addEventListener('mousedown', mouseDown);
element.addEventListener('touchstart', mouseDown);
}
setValue(value, forceTrigger = false) {
for (const step of this.#steps) {
if (step.options.value === value) {
this.#triggerChangeEvent(step.element, forceTrigger);
break;
}
}
}
addEventListener(type, listener) {
this.#elementForListeners.addEventListener(type, listener);
}
removeEventListener(type, listener) {
this.#elementForListeners.addEventListener(type, listener);
}
#triggerChangeEvent(target, forceTrigger = false) {
if (!target) {
return;
}
while (!target.classList.contains('step')) {
if (!target) {
return;
}
target = target.parentElement;
}
if (!forceTrigger && target === this.#lastTriggerTarget) {
return;
}
this.#lastTriggerTarget = target;
const stepElementIndex = this.#steps.findIndex((step) => step.element === target);
for (let i = 0; i < this.#steps.length; i++) {
const step = this.#steps[i];
if (i >= stepElementIndex && i <= this.#middleIndex || i <= stepElementIndex && i >= this.#middleIndex) {
step.element.classList.add('active');
step.element.style.setProperty('background-color', step.element.getAttribute('slider-background-color'));
} else {
step.element.classList.remove('active');
step.element.style.setProperty('background-color', '');
}
}
const rawValue = target.getAttribute('slider-value');
const event = new CustomEvent('change', {
detail: {
target,
rawValue,
value: isNaN(parseFloat(rawValue)) ? rawValue : parseFloat(rawValue),
},
});
this.#elementForListeners.dispatchEvent(event);
}
}
<file_sep>/catrobat/statistics/scripts/charts.js
"use strict";
function createStatistics(updated, overall, previousOverall, display) {
previousOverall = previousOverall || {};
setLastUpdated($('.last_updated'), new Date(updated));
createOverallStatistics(document.getElementById('quantities'), overall, previousOverall);
createNewProgramsStatistics(document.getElementById('new-and-remixed-programs-chart'), overall.type, overall.timeline);
createPieWithStatistics({
element: document.getElementById('programs-with-multiple-scenes-pie-chart'),
data: [{
label: 'Programs with a single Scene',
value: overall.programs - overall.programsWithMultipleScenes,
previousValue: previousOverall.programs - previousOverall.programsWithMultipleScenes
}, {
label: 'Programs with multiple Scenes',
value: overall.programsWithMultipleScenes,
previousValue: previousOverall.programsWithMultipleScenes
}],
sort: true,
reverseLegend: true,
createLegend: createPieChartLegendDualValues
});
createPieWithStatistics({
element: document.getElementById('programs-with-groups-pie-chart'),
data: [{
label: 'Programs without Groups',
value: overall.programs - overall.programsWithGroups,
previousValue: previousOverall.programs - previousOverall.programsWithGroups
}, {
label: 'Programs with Groups',
value: overall.programsWithGroups,
previousValue: previousOverall.programsWithGroups
}],
sort: true,
reverseLegend: true,
createLegend: createPieChartLegendDualValues
});
createPieWithStatistics({
element: document.getElementById('landscape-programs-pie-chart'),
data: [{
label: 'Programs in portrait',
value: overall.programs - overall.programsInLandscape,
previousValue: previousOverall.programs - previousOverall.programsInLandscape
}, {
label: 'Programs in landscape',
value: overall.programsInLandscape,
previousValue: previousOverall.programsInLandscape
}],
sort: true,
reverseLegend: true,
createLegend: createPieChartLegendDualValues
});
createPieWithStatistics({
element: document.getElementById('remixed-programs-pie-chart'),
data: [{
label: 'New Programs',
value: overall.programs - overall.remixes,
previousValue: previousOverall.programs - previousOverall.remixes
}, {
label: 'Remixed Programs',
value: overall.remixes,
previousValue: previousOverall.remixes
}],
sort: true,
reverseLegend: true,
createLegend: createPieChartLegendDualValues
});
createPieWithStatistics({
element: document.getElementById('versions-pie-chart'),
data: highlightLatestLanguage(objectToDataArray(overall.languages, previousOverall.languages)),
sort: true,
createLegend: createPieChartLegendVertical
});
createPieWithStatistics({
element: document.getElementById('screen-size-pie-chart'),
data: topOccurrences(objectToDataArray(overall.screenSizes, previousOverall.screenSizes), 10, {others: true}),
sort: true,
createLegend: createPieChartLegendVertical
});
createPieWithStatistics({
element: document.getElementById('platforms-pie-chart'),
data: topOccurrences(objectToDataArray(overall.platforms, previousOverall.platforms), 10, {others: true}),
sort: true,
createLegend: createPieChartLegendVertical
});
function createOverallFormulaUsage(overall) {
if (!overall) {
return overall;
}
const result = {};
Object.entries(overall).forEach(function(entry) {
var key = entry[0];
var value = entry[1];
switch (key) {
case 'NUMBER':
return;
}
result[key] = value;
});
return result;
}
function createOverallFeatureUsage(overall) {
if (!overall) {
return overall;
}
const result = {};
Object.entries(overall).forEach(function(entry) {
var key = entry[0];
var value = entry[1];
switch (key) {
case 'hardware':
case 'externalHardware':
case 'internalHardware':
case 'bluetooth':
case 'wlan':
return;
}
result[key] = value;
});
return result;
}
createEnhancedBlockUsageChart(document.getElementById('feature-usage-chart'), createOverallFeatureUsage(overall.featureUsage), createOverallFeatureUsage(previousOverall.featureUsage), display.features);
createEnhancedBlockUsageChart(document.getElementById('brick-usage-chart'), overall.brickUsage, previousOverall.brickUsage, display.bricks);
createEnhancedBlockUsageChart(document.getElementById('programs-with-bricks-chart'), overall.programsUsingBricks, previousOverall.programsUsingBricks, display.bricks, overall.programs, previousOverall.programs);
createEnhancedBlockUsageChart(document.getElementById('formula-usage-chart'),
createOverallFormulaUsage(overall.formulaUsage), createOverallFormulaUsage(previousOverall.formulaUsage), display.formulas);
createEnhancedBlockUsageChart(document.getElementById('programs-with-formulas-chart'),
createOverallFormulaUsage(overall.programsUsingFormula), createOverallFormulaUsage(previousOverall.programsUsingFormula), display.formulas, overall.programs, previousOverall.programs);
}
function objectToDataArray(object, previousObject) {
var currentData = objectToArray(object);
if (previousObject) {
var previousData = objectToArray(previousObject);
currentData.forEach(function(d) {
var previousD = getDataByLabel(previousData, d.label);
if (previousD) {
d.previousValue = previousD.value;
} else {
d.previousValue = 0;
}
});
}
return currentData;
}
function highlightLatestLanguage(languages) {
var latestLanguage = languages.reduce(function(currentLatestVersion, version) {
if (parseFloat(currentLatestVersion.label) < parseFloat(version.label)) {
return version;
} else {
return currentLatestVersion;
}
}, {label: "0.0"});
latestLanguage.label = latestLanguage.label + ' (latest)';
return languages;
}
function setLastUpdated($e, updated) {
var updatedString = updated.toLocaleDateString();
$e.each(function() {
$(this).html(updatedString);
});
}
function createOverallStatistics(e, overall, previously) {
previously = previously || {};
var rows = [
['Programs', overall.programs, previously.programs, {relative: false}],
['Scenes', overall.scenes, previously.scenes],
['Objects', overall.objects, previously.objects],
['Groups', overall.groups, previously.groups],
['Looks', overall.looks, previously.looks],
['Sounds', overall.sounds, previously.sounds],
['Bricks', overall.bricks, previously.bricks],
['Variables', overall.programVariables + overall.objectVariables, previously.programVariables + previously.objectVariables],
['Lists', overall.programLists + overall.objectLists, previously.programLists + previously.objectLists]
];
e.innerHTML = rows.map(function(row) {
var type = row[0];
var value = row[1];
var previousValue = row[2];
var options = row[3] || {};
var diff = '';
if (isNumber(previousValue)) {
var v = value;
var p = previousValue;
if (options.relative !== false) {
v /= overall.programs;
p /= previously.programs;
}
diff = toDiffHtml(v, p);
}
return '<tr><td>' + type + '</td><td class="text-right">' + value + '</td><td class="text-right">' + diff + '</td></tr>';
}).join('\n');
}
function createNewProgramsStatistics(e, type, timeline) {
const sortedTimeline = {};
if (type === 'month') {
Object.keys(timeline).sort(function(a, b) {
if (a.startsWith('01') && b.startsWith('12')) {
return 1;
} else if (a.startsWith('12') && b.startsWith('01')) {
return -1;
} else {
return a.localeCompare(b);
}
}).forEach(function(key) {
sortedTimeline[key] = timeline[key];
});
} else {
Object.keys(timeline).sort().forEach(function(key) {
sortedTimeline[key] = timeline[key];
});
}
createChart(e, {
type: 'line',
data: {
labels: Object.keys(sortedTimeline),
datasets: [{
label: 'New remixed Programs',
backgroundColor: CHART_COLORS[1],
data: Object.values(sortedTimeline).map(function(v) {
return v.remixes;
})
}, {
label: 'New origin Programs',
backgroundColor: CHART_COLORS[0],
data: Object.values(sortedTimeline).map(function(v) {
return v.new;
})
}]
},
options: {
scales: {
yAxes: [{
stacked: true,
display: true,
ticks: {
beginAtZero: true
}
}]
}
}
});
}
function createPieWithStatistics(options) {
var e = options.element;
if (options.sort) {
options.data = sortData(options.data, null);
}
createPieChart(e.querySelector('.chart'), options);
options.createLegend(e.querySelector('.legend'), options);
createPieChartStatistics(e.querySelector('.chart-statistics'), options);
}
function createPieChartLegendDualValues(legendElement, options) {
var output = [];
options.data.forEach(function(d) {
output.push('<div class="legend-item legend-item-horizontal">' +
'<div class="legend-block" style="background: ' + d.color + '"></div>' +
'<span class="legend-text">' + d.label + '</span></div>');
});
if (options && options.reverseLegend) {
output = output.reverse();
}
legendElement.innerHTML = output.join('\n');
}
function createPieChartLegendVertical(legendElement, options) {
var output = [];
options.data.forEach(function(d) {
output.push('<div class="legend-item legend-item-vertical">' +
'<div class="legend-block" style="background: ' + d.color + '"></div>' +
'<span class="legend-text">' + d.label + '</span></div>');
});
if (options && options.reverseLegend) {
output = output.reverse();
}
legendElement.innerHTML = output.join('\n');
}
function createPieChartStatistics(e, options) {
var total = 0;
var previousTotal = 0;
options.data.forEach(function(d) {
total += d.value;
previousTotal += d.previousValue || 0;
});
var statistics = [];
options.data.forEach(function(d, i) {
statistics.push(createPieChartStatisticsRow(d.color, d.label,
total, d.value, previousTotal, d.previousValue));
});
e.innerHTML = '<table class="table table-striped">'
+ statistics.join('\n') + '</table>';
}
function createPieChartStatisticsRow(color, label, total, value, previousTotal, previous) {
return '<tr class="statistics-row">' +
'<td><div class="legend-item">' +
'<div class="legend-block" style="background: ' + color + '"></div>' +
'<span>' + label + '</span></div></td>' +
'<td class="text-right">' + value + '</td>' +
'<td class="text-right">' + (value / total * 100).toFixed(2) + '%</td>' +
'<td class="text-right">' +
(isNumber(previousTotal) && isNumber(previous)
? toDiffHtml((value / total), (previous / previousTotal), {style: '1em'})
: '') +
'</td></tr>';
}
|
d30c63c9ba1220fde43c13f16038dd8b34286c91
|
[
"JavaScript"
] | 6 |
JavaScript
|
robertpainsi/robertpainsi.github.io
|
b49c606eaf37e3d2588fc3f912a8f2faa9789884
|
21563412bde48e25ad044866ec26421b1e9c81ea
|
refs/heads/master
|
<repo_name>mort237d/Exercise-Cars-REST-TypeScript<file_sep>/src/js/index.ts
import axios, {
AxiosResponse,
AxiosError
} from "../../node_modules/axios/index";
interface ICar {
id: number;
model: string;
vendor: string;
price: number;
}
interface IOwner {
id: number;
name: string;
cars: Array<ICar>;
}
let baseUri: string = "https://itemservice-mort237d.azurewebsites.net/api/Cars";
let buttonElement: HTMLButtonElement = <HTMLButtonElement>document.getElementById("getAllButton");
buttonElement.addEventListener("click", showAllCars);
let outputElement: HTMLDivElement = <HTMLDivElement>document.getElementById("content");
let buttonByIdElement: HTMLButtonElement = <HTMLButtonElement>document.getElementById("getByIdButton");
buttonByIdElement.addEventListener("click", showCarById);
let buttonSearchElement: HTMLButtonElement = <HTMLButtonElement>document.getElementById("searchButton");
buttonSearchElement.addEventListener("click", showCarBySearch);
let selectedElement: HTMLSelectElement = <HTMLSelectElement>document.getElementById("selector");
let buttonPriceRangeElement: HTMLButtonElement = <HTMLButtonElement>document.getElementById("searcByPricehButton");
buttonPriceRangeElement.addEventListener("click", priceRangeCar);
let buttonPutElement: HTMLButtonElement = <HTMLButtonElement>document.getElementById("changeCarButton");
buttonPutElement.addEventListener("click", putCar);
let putSelectedElement: HTMLSelectElement = <HTMLSelectElement>document.getElementById("putSelector");
let buttonDeleteElement: HTMLButtonElement = <HTMLButtonElement>document.getElementById("deleteButton");
buttonDeleteElement.addEventListener("click", deleteCar);
let addButton: HTMLButtonElement = <HTMLButtonElement>document.getElementById("addButton");
addButton.addEventListener("click", addCar);
let resetButton: HTMLButtonElement = <HTMLButtonElement>document.getElementById("resetButton");
resetButton.addEventListener("click", Reset);
function Reset(): void{
axios.get(baseUri + "/reset");
}
GetCarsToselector();
function GetCarsToselector(): void {
axios.get<ICar[]>(baseUri)
.then(function (response: AxiosResponse<ICar[]>): void {
response.data.forEach((car: ICar) => {
let i = document.createElement("option");
i.value = CarToString(car);
i.text = CarToString(car);
putSelectedElement.options.add(i);
});
})
.catch(function (error: AxiosError): void { // error in GET or in generateSuccess?
if (error.response) {
outputElement.innerHTML = error.message;
} else {
outputElement.innerHTML = error.message;
}
});
}
function CarToString(car: ICar): string{
return car.vendor + " " + car.model + ", kr. " + car.price + ",-";
}
let ownerButton: HTMLButtonElement = <HTMLButtonElement>document.getElementById("getAllOwnersButton");
ownerButton.addEventListener("click", showAllOwners);
/*
function showAllOwners(): void {
let output: HTMLDivElement = <HTMLDivElement>document.getElementById("ownersContent");
axios.get<IOwner[]>("https://itemservice-mort237d.azurewebsites.net/api/carowners")
.then(function (response: AxiosResponse<IOwner[]>): void {
let result: string = "<ul id='ownerList'>";
response.data.forEach((owner: IOwner) => {
result += "<li>" + owner.id + " " + owner.name;
owner.cars.forEach(car => {
result += "<ul><li>" + CarToString(car) + "</ul></li>";
});
});
result += "</li></ul>";
output.innerHTML = result;
})
.catch(function (error: AxiosError): void {
if (error.response) {
} else {
output.innerHTML = error.message;
}
});
}
*/
function showAllOwners(): void {
let output: HTMLDivElement = <HTMLDivElement>document.getElementById("ownersContent");
axios.get<IOwner[]>("https://itemservice-mort237d.azurewebsites.net/api/carowners")
.then(function (response: AxiosResponse<IOwner[]>): void {
let result: string = "<table class='table table-bordered'><thead><tr><th>ID</th><th>Name</th><th>Cars</th></tr></thead><tbody>";
response.data.forEach((owner: IOwner) => {
result += "<tr><td>" + owner.id + "</td><td>" + owner.name + "</td><td>";
owner.cars.forEach(car => {
result += "<ul><li>" + CarToString(car) + "</ul></li>";
});
result += "</td>"
});
result += "</tbody></ttable>";
output.innerHTML = result;
})
.catch(function (error: AxiosError): void {
if (error.response) {
} else {
output.innerHTML = error.message;
}
});
}
function showAllCars(): void {
axios.get<ICar[]>(baseUri)
.then(function (response: AxiosResponse<ICar[]>): void {
// element.innerHTML = generateSuccessHTMLOutput(response);
// outputHtmlElement.innerHTML = generateHtmlTable(response.data);
// outputHtmlElement.innerHTML = JSON.stringify(response.data);
let result: string = "<ul id='carList'>";
response.data.forEach((car: ICar) => {
result += "<li>" + car.id + " " + car.model + " " + car.vendor + " " + car.price + "</li>";
});
result += "</ul>";
outputElement.innerHTML = result;
})
.catch(function (error: AxiosError): void { // error in GET or in generateSuccess?
if (error.response) {
// the request was made and the server responded with a status code
// that falls out of the range of 2xx
// https://kapeli.com/cheat_sheets/Axios.docset/Contents/Resources/Documents/index
outputElement.innerHTML = error.message;
} else { // something went wrong in the .then block?
outputElement.innerHTML = error.message;
}
});
}
function showCarById(): void {
let output: HTMLDivElement = <HTMLDivElement>document.getElementById("idContent");
let inputElement: HTMLInputElement = <HTMLInputElement>document.getElementById("getInput");
let id: string = inputElement.value;
let uri: string = baseUri + "/" + id;
axios.get<ICar>(uri)
.then(function (response: AxiosResponse<ICar>): void {
output.innerHTML = "<ul id='idCar'><li>" + response.data.id + " " + response.data.model + " " + response.data.vendor + " " + response.data.price + "</li></ul>";
console.log(output.innerHTML);
})
.catch(function (error: AxiosError): void {
if (error.response) {
output.innerHTML = error.message;
} else {
output.innerHTML = error.message;
}
});
}
function showCarBySearch(): void {
let output: HTMLDivElement = <HTMLDivElement>document.getElementById("searchContent");
let inputElement: HTMLInputElement = <HTMLInputElement>document.getElementById("getSearchInput");
let id: string = inputElement.value;
let uri: string = baseUri;
let selector: string = selectedElement.value;
if (selector === "model") uri += "/Model/" + id;
else if (selector === "vendor") uri += "/Vendor/" + id;
else if (selector === "price") uri += "/Price/" + id;
axios.get<ICar[]>(uri)
.then(function (response: AxiosResponse<ICar[]>): void {
let result: string = "<ul>";
response.data.forEach((car: ICar) => {
result += "<li>" + car.id + " " + car.model + " " + car.vendor + " " + car.price + "</li>";
});
result += "</ul>";
output.innerHTML = result;
/*
let selector: string = selectedElement.value;
let result: string = "<ul>";
console.log(result);
if (selector === "model") {
response.data.forEach((car: ICar) => {
console.log("model");
if (car.model === id) {
result += "<li>" + car.id + " " + car.model + " " + car.vendor + " " + car.price + "</li>";
}
});
}
else if (selector === "vendor") {
response.data.forEach((car: ICar) => {
console.log("vendor");
if (car.vendor === id) {
result += "<li>" + car.id + " " + car.model + " " + car.vendor + " " + car.price + "</li>";
}
});
}
else if (selector === "price") {
response.data.forEach((car: ICar) => {
console.log("price");
if (car.price === +id) {
result += "<li>" + car.id + " " + car.model + " " + car.vendor + " " + car.price + "</li>";
}
});
}
console.log(result);
if (result === "<ul>") {
console.log("result");
result += "<li>No data</li>"
}
result += "</ul>";
output.innerHTML = result;
*/
})
.catch(function (error: AxiosError): void {
if (error.response) {
output.innerHTML = error.message;
} else {
output.innerHTML = error.message;
}
});
}
function priceRangeCar(): void{
let output: HTMLDivElement = <HTMLDivElement>document.getElementById("searchByPriceContent");
let lowInputElement: HTMLInputElement = <HTMLInputElement>document.getElementById("lowPriceInput");
let highInputElement: HTMLInputElement = <HTMLInputElement>document.getElementById("highPriceInput");
let lowPrice: string = lowInputElement.value;
let highPrice: string = highInputElement.value;
let uri: string = baseUri + "/Search?LowPrice=" + lowPrice + "&HighPrice=" + highPrice;
axios.get<ICar[]>(uri).then(function(response: AxiosResponse<ICar[]>): void{
let result: string = "<ul>";
response.data.forEach((car: ICar) => {
result += "<li>" + car.id + " " + car.model + " " + car.vendor + " " + car.price + "</li>";
});
result += "</ul>";
output.innerHTML = result;
})
.catch(function (error: AxiosError): void {
if (error.response) {
output.innerHTML = error.message;
} else {
output.innerHTML = error.message;
}
});
}
function putCar(): void{
let output: HTMLDivElement = <HTMLDivElement>document.getElementById("putContent");
let idInputElement: HTMLInputElement = <HTMLInputElement>document.getElementById("idInput");
let vendorInputElement: HTMLInputElement = <HTMLInputElement>document.getElementById("vendorInput");
let modelInputElement: HTMLInputElement = <HTMLInputElement>document.getElementById("modelInput");
let priceInputElement: HTMLInputElement = <HTMLInputElement>document.getElementById("priceInput");
let myId: string = idInputElement.value;
let myVendor: string = vendorInputElement.value;
let myModel: string = modelInputElement.value;
let myPrice: string = priceInputElement.value;
let uri: string = baseUri + "/" + myId;
axios.put<ICar>(uri, {id: myId, model: myModel, vendor: myVendor, price: myPrice})
.then((response: AxiosResponse) => {
let message: string = "response " + response.status + " " + response.statusText;
output.innerHTML = message;
console.log(message);
})
.catch((error: AxiosError) => {
output.innerHTML = error.message;
console.log(error);
})
}
function deleteCar(): void {
let output: HTMLDivElement = <HTMLDivElement>document.getElementById("contentDelete");
let inputElement: HTMLInputElement = <HTMLInputElement>document.getElementById("deleteInput");
let id: string = inputElement.value;
let uri: string = baseUri + "/" + id;
axios.delete<ICar>(uri)
.then(function (response: AxiosResponse<ICar>): void {
// element.innerHTML = generateSuccessHTMLOutput(response);
// outputHtmlElement.innerHTML = generateHtmlTable(response.data);
console.log(JSON.stringify(response));
output.innerHTML = response.status + " " + response.statusText;
console.log(output.innerText);
})
.catch(function (error: AxiosError): void { // error in GET or in generateSuccess?
if (error.response) {
// the request was made and the server responded with a status code
// that falls out of the range of 2xx
// https://kapeli.com/cheat_sheets/Axios.docset/Contents/Resources/Documents/index
output.innerHTML = error.message;
} else { // something went wrong in the .then block?
output.innerHTML = error.message;
}
});
}
function addCar(): void {
let addModelElement: HTMLInputElement = <HTMLInputElement>document.getElementById("addModel");
let addVendorElement: HTMLInputElement = <HTMLInputElement>document.getElementById("addVendor");
let addPriceElement: HTMLInputElement = <HTMLInputElement>document.getElementById("addPrice");
let myModel: string = addModelElement.value;
let myVendor: string = addVendorElement.value;
let myPrice: number = Number(addPriceElement.value);
let output: HTMLDivElement = <HTMLDivElement>document.getElementById("contentAdd");
// id is generated by the back-end (REST service)
axios.post<ICar>(baseUri, { model: myModel, vendor: myVendor, price: myPrice })
.then((response: AxiosResponse) => {
let message: string = "response " + response.status + " " + response.statusText;
output.innerHTML = message;
console.log(message);
})
.catch((error: AxiosError) => {
output.innerHTML = error.message;
console.log(error);
});
}
|
fc5c1383005cc6bbd00c669c74e408b5a7e09d29
|
[
"TypeScript"
] | 1 |
TypeScript
|
mort237d/Exercise-Cars-REST-TypeScript
|
c505afaf87ae5a3c6c77f2970f3ad2472fa82671
|
30187280ecc37cc07db6b1bf5f4f8b095880f313
|
refs/heads/master
|
<file_sep>apply plugin:'maven'
apply plugin: 'signing'
apply plugin:'groovy'
ext {
isBuildSnapshot = version.endsWith( "BUILD-SNAPSHOT" )
}
task sourcesJar(type: Jar) {
classifier = 'sources'
from sourceSets.main.allSource
}
task javadocJar(type: Jar, dependsOn:javadoc) {
classifier = 'javadoc'
from javadoc.destinationDir
}
artifacts {
archives jar
archives sourcesJar
archives javadocJar
}
signing {
signedFiles = sign( configurations.archives )
required { !isBuildSnapshot }
}
uploadArchives {
repositories {
mavenDeployer {
beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
repository(url: "https://oss.sonatype.org/service/local/staging/deploy/maven2/") {
authentication(userName: project.hasProperty("sonatypeUsername") ? project.sonatypeUsername : null,
password: project.hasProperty("sonatypePassword") ? project.sonatypePassword : null)
}
snapshotRepository(url: "http://repo.grails.org/grails/libs-snapshots-local") {
authentication(userName: project.hasProperty("artifactoryPublishUsername") ? project.artifactoryPublishUsername : null,
password: project.hasProperty("artifactoryPublishPassword") ? project.artifactoryPublishPassword : null)
}
pom.project {
name "Grails for Spring Boot - $project.name"
packaging 'jar'
description "Grails for Spring Boot - $project.name"
delegate.url 'http://www.github.com/grails/grails-boot/'
licenses {
license {
name 'The Apache Software License, Version 2.0'
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
distribution 'repo'
}
}
scm {
delegate.url 'scm:<EMAIL>:grails/grails-boot.git'
connection 'scm:<EMAIL>:grails/grails-boot.git'
developerConnection 'scm:<EMAIL>:grails/grails-boot.git'
}
licenses {
license {
name 'The Apache Software License, Version 2.0'
delegate.url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
distribution 'repo'
}
}
developers {
developer {
id 'graemerocher'
name '<NAME>'
}
}
}
}
}
}
<file_sep>version = "1.0.0.BUILD-SNAPSHOT"
configurations {
grails
}
dependencies {
compile "org.springframework:spring-orm:$springVersion", {
exclude group:'org.springframework', module:'spring-jdbc'
exclude group:'org.springframework', module:'spring-core'
}
compile "org.springframework:spring-jdbc:$springVersion", {
exclude group:'org.springframework', module:'spring-tx'
exclude group:'org.springframework', module:'spring-core'
exclude group:'org.springframework', module:'spring-beans'
}
compile "org.springframework:spring-tx:$springVersion", {
exclude group:'org.springframework', module:'spring-core'
exclude group:'org.springframework', module:'spring-beans'
}
compile project(":grails-datastore-core")
grails("org.grails:grails-core:$grailsVersion")
grails("org.grails:grails-bootstrap:$grailsVersion") {
transitive = false
}
}
sourceSets {
main {
compileClasspath += configurations.grails
}
test {
compileClasspath += configurations.grails
}
}
<file_sep>package org.grails.datastore.gorm.neo4j;
/**
* Created by stefan on 10.04.14.
*/
public interface IdGenerator {
public long nextId();
}
<file_sep>package org.grails.datastore.mapping.jcr;
import org.grails.datastore.mapping.transactions.Transaction;
import org.springframework.extensions.jcr.JcrSessionFactory;
import org.springframework.extensions.jcr.jackrabbit.support.UserTxSessionHolder;
import org.springframework.transaction.IllegalTransactionStateException;
import org.springframework.transaction.TransactionSystemException;
import org.springframework.transaction.UnexpectedRollbackException;
/**
* @author <NAME>
* @since 1.0
*/
public class JcrTransaction implements Transaction<UserTxSessionHolder> {
private UserTxSessionHolder transaction;
private boolean rollbackCalled;
private boolean commitCalled;
public JcrTransaction(JcrSessionFactory sessionFactory) {
try {
transaction = new UserTxSessionHolder(sessionFactory.getSession());
transaction.getTransaction().begin();
}
catch (Exception e) {
throw new TransactionSystemException("Exception occurred beginning JackRabbit transaction: " + e.getMessage());
}
}
public void commit() {
if (rollbackCalled) {
throw new IllegalTransactionStateException(
"Cannot call commit after rollback. Start another transaction first!");
}
try {
transaction.getTransaction().commit();
commitCalled = true;
}
catch (Exception e) {
throw new TransactionSystemException(
"Exception occurred committing back JackRabbit transaction: " + e.getMessage());
}
}
public boolean isActive() {
return !commitCalled && !rollbackCalled;
}
public void setTimeout(int timeout) {
try {
transaction.getTransaction().setTransactionTimeout(timeout);
}
catch (Exception e) {
throw new TransactionSystemException(
"Exception occurred setting timeout JackRabbit transaction: " + e.getMessage());
}
}
public void rollback() {
if (rollbackCalled) {
throw new UnexpectedRollbackException(
"Cannot rollback JackRabbit transaction. Transaction already rolled back!");
}
try {
transaction.getTransaction().rollback();
rollbackCalled = true;
}
catch (Exception e) {
throw new TransactionSystemException(
"Exception occurred rolling back JackRabbit transaction: " + e.getMessage());
}
}
public UserTxSessionHolder getNativeTransaction() {
return transaction;
}
}<file_sep>/* Copyright (C) 2011 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.simpledb;
import static org.grails.datastore.mapping.config.utils.ConfigUtils.read;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.grails.datastore.mapping.cache.TPCacheAdapterRepository;
import org.grails.datastore.mapping.core.AbstractDatastore;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.types.Association;
import org.grails.datastore.mapping.model.types.OneToMany;
import org.grails.datastore.mapping.simpledb.config.SimpleDBMappingContext;
import org.grails.datastore.mapping.simpledb.engine.AssociationKey;
import org.grails.datastore.mapping.simpledb.engine.SimpleDBAssociationInfo;
import org.grails.datastore.mapping.simpledb.engine.SimpleDBDomainResolver;
import org.grails.datastore.mapping.simpledb.engine.SimpleDBDomainResolverFactory;
import org.grails.datastore.mapping.simpledb.engine.SimpleDBIdGenerator;
import org.grails.datastore.mapping.simpledb.engine.SimpleDBIdGeneratorFactory;
import org.grails.datastore.mapping.simpledb.engine.SimpleDBNativeItem;
import org.grails.datastore.mapping.simpledb.model.types.SimpleDBTypeConverterRegistrar;
import org.grails.datastore.mapping.simpledb.util.DelayAfterWriteSimpleDBTemplateDecorator;
import org.grails.datastore.mapping.simpledb.util.SimpleDBTemplate;
import org.grails.datastore.mapping.simpledb.util.SimpleDBTemplateImpl;
import org.grails.datastore.mapping.simpledb.util.SimpleDBUtil;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.convert.converter.ConverterRegistry;
/**
* A Datastore implementation for the AWS SimpleDB document store.
*
* @author <NAME> based on Graeme Rocher code for MongoDb and Redis
* @since 0.1
*/
public class SimpleDBDatastore extends AbstractDatastore implements InitializingBean, MappingContext.Listener {
public static final String SECRET_KEY = "secretKey";
public static final String ACCESS_KEY = "accessKey";
public static final String ENDPOINT = "endpoint"; //optional, if specified will be used to explicitly set AWS endpoint. See http://docs.aws.amazon.com/general/latest/gr/rande.html#sdb_region
public static final String DOMAIN_PREFIX_KEY = "domainNamePrefix";
public static final String DELAY_AFTER_WRITES_MS = "delayAfterWritesMS"; //used for testing - to fight eventual consistency if this flag value is 'true' it will add specified pause after writes
// private Map<PersistentEntity, SimpleDBTemplate> simpleDBTemplates = new ConcurrentHashMap<PersistentEntity, SimpleDBTemplate>();
private SimpleDBTemplate simpleDBTemplate; //currently there is no need to create template per entity, we can share same instance
protected Map<AssociationKey, SimpleDBAssociationInfo> associationInfoMap = new HashMap<AssociationKey, SimpleDBAssociationInfo>(); //contains entries only for those associations that need a dedicated domain
protected Map<PersistentEntity, SimpleDBDomainResolver> entityDomainResolverMap = new HashMap<PersistentEntity, SimpleDBDomainResolver>();
protected Map<PersistentEntity, SimpleDBIdGenerator> entityIdGeneratorMap = new HashMap<PersistentEntity, SimpleDBIdGenerator>();
private String domainNamePrefix;
public SimpleDBDatastore() {
this(new SimpleDBMappingContext(), Collections.<String, String>emptyMap(), null, null);
}
/**
* Constructs a SimpleDBDatastore using the given MappingContext and connection details map.
*
* @param mappingContext The SimpleDBMappingContext
* @param connectionDetails The connection details containing the {@link #ACCESS_KEY} and {@link #SECRET_KEY} settings
*/
public SimpleDBDatastore(MappingContext mappingContext,
Map<String, String> connectionDetails, ConfigurableApplicationContext ctx, TPCacheAdapterRepository<SimpleDBNativeItem> adapterRepository) {
super(mappingContext, connectionDetails, ctx, adapterRepository);
if (mappingContext != null) {
mappingContext.addMappingContextListener(this);
}
initializeConverters(mappingContext);
domainNamePrefix = read(String.class, DOMAIN_PREFIX_KEY, connectionDetails, null);
}
public SimpleDBDatastore(MappingContext mappingContext, Map<String, String> connectionDetails) {
this(mappingContext, connectionDetails, null, null);
}
public SimpleDBDatastore(MappingContext mappingContext) {
this(mappingContext, Collections.<String, String>emptyMap(), null, null);
}
public SimpleDBTemplate getSimpleDBTemplate(PersistentEntity entity) {
// return simpleDBTemplates.get(entity);
return simpleDBTemplate;
}
public SimpleDBTemplate getSimpleDBTemplate() {
return simpleDBTemplate;
}
@Override
protected Session createSession(Map<String, String> connDetails) {
String delayAfterWrite = read(String.class, DELAY_AFTER_WRITES_MS, connectionDetails, null);
if (delayAfterWrite != null && !"".equals(delayAfterWrite)) {
return new DelayAfterWriteSimpleDBSession(this, getMappingContext(), getApplicationEventPublisher(), Integer.parseInt(delayAfterWrite), cacheAdapterRepository);
}
return new SimpleDBSession(this, getMappingContext(), getApplicationEventPublisher(), cacheAdapterRepository);
}
public void afterPropertiesSet() throws Exception {
for (PersistentEntity entity : mappingContext.getPersistentEntities()) {
// Only create SimpleDB templates for entities that are mapped with SimpleDB
if (!entity.isExternal()) {
createSimpleDBTemplate(entity);
}
}
}
protected void createSimpleDBTemplate(PersistentEntity entity) {
if (simpleDBTemplate != null) {
return;
}
String accessKey = read(String.class, ACCESS_KEY, connectionDetails, null);
String secretKey = read(String.class, SECRET_KEY, connectionDetails, null);
String delayAfterWrite = read(String.class, DELAY_AFTER_WRITES_MS, connectionDetails, null);
String endpoint = read(String.class, ENDPOINT, connectionDetails, null);
simpleDBTemplate = new SimpleDBTemplateImpl(accessKey, secretKey, endpoint);
if (delayAfterWrite != null && !"".equals(delayAfterWrite)) {
simpleDBTemplate = new DelayAfterWriteSimpleDBTemplateDecorator(simpleDBTemplate, Integer.parseInt(delayAfterWrite));
}
}
/**
* If specified, returns domain name prefix so that same AWS account can be used for more than one environment (DEV/TEST/PROD etc).
* @return null if name was not specified in the configuration
*/
public String getDomainNamePrefix() {
return domainNamePrefix;
}
public void persistentEntityAdded(PersistentEntity entity) {
createSimpleDBTemplate(entity);
analyzeAssociations(entity);
createEntityDomainResolver(entity);
createEntityIdGenerator(entity);
}
/**
* If the specified association has a dedicated AWS domains, returns info for that association,
* otherwise returns null.
*/
public SimpleDBAssociationInfo getAssociationInfo(Association<?> association) {
return associationInfoMap.get(generateAssociationKey(association));
}
/**
* Returns domain resolver for the specified entity.
* @param entity
* @return
*/
public SimpleDBDomainResolver getEntityDomainResolver(PersistentEntity entity) {
return entityDomainResolverMap.get(entity);
}
/**
* Returns id generator for the specified entity.
* @param entity
* @return
*/
public SimpleDBIdGenerator getEntityIdGenerator(PersistentEntity entity) {
return entityIdGeneratorMap.get(entity);
}
protected void createEntityDomainResolver(PersistentEntity entity) {
SimpleDBDomainResolverFactory resolverFactory = new SimpleDBDomainResolverFactory();
SimpleDBDomainResolver domainResolver = resolverFactory.buildResolver(entity, this);
entityDomainResolverMap.put(entity, domainResolver);
}
protected void createEntityIdGenerator(PersistentEntity entity) {
SimpleDBIdGeneratorFactory factory = new SimpleDBIdGeneratorFactory();
SimpleDBIdGenerator generator = factory.buildIdGenerator(entity, this);
entityIdGeneratorMap.put(entity, generator);
}
@Override
protected void initializeConverters(MappingContext mappingContext) {
final ConverterRegistry conversionService = mappingContext.getConverterRegistry();
new SimpleDBTypeConverterRegistrar().register(conversionService);
}
/**
* Analyzes associations and for those associations that need to be stored
* in a dedicated AWS domain, creates info object with details for that association.
*/
protected void analyzeAssociations(PersistentEntity entity) {
for (Association<?> association : entity.getAssociations()) {
if (association instanceof OneToMany && !association.isBidirectional()) {
String associationDomainName = generateAssociationDomainName(association);
associationInfoMap.put(generateAssociationKey(association), new SimpleDBAssociationInfo(associationDomainName));
}
}
}
protected AssociationKey generateAssociationKey(Association<?> association) {
return new AssociationKey(association.getOwner(), association.getName());
}
protected String generateAssociationDomainName(Association<?> association) {
String ownerDomainName = SimpleDBUtil.getMappedDomainName(association.getOwner());
return SimpleDBUtil.getPrefixedDomainName(domainNamePrefix, ownerDomainName.toUpperCase()+"_"+association.getName().toUpperCase());
}
}
<file_sep>buildscript {
repositories {
mavenCentral()
mavenRepo urls: "http://repo.grails.org/grails/core"
}
dependencies {
classpath "org.grails:grails-gradle-plugin:1.0",
"org.grails:grails-bootstrap:1.3.4"
}
}
apply plugin: "grails"
grailsVersion = "1.3.4"
repositories {
flatDir dirs: "lib"
}
configurations {
compile.exclude module: "commons-logging"
compile.exclude module: 'xml-apis'
}
repositories {
mavenRepo urls:"http://repository.codehaus.org/"
}
dependencies {
compile project(":grails-datastore-core"),
project(":grails-datastore-redis"),
project(":grails-datastore-gorm"),
project(":grails-datastore-gorm-redis")
runtime "org.aspectj:aspectjrt:1.6.8"
runtime "org.slf4j:slf4j-simple:1.5.8",
"hsqldb:hsqldb:1.8.0.5",
"net.sf.ehcache:ehcache-core:1.7.1"
compile "org.grails:grails-crud:$grailsVersion",
"org.grails:grails-gorm:$grailsVersion"
}
<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.keyvalue.mapping.config;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
import org.springframework.core.annotation.AnnotationUtils;
import org.grails.datastore.mapping.annotation.Index;
import org.grails.datastore.mapping.model.PersistentProperty;
import org.grails.datastore.mapping.reflect.ClassPropertyFetcher;
import org.springframework.util.ReflectionUtils;
/**
* Uses annotations to configure entity mappies
*
* @author <NAME>
* @since 1.1
*/
@SuppressWarnings("rawtypes")
public class AnnotationKeyValueMappingFactory extends KeyValueMappingFactory {
public AnnotationKeyValueMappingFactory(String keyspace) {
super(keyspace);
}
@Override
public KeyValue createMappedForm(PersistentProperty mpp) {
final Class javaClass = mpp.getOwner().getJavaClass();
final ClassPropertyFetcher cpf = ClassPropertyFetcher.forClass(javaClass);
final PropertyDescriptor pd = cpf.getPropertyDescriptor(mpp.getName());
final KeyValue kv = super.createMappedForm(mpp);
Index index = AnnotationUtils.getAnnotation(pd.getReadMethod(), Index.class);
if (index == null) {
final Field field = ReflectionUtils.findField(javaClass, mpp.getName());
if (field != null) {
ReflectionUtils.makeAccessible(field);
index = field.getAnnotation(Index.class);
}
}
if (index != null) {
kv.setIndex(true);
}
return kv;
}
}
<file_sep>package org.grails.datastore.mapping.query;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.jcr.Node;
import javax.jcr.NodeIterator;
import javax.jcr.Property;
import javax.jcr.RepositoryException;
import javax.jcr.query.QueryResult;
import org.springframework.beans.SimpleTypeConverter;
import org.springframework.dao.InvalidDataAccessResourceUsageException;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.jcr.JcrSession;
import org.grails.datastore.mapping.jcr.engine.JcrEntityPersister;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.PersistentProperty;
import org.grails.datastore.mapping.query.projections.ManualProjections;
import org.springframework.extensions.jcr.JcrTemplate;
/**
* @author <NAME>
* @since 1.0
*/
@SuppressWarnings("hiding")
public class JcrQuery extends Query {
private JcrEntityPersister entityPersister;
private JcrTemplate jcrTemplate;
private SimpleTypeConverter typeConverter;
private ManualProjections manualProjections;
public static final String ROOT_NODE = "//";
public static final String GREATER_THAN_EQUALS = " >= ";
public static final String LESS_THAN_EQUALS = " <= ";
public static final String LOGICAL_AND = " and ";
public static final String GREATER_THAN = " > ";
public static final char SPACE = ' ';
public static final char AT_SIGN = '@';
public static final String LESS_THAN = " < ";
public static final String EQUALS = " = ";
public static final String NOT_EQUALS = " != ";
public static final String ASCENDING = "ascending";
public static final String DESCENDING = "descending";
public static final String LOGICAL_OR = " or ";
public static final String XS_DATE = "xs:date";
public JcrQuery(JcrSession session, JcrTemplate jcrTemplate, PersistentEntity persistentEntity, JcrEntityPersister entityPersister) {
super(session, persistentEntity);
this.entityPersister = entityPersister;
this.jcrTemplate = jcrTemplate;
this.manualProjections = new ManualProjections(entity);
typeConverter = new SimpleTypeConverter();
}
protected JcrQuery(Session session, PersistentEntity entity) {
super(session, entity);
}
@Override
protected List executeQuery(PersistentEntity entity, Junction criteria) {
final ProjectionList projectionList = projections();
List<String> uuids = new ArrayList<String>();
List finalResults = null;
if (criteria.isEmpty() && !(max != -1)) {
final String queryString = getQueryString();
QueryResult qr = jcrTemplate.query(queryString.toString(), javax.jcr.query.Query.XPATH);
try {
NodeIterator itr = qr.getNodes();
while (itr.hasNext()) {
Node node = itr.nextNode();
uuids.add(node.getUUID());
}
} catch (RepositoryException e) {
throw new InvalidDataAccessResourceUsageException("Cannot execute query. Entity [" + getEntity() + "] does not exist in the repository");
}
finalResults = getSession().retrieveAll(getEntity().getJavaClass(), uuids);
if (projectionList.isEmpty()) {
return finalResults;
}
List results = new ArrayList();
for (Projection projection : projectionList.getProjectionList()) {
if (projection instanceof CountProjection) {
results.add(finalResults.size());
} else if (projection instanceof MinProjection) {
MinProjection min = (MinProjection) projection;
results.add(manualProjections.min(finalResults, min.getPropertyName()));
} else if (projection instanceof MaxProjection) {
MaxProjection max = (MaxProjection) projection;
results.add(manualProjections.max(finalResults, max.getPropertyName()));
} else if (projection instanceof IdProjection) {
results.add(uuids);
} else if (projection.getClass() == PropertyProjection.class) {
PropertyProjection propertyProjection = (PropertyProjection) projection;
final String propName = propertyProjection.getPropertyName();
PersistentProperty prop = entityPersister.getPersistentEntity().getPropertyByName(propName);
Class type = prop.getType();
List values = new ArrayList();
for (String uuid : uuids) {
Node node = jcrTemplate.getNodeByUUID(uuid);
try {
if (node.hasProperty(propName)) {
Property nodeProp = node.getProperty(propName);
values.add(nodeProp.getString());
}
} catch (RepositoryException e) {
throw new InvalidDataAccessResourceUsageException("Cannot execute PropertyProjection criterion on non-existent property: name[" + prop + "]");
}
}
final PersistentEntity associatedEntity = getSession().getMappingContext().getPersistentEntity(type.getName());
final boolean isEntityType = associatedEntity != null;
if (isEntityType) {
return getSession().retrieveAll(type, values);
}
for (Object value : values) {
results.add(typeConverter.convertIfNecessary(value, type));
}
}
}
finalResults = results;
} else {
final List params = new ArrayList();
final String queryString = getQueryString(params, true);
QueryResult qr = jcrTemplate.query(queryString.toString(), javax.jcr.query.Query.XPATH);
try {
NodeIterator itr = qr.getNodes();
while (itr.hasNext()) {
Node node = itr.nextNode();
uuids.add(node.getUUID());
}
} catch (RepositoryException e) {
throw new InvalidDataAccessResourceUsageException("Cannot execute query. Entity [" + getEntity() + "] does not exist in the repository");
}
if (uuids.isEmpty()) {
return Collections.emptyList();
}
finalResults = getSession().retrieveAll(getEntity().getJavaClass(), uuids);
IdProjection idProjection = null;
if (!projectionList.isEmpty()) {
List projectionResults = new ArrayList();
for (Projection projection : projectionList.getProjectionList()) {
final String projectionType = projection.getClass().getSimpleName();
if (projection instanceof CountProjection) {
projectionResults.add(finalResults.size());
} else if (projection instanceof MaxProjection) {
MaxProjection min = (MaxProjection) projection;
finalResults.add(manualProjections.min(finalResults, min.getPropertyName()));
} else if (projection instanceof MinProjection) {
MinProjection min = (MinProjection) projection;
finalResults.add(manualProjections.min(finalResults, min.getPropertyName()));
} else {
if (projection instanceof SumProjection) {
return unsupportedProjection(projectionType);
}
if (projection instanceof AvgProjection) {
return unsupportedProjection(projectionType);
//} else if (projection instanceof PropertyProjection) {
// PropertyProjection propertyProjection = (PropertyProjection) projection;
// final String propName = propertyProjection.getPropertyName();
// PersistentProperty prop = entityPersister.getPersistentEntity().getPropertyByName(propName);
// return unsupportedProjection(projectionType);
}
if (projection instanceof IdProjection) {
idProjection = (IdProjection) projection;
}
}
}
if (!projectionResults.isEmpty()) {
return projectionResults;
}
if (idProjection != null) {
return uuids;
}
}
final int total = finalResults.size();
if (offset > total) {
finalResults = Collections.emptyList();
} else {
int from = offset;
int to = max == -1 ? -1 : (offset + max) - 1;
if (to >= total) to = -1;
if (max != -1) {
finalResults = finalResults.subList(from, max);
}
}
}
return finalResults;
}
@SuppressWarnings("unused")
private List applyProjections(List results, ProjectionList projections) {
List projectedResults = new ArrayList();
for (Projection projection : projections.getProjectionList()) {
if (projection instanceof CountProjection) {
projectedResults.add(results.size());
} else if (projection instanceof MinProjection) {
MinProjection min = (MinProjection) projection;
projectedResults.add(manualProjections.min(results, min.getPropertyName()));
} else if (projection instanceof MaxProjection) {
MaxProjection min = (MaxProjection) projection;
projectedResults.add(manualProjections.max(results, min.getPropertyName()));
}
}
if (projectedResults.isEmpty()) {
return results;
}
return projectedResults;
}
protected String getQueryString(List params, @SuppressWarnings("unused") boolean distinct) {
final StringBuilder q = new StringBuilder();
q.append(ROOT_NODE);
q.append(getEntity().getJavaClass().getSimpleName());
if (!criteria.isEmpty()) {
q.append("[");
buildCondition(entity, criteria, q, 0, params);
q.append("]");
}
validateQuery(q);
for (Order order : orderBy) {
String direction = null;
if (order.getDirection().equals(Order.Direction.ASC)) {
direction = ASCENDING;
}
else {
direction = DESCENDING;
}
q.append(SPACE);
q.append("order by @");
q.append(order.getProperty());
q.append(SPACE);
q.append(direction);
}
return q.toString();
}
private StringBuilder validateQuery(StringBuilder q) {
String tmp = q.toString();
int length = tmp.length();
Character c = tmp.charAt(length - 2);
if (c.equals('[')) {
tmp.subSequence(0, length - 2);
q.delete(length - 2, length);
}
return q;
}
private static int buildCondition(PersistentEntity entity, Junction criteria, StringBuilder q, int index, List params) {
final List<Criterion> criterionList = criteria.getCriteria();
for (Iterator<Criterion> iterator = criterionList.iterator(); iterator.hasNext();) {
Criterion criterion = iterator.next();
final String operator = criteria instanceof Conjunction ? LOGICAL_AND : LOGICAL_OR;
CriterionHandler qh = criterionHandlers.get(criterion.getClass());
if (qh != null) {
qh.handle(entity, criterion, q, params);
}
if (iterator.hasNext()) {
q.append(operator);
}
}
return index;
}
private static interface CriterionHandler<T> {
void handle(PersistentEntity entity, T criterion, StringBuilder q, List params);
}
private static final Map<Class, CriterionHandler> criterionHandlers = new HashMap() {{
put(Like.class, new CriterionHandler<Like>() {
public void handle(PersistentEntity entity, Like criterion, StringBuilder q, List params) {
String property = criterion.getProperty();
String pattern = criterion.getPattern();
validateProperty(entity, property, Like.class);
q.append("jcr:like(@")
.append(property)
.append(",")
.append("'")
.append(pattern)
.append("')");
}
});
put(Between.class, new CriterionHandler<Between>() {
public void handle(PersistentEntity entity, Between criterion, StringBuilder q, List params) {
final String name = criterion.getProperty();
final Object value = criterion.getValue();
validateProperty(entity, name, Between.class);
q.append(AT_SIGN)
.append(name)
.append(GREATER_THAN_EQUALS)
.append(value);
}
});
put(GreaterThanEquals.class, new CriterionHandler<GreaterThanEquals>() {
public void handle(PersistentEntity entity, GreaterThanEquals criterion, StringBuilder q, List params) {
final String name = criterion.getProperty();
final Object value = criterion.getValue();
validateProperty(entity, name, GreaterThanEquals.class);
q.append(AT_SIGN)
.append(name)
.append(GREATER_THAN_EQUALS);
if (value instanceof Calendar || value instanceof Date) {
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy");
q.append(XS_DATE);
q.append("('");
q.append(sdf.format((Date)value));
q.append("')");
}else q.append(value);
}
});
put(GreaterThan.class, new CriterionHandler<GreaterThan>() {
public void handle(PersistentEntity entity, GreaterThan criterion, StringBuilder q, List params) {
final String name = criterion.getProperty();
final Object value = criterion.getValue();
validateProperty(entity, name, GreaterThan.class);
q.append(AT_SIGN)
.append(name)
.append(GREATER_THAN);
if (value instanceof Calendar || value instanceof Date) {
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy");
q.append(XS_DATE);
q.append("('");
q.append(sdf.format((Date)value));
q.append("')");;
}else q.append(value);
}
});
put(LessThanEquals.class, new CriterionHandler<LessThanEquals>() {
public void handle(PersistentEntity entity, LessThanEquals criterion, StringBuilder q, List params) {
final String name = criterion.getProperty();
final Object value = criterion.getValue();
validateProperty(entity, name, LessThanEquals.class);
q.append(AT_SIGN)
.append(name)
.append(LESS_THAN_EQUALS);
if (value instanceof Calendar || value instanceof Date) {
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy");
q.append(XS_DATE);
q.append("('");
q.append(sdf.format((Date)value));
q.append("')");
}else q.append(value);
}
});
put(LessThan.class, new CriterionHandler<LessThan>() {
public void handle(PersistentEntity entity, LessThan criterion, StringBuilder q, List params) {
final String name = criterion.getProperty();
final Object value = criterion.getValue();
validateProperty(entity, name, LessThan.class);
q.append(AT_SIGN)
.append(name)
.append(LESS_THAN);
if (value instanceof Calendar || value instanceof Date) {
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy");
q.append(XS_DATE);
q.append("('");
q.append(sdf.format((Date)value));
q.append("')");
}else q.append(value);
}
});
put(Equals.class, new CriterionHandler<Equals>() {
public void handle(PersistentEntity entity, Equals eq, StringBuilder q, List params) {
final String name = eq.getProperty();
final Object value = eq.getValue();
validateProperty(entity, name, Equals.class);
q.append(AT_SIGN)
.append(name)
.append(EQUALS);
if (value instanceof String || value instanceof Boolean) {
q.append("'")
.append(value)
.append("'");
} else q.append(value);
}
});
put(NotEquals.class, new CriterionHandler<NotEquals>() {
public void handle(PersistentEntity entity, NotEquals nqe, StringBuilder q, List params) {
final String name = nqe.getProperty();
final Object value = nqe.getValue();
validateProperty(entity, name, Equals.class);
q.append(AT_SIGN)
.append(name)
.append(NOT_EQUALS);
if (value instanceof String || value instanceof Boolean) {
q.append("'")
.append(value)
.append("'");
} else q.append(value);
}
});
put(In.class, new CriterionHandler<In>() {
public void handle(PersistentEntity entity, In criterion, StringBuilder q, List params) {
final String name = criterion.getName();
validateProperty(entity, name, In.class);
Disjunction dis = new Disjunction();
for (Object value : criterion.getValues()) {
dis.add(Restrictions.eq(name, value));
}
buildCondition(entity, dis, q, 0, params);
}
});
put(Conjunction.class, new CriterionHandler<Junction>() {
public void handle(PersistentEntity entity, Junction criterion, StringBuilder q, List params) {
buildCondition(entity, criterion, q, 0, params);
}
});
put(Disjunction.class, new CriterionHandler<Junction>() {
public void handle(PersistentEntity entity, Junction criterion, StringBuilder q, List params) {
buildCondition(entity, criterion, q, 0, params);
}
});
put(Negation.class, new CriterionHandler<Negation>() {
public void handle(PersistentEntity entity, Negation criterion, StringBuilder q, List params) {
List<Criterion> cris = criterion.getCriteria();
Conjunction con = new Conjunction();
for (Criterion c : cris) {
if (c instanceof Equals) {
con.add(Restrictions.ne(((Equals) c).getProperty(), ((Equals) c).getValue()));
}
if (c instanceof Conjunction) {
con.add(c);
}
}
buildCondition(entity, con, q, 0, params);
}
});
}};
/**
* Obtains the query string with variables embedded within the Query
*
* @return The query string
*/
public String getQueryString() {
return getQueryString(null, false);
}
private static void validateProperty(PersistentEntity entity, String name, Class criterionType) {
if (entity.getIdentity().getName().equals(name)) return;
PersistentProperty prop = entity.getPropertyByName(name);
if (prop == null) {
throw new InvalidDataAccessResourceUsageException("Cannot use [" + criterionType.getSimpleName() + "] criterion on non-existent property: " + name);
}
}
private List unsupportedProjection(String projectionType) {
throw new InvalidDataAccessResourceUsageException("Cannot use [" + projectionType + "] projection. [" + projectionType + "] projections are not currently supported.");
}
}
<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.gemfire.engine;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Lock;
import javax.persistence.CascadeType;
import org.grails.datastore.mapping.core.OptimisticLockingException;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.engine.EntityAccess;
import org.grails.datastore.mapping.engine.LockableEntityPersister;
import org.grails.datastore.mapping.engine.event.AbstractPersistenceEvent;
import org.grails.datastore.mapping.engine.event.PreDeleteEvent;
import org.grails.datastore.mapping.engine.event.PreInsertEvent;
import org.grails.datastore.mapping.engine.event.PreUpdateEvent;
import org.grails.datastore.mapping.gemfire.GemfireDatastore;
import org.grails.datastore.mapping.gemfire.GemfireSession;
import org.grails.datastore.mapping.gemfire.query.GemfireQuery;
import org.grails.datastore.mapping.keyvalue.mapping.config.KeyValue;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.types.Association;
import org.grails.datastore.mapping.model.types.OneToMany;
import org.grails.datastore.mapping.model.types.ToOne;
import org.grails.datastore.mapping.query.Query;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.dao.CannotAcquireLockException;
import org.springframework.dao.DataAccessResourceFailureException;
import org.springframework.data.gemfire.GemfireCallback;
import org.springframework.data.gemfire.GemfireTemplate;
import com.gemstone.gemfire.GemFireCheckedException;
import com.gemstone.gemfire.GemFireException;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.internal.cache.PartitionedRegion;
/**
* A persister capable of storing objects in a Gemfire region.
*
* @author <NAME>
* @since 1.0
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public class GemfireEntityPersister extends LockableEntityPersister {
private GemfireDatastore gemfireDatastore;
private Map<Object, Lock> distributedLocksHeld = new ConcurrentHashMap<Object, Lock>();
private static AtomicInteger identifierGenerator = new AtomicInteger(0);
public GemfireEntityPersister(MappingContext mappingContext, PersistentEntity entity,
Session session, ApplicationEventPublisher publisher) {
super(mappingContext, entity, session, publisher);
this.gemfireDatastore = (GemfireDatastore) session.getDatastore();
}
@Override
public Object lock(final Serializable id) throws CannotAcquireLockException {
final GemfireTemplate template = gemfireDatastore.getTemplate(getPersistentEntity());
return template.execute(new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
final Lock lock = region.getDistributedLock(id);
lock.lock();
final Object o = region.get(id);
distributedLocksHeld.put(o, lock);
return o;
}
});
}
@Override
public Object lock(final Serializable id, final int timeout) throws CannotAcquireLockException {
final GemfireTemplate template = gemfireDatastore.getTemplate(getPersistentEntity());
return template.execute(new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
final Lock lock = region.getDistributedLock(id);
try {
if (lock.tryLock(timeout, TimeUnit.SECONDS)) {
final Object o = region.get(id);
distributedLocksHeld.put(o, lock);
return o;
}
throw new CannotAcquireLockException("Timeout acquiring Gemfire lock on object type ["+getPersistentEntity()+"] with identifier ["+id+"]");
} catch (InterruptedException e) {
throw new CannotAcquireLockException("Cannot acquire Gemfire lock on object type ["+getPersistentEntity()+"] with identifier ["+id+"]: " + e.getMessage(), e);
}
}
});
}
@Override
public boolean isLocked(Object o) {
return distributedLocksHeld.containsKey(o);
}
@Override
public void unlock(Object o) {
final Lock lock = distributedLocksHeld.get(o);
if (lock != null) {
lock.unlock();
}
}
@Override
protected List<Object> retrieveAllEntities(PersistentEntity persistentEntity, final Serializable[] keys) {
return retrieveAllEntities(persistentEntity, Arrays.asList(keys));
}
@Override
protected List<Object> retrieveAllEntities(final PersistentEntity persistentEntity, final Iterable<Serializable> keys) {
final GemfireTemplate template = gemfireDatastore.getTemplate(persistentEntity);
return (List<Object>) template.execute(new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
if (keys instanceof Collection) {
return getListOfValues(region.getAll((Collection) keys));
}
Collection keyList = new ArrayList();
for (Serializable key : keys) {
keyList.add(key);
}
return getListOfValues(region.getAll(keyList));
}
List getListOfValues(final Map all) {
if (all != null) {
Collection nativeEntries = all.values();
List values = new ArrayList(nativeEntries.size());
for (Object entry : nativeEntries) {
values.add(handleDatastoreLoad(persistentEntity, entry));
}
return values;
}
return Collections.emptyList();
}
});
}
@Override
protected List<Serializable> persistEntities(final PersistentEntity persistentEntity, Iterable objs) {
final GemfireTemplate template = gemfireDatastore.getTemplate(persistentEntity);
final Map putMap = new HashMap();
List<Serializable> identifiers = new ArrayList<Serializable>();
final Map<Object, EntityAccess> entityAccessObjects = new HashMap<Object, EntityAccess>();
final Map<Object, Boolean> updates = new HashMap<Object, Boolean>();
for (Object obj : objs) {
final EntityAccess access = createEntityAccess(persistentEntity,obj);
entityAccessObjects.put(obj, access);
Object identifier = access.getIdentifier();
boolean isUpdate = true;
if (identifier == null) {
identifier = generateIdentifier(persistentEntity, access);
isUpdate = false;
}
AbstractPersistenceEvent event = isUpdate ?
new PreUpdateEvent(session.getDatastore(), persistentEntity, access) :
new PreInsertEvent(session.getDatastore(), persistentEntity, access);
updates.put(obj, isUpdate);
publisher.publishEvent(event);
if (event.isCancelled()) {
break;
}
putMap.put(identifier, obj);
identifiers.add((Serializable) identifier);
}
template.execute(new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
region.putAll(putMap);
if (!persistentEntity.isRoot()) {
doWithParents(persistentEntity, new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
region.putAll(putMap);
return null;
}
});
}
for (Object id : putMap.keySet()) {
Object obj = putMap.get(id);
final EntityAccess access = entityAccessObjects.get(obj);
if (access != null) {
cascadeSaveOrUpdate(persistentEntity, obj, access);
if (updates.get(obj)) {
firePostUpdateEvent(persistentEntity, access);
}
else {
firePostInsertEvent(persistentEntity, access);
}
}
}
return null;
}
});
return identifiers;
}
@Override
protected Object retrieveEntity(final PersistentEntity persistentEntity, final Serializable key) {
final GemfireTemplate template = gemfireDatastore.getTemplate(persistentEntity);
return template.execute(new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
final Class idType = persistentEntity.getIdentity().getType();
Object lookupKey = getMappingContext().getConversionService().convert(key, idType);
final Object entry = region.get(lookupKey);
if (entry != null) {
return handleDatastoreLoad(persistentEntity, entry);
}
return null;
}
});
}
/**
* Handle loading an event from store, including firing necessary events.
* @param persistentEntity The entity type
* @param entry The native entry taken from the store.
* @return The domain entity
*/
public Object handleDatastoreLoad(PersistentEntity persistentEntity, Object entry) {
final EntityAccess ea = createEntityAccess(persistentEntity, entry);
firePreLoadEvent(persistentEntity, ea);
for (Association association : persistentEntity.getAssociations()) {
if (association instanceof OneToMany) {
final String propertyName = association.getName();
final Object currentState = ea.getProperty(propertyName);
if (currentState == null) {
initializeCollectionState(association, ea, propertyName);
}
}
}
firePostLoadEvent(persistentEntity, ea);
return entry;
}
private Object initializeCollectionState(Association association, EntityAccess ea, String propertyName) {
if (Set.class.isAssignableFrom(association.getType())) {
final HashSet set = new HashSet();
ea.setProperty(propertyName, set);
return set;
}
if (List.class.isAssignableFrom(association.getType())) {
final ArrayList list = new ArrayList();
ea.setProperty(propertyName, list);
return list;
}
if (Map.class.isAssignableFrom(association.getType())) {
final HashMap map = new HashMap();
ea.setProperty(propertyName, map);
return map;
}
return null;
}
@Override
protected Serializable persistEntity(final PersistentEntity persistentEntity, final Object obj) {
final EntityAccess access = createEntityAccess(persistentEntity,obj);
Object identifier = access.getIdentifier();
boolean isUpdate = true;
if (identifier == null) {
identifier = generateIdentifier(persistentEntity, access);
isUpdate = false;
}
final Object finalId = identifier;
final GemfireTemplate template = gemfireDatastore.getTemplate(persistentEntity);
final boolean finalIsUpdate = isUpdate;
template.execute(new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
AbstractPersistenceEvent event = finalIsUpdate ?
new PreUpdateEvent(session.getDatastore(), persistentEntity, access) :
new PreInsertEvent(session.getDatastore(), persistentEntity, access);
publisher.publishEvent(event);
if (event.isCancelled()) {
return finalId;
}
if (finalIsUpdate && isVersioned(access)) {
// TODO this should be done with a CAS approach if possible
checkVersion(region, access, persistentEntity, finalId);
}
region.put(finalId, obj);
if (!persistentEntity.isRoot()) {
doWithParents(persistentEntity, new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
region.put(finalId, obj);
return null;
}
});
}
cascadeSaveOrUpdate(persistentEntity, obj, access);
if (finalIsUpdate) {
firePostUpdateEvent(persistentEntity, access);
}
else {
firePostInsertEvent(persistentEntity, access);
}
return null;
}
});
return (Serializable) identifier;
}
protected void checkVersion(Region region, EntityAccess access,
PersistentEntity persistentEntity, Object id) {
final Class idType = persistentEntity.getIdentity().getType();
Object lookupKey = getMappingContext().getConversionService().convert(id, idType);
Object previous = region.get(lookupKey);
Object oldVersion = new EntityAccess(persistentEntity, previous).getProperty("version");
Object currentVersion = access.getProperty("version");
if (Number.class.isAssignableFrom(access.getPropertyType("version"))) {
oldVersion = ((Number)oldVersion).longValue();
currentVersion = ((Number)currentVersion).longValue();
}
if (oldVersion != null && currentVersion != null && !oldVersion.equals(currentVersion)) {
throw new OptimisticLockingException(persistentEntity, id);
}
incrementVersion(access);
}
private void cascadeSaveOrUpdate(PersistentEntity persistentEntity, Object obj, EntityAccess access) {
final List<Association> associations = persistentEntity.getAssociations();
for (Association association : associations) {
if (association.doesCascade(CascadeType.PERSIST)) {
final Session session = getSession();
String processKey = association + ">" + obj;
if (association instanceof ToOne) {
final Object associatedObject = access.getProperty(association.getName());
if (associatedObject != null && !associatedObject.equals(obj)) {
if (session.getAttribute(obj, processKey) == null) {
session.setAttribute(obj, processKey, true);
this.session.persist(associatedObject);
autoAssociateInverseSide(obj, association, associatedObject);
}
}
else {
session.setAttribute(obj, processKey, false);
}
}
else if (association instanceof OneToMany) {
if (session.getAttribute(obj, processKey) == Boolean.TRUE) {
session.setAttribute(obj, processKey, Boolean.TRUE);
Object associatedObjects = access.getProperty(association.getName());
if (associatedObjects instanceof Iterable) {
final Iterable iterable = (Iterable) associatedObjects;
for (Object associatedObject : iterable) {
autoAssociateInverseSide(obj, association, associatedObject);
}
session.persist(iterable);
}
}
else {
session.setAttribute(obj, processKey, false);
}
}
}
}
}
private void autoAssociateInverseSide(Object obj, Association association, Object associatedObject) {
if (association.isBidirectional()) {
final Association inverseSide = association.getInverseSide();
if (inverseSide instanceof ToOne) {
final EntityAccess associationAccess = createEntityAccess(association.getAssociatedEntity(), associatedObject);
associationAccess.setProperty(inverseSide.getName(), obj);
}
else if (inverseSide instanceof OneToMany) {
final EntityAccess associationAccess = createEntityAccess(association.getAssociatedEntity(), associatedObject);
Object collectionObject = associationAccess.getProperty(inverseSide.getName());
if (collectionObject == null) {
collectionObject = initializeCollectionState(inverseSide, associationAccess, inverseSide.getName());
}
if (collectionObject instanceof Collection) {
final Collection collection = (Collection) collectionObject;
if (!collection.contains(obj))
collection.add(obj);
}
}
}
}
private void cascadeDelete(PersistentEntity persistentEntity, Object obj, EntityAccess access) {
final List<Association> associations = persistentEntity.getAssociations();
for (Association association : associations) {
if (association.doesCascade(CascadeType.REMOVE)) {
if (association instanceof ToOne) {
ToOne toOne = (ToOne) association;
final Object associatedObject = access.getProperty(toOne.getName());
if (associatedObject != null && !associatedObject.equals(obj)) {
session.delete(associatedObject);
}
}
else if (association instanceof OneToMany) {
Object associatedObjects = access.getProperty(association.getName());
if (associatedObjects instanceof Iterable) {
session.delete((Iterable)associatedObjects);
}
}
}
}
}
private void doWithParents(PersistentEntity persistentEntity, GemfireCallback gemfireCallback) {
if (!persistentEntity.isRoot()) {
PersistentEntity parentEntity = persistentEntity.getParentEntity();
do {
GemfireTemplate parentTemplate = gemfireDatastore.getTemplate(parentEntity);
parentTemplate.execute(gemfireCallback);
parentEntity = parentEntity.getParentEntity();
}
while(parentEntity != null && !(parentEntity.isRoot()));
}
}
private Object generateIdentifier(final PersistentEntity persistentEntity, final EntityAccess access) {
final GemfireTemplate template = gemfireDatastore.getTemplate(persistentEntity);
return template.execute(new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
KeyValue mf = (KeyValue)gemfireDatastore.getMappingContext().getMappingFactory().createMappedForm(persistentEntity.getIdentity());
if ("uuid".equals(mf.getGenerator())) {
String uuid = UUID.randomUUID().toString();
access.setIdentifier(uuid);
return uuid;
}
Cache cache = CacheFactory.getAnyInstance();
final int uuid = PartitionedRegion.generatePRId(
(InternalDistributedSystem)cache.getDistributedSystem(),cache);
if (uuid == 0) {
throw new DataAccessResourceFailureException("Unable to generate Gemfire UUID");
}
long finalId = identifierGenerator.getAndIncrement() + uuid;
access.setIdentifier(finalId);
return finalId;
}
});
}
@Override
protected void deleteEntity(final PersistentEntity persistentEntity, final Object obj) {
final EntityAccess access = createEntityAccess(persistentEntity, obj);
final Object identifier = access.getIdentifier();
final GemfireTemplate template = gemfireDatastore.getTemplate(persistentEntity);
template.execute(new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
PreDeleteEvent event = new PreDeleteEvent(session.getDatastore(), persistentEntity, access);
publisher.publishEvent(event);
if (event.isCancelled()) {
return null;
}
region.remove(identifier);
if (!persistentEntity.isRoot()) {
doWithParents(persistentEntity, new GemfireCallback() {
public Object doInGemfire(Region region) throws GemFireCheckedException, GemFireException {
region.remove(identifier);
return null;
}
});
}
cascadeDelete(persistentEntity, obj, access);
firePostDeleteEvent(persistentEntity, access);
return null;
}
});
}
@Override
protected void deleteEntities(PersistentEntity persistentEntity, Iterable objects) {
for (Object object : objects) {
deleteEntity(persistentEntity, object);
}
}
public Query createQuery() {
return new GemfireQuery((GemfireSession) getSession(), getPersistentEntity());
}
public Serializable refresh(Object o) {
return (Serializable)createEntityAccess(getPersistentEntity(), o).getIdentifier();
}
}
<file_sep>/* Copyright 2004-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.orm.hibernate.cfg;
import java.util.Arrays;
import java.util.List;
import org.codehaus.groovy.grails.commons.GrailsDomainClass;
import org.codehaus.groovy.grails.commons.GrailsDomainClassProperty;
import org.codehaus.groovy.grails.orm.hibernate.persister.entity.GroovyAwareJoinedSubclassEntityPersister;
import org.codehaus.groovy.grails.orm.hibernate.persister.entity.GroovyAwareSingleTableEntityPersister;
import org.codehaus.groovy.grails.orm.hibernate.validation.UniqueConstraint;
import org.codehaus.groovy.grails.validation.ConstrainedProperty;
import org.codehaus.groovy.grails.validation.Constraint;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Table;
/**
* Handles the binding Grails domain classes and properties to the Hibernate runtime meta model.
* Based on the HbmBinder code in Hibernate core and influenced by AnnotationsBinder.
*
* @author <NAME>
* @since 0.1
*/
public class GrailsDomainBinder extends AbstractGrailsDomainBinder {
protected Class<?> getGroovyAwareJoinedSubclassEntityPersisterClass() {
return GroovyAwareJoinedSubclassEntityPersister.class;
}
protected Class<?> getGroovyAwareSingleTableEntityPersisterClass() {
return GroovyAwareSingleTableEntityPersister.class;
}
protected void handleLazyProxy(GrailsDomainClass domainClass, GrailsDomainClassProperty grailsProperty) {
HibernateUtils.handleLazyProxy(domainClass, grailsProperty);
}
protected void handleUniqueConstraint(GrailsDomainClassProperty property, Column column, String path, Table table, String columnName, String sessionFactoryBeanName) {
ConstrainedProperty cp = getConstrainedProperty(property);
if (cp != null && cp.hasAppliedConstraint(UniqueConstraint.UNIQUE_CONSTRAINT)) {
Constraint appliedConstraint = cp.getAppliedConstraint(UniqueConstraint.UNIQUE_CONSTRAINT);
if (appliedConstraint instanceof UniqueConstraint) {
UniqueConstraint uc = (UniqueConstraint) appliedConstraint;
if (uc != null && uc.isUnique()) {
if (!uc.isUniqueWithinGroup()) {
column.setUnique(true);
}
else if (uc.getUniquenessGroup().size() > 0) {
createKeyForProps(property, path, table, columnName, uc.getUniquenessGroup(), sessionFactoryBeanName);
}
}
}
}
else {
Object val = cp != null ? cp.getMetaConstraintValue(UniqueConstraint.UNIQUE_CONSTRAINT) : null;
if (val instanceof Boolean) {
column.setUnique((Boolean)val);
}
else if (val instanceof String) {
createKeyForProps(property, path, table, columnName, Arrays.asList((String) val), sessionFactoryBeanName);
}
else if (val instanceof List<?> && ((List<?>)val).size() > 0) {
createKeyForProps(property, path, table, columnName, (List<?>)val, sessionFactoryBeanName);
}
}
}
protected boolean identityEnumTypeSupports(Class<?> propertyType) {
return IdentityEnumType.supports(propertyType);
}
protected boolean isNotEmpty(String s) {
return GrailsHibernateUtil.isNotEmpty(s);
}
protected String qualify(String prefix, String name) {
return GrailsHibernateUtil.qualify(prefix, name);
}
protected String unqualify(String qualifiedName) {
return GrailsHibernateUtil.unqualify(qualifiedName);
}
}
<file_sep>package org.grails.datastore.mapping.orient.query;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.orient.OrientSession;
import org.grails.datastore.mapping.query.Query;
import org.grails.datastore.mapping.query.Restrictions;
import org.grails.datastore.mapping.query.api.QueryArgumentsAware;
import org.grails.datastore.mapping.query.jpa.JpaQueryBuilder;
import org.grails.datastore.mapping.query.jpa.JpaQueryInfo;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceException;
import java.util.List;
import java.util.Map;
public class OrientQuery extends Query implements QueryArgumentsAware {
private static final Log LOG = LogFactory.getLog(OrientQuery.class);
public OrientQuery(OrientSession session, PersistentEntity entity) {
super(session, entity);
if (session == null) {
throw new InvalidDataAccessApiUsageException("Argument session cannot be null");
}
if (entity == null) {
throw new InvalidDataAccessApiUsageException("No persistent entity specified");
}
}
@Override
public OrientSession getSession() {
return (OrientSession) super.getSession();
}
@Override
public void add(Criterion criterion) {
if (criterion instanceof Equals) {
final Equals eq = (Equals) criterion;
Object resolved = resolveIdIfEntity(eq.getValue());
if (resolved != eq.getValue()) {
criterion = Restrictions.idEq(resolved);
}
}
criteria.add(criterion);
}
@Override
protected List executeQuery(final PersistentEntity entity, final Junction criteria) {
/*
final JpaTemplate jpaTemplate = getSession().getJpaTemplate();
if (!OrientSession.hasTransaction()) {
jpaTemplate.setFlushEager(false);
}
return (List)jpaTemplate.execute(new JpaCallback<Object>() {
public Object doInJpa(EntityManager em) throws PersistenceException {
return executeQuery(entity, criteria, em, false);
}
});
*/
return null;
}
@Override
public Object singleResult() {
/*
final JpaTemplate jpaTemplate = getSession().getJpaTemplate();
if (!JpaSession.hasTransaction()) {
jpaTemplate.setFlushEager(false);
}
try {
return jpaTemplate.execute(new JpaCallback<Object>() {
public Object doInJpa(EntityManager em) throws PersistenceException {
return executeQuery(entity, criteria, em, true);
}
});
} catch (EmptyResultDataAccessException e) {
return null;
}
*/
return null;
}
Object executeQuery(final PersistentEntity entity, final Junction criteria, EntityManager em, boolean singleResult) {
JpaQueryBuilder queryBuilder = new JpaQueryBuilder(entity, criteria, projections, orderBy);
queryBuilder.setConversionService(session.getDatastore().getMappingContext().getConversionService());
JpaQueryInfo jpaQueryInfo = queryBuilder.buildSelect();
List parameters = jpaQueryInfo.getParameters();
final String queryToString = jpaQueryInfo.getQuery();
if (LOG.isDebugEnabled()) {
LOG.debug("Built JPQL to execute: " + queryToString);
}
final javax.persistence.Query q = em.createQuery(queryToString);
if (parameters != null) {
for (int i = 0, count = parameters.size(); i < count; i++) {
q.setParameter(i + 1, parameters.get(i));
}
}
q.setFirstResult(offset);
if (max > -1) {
q.setMaxResults(max);
}
if (!singleResult) {
return q.getResultList();
}
return q.getSingleResult();
}
@Override
public void setArguments(@SuppressWarnings("rawtypes") Map arguments) {
//To change body of implemented methods use File | Settings | File Templates.
}
}
<file_sep>version = "3.0.2.BUILD-SNAPSHOT"
configurations {
optional
}
dependencies {
compile "org.mongodb:mongo-java-driver:2.12.0"
compile("org.springframework.data:spring-data-mongodb:1.4.1.RELEASE") {
exclude group:'org.mongodb',module:'mongo-java-driver'
}
compile("com.gmongo:gmongo:1.2") {
exclude group:'org.mongodb',module:'mongo-java-driver'
}
compile("org.grails:grails-async:$grailsVersion")
compile("org.grails:grails-core:$grailsVersion") {
exclude group:'aopalliance',module:'aopalliance'
exclude group:'commons-logging',module:'commons-logging'
exclude group:'commons-lang',module:'commons-lang'
exclude group:'commons-collections',module:'commons-collections'
exclude group:'commons-io',module:'commons-io'
exclude group:'org.grails',module:'grails-spring'
exclude group:'org.grails',module:'grails-bootstrap'
exclude group:'org.aspectj',module:'aspectjweaver'
exclude group:'org.aspectj',module:'aspectjrt'
exclude group:'oro',module:'oro'
exclude group:'asm',module:'asm'
exclude group:'cglib',module:'cglib'
exclude group:'xalan',module:'serializer'
exclude group:'org.springframework',module:'spring-core'
exclude group:'org.springframework',module:'spring-tx'
exclude group:'org.springframework',module:'spring-aspects'
exclude group:'org.springframework',module:'spring-beans'
exclude group:'org.springframework',module:'spring-context'
exclude group:'org.springframework',module:'spring-expression'
exclude group:'org.springframework',module:'spring-web'
exclude group:'org.springframework',module:'spring-aop'
exclude group:'org.springframework',module:'spring-context-support'
exclude group:'commons-logging',module:'commons-logging'
exclude group:'commons-validator',module:'commons-validator'
exclude group:'com.googlecode.concurrentlinkedhashmap',module:'concurrentlinkedhashmap-lru'
}
compile("org.grails:grails-bootstrap:$grailsVersion") {
exclude group:'org.apache.ant',module:'ant'
exclude group:'org.apache.ant',module:'ant-launcher'
exclude group:'org.apache.ant',module:'ant-trax'
exclude group:'org.apache.ant',module:'ant-junit'
exclude group:'org.apache.ant',module:'ant'
exclude group:'org.apache.ivy',module:'ivy'
exclude group:'org.codehaus.gant',module:'gant_groovy1.8'
exclude group:'jline',module:'jline'
exclude group:'org.fusesource.jansi',module:'jansi'
exclude group:'net.java.dev.jna',module:'jna'
}
compile project(":grails-datastore-gorm"),
project(":grails-datastore-gorm-plugin-support"),
project(":grails-datastore-core")
testCompile project(":grails-datastore-gorm-test"),
project(":grails-datastore-gorm-tck")
optional "javax.servlet:servlet-api:2.5"
def excludes = {
exclude group:"org.grails",module: "grails-plugin-url-mappings"
exclude group:"org.grails",module: "grails-plugin-servlets"
exclude group:"org.grails",module: "grails-plugin-controllers"
exclude group:"org.grails",module: "grails-plugin-domain-class"
exclude group:"org.grails",module: "grails-plugin-gsp"
exclude group:"org.grails",module: "grails-plugin-filters"
exclude group:"org.grails",module: "grails-plugin-mimetypes"
exclude group:"org.grails",module: "grails-plugin-converters"
exclude group:"org.grails",module: "grails-logging"
exclude group:"org.grails",module: "grails-test"
exclude group:"org.grails",module: "grails-datastore-gorm"
exclude group:"org.grails",module: "grails-datastore-core"
exclude group:"org.grails",module: "grails-datastore-simple"
exclude group:"org.grails",module: "grails-datastore-gorm"
}
optional "org.grails:grails-test:$grailsVersion", excludes
optional "org.grails:grails-plugin-testing:$grailsVersion", excludes
}
tasks.withType(GroovyCompile) {
configure(groovyOptions.forkOptions) {
memoryMaximumSize = '1g'
jvmArgs = ['-XX:MaxPermSize=512m', '-Xms512m', '-Xmx1g']
}
}
sourceSets {
main {
compileClasspath += configurations.optional
}
}
test {
testLogging {
exceptionFormat ='full'
}
forkEvery = 30
maxParallelForks = 4
jvmArgs '-server','-Xmx1024M', '-Xms64M', '-XX:PermSize=32m','-XX:MaxPermSize=256m'
}
//test {
// if (System.getProperty('debug', 'false') == 'true') {
// jvmArgs '-server','-Xmx2048M', '-Xms64M', '-XX:PermSize=32m','-XX:MaxPermSize=256m', '-Xdebug',
// '-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005'
// }
// else {
// jvmArgs '-server','-Xmx2048M', '-Xms64M', '-XX:PermSize=32m','-XX:MaxPermSize=256m'
// }
//}<file_sep>version = "0.0.1.BUILD-SNAPSHOT"
ext.orientVersion = "1.7.7"
ext.gdmVersion = "2.0.7.RELEASE"
repositories {
maven { url "https://oss.sonatype.org/content/repositories/releases/" }
//maven { url "https://oss.sonatype.org/content/repositories/snapshots/" }
maven { url "http://repo.springsource.org/libs-snapshot" }
}
dependencies {
compile "com.orientechnologies:orient-commons:$orientVersion",
"com.orientechnologies:orientdb-client:$orientVersion",
"com.orientechnologies:orientdb-core:$orientVersion",
"com.orientechnologies:orientdb-enterprise:$orientVersion"
if (version =~ /SNAPSHOT/) {
compile project(":grails-datastore-gorm"),
project(":grails-datastore-web"),
project(":grails-datastore-gorm-plugin-support")
testCompile project(":grails-datastore-gorm-test"),
project(":grails-datastore-gorm-tck")
} else {
compile "org.grails:grails-datastore-gorm:$gdmVersion",
"org.grails:grails-datastore-web:$gdmVersion",
"org.grails:grails-datastore-gorm-plugin-support:$gdmVersion"
testCompile "org.grails:grails-datastore-gorm-test:$gdmVersion",
"org.grails:grails-datastore-gorm-tck:$gdmVersion"
}
}
<file_sep>version = "1.0.0.BUILD-SNAPSHOT"
dependencies {
compile project(":grails-datastore-gorm"),
project(":grails-datastore-orient"),
project(":grails-datastore-gorm-plugin-support"),
project(":grails-datastore-core")
testCompile project(':grails-datastore-gorm-test'),
project(':grails-datastore-gorm-tck'),
project(":grails-datastore-gorm"),
project(":grails-datastore-core")
testRuntime 'javax.servlet:servlet-api:2.5'
testRuntime "org.grails:grails-web:$grailsVersion"
}<file_sep>/* Copyright (C) 2011 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.orm.hibernate;
import groovy.util.ConfigObject;
import org.grails.datastore.mapping.core.AbstractDatastore;
import org.grails.datastore.mapping.model.MappingContext;
import org.hibernate.SessionFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
/**
* Datastore implementation that uses a Hibernate SessionFactory underneath.
*
* @author <NAME>
* @since 2.0
*/
public abstract class AbstractHibernateDatastore extends AbstractDatastore implements ApplicationContextAware {
protected SessionFactory sessionFactory;
protected ConfigObject config;
protected AbstractEventTriggeringInterceptor eventTriggeringInterceptor;
protected AbstractHibernateDatastore(MappingContext mappingContext, SessionFactory sessionFactory, ConfigObject config) {
super(mappingContext);
this.sessionFactory = sessionFactory;
this.config = config;
initializeConverters(mappingContext);
}
public AbstractHibernateDatastore(MappingContext mappingContext, SessionFactory sessionFactory, ConfigObject config, ApplicationContext applicationContext) {
this(mappingContext, sessionFactory, config);
setApplicationContext(applicationContext);
}
/**
* @return The Hibernate {@link SessionFactory} being used by this datastore instance
*/
public SessionFactory getSessionFactory() {
return sessionFactory;
}
@Override
protected boolean registerValidationListener() {
return false;
}
// for testing
public AbstractEventTriggeringInterceptor getEventTriggeringInterceptor() {
return eventTriggeringInterceptor;
}
}
<file_sep>package org.grails.datastore.gorm.neo4j;
import org.grails.datastore.gorm.neo4j.engine.CypherEngine;
import org.grails.datastore.mapping.core.impl.PendingInsertAdapter;
import org.grails.datastore.mapping.engine.EntityAccess;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
/**
* Created by stefan on 15.02.14.
*/
class RelationshipPendingDelete extends PendingInsertAdapter<Object, Long> {
private static Logger log = LoggerFactory.getLogger(RelationshipPendingDelete.class);
private String relType;
private CypherEngine cypherEngine;
private EntityAccess target;
RelationshipPendingDelete(EntityAccess source, String relType, EntityAccess target, CypherEngine cypherEngine) {
super(source.getPersistentEntity(), -1l, source.getEntity(), source);
this.target = target;
this.cypherEngine = cypherEngine;
this.relType = relType;
}
@Override
public void run() {
String labelsFrom = ((GraphPersistentEntity)getEntity()).getLabelsAsString();
String labelsTo = null;
String cypher;
List params = new ArrayList(2);
params.add(getEntityAccess().getIdentifier());
if (target!=null) {
params.add(target.getIdentifier());
labelsTo = ((GraphPersistentEntity)target.getPersistentEntity()).getLabelsAsString();
cypher = String.format("MATCH (from%s {__id__: {1}})-[r:%s]->(to%s {__id__: {2}}) DELETE r", labelsFrom, relType, labelsTo);
} else {
cypher = String.format("MATCH (from%s {__id__: {1}})-[r:%s]->() DELETE r", labelsFrom, relType);
}
cypherEngine.execute(cypher, params);
}
}
<file_sep>/* Copyright 2004-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.orm.hibernate;
import grails.util.GrailsNameUtils;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
import org.codehaus.groovy.grails.commons.GrailsDomainClass;
import org.codehaus.groovy.grails.commons.GrailsDomainClassProperty;
import org.springframework.beans.BeanUtils;
import org.springframework.util.ReflectionUtils;
/**
* An implementation of the GrailsDomainClassProperty interface that allows Classes mapped in
* Hibernate to integrate with Grails' validation, dynamic methods etc. seamlessly.
*
* @author <NAME>
* @since 0.1
*/
public class GrailsHibernateDomainClassProperty implements GrailsDomainClassProperty {
protected AbstractGrailsHibernateDomainClass domainClass;
protected String name;
protected String naturalName;
protected Class<?> type;
protected boolean identity;
protected boolean oneToOne;
protected boolean manyToOne;
protected boolean association;
protected boolean oneToMany;
protected boolean manyToMany;
protected boolean bidirectional;
protected boolean optional;
protected Class<?> relatedClassType;
protected GrailsDomainClass referencedDomainClass;
protected GrailsDomainClassProperty otherSide;
protected boolean owingSide;
protected String columnName;
protected boolean explicitSaveUpdateCascade;
public GrailsHibernateDomainClassProperty(AbstractGrailsHibernateDomainClass domainClass, String propertyName) {
this.domainClass = domainClass;
name = propertyName;
naturalName = GrailsNameUtils.getNaturalName(propertyName);
}
public String getName() {
return name;
}
public Class<?> getType() {
if (type == null) {
attemptResolveType();
}
return type;
}
protected void attemptResolveType() {
PropertyDescriptor propertyDescriptor = BeanUtils.getPropertyDescriptor(domainClass.getClazz(), name);
type = propertyDescriptor == null ? null : propertyDescriptor.getPropertyType();
if (type == null) {
Field field = ReflectionUtils.findField(domainClass.getClazz(), name);
if (field != null) {
type = field.getType();
}
}
}
public void setType(Class<?> type) {
this.type = type;
}
public String getTypePropertyName() {
return GrailsNameUtils.getPropertyNameRepresentation(type);
}
public GrailsDomainClass getDomainClass() {
return domainClass;
}
public boolean isPersistent() {
return true;
}
public String getNaturalName() {
return naturalName;
}
public void setReferencedDomainClass(GrailsDomainClass referencedGrailsDomainClass) {
this.referencedDomainClass = referencedGrailsDomainClass;
}
public void setOtherSide(GrailsDomainClassProperty referencedProperty) {
otherSide = referencedProperty;
}
public boolean isExplicitSaveUpdateCascade() {
return explicitSaveUpdateCascade;
}
public void setExplicitSaveUpdateCascade(boolean explicitSaveUpdateCascade) {
this.explicitSaveUpdateCascade = explicitSaveUpdateCascade;
}
public GrailsDomainClassProperty getOtherSide() {
return otherSide;
}
public Class<?> getReferencedPropertyType() {
return relatedClassType;
}
public boolean isIdentity() {
return identity;
}
public void setIdentity(boolean identity) {
this.identity = identity;
}
public boolean isOptional() {
return optional;
}
public void setOptional(boolean optional) {
this.optional = optional;
}
public boolean isOneToOne() {
return oneToOne;
}
public void setOneToOne(boolean oneToOne) {
this.oneToOne = oneToOne;
}
public boolean isManyToOne() {
return manyToOne;
}
public void setManyToOne(boolean manyToOne) {
this.manyToOne = manyToOne;
}
public boolean isAssociation() {
return association;
}
public boolean isEnum() {
return getType().isEnum();
}
public void setAssociation(boolean association) {
this.association = association;
}
public boolean isOneToMany() {
return oneToMany;
}
public void setOneToMany(boolean oneToMany) {
this.oneToMany = oneToMany;
}
public boolean isManyToMany() {
return manyToMany;
}
public void setManyToMany(boolean manyToMany) {
this.manyToMany = manyToMany;
}
public boolean isBidirectional() {
return bidirectional;
}
public String getFieldName() {
return getName().toUpperCase();
}
public void setBidirectional(boolean bidirectional) {
this.bidirectional = bidirectional;
}
public GrailsDomainClass getReferencedDomainClass() {
return referencedDomainClass;
}
public void setRelatedClassType(Class<?> relatedType) {
relatedClassType = relatedType;
}
public boolean isInherited() {
return false;
}
public int getFetchMode() {
return FETCH_LAZY;
}
public boolean isOwningSide() {
return owingSide;
}
public boolean isCircular() {
return false;
}
public String getReferencedPropertyName() {
return null;
}
public boolean isEmbedded() {
return false;
}
public GrailsDomainClass getComponent() {
return null;
}
public void setOwningSide(boolean b) {
owingSide = b;
}
public boolean isBasicCollectionType() {
return false;
}
public boolean isHasOne() {
return false;
}
public void setColumnName(String columnName) {
this.columnName = columnName;
}
public String getColumnName() {
return columnName;
}
public void setDerived(boolean derived) {
// ignored
}
public boolean isDerived() {
return false;
}
}
<file_sep>title=GORM for Mongo
version=3.0.0
authors=<NAME>, <NAME><file_sep>package org.grails.datastore.mapping.orient.config;
import org.grails.datastore.mapping.config.Entity;
import org.grails.datastore.mapping.document.config.Collection;
/**
* Configures how an entity is mapped onto a Class collection
*
* @author <NAME>
*/
public class OrientClass extends Collection {
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
protected String className;
}
<file_sep>/* Copyright (C) 2011 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.orm.hibernate.validation;
import java.util.Map;
import org.codehaus.groovy.grails.commons.GrailsDomainClassProperty;
import org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsDomainBinder;
import org.codehaus.groovy.grails.orm.hibernate.cfg.PropertyConfig;
import org.codehaus.groovy.grails.validation.ConstrainedProperty;
import org.codehaus.groovy.grails.validation.DefaultConstraintEvaluator;
/**
* Extends default implementation to add Hibernate specific exceptions.
*
* @author <NAME>
* @since 2.0
*/
public class HibernateConstraintsEvaluator extends DefaultConstraintEvaluator {
public HibernateConstraintsEvaluator(Map<String, Object> defaultConstraints) {
super(defaultConstraints);
}
public HibernateConstraintsEvaluator() {
// default
}
@Override
protected void applyDefaultNullableConstraint(GrailsDomainClassProperty p, ConstrainedProperty cp) {
final PropertyConfig propertyConfig = new GrailsDomainBinder().getPropertyConfig(p);
boolean insertable = propertyConfig != null ? propertyConfig.isInsertable() : true;
if (!insertable) {
cp.applyConstraint(ConstrainedProperty.NULLABLE_CONSTRAINT,true);
}
else {
super.applyDefaultNullableConstraint(p, cp);
}
}
}
<file_sep>/* Copyright (C) 2011 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.simpledb.engine;
import java.util.LinkedList;
import java.util.List;
/**
* An implementation of the domain resolver which assumes there is no sharding -
* i.e. always the same domain name for all the primary keys (for the same type
* of {@link org.grails.datastore.mapping.model.PersistentEntity}
*/
public class ConstSimpleDBDomainResolver extends AbstractSimpleDBDomainResolver {
private List<String> domains;
public ConstSimpleDBDomainResolver(String entityFamily, String domainNamePrefix) {
super(entityFamily, domainNamePrefix); //parent contains the logic for figuring out the final entityFamily
domains = new LinkedList<String>();
domains.add(getEntityFamily()); // without sharding there is just one domain
}
public String resolveDomain(String id) {
return entityFamily; // without sharding it is always the same one per PersistentEntity
}
public List<String> getAllDomainsForEntity() {
return domains;
}
}
<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.gorm.neo4j;
//import org.grails.datastore.gorm.neo4j.converters.*;
import org.grails.datastore.mapping.document.config.Attribute;
import org.grails.datastore.mapping.model.AbstractMappingContext;
import org.grails.datastore.mapping.model.MappingConfigurationStrategy;
import org.grails.datastore.mapping.model.MappingFactory;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.config.GormMappingConfigurationStrategy;
/**
* @author <NAME> <<EMAIL>>
*/
public class Neo4jMappingContext extends AbstractMappingContext {
MappingFactory mappingFactory = new GraphGormMappingFactory();
MappingConfigurationStrategy mappingSyntaxStrategy = new GormMappingConfigurationStrategy(mappingFactory);
// Neo4jMappingContext() {
// mappingFactory = new GraphGormMappingFactory()
// syntaxStrategy =
// }
public Neo4jMappingContext() {
super();
addTypeConverter(new LazyEntitySetToSetConverter());
}
@Override
public MappingConfigurationStrategy getMappingSyntaxStrategy() {
return mappingSyntaxStrategy;
}
@Override
public MappingFactory getMappingFactory() {
return mappingFactory;
}
@Override
protected PersistentEntity createPersistentEntity(Class javaClass) {
PersistentEntity persistentEntity = new GraphPersistentEntity(javaClass, this);
// mappingFactory.createMappedForm(persistentEntity) // populates mappingFactory.entityToPropertyMap as a side effect
return persistentEntity;
}
// MappingFactory getMappingFactory() {
// mappingFactory
// }
}
<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.cassandra;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Session;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.dao.DataAccessResourceFailureException;
import org.grails.datastore.mapping.cassandra.engine.CassandraEntityPersister;
import org.grails.datastore.mapping.core.AbstractSession;
import org.grails.datastore.mapping.core.Datastore;
import org.grails.datastore.mapping.engine.Persister;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.transactions.Transaction;
import org.springframework.transaction.TransactionSystemException;
/**
* @author <NAME>
* @since 1.0
*/
public class CassandraSession extends AbstractSession<Session> {
Logger log = LoggerFactory.getLogger(CassandraSession.class);
private Session session;
private ApplicationEventPublisher applicationEventPublisher;
public CassandraSession(Datastore ds, MappingContext context, Session session, ApplicationEventPublisher applicationEventPublisher, boolean stateless) {
super(ds, context, applicationEventPublisher, stateless);
this.applicationEventPublisher = applicationEventPublisher;
this.session = session;
}
@Override
protected Persister createPersister(Class cls, MappingContext mappingContext) {
PersistentEntity entity = mappingContext.getPersistentEntity(cls.getName());
if (entity != null) {
return new CassandraEntityPersister(mappingContext,entity,this,session,applicationEventPublisher);
}
return null;
}
@Override
public void disconnect() {
super.disconnect();
}
@Override
protected Transaction beginTransactionInternal() {
throw new TransactionSystemException("Transactions are not supported by Cassandra");
}
public Session getNativeInterface() {
return session;
}
}
<file_sep>package org.grails.datastore.mapping.jcr.engine;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import javax.jcr.ItemNotFoundException;
import javax.jcr.Node;
import javax.jcr.PathNotFoundException;
import javax.jcr.Property;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import org.springframework.beans.SimpleTypeConverter;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.dao.DataAccessResourceFailureException;
import org.springframework.dao.DataRetrievalFailureException;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.jcr.JcrSession;
import org.grails.datastore.mapping.jcr.util.JcrConstants;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.node.engine.AbstractNodeEntityPersister;
import org.grails.datastore.mapping.query.JcrQuery;
import org.springframework.extensions.jcr.JcrCallback;
import org.springframework.extensions.jcr.JcrTemplate;
/**
* TODO: write javadoc
*
* @author <NAME>
* @since 1.0
*/
public class JcrEntityPersister extends AbstractNodeEntityPersister<Node, String> {
private JcrTemplate jcrTemplate;
private SimpleTypeConverter typeConverter;
public JcrEntityPersister(MappingContext context, PersistentEntity entity, Session session,
JcrTemplate jcrTemplate, ApplicationEventPublisher publisher) {
super(context, entity, session, publisher);
typeConverter = new SimpleTypeConverter();
this.jcrTemplate = jcrTemplate;
this.jcrTemplate.setAllowCreate(true);
}
public JcrEntityPersister(MappingContext mappingContext, PersistentEntity entity,
Session session, ApplicationEventPublisher publisher) {
super(mappingContext, entity, session, publisher);
}
/**
* @param persistentEntity The PesistentEntity instnace
* @param id The identifer
* @param timeout The lock timeout in seconds
*/
@Override
protected void lockEntry(PersistentEntity persistentEntity, Serializable id, int timeout) {
//TODO: Implement lock timeout
Node node = jcrTemplate.getNodeByUUID(getString(id));
try {
node.lock(true, true);
} catch (RepositoryException e) {
throw new DataAccessResourceFailureException("Exception occurred cannot lock entity: " + e.getMessage(), e);
}
}
/**
* @param o The object
* @return True if the object is locked
*/
@Override
public boolean isLocked(Object o) {
String uuid = getString(createEntityAccess(getPersistentEntity(), o).getIdentifier());
if (uuid == null) return false;
Node node = jcrTemplate.getNodeByUUID(uuid);
try {
return node.isLocked();
} catch (RepositoryException e) {
throw new DataAccessResourceFailureException("Exception occurred cannot unlock entity: " + e.getMessage(), e);
}
}
/**
* @param persistentEntity The persistent entity
* @param id The identifer
*/
@Override
protected void unlockEntry(PersistentEntity persistentEntity, Serializable id) {
Node node = jcrTemplate.getNodeByUUID(getString(id));
try {
node.unlock();
} catch (RepositoryException e) {
throw new DataAccessResourceFailureException("Exception occurred cannot unlock entity: " + e.getMessage(), e);
}
}
@Override
protected String generateIdentifier(PersistentEntity persistentEntity, Node tmp) {
try {
return tmp.getUUID();
} catch (RepositoryException e) {
throw new DataAccessResourceFailureException("Exception occurred cannot generateIdentifier by getting UUID from Node: " + e.getMessage(), e);
}
}
@Override
protected void deleteEntry(final String key) {
Node node = jcrTemplate.getNodeByUUID(key);
try {
node.remove();
jcrTemplate.save();
} catch (RepositoryException e) {
throw new DataAccessResourceFailureException("Exception occurred cannot delete Node: " + e.getMessage(), e);
}
}
@Override
protected Object getEntryValue(Node nativeEntry, String property) {
try {
Property prop = nativeEntry.getProperty(property);
if (prop.getType() == PropertyType.REFERENCE) {
String nodeUUID = prop.getString();
return jcrTemplate.getNodeByUUID(nodeUUID);
}
switch (prop.getType()) {
case PropertyType.BINARY:
// TODO - add lazyinputstream
return prop.getString();
case PropertyType.BOOLEAN:
return prop.getBoolean();
case PropertyType.DATE:
return prop.getDate();
case PropertyType.DOUBLE:
return prop.getDouble();
case PropertyType.LONG:
return prop.getLong();
case PropertyType.NAME: // fall through
case PropertyType.PATH: // fall through
case PropertyType.REFERENCE: // fall through
case PropertyType.STRING: // fall through
case PropertyType.UNDEFINED: // not actually expected
default: // not actually expected
return prop.getString();
}
} catch(PathNotFoundException e) {
return null;
} catch (Exception e) {
throw new DataRetrievalFailureException("Exception occurred cannot getProperty from Node: " + e.getMessage(), e);
}
}
@Override
protected Node retrieveEntry(final PersistentEntity persistentEntity, final Serializable key) {
if (key == null) {
return null;
}
return (Node) jcrTemplate.execute(new JcrCallback() {
public Object doInJcr(javax.jcr.Session s) throws IOException, RepositoryException {
try {
return s.getNodeByUUID(getString(key));
} catch (ItemNotFoundException ex) {
//Force to return null when ItemNotFoundException occurred
return null;
}
}
});
}
private String getString(Object key) {
return typeConverter.convertIfNecessary(key, String.class);
}
private Long getLong(Object value) {
return typeConverter.convertIfNecessary(value, Long.class);
}
@Override
protected Node createNewEntry(final PersistentEntity persistentEntity) {
try {
Node rootNode = jcrTemplate.getRootNode();
Node node = rootNode.addNode(persistentEntity.getJavaClass().getSimpleName(), JcrConstants.DEFAULT_JCR_TYPE);
node.addMixin(JcrConstants.MIXIN_REFERENCEABLE);
node.addMixin(JcrConstants.MIXIN_VERSIONABLE);
node.addMixin(JcrConstants.MIXIN_LOCKABLE);
return node;
} catch (RepositoryException e) {
throw new DataAccessResourceFailureException("Exception occurred cannot create Node: " + e.getMessage(), e);
}
}
@Override
protected void setEntryValue(Node nativeEntry, String propertyName, Object value) {
//Possible property should be only String, Boolean, Calendar, Double, InputStream and Long
if (value != null) {
try {
if (value instanceof String)
nativeEntry.setProperty(propertyName, (String) value);
else if (value instanceof Boolean)
nativeEntry.setProperty(propertyName, (Boolean) value);
else if (value instanceof Calendar) {
nativeEntry.setProperty(propertyName, (Calendar) value);
}else if (value instanceof Double)
nativeEntry.setProperty(propertyName, (Double) value);
else if (value instanceof InputStream)
nativeEntry.setProperty(propertyName, (InputStream) value);
else if (value instanceof Long)
nativeEntry.setProperty(propertyName, getLong(value));
else if (value instanceof Integer)
nativeEntry.setProperty(propertyName, getLong(value));
else if (value instanceof Date) {
//TODO: Solve date type problem.
nativeEntry.setProperty(propertyName, ((Date)value).getTime());
}else{
//Marshaling all unsupported data types into String
value = value.toString();
nativeEntry.setProperty(propertyName, (String)value);
}
} catch (RepositoryException e) {
throw new DataAccessResourceFailureException("Exception occurred set a property value to Node: " + e.getMessage(), e);
}
}
}
@Override
protected String storeEntry(PersistentEntity persistentEntity, String id, Node nativeEntry) {
jcrTemplate.save();
return id;
}
@Override
protected void updateEntry(final PersistentEntity persistentEntity, final String id, final Node entry) {
if (id != null) {
List<String> propNames = persistentEntity.getPersistentPropertyNames();
Node node = jcrTemplate.getNodeByUUID(id);
try {
node.checkout();
for (String propName : propNames) {
if (node.hasProperty(propName)) {
node.setProperty(propName, entry.getProperty(propName).getValue());
}
}
node.save();
node.checkin();
} catch (RepositoryException e) {
throw new DataAccessResourceFailureException("Exception occurred when updating Node: " + e.getMessage(), e);
}
}
}
public org.grails.datastore.mapping.query.Query createQuery() {
return new JcrQuery((JcrSession) getSession(), getJcrTemplate(), getPersistentEntity(), this);
}
public JcrTemplate getJcrTemplate() {
return jcrTemplate;
}
}
<file_sep>apply from:'../gradle/maven-central-build.gradle'
group = 'org.grails'
version = "1.1.0.RELEASE"
sourceCompatibility = 1.6
targetCompatibility = 1.6
repositories {
mavenCentral()
maven { url "http://repo.spring.io/libs-milestone" }
mavenLocal()
}
dependencies {
def springBootVersion = '1.1.1.RELEASE'
compile "org.springframework.boot:spring-boot-cli:$springBootVersion", {
exclude group:'org.codehaus.groovy', module:'groovy'
}
compile "org.springframework.boot:spring-boot-autoconfigure:$springBootVersion"
compile 'org.codehaus.groovy:groovy-all:2.3.2'
compile "org.grails:grails-datastore-gorm-mongodb:3.0.1.RELEASE", {
exclude group:'org.grails', module:'grails-datastore-gorm-plugin-support'
exclude group:"org.springframework.data", module:"spring-data-mongodb"
}
compile "org.mongodb:mongo-java-driver:2.12.2"
compile "org.springframework.data:spring-data-mongodb:1.5.0.RELEASE", {
exclude group:"org.mongodb", module:"mongo-java-driver"
}
compile "org.springframework:spring-tx:4.0.5.RELEASE"
testCompile "org.spockframework:spock-core:0.7-groovy-2.0"
}
<file_sep>package org.grails.datastore.mapping.orient.config;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.serialization.OSerializableStream;
import groovy.lang.Closure;
import org.grails.datastore.mapping.document.config.DocumentMappingContext;
import org.grails.datastore.mapping.model.*;
import java.math.BigDecimal;
import java.util.*;
import org.grails.datastore.mapping.document.config.Collection;
/**
* Models a {@link org.grails.datastore.mapping.model.MappingContext} for Orient.
*
* @author <NAME>
*/
@SuppressWarnings("rawtypes")
public class OrientMappingContext extends DocumentMappingContext {
/**
* Java types supported as orient property types.
* See: https://github.com/orientechnologies/orientdb/wiki/Types
*/
private static final Set<String> ORIENT_NATIVE_TYPES = Collections.unmodifiableSet(new HashSet<String>(Arrays.asList(
String.class.getName(),
Byte.class.getName(),
Short.class.getName(),
Integer.class.getName(),
Long.class.getName(),
Float.class.getName(),
Double.class.getName(),
BigDecimal.class.getName(),
Date.class.getName(),
byte[].class.getName(),
ORecordId.class.getName(),
ORecord.class.getName(),
ODocument.class.getName()
)));
/**
* Check whether a type is a native orient type that can be stored without conversion.
* @param clazz The class to check.
* @return true if no conversion is required and the type can be stored natively.
*/
public static boolean isOrientNativeType(Class clazz) {
return ( OrientMappingContext.ORIENT_NATIVE_TYPES.contains(clazz.getName())
|| ORecord.class.isAssignableFrom(clazz.getClass())
|| OSerializableStream.class.isAssignableFrom(clazz.getClass())
);
}
public OrientMappingContext(String defaultDatabaseName) {
super(defaultDatabaseName);
}
public OrientMappingContext(String defaultDatabaseName, Closure defaultMapping) {
super(defaultDatabaseName, defaultMapping);
}
@Override
protected MappingFactory createDocumentMappingFactory(Closure defaultMapping) {
OrientDocumentMappingFactory orientDocumentMappingFactory = new OrientDocumentMappingFactory();
orientDocumentMappingFactory.setDefaultMapping(defaultMapping);
return orientDocumentMappingFactory;
}
@Override
public PersistentEntity createEmbeddedEntity(Class type) {
return new DocumentEmbeddedPersistentEntity(type, this);
}
class DocumentEmbeddedPersistentEntity extends EmbeddedPersistentEntity {
private DocumentCollectionMapping classMapping ;
public DocumentEmbeddedPersistentEntity(Class type, MappingContext ctx) {
super(type, ctx);
classMapping = new DocumentCollectionMapping(this, ctx);
}
@Override
public ClassMapping getMapping() {
return classMapping;
}
public class DocumentCollectionMapping extends AbstractClassMapping<Collection> {
private Collection mappedForm;
public DocumentCollectionMapping(PersistentEntity entity, MappingContext context) {
super(entity, context);
this.mappedForm = (Collection) context.getMappingFactory().createMappedForm(DocumentEmbeddedPersistentEntity.this);
}
@Override
public Collection getMappedForm() {
return mappedForm ;
}
}
}
}
<file_sep>title=GORM for Neo4j
version=1.0.0
authors=<NAME><file_sep>/* Copyright (C) 2011 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.jpa.query;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.grails.datastore.mapping.jpa.JpaSession;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.query.Query;
import org.grails.datastore.mapping.query.Restrictions;
import org.grails.datastore.mapping.query.jpa.JpaQueryBuilder;
import org.grails.datastore.mapping.query.jpa.JpaQueryInfo;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.orm.jpa.JpaCallback;
import org.springframework.orm.jpa.JpaTemplate;
/**
* Query implementation for JPA.
*
* @author <NAME>
* @since 1.0
*/
@SuppressWarnings("rawtypes")
public class JpaQuery extends Query {
private static final Log LOG = LogFactory.getLog(JpaQuery.class);
public JpaQuery(JpaSession session, PersistentEntity entity) {
super(session, entity);
if (session == null) {
throw new InvalidDataAccessApiUsageException("Argument session cannot be null");
}
if (entity == null) {
throw new InvalidDataAccessApiUsageException("No persistent entity specified");
}
}
@Override
public JpaSession getSession() {
return (JpaSession) super.getSession();
}
@Override
public void add(Criterion criterion) {
if (criterion instanceof Equals) {
final Equals eq = (Equals) criterion;
Object resolved = resolveIdIfEntity(eq.getValue());
if (resolved != eq.getValue()) {
criterion = Restrictions.idEq(resolved);
}
}
criteria.add(criterion);
}
@Override
protected List executeQuery(final PersistentEntity entity, final Junction criteria) {
final JpaTemplate jpaTemplate = getSession().getJpaTemplate();
if (!JpaSession.hasTransaction()) {
jpaTemplate.setFlushEager(false);
}
return (List)jpaTemplate.execute(new JpaCallback<Object>() {
public Object doInJpa(EntityManager em) throws PersistenceException {
return executeQuery(entity, criteria, em, false);
}
});
}
@Override
public Object singleResult() {
final JpaTemplate jpaTemplate = getSession().getJpaTemplate();
if (!JpaSession.hasTransaction()) {
jpaTemplate.setFlushEager(false);
}
try {
return jpaTemplate.execute(new JpaCallback<Object>() {
public Object doInJpa(EntityManager em) throws PersistenceException {
return executeQuery(entity, criteria, em, true);
}
});
} catch (EmptyResultDataAccessException e) {
return null;
}
}
Object executeQuery(final PersistentEntity entity, final Junction criteria, EntityManager em, boolean singleResult) {
JpaQueryBuilder queryBuilder = new JpaQueryBuilder(entity, criteria, projections, orderBy);
queryBuilder.setConversionService(session.getDatastore().getMappingContext().getConversionService());
JpaQueryInfo jpaQueryInfo = queryBuilder.buildSelect();
List parameters = jpaQueryInfo.getParameters();
final String queryToString = jpaQueryInfo.getQuery();
if (LOG.isDebugEnabled()) {
LOG.debug("Built JPQL to execute: " + queryToString);
}
final javax.persistence.Query q = em.createQuery(queryToString);
if (parameters != null) {
for (int i = 0, count = parameters.size(); i < count; i++) {
q.setParameter(i + 1, parameters.get(i));
}
}
q.setFirstResult(offset);
if (max > -1) {
q.setMaxResults(max);
}
if (!singleResult) {
return q.getResultList();
}
return q.getSingleResult();
}
}
<file_sep>package org.grails.datastore.mapping.jcr.util;
/**
* @author <NAME>
* @since 1.0
*/
public interface JcrConstants {
String DEFAULT_JCR_TYPE = "nt:unstructured";
String MIXIN_REFERENCEABLE = "mix:referenceable";
String MIXIN_VERSIONABLE = "mix:versionable";
String MIXIN_LOCKABLE = "mix:lockable";
}
<file_sep>title=GORM Datastore API
version=1.0.0
authors=<NAME><file_sep>package org.grails.datastore.mapping.orient.engine;
import com.orientechnologies.orient.core.db.ODatabase;
import com.orientechnologies.orient.core.db.ODatabaseComplex;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.record.impl.ODocument;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.core.SessionImplementor;
import org.grails.datastore.mapping.dirty.checking.DirtyCheckable;
import org.grails.datastore.mapping.engine.AssociationIndexer;
import org.grails.datastore.mapping.engine.EntityAccess;
import org.grails.datastore.mapping.engine.NativeEntryEntityPersister;
import org.grails.datastore.mapping.engine.PropertyValueIndexer;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.PersistentProperty;
import org.grails.datastore.mapping.model.types.*;
import org.grails.datastore.mapping.orient.OrientSession;
import org.grails.datastore.mapping.query.Query;
import org.springframework.context.ApplicationEventPublisher;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
/**
* Created with IntelliJ IDEA.
* User: emrul
* Date: 12/08/13
* Time: 21:47
* To change this template use File | Settings | File Templates.
*/
public class OrientEntityPersister extends NativeEntryEntityPersister<ODocument, Object> {
public OrientEntityPersister(MappingContext mappingContext, PersistentEntity entity, Session session, ApplicationEventPublisher publisher) {
super(mappingContext, entity, session, publisher);
}
public OrientSession getOrientSession() {
return (OrientSession)getSession();
}
protected ODatabaseDocumentTx getOrientDb() {
return (ODatabaseDocumentTx)getOrientSession().getNativeInterface();
}
protected ORecordId createRecordIdWithKey(Object key) {
ORecordId recId = null;
if ( key instanceof ORecordId ) {
recId = (ORecordId)key;
}
else if ( key instanceof String ) {
recId = new ORecordId((String)key);
}
return recId;
}
@Override
public String getEntityFamily() {
return this.getPersistentEntity().getName();
}
@Override
protected void deleteEntry(String family, Object key, Object entry) {
getOrientDb().delete(createRecordIdWithKey(key));
}
@Override
protected Object generateIdentifier(PersistentEntity persistentEntity, ODocument entry) {
//PersistentEntity root = persistentEntity.getRootEntity();
//return persistentEntity.getName() + ":" + Integer.toString(entry.hashCode()); //To change body of implemented methods use File | Settings | File Templates.
return entry.hashCode();
}
@Override
public PropertyValueIndexer getPropertyIndexer(PersistentProperty property) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public AssociationIndexer getAssociationIndexer(ODocument nativeEntry, Association association) {
return new OrientAssociationIndexer(nativeEntry, association, (OrientSession) session);
}
@Override
protected ODocument createNewEntry(String family) {
return new ODocument(family);
}
@Override
protected Object getEntryValue(ODocument nativeEntry, String property) {
return nativeEntry.field(property);
}
@Override
protected void setEntryValue(ODocument nativeEntry, String key, Object value) {
nativeEntry.field(key, value);
}
@Override
protected ODocument retrieveEntry(PersistentEntity persistentEntity, String family, Serializable key) {
return getOrientDb().load(createRecordIdWithKey(key));
}
@Override
protected Object storeEntry(PersistentEntity persistentEntity, EntityAccess entityAccess, Object storeId, ODocument nativeEntry) {
nativeEntry.setClassName(persistentEntity.getName());
return getOrientDb().save(nativeEntry).getIdentity();
}
@Override
protected void updateEntry(PersistentEntity persistentEntity, EntityAccess entityAccess, Object key, ODocument entry) {
getOrientDb().save(entry);
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
protected void deleteEntries(String family, List<Object> keys) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Query createQuery() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
/**
* Formulates a database reference for the given entity, association and association id
*
* @param persistentEntity The entity being persisted
* @param association The association
* @param associationId The association id
* @return A database reference
*/
@Override
protected Object formulateDatabaseReference(PersistentEntity persistentEntity, Association association, Serializable associationId) {
SessionImplementor<Object> si = (SessionImplementor<Object>) session;
if ( associationId instanceof Integer) {
Object assocObj = si.getCachedEntry( association.getAssociatedEntity().getRootEntity(), associationId);
if (assocObj != null) {
return assocObj;
}
}
/*
if ( associationId instanceof String) {
String[] assocRef = ((String)associationId).split(":");
if ( assocRef.length == 2 ) {
Object assocObj = si.getCachedEntry( getMappingContext().getPersistentEntity(assocRef[0]), associationId);
if (assocObj != null) {
return assocObj;
}
}
}
*/
return associationId;
}
/*
@Override
protected Serializable persistEntity(PersistentEntity pe, Object obj) {
if (obj == null) {
//log.error("obj is null");
throw new IllegalStateException("obj is null");
}
return persistEntity(pe, obj, new HashSet());
}
protected Serializable persistEntity(PersistentEntity pe, Object obj, Collection persistingColl ) {
if (persistingColl.contains(obj)) {
return null;
} else {
persistingColl.add(obj);
}
boolean isDirty = obj instanceof DirtyCheckable ? ((DirtyCheckable)obj).hasChanged() : true;
if (getSession().containsPersistingInstance(obj) && (!isDirty)) {
return null;
}
EntityAccess entityAccess = createEntityAccess(pe, obj);
if (getMappingContext().getProxyFactory().isProxy(obj)) {
return (Serializable) entityAccess.getIdentifier();
}
getSession().addPersistingInstance(obj);
// cancel operation if vetoed
boolean isUpdate = entityAccess.getIdentifier() != null;
if (isUpdate) {
if (cancelUpdate(pe, entityAccess)) {
return null;
}
getSession().addPendingUpdate(new NodePendingUpdate(entityAccess, getCypherEngine(), getMappingContext()));
persistAssociationsOfEntity(pe, entityAccess, true, persistingColl);
firePostUpdateEvent(pe, entityAccess);
} else {
if (cancelInsert(pe, entityAccess)) {
return null;
}
getSession().addPendingInsert(new NodePendingInsert(getSession().getDatastore().nextIdForType(pe), entityAccess, getCypherEngine(), getMappingContext()));
persistAssociationsOfEntity(pe, entityAccess, false, persistingColl);
firePostInsertEvent(pe, entityAccess);
}
return (Serializable) entityAccess.getIdentifier();
}
private void persistAssociationsOfEntity(PersistentEntity pe, EntityAccess entityAccess, boolean isUpdate, Collection persistingColl) {
Object obj = entityAccess.getEntity();
DirtyCheckable dirtyCheckable = null;
if (obj instanceof DirtyCheckable) {
dirtyCheckable = (DirtyCheckable)obj;
}
for (PersistentProperty pp: pe.getAssociations()) {
if ((!isUpdate) || ((dirtyCheckable!=null) && dirtyCheckable.hasChanged(pp.getName()))) {
Object propertyValue = entityAccess.getProperty(pp.getName());
if ((pp instanceof OneToMany) || (pp instanceof ManyToMany)) {
Association association = (Association) pp;
if (propertyValue!= null) {
if (association.isBidirectional()) { // Populate other side of bidi
for (Object associatedObject: (Iterable)propertyValue) {
EntityAccess assocEntityAccess = createEntityAccess(association.getAssociatedEntity(), associatedObject);
assocEntityAccess.setProperty(association.getReferencedPropertyName(), obj);
}
}
Iterable targets = (Iterable) propertyValue;
persistEntities(association.getAssociatedEntity(), targets, persistingColl);
boolean reversed = RelationshipUtils.useReversedMappingFor(association);
if (!reversed) {
if (!(propertyValue instanceof LazyEnititySet)) {
LazyEnititySet les = new LazyEnititySet(entityAccess, association, getMappingContext().getProxyFactory(), getSession());
les.addAll(targets);
entityAccess.setProperty(association.getName(), les);
}
}
}
} else if (pp instanceof ToOne) {
if (propertyValue != null) {
ToOne to = (ToOne) pp;
if (to.isBidirectional()) { // Populate other side of bidi
EntityAccess assocEntityAccess = createEntityAccess(to.getAssociatedEntity(), propertyValue);
if (to instanceof OneToOne) {
assocEntityAccess.setProperty(to.getReferencedPropertyName(), obj);
} else {
Collection collection = (Collection) assocEntityAccess.getProperty(to.getReferencedPropertyName());
if (collection == null ) {
collection = new ArrayList();
assocEntityAccess.setProperty(to.getReferencedPropertyName(), collection);
}
if (!collection.contains(obj)) {
collection.add(obj);
}
}
}
persistEntity(to.getAssociatedEntity(), propertyValue, persistingColl);
boolean reversed = RelationshipUtils.useReversedMappingFor(to);
String relType = RelationshipUtils.relationshipTypeUsedFor(to);
if (!reversed) {
getSession().addPendingInsert(new RelationshipPendingDelete(entityAccess, relType, null , getCypherEngine()));
getSession().addPendingInsert(new RelationshipPendingInsert(entityAccess, relType,
new EntityAccess(to.getAssociatedEntity(), propertyValue),
getCypherEngine()));
}
}
} else {
throw new IllegalArgumentException("wtf don't know how to handle " + pp + "(" + pp.getClass() +")" );
}
}
}
}
*/
}
<file_sep>package org.grails.datastore.mapping.node.engine;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.CascadeType;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.dao.CannotAcquireLockException;
import org.springframework.dao.DataIntegrityViolationException;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.core.SessionImplementor;
import org.grails.datastore.mapping.core.impl.PendingInsert;
import org.grails.datastore.mapping.core.impl.PendingInsertAdapter;
import org.grails.datastore.mapping.core.impl.PendingOperation;
import org.grails.datastore.mapping.core.impl.PendingOperationAdapter;
import org.grails.datastore.mapping.core.impl.PendingOperationExecution;
import org.grails.datastore.mapping.core.impl.PendingUpdate;
import org.grails.datastore.mapping.core.impl.PendingUpdateAdapter;
import org.grails.datastore.mapping.engine.EntityAccess;
import org.grails.datastore.mapping.engine.LockableEntityPersister;
import org.grails.datastore.mapping.engine.event.PreDeleteEvent;
import org.grails.datastore.mapping.model.ClassMapping;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.PersistentProperty;
import org.grails.datastore.mapping.model.PropertyMapping;
import org.grails.datastore.mapping.model.types.Association;
import org.grails.datastore.mapping.model.types.OneToMany;
import org.grails.datastore.mapping.model.types.Simple;
import org.grails.datastore.mapping.model.types.ToOne;
import org.grails.datastore.mapping.node.mapping.Node;
import org.grails.datastore.mapping.node.mapping.NodeProperty;
import org.grails.datastore.mapping.proxy.ProxyFactory;
/**
* Abstract implementation of the EntityPersister abstract class
* for Node style stores
*
* @author <NAME>
* @since 1.0
*/
@SuppressWarnings("unused")
public abstract class AbstractNodeEntityPersister<T, K> extends LockableEntityPersister {
protected String nodeEntity;
protected Session session;
protected ClassMapping classMapping;
public AbstractNodeEntityPersister(MappingContext mappingContext, PersistentEntity entity,
Session session, ApplicationEventPublisher publisher) {
super(mappingContext, entity, session, publisher);
this.session = session;
this.classMapping = entity.getMapping();
this.nodeEntity = getEntity(entity, classMapping);
}
public String getNodeEntity() {
return nodeEntity;
}
public void setNodeEntity(String nodeEntity) {
this.nodeEntity = nodeEntity;
}
protected String getEntity(PersistentEntity persistentEntity, ClassMapping<Node> cm) {
String enName = null;
if (cm.getMappedForm() != null) {
enName = cm.getMappedForm().getEntityName();
}
if (enName == null) enName = persistentEntity.getJavaClass().getSimpleName();
return enName;
}
@Override
protected List<Object> retrieveAllEntities(PersistentEntity persistentEntity, Serializable[] keys) {
List<Object> results = new ArrayList<Object>();
for (Serializable key : keys) {
results.add(retrieveEntity(persistentEntity, key));
}
return results;
}
@Override
protected List<Object> retrieveAllEntities(PersistentEntity persistentEntity, Iterable<Serializable> keys) {
List<Object> results = new ArrayList<Object>();
for (Serializable key : keys) {
results.add(retrieveEntity(persistentEntity, key));
}
return results;
}
/**
* This is a rather simplistic and unoptimized implementation. Subclasses can override to provide
* batch insert capabilities to optimize the insertion of multiple entities in one go
*
* @param persistentEntity The persistent entity
* @param objs The objects to persist
* @return A list of keys
*/
@Override
protected List<Serializable> persistEntities(PersistentEntity persistentEntity, Iterable objs) {
List<Serializable> keys = new ArrayList<Serializable>();
for (Object obj : objs) {
keys.add(persist(obj));
}
return keys;
}
@Override
protected Object retrieveEntity(PersistentEntity persistentEntity, Serializable key) {
T nativeEntry = retrieveEntry(persistentEntity, key);
if (nativeEntry != null) {
return createObjectFromNativeEntry(persistentEntity, key, nativeEntry);
}
return null;
}
protected Object createObjectFromNativeEntry(PersistentEntity persistentEntity, Serializable nativeKey, T nativeEntry) {
Object obj = newEntityInstance(persistentEntity);
refreshObjectStateFromNativeEntry(persistentEntity, obj, nativeKey, nativeEntry);
return obj;
}
protected void refreshObjectStateFromNativeEntry(PersistentEntity persistentEntity, Object obj, Serializable nativeKey, T nativeEntry) {
EntityAccess ea = new EntityAccess(persistentEntity, obj);
ea.setConversionService(getMappingContext().getConversionService());
String idName = ea.getIdentifierName();
ea.setProperty(idName, nativeKey);
final List<PersistentProperty> props = persistentEntity.getPersistentProperties();
for (final PersistentProperty prop : props) {
PropertyMapping<NodeProperty> pm = prop.getMapping();
String propKey = null;
if (pm.getMappedForm() != null) {
propKey = pm.getMappedForm().getName();
}
if (propKey == null) {
propKey = prop.getName();
}
if (prop instanceof Simple) {
ea.setProperty(prop.getName(), getEntryValue(nativeEntry, propKey));
} else if (prop instanceof ToOne) {
Serializable tmp = (Serializable) getEntryValue(nativeEntry, propKey);
PersistentEntity associatedEntity = prop.getOwner();
final Serializable associationKey = (Serializable) getMappingContext().getConversionService().convert(tmp, associatedEntity.getIdentity().getType());
if (associationKey != null) {
PropertyMapping<NodeProperty> associationPropertyMapping = prop.getMapping();
//boolean isLazy = isLazyAssociation(associationPropertyMapping);
final Class propType = prop.getType();
//if (isLazy) {
// Object proxy = getProxyFactory().createProxy(session, propType, associationKey);
// ea.setProperty(prop.getName(), proxy);
//} else {
ea.setProperty(prop.getName(), session.retrieve(propType, associationKey));
//}
}
} else if (prop instanceof OneToMany) {
Association association = (Association) prop;
PropertyMapping<NodeProperty> associationPropertyMapping = association.getMapping();
//boolean isLazy = isLazyAssociation(associationPropertyMapping);
//AssociationIndexer indexer = getAssociationIndexer(association);
nativeKey = (Serializable) getMappingContext().getConversionService().convert(nativeKey, getPersistentEntity().getIdentity().getType());
//if (isLazy) {
// if (List.class.isAssignableFrom(association.getType())) {
// ea.setPropertyNoConversion(association.getName(), new PersistentList(nativeKey, session));
// } else if (Set.class.isAssignableFrom(association.getType())) {
// ea.setPropertyNoConversion(association.getName(), new PersistentSet(nativeKey, session, indexer));
// }
// } else {
//if (indexer != null) {
// List keys = indexer.query(nativeKey);
ea.setProperty(association.getName(), session.retrieveAll(association.getAssociatedEntity().getJavaClass(), nativeKey));
// }
//}
}
}
}
/*private boolean isLazyAssociation(PropertyMapping<NodeProperty> associationPropertyMapping) {
if (associationPropertyMapping != null) {
NodeProperty np = associationPropertyMapping.getMappedForm();
if (np.getFetchStrategy() != FetchType.LAZY) {
return false;
}
}
return true;
}*/
@Override
protected Serializable persistEntity(final PersistentEntity persistentEntity, Object obj) {
ClassMapping<Node> cm = persistentEntity.getMapping();
T tmp = null;
final EntityAccess entityAccess = new EntityAccess(persistentEntity, obj);
K k = readObjectIdentifier(entityAccess, cm);
boolean isUpdate = k != null;
/* if (!isUpdate) {
tmp = createNewEntry(persistentEntity);
k = generateIdentifier(persistentEntity, tmp);
String id = entityAccess.getIdentifierName();
entityAccess.setProperty(id, k);
} else {
SessionImplementor<T> si = (SessionImplementor<T>) session;
tmp = si.getCachedEntry(persistentEntity, (Serializable) k);
if (tmp == null) {
tmp = retrieveEntry(persistentEntity, (Serializable) k);
}
if (tmp == null) {
tmp = createNewEntry(persistentEntity);
}
}*/
PendingOperation<T, K> pendingOperation;
SessionImplementor<Object> si = (SessionImplementor<Object>) session;
if (!isUpdate) {
tmp = createNewEntry(persistentEntity);
k = generateIdentifier(persistentEntity, tmp);
pendingOperation = new PendingInsertAdapter<T, K>(persistentEntity, k, tmp, entityAccess) {
public void run() {
executeInsert(persistentEntity, entityAccess, getNativeKey(), getNativeEntry());
}
};
String id = entityAccess.getIdentifierName();
entityAccess.setProperty(id, k);
}
else {
tmp = (T) si.getCachedEntry(persistentEntity, (Serializable) k);
if (tmp == null) {
tmp = retrieveEntry(persistentEntity, (Serializable) k);
}
if (tmp == null) {
tmp = createNewEntry(persistentEntity);
}
pendingOperation = new PendingUpdateAdapter<T, K>(persistentEntity, k, tmp, entityAccess) {
public void run() {
if (cancelUpdate(persistentEntity, entityAccess)) return;
updateEntry(persistentEntity, getNativeKey(), getNativeEntry());
firePostUpdateEvent(persistentEntity, entityAccess);
}
};
}
final T e = tmp;
final List<PersistentProperty> props = persistentEntity.getPersistentProperties();
final Map<OneToMany, List<Serializable>> oneToManyKeys = new HashMap<OneToMany, List<Serializable>>();
final Map<OneToMany, Serializable> inverseCollectionUpdates = new HashMap<OneToMany, Serializable>();
// final Map<PersistentProperty, Object> toIndex = new HashMap<PersistentProperty, Object>();
//final Map<PersistentProperty, Object> toUnindex = new HashMap<PersistentProperty, Object>();
for (PersistentProperty prop : props) {
PropertyMapping<NodeProperty> pm = prop.getMapping();
final NodeProperty nodeProperty = pm.getMappedForm();
String propName = null;
if (nodeProperty != null) {
propName = nodeProperty.getName();
}
//final boolean indexed = nodeProperty != null && nodeProperty.isIndex();
if (propName == null) propName = prop.getName();
//Single Entity
if (prop instanceof Simple) {
Object propValue = entityAccess.getProperty(prop.getName());
/* if (indexed) {
if (isUpdate) {
final Object oldValue = getEntryValue(e, propName);
if (oldValue != null && !oldValue.equals(propValue))
toUnindex.put(prop, oldValue);
}
toIndex.put(prop, propValue);
}*/
setEntryValue(e, propName, propValue);
} else if (prop instanceof OneToMany) {
final OneToMany oneToMany = (OneToMany) prop;
Object propValue = entityAccess.getProperty(oneToMany.getName());
if (propValue instanceof Collection) {
Collection associatedObjects = (Collection) propValue;
List<Serializable> keys = session.persist(associatedObjects);
oneToManyKeys.put(oneToMany, keys);
}
} else if (prop instanceof ToOne) {
ToOne association = (ToOne) prop;
if (association.doesCascade(CascadeType.PERSIST)) {
if (!association.isForeignKeyInChild()) {
final Object associatedObject = entityAccess.getProperty(prop.getName());
if (associatedObject != null) {
@SuppressWarnings("hiding") ProxyFactory proxyFactory = getProxyFactory();
// never cascade to proxies
if (!proxyFactory.isProxy(associatedObject)) {
Serializable associationId;
AbstractNodeEntityPersister associationPersister = (AbstractNodeEntityPersister) session.getPersister(associatedObject);
if (!session.contains(associatedObject)) {
Serializable tempId = associationPersister.getObjectIdentifier(associatedObject);
if (tempId == null) tempId = session.persist(associatedObject);
associationId = tempId;
} else {
associationId = associationPersister.getObjectIdentifier(associatedObject);
}
/* if (indexed) {
toIndex.put(prop, associationId);
if (isUpdate) {
final Object oldValue = getEntryValue(e, propName);
if (oldValue != null && !oldValue.equals(associatedObject))
toUnindex.put(prop, oldValue);
}
}*/
setEntryValue(e, propName, associationId);
if (association.isBidirectional()) {
Association inverse = association.getInverseSide();
if (inverse instanceof OneToMany) {
inverseCollectionUpdates.put((OneToMany) inverse, associationId);
}
}
}
} else {
if (!association.isNullable() && !association.isCircular()) {
throw new DataIntegrityViolationException("Cannot save object [" + entityAccess.getEntity() + "] of type [" + persistentEntity + "]. The association [" + association + "] is cannot be null.");
}
}
}
}
}// End
}
if (!isUpdate) {
// if the identifier is null at this point that means that datastore could not generated an identifer
// and the identifer is generated only upon insert of the entity
final K updateId = k;
PendingOperation postOperation = new PendingOperationAdapter<T, K>(persistentEntity, k, e) {
public void run() {
if (cancelInsert(persistentEntity, entityAccess)) {
return;
}
storeEntry(persistentEntity, updateId, e);
firePostInsertEvent(persistentEntity, entityAccess);
for (OneToMany inverseCollection : inverseCollectionUpdates.keySet()) {
final Serializable primaryKey = inverseCollectionUpdates.get(inverseCollection);
final AbstractNodeEntityPersister inversePersister = (AbstractNodeEntityPersister) session.getPersister(inverseCollection.getOwner());
//final AssociationIndexer associationIndexer = inversePersister.getAssociationIndexer(e, inverseCollection);
//associationIndexer.index(primaryKey, updateId );
}
}
};
pendingOperation.addCascadeOperation(postOperation);
// If the key is still at this point we have to execute the pending operation now to get the key
if (k == null) {
PendingOperationExecution.executePendingOperation(pendingOperation);
}
else {
si.addPendingInsert((PendingInsert) pendingOperation);
}
}
else {
final K updateId = k;
PendingOperation postOperation = new PendingOperationAdapter<T, K>(persistentEntity, k, e) {
public void run() {
// updateOneToManyIndices(e, updateId, oneToManyKeys);
// if (doesRequirePropertyIndexing())
// updatePropertyIndices(updateId, toIndex, toUnindex);
if (cancelUpdate(persistentEntity, entityAccess)) {
return;
}
updateEntry(persistentEntity, updateId, e);
firePostUpdateEvent(persistentEntity, entityAccess);
}
};
pendingOperation.addCascadeOperation(postOperation);
si.addPendingUpdate((PendingUpdate) pendingOperation);
}
/* if (!isUpdate) {
SessionImplementor si = (SessionImplementor) session;
final K updateId = k;
si.getPendingInserts().add(new Runnable() {
public void run() {
for (EntityInterceptor interceptor : interceptors) {
if (!interceptor.beforeInsert(persistentEntity, entityAccess)) return;
}
storeEntry(persistentEntity, updateId, e);
}
});
} else {
SessionImplementor si = (SessionImplementor) session;
final K updateId = k;
si.getPendingUpdates().add(new Runnable() {
public void run() {
for (EntityInterceptor interceptor : interceptors) {
if (!interceptor.beforeUpdate(persistentEntity, entityAccess)) return;
}
updateEntry(persistentEntity, updateId, e);
}
});
}*/
return (Serializable) k;
}
@Override
protected void deleteEntities(PersistentEntity persistentEntity, Iterable objects) {
}
@Override
protected void deleteEntity(PersistentEntity persistentEntity, Object obj) {
if (obj == null) {
return;
}
EntityAccess entityAccess = createEntityAccess(persistentEntity, obj);
PreDeleteEvent event = new PreDeleteEvent(session.getDatastore(), persistentEntity,
entityAccess);
publisher.publishEvent(event);
if (event.isCancelled()) {
return;
}
final K key = readIdentifierFromObject(obj);
if (key == null) {
return;
}
deleteEntry(key);
firePostDeleteEvent(persistentEntity, entityAccess);
}
private K readIdentifierFromObject(Object object) {
EntityAccess access = new EntityAccess(getPersistentEntity(), object);
access.setConversionService(getMappingContext().getConversionService());
final Object idValue = access.getIdentifier();
K key = null;
if (idValue != null) {
key = inferNativeKey(idValue);
}
return key;
}
@Override
public final Object lock(Serializable id) throws CannotAcquireLockException {
return lock(id, DEFAULT_TIMEOUT);
}
@Override
public final Object lock(Serializable id, int timeout) throws CannotAcquireLockException {
lockEntry(getPersistentEntity(), id, timeout);
return retrieve(id);
}
/**
* Subclasses can override to provide locking semantics
*
* @param persistentEntity The PesistentEntity instnace
* @param id The identifer
* @param timeout The lock timeout in seconds
*/
protected void lockEntry(PersistentEntity persistentEntity, Serializable id, int timeout) {
// do nothing,
}
/**
* Subclasses can override to provide locking semantics
*
* @param o The object
* @return True if the object is locked
*/
@Override
public boolean isLocked(Object o) {
return false;
}
@Override
public final void unlock(Object o) {
unlockEntry(getPersistentEntity(), (Serializable) createEntityAccess(getPersistentEntity(), o).getIdentifier());
}
/**
* Subclasses to override to provide locking semantics
*
* @param persistentEntity The persistent entity
* @param id The identifer
*/
protected void unlockEntry(PersistentEntity persistentEntity, Serializable id) {
// do nothing
}
public Serializable refresh(Object o) {
final PersistentEntity entity = getPersistentEntity();
EntityAccess ea = createEntityAccess(entity, o);
Serializable identifier = (Serializable) ea.getIdentifier();
if (identifier != null) {
final T entry = retrieveEntry(entity, identifier);
refreshObjectStateFromNativeEntry(entity, o, identifier, entry);
return identifier;
}
return null;
}
protected K readObjectIdentifier(EntityAccess entityAccess, ClassMapping<Node> cm) {
return (K) entityAccess.getIdentifier();
}
protected abstract K generateIdentifier(PersistentEntity persistentEntity, T tmp);
/**
* Used to establish the native key to use from the identifier defined by the object
*
* @param identifier The identifier specified by the object
* @return The native key which may just be a cast from the identifier parameter to K
*/
protected K inferNativeKey(Object identifier) {
return (K) identifier;
}
/**
* Deletes a single entry
*
* @param key The identity
*/
protected abstract void deleteEntry(K key);
/**
* Reads a value for the given key from the native entry
*
* @param nativeEntry The native entry. Could be a JCR Node etc.
* @param property The property key
* @return The value
*/
protected abstract Object getEntryValue(T nativeEntry, String property);
/**
* Reads the native form of a Node datastore entry. This could be
* a JCR Node, a Graph Nodeetc.
*
* @param persistentEntity The persistent entity
* @param key The key
* @return The native form
*/
protected abstract T retrieveEntry(PersistentEntity persistentEntity, Serializable key);
/**
* Creates a new entry for the given Node.
*
* @param persistentEntity persistentEntity The persistent entity
* @return An entry such as a JCR Node etc.
*/
protected abstract T createNewEntry(PersistentEntity persistentEntity);
/**
* Sets a value on an entry
*
* @param nativeEntry The native entry such as a JCR Node etc.
* @param propertyName The Property Name
* @param value The value
*/
protected abstract void setEntryValue(T nativeEntry, String propertyName, Object value);
/**
* Stores the native form of a Node style datastore to the actual data store
*
* @param persistentEntity The persistent entity
* @param id The id of the object to store
* @param nativeEntry The native form. Could be a a JCR Node,etc.
* @return The native identitys
*/
protected abstract K storeEntry(PersistentEntity persistentEntity, K id, T nativeEntry);
/**
* Updates an existing entry to the actual datastore
*
* @param persistentEntity The PersistentEntity
* @param id The id of the object to update
* @param entry The entry
*/
protected abstract void updateEntry(PersistentEntity persistentEntity, K id, T entry);
/**
* Executes an insert for the given entity, entity access, identifier and native entry.
* Any before interceptors will be triggered
*
* @param persistentEntity
* @param entityAccess
* @param id
* @param e
* @return The key
*/
protected K executeInsert(final PersistentEntity persistentEntity,
final EntityAccess entityAccess,
final K id, final T e) {
if (cancelInsert(persistentEntity, entityAccess)) return null;
final K newId = storeEntry(persistentEntity, id, e);
entityAccess.setIdentifier(newId);
firePostInsertEvent(persistentEntity, entityAccess);
return newId;
}
}
<file_sep>version = "1.3.0.BUILD-SNAPSHOT"
configurations {
grails
}
dependencies {
compile("com.gmongo:gmongo:1.2") {
transitive = false
}
grails("org.grails:grails-core:$grailsVersion")
grails("org.grails:grails-bootstrap:$grailsVersion") {
transitive = false
}
compile project(":grails-datastore-gorm"),
project(":grails-datastore-gorm-plugin-support"),
project(":grails-datastore-mongo"),
project(":grails-datastore-core")
testCompile project(":grails-datastore-gorm-test"),
project(":grails-datastore-gorm-tck")
testRuntime "javax.servlet:servlet-api:2.5"
testRuntime "org.grails:grails-web:$grailsVersion", {
transitive = false
}
}
sourceSets {
main {
compileClasspath += configurations.grails
}
}
//test {
// if (System.getProperty('debug', 'false') == 'true') {
// jvmArgs '-server','-Xmx2048M', '-Xms64M', '-XX:PermSize=32m','-XX:MaxPermSize=256m', '-Xdebug',
// '-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005'
// }
// else {
// jvmArgs '-server','-Xmx2048M', '-Xms64M', '-XX:PermSize=32m','-XX:MaxPermSize=256m'
// }
//}<file_sep>package org.grails.datastore.mapping.appengine.testsupport;
import com.google.appengine.api.datastore.dev.LocalDatastoreService;
import com.google.appengine.tools.development.ApiProxyLocal;
import com.google.appengine.tools.development.ApiProxyLocalFactory;
import com.google.appengine.tools.development.LocalServerEnvironment;
import com.google.apphosting.api.ApiProxy;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.File;
/**
* Base test case class for datastore tests taken from the
* <a href="http://code.google.com/intl/fr/appengine/docs/java/howto/unittesting.html">Google App Engine testing documentation</a>.
*
* @author <NAME>
*/
public abstract class AppEngineDatastoreTestCase {
@BeforeClass
public static void setUp() throws Exception {
ApiProxy.setEnvironmentForCurrentThread(new TestEnvironment());
ApiProxyLocalFactory factory = new ApiProxyLocalFactory();
ApiProxyLocal proxyLocal = factory.create(new LocalServerEnvironment() {
public File getAppDir() {
return new File(".");
}
public String getAddress() {
return "localhost";
}
public int getPort() {
return 8080;
}
public void waitForServerToStart() throws InterruptedException {
//To change body of implemented methods use File | Settings | File Templates.
}
});
proxyLocal.setProperty(LocalDatastoreService.NO_STORAGE_PROPERTY, Boolean.TRUE.toString());
ApiProxy.setDelegate(proxyLocal);
}
@AfterClass
public static void tearDown() throws Exception {
ApiProxyLocal proxy = (ApiProxyLocal) ApiProxy.getDelegate();
LocalDatastoreService datastoreService = (LocalDatastoreService) proxy.getService(LocalDatastoreService.PACKAGE);
datastoreService.clearProfiles();
// not strictly necessary to null these out but there's no harm either
ApiProxy.setDelegate(null);
ApiProxy.setEnvironmentForCurrentThread(null);
}
}<file_sep>package org.grails.datastore.mapping.appengine;
import com.google.appengine.api.datastore.*;
import org.grails.datastore.mapping.appengine.engine.AppEngineEntityPersister;
import org.grails.datastore.mapping.core.AbstractSession;
import org.grails.datastore.mapping.core.Datastore;
import org.grails.datastore.mapping.engine.Persister;
import org.grails.datastore.mapping.keyvalue.KeyValueSession;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.transactions.Transaction;
import java.util.*;
/**
* Google App Engine session to the datastore
*
* @author <NAME>
* @author <NAME>
*
* @since 1.0
*/
public class AppEngineSession extends AbstractSession<DatastoreService> implements KeyValueSession<Key> {
protected DatastoreService datastoreService = DatastoreServiceFactory.getDatastoreService();
private AppEngineTransaction transaction;
/**
* Create a new Google App Engine session to the datastore.
*
* @param mappingContext The Mapping Context
*/
public AppEngineSession(Datastore ds,MappingContext mappingContext) {
super(ds, mappingContext);
}
public Key store(String table, Map object) {
Entity entity = new Entity(table);
Set keySet = object.keySet();
for (Object aKeySet : keySet) {
String propertyName = (String) aKeySet;
Object value = object.get(propertyName);
entity.setProperty(propertyName, value);
}
return datastoreService.put(entity);
}
public Map<String, Object> retrieve(Key key) {
try {
Entity entity = datastoreService.get(key);
return entity.getProperties();
} catch (EntityNotFoundException e) {
return null;
}
}
public List<Map<String, Object>> retrieve(Key... keys) {
List<com.google.appengine.api.datastore.Key> keysList = new ArrayList<Key>();
keysList.addAll(Arrays.asList(keys));
List<Map<String, Object>> results = new ArrayList<Map<String, Object>>();
Map<com.google.appengine.api.datastore.Key, Entity> keyEntityMap = datastoreService.get(keysList);
Set<com.google.appengine.api.datastore.Key> keySet = keyEntityMap.keySet();
for (com.google.appengine.api.datastore.Key aKeySet : keySet) {
Entity value = keyEntityMap.get(aKeySet);
results.add(value.getProperties());
}
return results;
}
public void delete(Key... keys) {
datastoreService.delete(Arrays.asList(keys));
}
/**
* @return always true, always connected to the Google App Engine datastore
*/
public boolean isConnected() {
return true;
}
/**
* Start a new transaction.
*
* @return a started transaction
*/
@Override
protected Transaction beginTransactionInternal() {
AppEngineTransaction engineTransaction = new AppEngineTransaction(DatastoreServiceFactory.getDatastoreService().beginTransaction());
this.transaction = engineTransaction;
return engineTransaction;
}
public DatastoreService getNativeInterface() {
return datastoreService;
}
@Override
protected Persister createPersister(Class cls, MappingContext mappingContext) {
PersistentEntity entity = mappingContext.getPersistentEntity(cls.getName());
if (entity != null) {
return new AppEngineEntityPersister(mappingContext, entity,this, datastoreService);
}
return null;
}
}
<file_sep>app.grails.version=2.3.2
<file_sep>configurations {
optional
}
dependencies {
compile(project(":grails-datastore-gorm-hibernate-core"))
// Web dependencies optional
optional("org.grails:grails-web:$grailsVersion") {
transitive = false
}
optional("org.grails:grails-spring:$grailsVersion") {
transitive = false
}
optional("org.springframework:spring-webmvc:$springVersion") {
transitive = false
}
optional 'javax.servlet:servlet-api:2.5'
optional(project(":grails-datastore-web"))
testCompile project(":grails-datastore-gorm-test")
testCompile project(":grails-datastore-gorm-tck")
testCompile "com.h2database:h2:1.3.164"
testCompile "net.sf.ehcache:ehcache-core:2.4.6"
testCompile "org.hibernate:hibernate-ehcache:3.6.10.Final"
}
sourceSets {
main {
compileClasspath += configurations.optional
}
javadoc {
classpath = configurations.compile + configurations.optional
}
}
idea {
module {
scopes.PROVIDED.plus += configurations.optional
}
}
eclipse {
classpath {
plusConfigurations += configurations.optional
}
}
test {
testLogging {
exceptionFormat ='full'
}
forkEvery = 30
maxParallelForks = 4
jvmArgs '-server','-Xmx1024M', '-Xms64M', '-XX:PermSize=32m','-XX:MaxPermSize=256m','-XX:+CMSClassUnloadingEnabled','-XX:+HeapDumpOnOutOfMemoryError'
}
compileTestGroovy {
groovyOptions.fork(memoryMaximumSize: '1024m')
}
/*
test {
jvmArgs '-Xmx1024m', '-Xdebug', '-Xnoagent', '-Dgrails.full.stacktrace=true', '-Djava.compiler=NONE',
'-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005'
}
*/
<file_sep>title=GORM for AWS SimpleDB
version=0.3
authors=<NAME>
footer=Official production and development support for plugin is available via OSMoss: <a href="http://www.osmoss.com/project/grails-gorm-simpledb">http://www.osmoss.com/project/grails-gorm-simpledb</a><file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.mongo.config;
import com.mongodb.DBObject;
import groovy.lang.Closure;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import java.util.regex.Pattern;
import org.bson.BSONObject;
import org.bson.types.BSONTimestamp;
import org.bson.types.Code;
import org.bson.types.CodeWScope;
import org.bson.types.Symbol;
import org.grails.datastore.gorm.mongo.geo.*;
import org.grails.datastore.mapping.config.AbstractGormMappingFactory;
import org.grails.datastore.mapping.config.Property;
import org.grails.datastore.mapping.document.config.Collection;
import org.grails.datastore.mapping.document.config.DocumentMappingContext;
import org.grails.datastore.mapping.model.AbstractClassMapping;
import org.grails.datastore.mapping.model.ClassMapping;
import org.grails.datastore.mapping.model.EmbeddedPersistentEntity;
import org.grails.datastore.mapping.model.IdentityMapping;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.MappingFactory;
import org.grails.datastore.mapping.model.PersistentEntity;
import com.mongodb.DBRef;
/**
* Models a {@link org.grails.datastore.mapping.model.MappingContext} for Mongo.
*
* @author <NAME>
*/
@SuppressWarnings("rawtypes")
public class MongoMappingContext extends DocumentMappingContext {
/**
* Java types supported as mongo property types.
*/
private static final Set<String> MONGO_NATIVE_TYPES = Collections.unmodifiableSet(new HashSet<String>(Arrays.asList(
Double.class.getName(),
String.class.getName(),
com.mongodb.DBObject.class.getName(),
org.bson.types.Binary.class.getName(),
org.bson.types.ObjectId.class.getName(),
DBRef.class.getName(),
Boolean.class.getName(),
Date.class.getName(),
Pattern.class.getName(),
Symbol.class.getName(),
Integer.class.getName(),
BSONTimestamp.class.getName(),
Code.class.getName(),
CodeWScope.class.getName(),
Long.class.getName(),
UUID.class.getName(),
byte[].class.getName(),
Byte.class.getName()
)));
/**
* Check whether a type is a native mongo type that can be stored by the mongo driver without conversion.
* @param clazz The class to check.
* @return true if no conversion is required and the type can be stored natively.
*/
public static boolean isMongoNativeType(Class clazz) {
return MongoMappingContext.MONGO_NATIVE_TYPES.contains(clazz.getName()) ||
DBObject.class.isAssignableFrom(clazz.getClass());
}
private final class MongoDocumentMappingFactory extends
AbstractGormMappingFactory<MongoCollection, MongoAttribute> {
@Override
protected Class<MongoAttribute> getPropertyMappedFormType() {
return MongoAttribute.class;
}
@Override
protected Class<MongoCollection> getEntityMappedFormType() {
return MongoCollection.class;
}
@Override
protected IdentityMapping getIdentityMappedForm(final ClassMapping classMapping, final MongoAttribute property) {
if (property == null) {
return super.getIdentityMappedForm(classMapping, property);
}
return new IdentityMapping() {
public String[] getIdentifierName() {
if (property.getName() == null) {
return new String[] { MappingFactory.IDENTITY_PROPERTY };
}
return new String[] { property.getName()};
}
public ClassMapping getClassMapping() {
return classMapping;
}
public Property getMappedForm() {
return property;
}
};
}
@Override
public boolean isSimpleType(Class propType) {
if (propType == null) return false;
if (propType.isArray()) {
return isSimpleType(propType.getComponentType()) || super.isSimpleType(propType);
}
return isMongoNativeType(propType) || super.isSimpleType(propType);
}
}
public MongoMappingContext(String defaultDatabaseName) {
super(defaultDatabaseName);
registerMongoTypes();
}
public MongoMappingContext(String defaultDatabaseName, Closure defaultMapping) {
super(defaultDatabaseName, defaultMapping);
registerMongoTypes();
}
protected void registerMongoTypes() {
MappingFactory.registerCustomType(new GeometryCollectionType());
MappingFactory.registerCustomType(new PointType());
MappingFactory.registerCustomType(new PolygonType());
MappingFactory.registerCustomType(new LineStringType());
MappingFactory.registerCustomType(new MultiLineStringType());
MappingFactory.registerCustomType(new MultiPointType());
MappingFactory.registerCustomType(new MultiPolygonType());
MappingFactory.registerCustomType(new ShapeType());
MappingFactory.registerCustomType(new BoxType());
MappingFactory.registerCustomType(new CircleType());
}
@Override
protected MappingFactory createDocumentMappingFactory(Closure defaultMapping) {
MongoDocumentMappingFactory mongoDocumentMappingFactory = new MongoDocumentMappingFactory();
mongoDocumentMappingFactory.setDefaultMapping(defaultMapping);
return mongoDocumentMappingFactory;
}
@Override
public PersistentEntity createEmbeddedEntity(Class type) {
return new DocumentEmbeddedPersistentEntity(type, this);
}
class DocumentEmbeddedPersistentEntity extends EmbeddedPersistentEntity {
private DocumentCollectionMapping classMapping ;
public DocumentEmbeddedPersistentEntity(Class type, MappingContext ctx) {
super(type, ctx);
classMapping = new DocumentCollectionMapping(this, ctx);
}
@Override
public ClassMapping getMapping() {
return classMapping;
}
public class DocumentCollectionMapping extends AbstractClassMapping<Collection> {
private Collection mappedForm;
public DocumentCollectionMapping(PersistentEntity entity, MappingContext context) {
super(entity, context);
this.mappedForm = (Collection) context.getMappingFactory().createMappedForm(DocumentEmbeddedPersistentEntity.this);
}
@Override
public Collection getMappedForm() {
return mappedForm ;
}
}
}
}
<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.gemfire;
import org.springframework.context.ApplicationEventPublisher;
import org.grails.datastore.mapping.core.AbstractSession;
import org.grails.datastore.mapping.engine.Persister;
import org.grails.datastore.mapping.gemfire.engine.GemfireEntityPersister;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.transactions.Transaction;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheTransactionManager;
/**
* Implementation of the {@link org.grails.datastore.mapping.core.Session} interface
* that interacts with a Gemfire cache implementation
*
* @author <NAME>
* @since 1.0
*/
@SuppressWarnings("rawtypes")
public class GemfireSession extends AbstractSession<Cache> {
public GemfireSession(GemfireDatastore datastore, MappingContext mappingContext,
ApplicationEventPublisher publisher) {
super(datastore, mappingContext, publisher);
}
@Override
protected Persister createPersister(Class cls, MappingContext mappingContext) {
final PersistentEntity entity = mappingContext.getPersistentEntity(cls.getName());
return entity == null ? null : new GemfireEntityPersister(mappingContext, entity, this, publisher);
}
@Override
protected Transaction<CacheTransactionManager> beginTransactionInternal() {
GemfireDatastore datastore = (GemfireDatastore) getDatastore();
final CacheTransactionManager tm = datastore.getGemfireCache().getCacheTransactionManager();
tm.begin();
return new GemfireTransaction(tm);
}
// TODO: Support Client Caches, here we assume a peer cache
// public boolean isConnected() {}
public Object getNativeInterface() {
return ((GemfireDatastore)getDatastore()).getGemfireCache();
}
private class GemfireTransaction implements Transaction<CacheTransactionManager> {
private CacheTransactionManager transactionManager;
public GemfireTransaction(CacheTransactionManager transactionManager) {
this.transactionManager = transactionManager;
}
public void commit() {
transactionManager.commit();
}
public void rollback() {
transactionManager.rollback();
}
public CacheTransactionManager getNativeTransaction() {
return transactionManager;
}
public boolean isActive() {
return transactionManager.exists();
}
public void setTimeout(int timeout) {
// noop
}
}
}
<file_sep>/* Copyright (C) 2011 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.jpa.config;
import java.lang.reflect.Field;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Table;
import groovy.lang.Closure;
import org.grails.datastore.mapping.config.groovy.MappingConfigurationBuilder;
import org.grails.datastore.mapping.model.MappingFactory;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.PersistentProperty;
import org.grails.datastore.mapping.model.config.GormProperties;
import org.grails.datastore.mapping.reflect.ClassPropertyFetcher;
import org.springframework.beans.BeanUtils;
/**
* MappingFactory for JPA that maps entities to {@link Table} instances and properties
* to {@link Column} instances.
*
* @author <NAME>
* @since 1.0
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public class JpaMappingFactory extends MappingFactory<org.grails.datastore.mapping.jpa.config.Table, org.grails.datastore.mapping.jpa.config.Column> {
@Override
public org.grails.datastore.mapping.jpa.config.Table createMappedForm(PersistentEntity entity) {
org.grails.datastore.mapping.jpa.config.Table mappedForm = new org.grails.datastore.mapping.jpa.config.Table((Table) entity.getJavaClass().getAnnotation(Table.class));
ClassPropertyFetcher cpf = ClassPropertyFetcher.forClass(entity.getJavaClass());
MappingConfigurationBuilder builder = new MappingConfigurationBuilder(mappedForm, org.grails.datastore.mapping.jpa.config.Table.class);
List<Closure> values = cpf.getStaticPropertyValuesFromInheritanceHierarchy(GormProperties.MAPPING, Closure.class);
for (int i = values.size(); i > 0; i--) {
Closure value = values.get(i - 1);
builder.evaluate(value);
}
values = cpf.getStaticPropertyValuesFromInheritanceHierarchy(GormProperties.CONSTRAINTS, Closure.class);
for (int i = values.size(); i > 0; i--) {
Closure value = values.get(i - 1);
builder.evaluate(value);
}
return mappedForm;
}
@Override
public org.grails.datastore.mapping.jpa.config.Column createMappedForm(PersistentProperty mpp) {
Field field;
try {
field = mpp.getOwner().getJavaClass().getDeclaredField(mpp.getName());
return new org.grails.datastore.mapping.jpa.config.Column(field.getAnnotation(Column.class));
} catch (SecurityException e) {
return null;
} catch (NoSuchFieldException e) {
return null;
}
}
}
<file_sep>export GRADLE_OPTS="-Xmx2048m -Xms256m -XX:MaxPermSize=768m -XX:+CMSClassUnloadingEnabled -XX:+HeapDumpOnOutOfMemoryError"
EXIT_STATUS=0
./gradlew grails-datastore-gorm-hibernate:test || EXIT_STATUS=$?
./gradlew grails-datastore-gorm-hibernate4:test || EXIT_STATUS=$?
./gradlew grails-datastore-gorm-mongodb:test || EXIT_STATUS=$?
./gradlew grails-datastore-gorm-redis:test || EXIT_STATUS=$?
./gradlew grails-datastore-gorm-test:test || EXIT_STATUS=$?
version=$(grep 'projectVersion =' build.gradle)
version=${version//[[:blank:]]/}
version="${version#*=}";
version=${version//\"/}
releaseType=$(grep 'releaseType =' build.gradle | egrep -v ^[[:blank:]]*\/\/ | egrep -v ^[[:blank:]]*isBuildSnapshot)
releaseType=${releaseType//[[:blank:]]/}
releaseType="${releaseType#*=}";
releaseType=${releaseType//\"/}
echo "Project Version: $version $releaseType"
if [[ $releaseType != *-SNAPSHOT* ]]
then
./gradlew allDocs
base_dir=$(pwd)
echo "BASE DIR = $base_dir"
git config --global user.name "$GIT_NAME"
git config --global user.email "$GIT_EMAIL"
git config --global credential.helper "store --file=~/.git-credentials"
echo "https://$GH_TOKEN:@github.com" > ~/.git-credentials
git clone https://${GH_TOKEN}@github.com/grails/grails-data-mapping.git -b gh-pages gh-pages --single-branch > /dev/null
cd gh-pages
echo "Making directory for Version: $version"
mkdir -p "$version"
cd "$version"
current_dir=$(pwd)
git rm -rf .
mkdir -p "$current_dir"
echo "Current Directory: $current_dir"
cp -r "$base_dir/build/docs/." "$current_dir/"
cd ..
mkdir -p current
cd current
current_dir=$(pwd)
git rm -rf .
mkdir -p "$current_dir"
echo "Current Directory: $current_dir"
cp -r "$base_dir/build/docs/." "$current_dir/"
cd ..
cp -r "$base_dir/build/docs/." ./
git add .
git commit -a -m "Updating docs for Travis build: https://travis-ci.org/grails/grails-data-mapping/builds/$TRAVIS_BUILD_ID"
git push origin HEAD
cd ..
rm -rf gh-pages
fi
exit $EXIT_STATUS
<file_sep>repositories {
mavenRepo urls:'http://maven-gae-plugin.googlecode.com/svn/repository/'
}
dependencies {
compile project(":grails-datastore-core")
compile "com.google.appengine:appengine-api-1.0-sdk:1.3.4"
compile "com.google.appengine:appengine-api-stubs:1.3.4"
testCompile "com.google.appengine:appengine-tools-api:1.3.4"
testCompile "com.google.appengine:appengine-testing:1.3.4"
testCompile "com.google.appengine:appengine-local-runtime:1.3.4"
testRuntime "com.google.appengine:appengine-api-labs:1.3.4"
}
<file_sep>version = "1.0.0.BUILD-SNAPSHOT"
repositories {
maven { url "http://dist.gemstone.com/maven/release" }
}
dependencies {
compile project(":grails-datastore-core")
compile('org.springframework.data.gemfire:spring-gemfire:1.0.1.RELEASE') {
exclude module:'spring-context'
exclude module:'spring-tx'
}
compile('com.gemstone.gemfire:gemfire:6.5.1.4') {
exclude module:'antlr'
}
}
<file_sep>configurations {
optional
}
dependencies {
compile project(":grails-datastore-core")
compile("org.grails:grails-core:$grailsVersion") {
exclude group:'aopalliance',module:'aopalliance'
exclude group:'commons-logging',module:'commons-logging'
exclude group:'commons-lang',module:'commons-lang'
// TODO: When the commons-collection dependency on grails-core is removed, then uncomment this
// exclude group:'commons-collections',module:'commons-collections'
exclude group:'commons-io',module:'commons-io'
exclude group:'org.grails',module:'grails-spring'
exclude group:'org.grails',module:'grails-bootstrap'
exclude group:'org.aspectj',module:'aspectjweaver'
exclude group:'org.aspectj',module:'aspectjrt'
exclude group:'oro',module:'oro'
exclude group:'asm',module:'asm'
exclude group:'cglib',module:'cglib'
exclude group:'xalan',module:'serializer'
exclude group:'org.springframework',module:'spring-aspects'
exclude group:'org.springframework',module:'spring-beans'
exclude group:'org.springframework',module:'spring-context'
exclude group:'org.springframework',module:'spring-context-support'
exclude group:'org.springframework',module:'spring-expression'
exclude group:'org.springframework',module:'spring-web'
exclude group:'org.springframework',module:'spring-aop'
exclude group:'commons-logging',module:'commons-logging'
exclude group:'commons-validator',module:'commons-validator'
exclude group:'com.googlecode.concurrentlinkedhashmap',module:'concurrentlinkedhashmap-lru'
}
compile("org.grails:grails-bootstrap:$grailsVersion") {
exclude group:'org.apache.ant',module:'ant'
exclude group:'org.apache.ant',module:'ant-launcher'
exclude group:'org.apache.ant',module:'ant-trax'
exclude group:'org.apache.ant',module:'ant-junit'
exclude group:'org.apache.ant',module:'ant'
exclude group:'org.apache.ivy',module:'ivy'
exclude group:'org.codehaus.gant',module:'gant_groovy1.8'
exclude group:'jline',module:'jline'
exclude group:'org.fusesource.jansi',module:'jansi'
exclude group:'net.java.dev.jna',module:'jna'
}
compile "org.grails:grails-async:$grailsVersion"
compile "org.slf4j:slf4j-api:$slf4jVersion"
compile "org.slf4j:jcl-over-slf4j:$slf4jVersion"
optional "org.springframework:spring-web:$springVersion"
}
def astTransformPath
jar.doFirst {
def metaInfPath = "${compileGroovy.destinationDir}/META-INF/services"
ant.mkdir(dir:metaInfPath)
astTransformPath = new File(metaInfPath, "org.codehaus.groovy.transform.ASTTransformation")
astTransformPath.text = "org.grails.datastore.gorm.query.transform.GlobalDetachedCriteriaASTTransformation"
}
jar.doLast {
if(astTransformPath) {
ant.delete file:astTransformPath
}
}
eclipse {
classpath {
file {
whenMerged { classpath ->
def testSrc = classpath.entries.find { entry -> entry.kind == 'src' && entry.path == "src/test/groovy" }
if (testSrc) {
// greclipse cannot use ASTs that are in the same project, so exclude this from compilation
testSrc.excludes = ["**/DirtyCheckTransformationSpec.groovy"]
}
}
}
}
}
sourceSets {
main {
compileClasspath += configurations.optional
}
javadoc {
classpath += configurations.optional
}
}
idea {
module {
scopes.PROVIDED.plus += configurations.optional
}
}
eclipse {
classpath {
plusConfigurations += configurations.optional
}
}<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.mongo.engine;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.bson.types.ObjectId;
import org.grails.datastore.mapping.core.IdentityGenerationException;
import org.grails.datastore.mapping.core.OptimisticLockingException;
import org.grails.datastore.mapping.core.SessionImplementor;
import org.grails.datastore.mapping.engine.AssociationIndexer;
import org.grails.datastore.mapping.engine.EntityAccess;
import org.grails.datastore.mapping.engine.NativeEntryEntityPersister;
import org.grails.datastore.mapping.engine.Persister;
import org.grails.datastore.mapping.engine.PropertyValueIndexer;
import org.grails.datastore.mapping.engine.internal.MappingUtils;
import org.grails.datastore.mapping.model.EmbeddedPersistentEntity;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.PersistentProperty;
import org.grails.datastore.mapping.model.PropertyMapping;
import org.grails.datastore.mapping.model.config.GormProperties;
import org.grails.datastore.mapping.model.types.Association;
import org.grails.datastore.mapping.model.types.EmbeddedCollection;
import org.grails.datastore.mapping.model.types.Identity;
import org.grails.datastore.mapping.model.types.ManyToMany;
import org.grails.datastore.mapping.mongo.MongoDatastore;
import org.grails.datastore.mapping.mongo.MongoSession;
import org.grails.datastore.mapping.mongo.config.MongoAttribute;
import org.grails.datastore.mapping.mongo.config.MongoMappingContext;
import org.grails.datastore.mapping.mongo.query.MongoQuery;
import org.grails.datastore.mapping.query.Query;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.TypeDescriptor;
import org.springframework.dao.DataAccessException;
import org.springframework.data.mongodb.core.DbCallback;
import org.springframework.data.mongodb.core.MongoTemplate;
import com.mongodb.BasicDBObject;
import com.mongodb.CommandResult;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mongodb.DBRef;
import com.mongodb.MongoException;
import com.mongodb.WriteConcern;
import com.mongodb.WriteResult;
/**
* A {@link org.grails.datastore.mapping.engine.EntityPersister} implementation for the Mongo document store
*
* @author <NAME>
* @since 1.0
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public class MongoEntityPersister extends NativeEntryEntityPersister<DBObject, Object> {
public static final String INSTANCE_PREFIX = "instance:";
static Logger log = LoggerFactory.getLogger(MongoEntityPersister.class);
private static final String NEXT_ID_SUFFIX = ".next_id";
private boolean hasNumericalIdentifier = false;
private boolean hasStringIdentifier = false;
public static final String MONGO_ID_FIELD = "_id";
public static final String MONGO_CLASS_FIELD = "_class";
public MongoEntityPersister(MappingContext mappingContext, PersistentEntity entity,
MongoSession mongoSession, ApplicationEventPublisher publisher) {
super(mappingContext, entity, mongoSession, publisher);
if (!(entity instanceof EmbeddedPersistentEntity)) {
PersistentProperty identity = entity.getIdentity();
if (identity != null) {
hasNumericalIdentifier = Long.class.isAssignableFrom(identity.getType());
hasStringIdentifier = String.class.isAssignableFrom(identity.getType());
}
}
}
@Override
protected void refreshObjectStateFromNativeEntry(PersistentEntity persistentEntity, Object obj, Serializable nativeKey, DBObject nativeEntry, boolean isEmbedded) {
if (isEmbedded) {
Object id = nativeEntry.get(MONGO_ID_FIELD);
super.refreshObjectStateFromNativeEntry(persistentEntity, obj, (Serializable) id, nativeEntry, isEmbedded);
}
else {
super.refreshObjectStateFromNativeEntry(persistentEntity, obj, nativeKey, nativeEntry, isEmbedded);
}
}
@Override
protected DBObject getEmbedded(DBObject nativeEntry, String key) {
final Object embeddedDocument = nativeEntry.get(key);
if (embeddedDocument instanceof DBObject) {
return (DBObject) embeddedDocument;
}
return null;
}
@Override
protected void setEmbedded(DBObject nativeEntry, String key, DBObject embeddedEntry) {
nativeEntry.put(key, embeddedEntry);
}
@Override
protected void setEmbeddedCollection(final DBObject nativeEntry, final String key, Collection<?> instances, List<DBObject> embeddedEntries) {
if (instances == null || instances.isEmpty()) {
nativeEntry.put(key, null);
return;
}
nativeEntry.put(key, embeddedEntries);
}
@Override
protected void setEmbeddedMap(DBObject nativeEntry, String key, Map instances, Map<Object, DBObject> embeddedEntries) {
if (instances == null || instances.isEmpty()) {
nativeEntry.put(key, null);
return;
}
nativeEntry.put(key, embeddedEntries);
}
/**
* Implementors who want to support one-to-many associations embedded should implement this method
*
* @param association The association
* @param ea
* @param nativeEntry
* @return A list of keys loaded from the embedded instance
*/
@Override
protected List loadEmbeddedCollectionKeys(Association association, EntityAccess ea, DBObject nativeEntry) {
if (nativeEntry == null) {
return super.loadEmbeddedCollectionKeys(association, ea, nativeEntry);
}
Object entry = nativeEntry.get(getPropertyKey(association));
List keys = new ArrayList();
if (entry instanceof List) {
List entries = (List) entry;
for (Object o : entries) {
if (o instanceof DBRef) {
DBRef dbref = (DBRef) o;
keys.add(dbref.getId());
}
else if (o != null) {
keys.add(o);
}
else {
keys.add(null);
}
}
}
return keys;
}
@Override
protected void setEmbeddedCollectionKeys(Association association, EntityAccess embeddedEntityAccess, DBObject embeddedEntry, List<Serializable> keys) {
List dbRefs = new ArrayList();
boolean reference = isReference(association);
for (Object foreignKey : keys) {
if (reference) {
dbRefs.add(new DBRef((DB) session.getNativeInterface(), getCollectionName(association.getAssociatedEntity()), foreignKey));
} else {
dbRefs.add(foreignKey);
}
}
embeddedEntry.put(association.getName(), dbRefs);
}
@Override
protected void loadEmbeddedCollection(EmbeddedCollection embeddedCollection,
EntityAccess ea, Object embeddedInstances, String propertyKey) {
if(Map.class.isAssignableFrom(embeddedCollection.getType())) {
if(embeddedInstances instanceof DBObject) {
Map instances = new HashMap();
DBObject embedded = (DBObject)embeddedInstances;
for (String key : embedded.keySet()) {
Object o = embedded.get(key);
if(o instanceof DBObject) {
DBObject nativeEntry = (DBObject) o;
Object instance =
createObjectFromEmbeddedNativeEntry(embeddedCollection.getAssociatedEntity(), nativeEntry);
SessionImplementor<DBObject> si = (SessionImplementor<DBObject>)getSession();
si.cacheEntry(embeddedCollection.getAssociatedEntity(), createEmbeddedCacheEntryKey(instance), nativeEntry);
instances.put(key, instance);
}
}
ea.setProperty(embeddedCollection.getName(), instances);
}
}
else {
Collection<Object> instances = MappingUtils.createConcreteCollection(embeddedCollection.getType());
if (embeddedInstances instanceof Collection) {
Collection coll = (Collection)embeddedInstances;
for (Object dbo : coll) {
if (dbo instanceof BasicDBObject) {
BasicDBObject nativeEntry = (BasicDBObject)dbo;
Object instance =
createObjectFromEmbeddedNativeEntry(embeddedCollection.getAssociatedEntity(), nativeEntry);
SessionImplementor<DBObject> si = (SessionImplementor<DBObject>)getSession();
si.cacheEntry(embeddedCollection.getAssociatedEntity(), createEmbeddedCacheEntryKey(instance), nativeEntry);
instances.add(instance);
}
}
}
ea.setProperty(embeddedCollection.getName(), instances);
}
}
@Override
protected boolean isEmbeddedEntry(Object entry) {
return entry instanceof DBObject;
}
public Query createQuery() {
return new MongoQuery((MongoSession) getSession(), getPersistentEntity());
}
@Override
protected boolean doesRequirePropertyIndexing() {
return false;
}
@Override
protected List<Object> retrieveAllEntities(PersistentEntity persistentEntity,
Iterable<Serializable> keys) {
Query query = session.createQuery(persistentEntity.getJavaClass());
PersistentProperty identity = persistentEntity.getIdentity();
if (keys instanceof List) {
List actualKeys = new ArrayList();
Iterator iterator = keys.iterator();
while (iterator.hasNext()) {
Object key = iterator.next();
Object id = getIdentifierForKey(key);
actualKeys.add(id);
}
query.in(identity.getName(), actualKeys);
}
else {
List<Serializable> keyList = new ArrayList<Serializable>();
for (Serializable key : keys) {
keyList.add(key);
}
query.in(identity.getName(), keyList);
}
List<Object> entityResults = new ArrayList<Object>();
Iterator<Serializable> keyIterator = keys.iterator();
Map<Serializable, Object> resultMap = new HashMap<Serializable, Object>();
for (Object o : query.list()) {
if (o instanceof DBObject) {
DBObject dbo = (DBObject) o;
o = createObjectFromNativeEntry(getPersistentEntity(), (Serializable) dbo.get(MONGO_ID_FIELD), dbo);
}
resultMap.put(getObjectIdentifier(o), o);
}
while (keyIterator.hasNext()) {
Object key = getIdentifierForKey(keyIterator.next());
ConversionService conversionService = getMappingContext().getConversionService();
key = conversionService.convert(key, identity.getType());
Object o = resultMap.get(key);
entityResults.add(o); // may add null, so entityResults list size matches input list size.
}
return entityResults;
}
private Object getIdentifierForKey(Object key) {
Object id = key;
if (key instanceof DBRef) {
DBRef ref = (DBRef) key;
id = ref.getId();
}
return id;
}
@Override
protected List<Object> retrieveAllEntities(PersistentEntity persistentEntity, Serializable[] keys) {
return retrieveAllEntities(persistentEntity, Arrays.asList(keys));
}
@Override
public String getEntityFamily() {
return getMongoSession().getCollectionName(getPersistentEntity());
}
@Override
protected void deleteEntry(String family, final Object key, final Object entry) {
getMongoTemplate().execute(new DbCallback<Object>() {
public Object doInDB(DB con) throws MongoException, DataAccessException {
DBCollection dbCollection = getCollection(con);
DBObject dbo = createDBObjectWithKey(key);
MongoSession mongoSession = (MongoSession) session;
WriteConcern writeConcern = mongoSession.getDeclaredWriteConcern(getPersistentEntity());
if (writeConcern != null) {
dbCollection.remove(dbo, writeConcern);
} else {
dbCollection.remove(dbo);
}
return null;
}
protected DBCollection getCollection(DB con) {
return con.getCollection(getCollectionName(getPersistentEntity()));
}
});
}
protected MongoTemplate getMongoTemplate() {
return getMongoSession().getMongoTemplate(getPersistentEntity());
}
@Override
protected Object generateIdentifier(final PersistentEntity persistentEntity, final DBObject nativeEntry) {
return getMongoTemplate().execute(new DbCallback<Object>() {
public Object doInDB(DB con) throws MongoException, DataAccessException {
String collectionName = getCollectionName(persistentEntity, nativeEntry);
DBCollection dbCollection = con.getCollection(collectionName + NEXT_ID_SUFFIX);
// If there is a numeric identifier then we need to rely on optimistic concurrency controls to obtain a unique identifer
// sequence. If the identifier is not numeric then we assume BSON ObjectIds.
if (hasNumericalIdentifier) {
int attempts = 0;
while (true) {
DBObject result = dbCollection.findAndModify(new BasicDBObject(MONGO_ID_FIELD, collectionName), null, null, false, new BasicDBObject("$inc", new BasicDBObject("next_id", 1)), true, true);
// result should never be null and we shouldn't come back with an error ,but you never know. We should just retry if this happens...
if (result != null && con.getLastError().ok()) {
long nextId = getMappingContext().getConversionService().convert(result.get("next_id"), Long.class);
nativeEntry.put(MONGO_ID_FIELD, nextId);
break;
} else {
attempts++;
if (attempts > 3) {
throw new IdentityGenerationException("Unable to generate identity using findAndModify after 3 attempts: " + con.getLastError().getErrorMessage());
}
}
}
return nativeEntry.get(MONGO_ID_FIELD);
}
ObjectId objectId = ObjectId.get();
if (ObjectId.class.isAssignableFrom(persistentEntity.getIdentity().getType())) {
nativeEntry.put(MONGO_ID_FIELD, objectId);
return objectId;
}
String stringId = objectId.toString();
nativeEntry.put(MONGO_ID_FIELD, stringId);
return stringId;
}
});
}
@Override
public PropertyValueIndexer getPropertyIndexer(PersistentProperty property) {
// We don't need to implement this for Mongo since Mongo automatically creates indexes for us
return null;
}
@Override
public AssociationIndexer getAssociationIndexer(DBObject nativeEntry, Association association) {
return new MongoAssociationIndexer(nativeEntry, association, (MongoSession) session);
}
@Override
protected DBObject createNewEntry(String family, Object instance) {
SessionImplementor<DBObject> si = (SessionImplementor<DBObject>)getSession();
DBObject dbo = si.getCachedEntry(getPersistentEntity(), createInstanceCacheEntryKey(instance));
if(dbo != null) {
return dbo;
}
else {
return super.createNewEntry(family, instance);
}
}
public static String createInstanceCacheEntryKey(Object instance) {
return INSTANCE_PREFIX + System.identityHashCode(instance);
}
@Override
protected DBObject createNewEntry(String family) {
BasicDBObject dbo = new BasicDBObject();
PersistentEntity persistentEntity = getPersistentEntity();
if (!persistentEntity.isRoot()) {
dbo.put(MONGO_CLASS_FIELD, persistentEntity.getDiscriminator());
}
return dbo;
}
@Override
protected Object getEntryValue(DBObject nativeEntry, String property) {
Object value = nativeEntry.get(property);
if (value instanceof DBRef) {
return getIdentifierForKey(value);
}
return value;
}
@Override
protected Object formulateDatabaseReference(PersistentEntity persistentEntity, Association association, Serializable associationId) {
DB db = (DB) session.getNativeInterface();
boolean isReference = isReference(association);
if (isReference) {
return new DBRef(db, getCollectionName(association.getAssociatedEntity()), associationId);
}
return associationId;
}
private boolean isReference(Association association) {
PropertyMapping mapping = association.getMapping();
if (mapping != null) {
MongoAttribute attribute = (MongoAttribute) mapping.getMappedForm();
if (attribute != null) {
return attribute.isReference();
}
}
return true;
}
@Override
protected void setEntryValue(DBObject nativeEntry, String key, Object value) {
MappingContext mappingContext = getMappingContext();
setDBObjectValue(nativeEntry, key, value, mappingContext);
}
@Override
protected String getPropertyKey(PersistentProperty prop) {
if (prop instanceof Identity) {
return MONGO_ID_FIELD;
}
return super.getPropertyKey(prop);
}
public static void setDBObjectValue(DBObject nativeEntry, String key, Object value, MappingContext mappingContext) {
Object nativeValue = getSimpleNativePropertyValue(value, mappingContext);
nativeEntry.put(key, nativeValue);
}
/**
* Convert a value into a type suitable for use in Mongo. Collections and maps are converted recursively. The
* mapping context is used for the conversion if possible, otherwise toString() is the eventual fallback.
* @param value The value to convert (or null)
* @param mappingContext The mapping context.
* @return The converted value (or null)
*/
public static Object getSimpleNativePropertyValue(Object value, MappingContext mappingContext) {
Object nativeValue;
if (value == null || mappingContext.isPersistentEntity(value)) {
nativeValue = null;
} else if (MongoMappingContext.isMongoNativeType(value.getClass())) {
// easy case, no conversion required.
// Checked first in case any of these types (such as BasicDBObject) are instances of collections
// or arrays, etc.!
nativeValue = value;
} else if (value.getClass().isArray()) {
Object[] array = (Object[]) value;
List<Object> nativeColl = new ArrayList<Object>(array.length);
for (Object item : array) {
nativeColl.add(getSimpleNativePropertyValue(item, mappingContext));
}
nativeValue = nativeColl;
} else if (value instanceof Collection) {
Collection existingColl = (Collection)value;
List<Object> nativeColl = new ArrayList<Object>(existingColl.size());
for (Object item : existingColl) {
nativeColl.add(getSimpleNativePropertyValue(item, mappingContext));
}
nativeValue = nativeColl;
} else if (value instanceof Map) {
Map<String, Object> existingMap = (Map)value;
Map<String, Object> newMap = new LinkedHashMap<String, Object>();
for (Map.Entry<String, Object> entry :existingMap.entrySet()) {
newMap.put(entry.getKey(), getSimpleNativePropertyValue(entry.getValue(), mappingContext));
}
nativeValue = newMap;
} else {
nativeValue = convertPrimitiveToNative(value, mappingContext);
}
return nativeValue;
}
private static Object convertPrimitiveToNative(Object item, MappingContext mappingContext) {
Object nativeValue;
if (item != null) {
ConversionService conversionService = mappingContext.getConversionService();
// go for toInteger or toString.
TypeDescriptor itemTypeDescriptor = TypeDescriptor.forObject(item);
Class<?> itemTypeClass = itemTypeDescriptor.getObjectType();
if ((itemTypeClass.equals(Integer.class) || itemTypeClass.equals(Short.class)) && conversionService.canConvert(itemTypeDescriptor, TypeDescriptor.valueOf(Integer.class))) {
nativeValue = conversionService.convert(item, Integer.class);
} else if (conversionService.canConvert(itemTypeDescriptor, TypeDescriptor.valueOf(String.class))) {
nativeValue = conversionService.convert(item, String.class);
} else {
// fall back if no explicit converter is registered, good for URL, Locale, etc.
nativeValue = item.toString();
}
} else {
nativeValue = null;
}
return nativeValue;
}
@Override
protected PersistentEntity discriminatePersistentEntity(PersistentEntity persistentEntity, DBObject nativeEntry) {
final Object o = nativeEntry.get(MONGO_CLASS_FIELD);
if (o != null) {
final String className = o.toString();
final PersistentEntity childEntity = getMappingContext().getChildEntityByDiscriminator(persistentEntity.getRootEntity(), className);
if (childEntity != null) {
return childEntity;
}
}
return super.discriminatePersistentEntity(persistentEntity, nativeEntry);
}
@Override
protected DBObject retrieveEntry(final PersistentEntity persistentEntity,
String family, final Serializable key) {
return getMongoTemplate().execute(new DbCallback<DBObject>() {
public DBObject doInDB(DB con) throws MongoException, DataAccessException {
DBCollection dbCollection = con.getCollection(getCollectionName(persistentEntity));
return dbCollection.findOne(createDBObjectWithKey(key));
}
});
}
private DBObject removeNullEntries(DBObject nativeEntry) {
for (String key : new HashSet<String>(nativeEntry.keySet())) {
Object o = nativeEntry.get(key);
if (o == null) {
nativeEntry.removeField(key);
} else if (o instanceof Object[]) {
for (Object o2 : (Object[])o) {
if (o2 instanceof DBObject) {
removeNullEntries((DBObject)o2);
}
}
} else if (o instanceof List) {
for (Object o2 : (List)o) {
if (o2 instanceof DBObject) {
removeNullEntries((DBObject)o2);
}
}
} else if (o instanceof DBObject) {
removeNullEntries((DBObject)o);
}
}
return nativeEntry;
}
@Override
protected Object storeEntry(final PersistentEntity persistentEntity, final EntityAccess entityAccess,
final Object storeId, final DBObject nativeEntry) {
return getMongoTemplate().execute(new DbCallback<Object>() {
public Object doInDB(DB con) throws MongoException, DataAccessException {
removeNullEntries(nativeEntry);
nativeEntry.put(MONGO_ID_FIELD, storeId);
return nativeEntry.get(MONGO_ID_FIELD);
}
});
}
public String getCollectionName(PersistentEntity persistentEntity) {
return getCollectionName(persistentEntity, null);
}
private String getCollectionName(PersistentEntity persistentEntity, DBObject nativeEntry) {
String collectionName;
if (persistentEntity.isRoot()) {
MongoSession mongoSession = (MongoSession) getSession();
collectionName = mongoSession.getCollectionName(persistentEntity);
}
else {
MongoSession mongoSession = (MongoSession) getSession();
collectionName = mongoSession.getCollectionName(persistentEntity.getRootEntity());
}
return collectionName;
}
private DBObject modifyNullsToUnsets(DBObject nativeEntry) {
DBObject unsets = new BasicDBObject();
DBObject sets = new BasicDBObject();
for (String key : nativeEntry.keySet()) {
Object o = nativeEntry.get(key);
if (o == null) {
unsets.put(key, 1);
} else if ("_id".equals(key)) {
} else if (o instanceof Object[]) {
sets.put(key, o);
for (Object o2 : (Object[])o) {
if (o2 instanceof DBObject) {
removeNullEntries((DBObject)o2);
}
}
} else if (o instanceof List) {
sets.put(key, o);
for (Object o2 : (List)o) {
if (o2 instanceof DBObject) {
removeNullEntries((DBObject)o2);
}
}
} else if (o instanceof DBObject) {
sets.put(key, removeNullEntries((DBObject)o));
} else {
sets.put(key, o);
}
}
DBObject newEntry = new BasicDBObject();
newEntry.put("$set", sets);
if (!unsets.keySet().isEmpty()) {
newEntry.put("$unset", unsets);
}
return newEntry;
}
@Override
public void updateEntry(final PersistentEntity persistentEntity, final EntityAccess ea,
final Object key, final DBObject entry) {
getMongoTemplate().execute(new DbCallback<Object>() {
public Object doInDB(DB con) throws MongoException, DataAccessException {
String collectionName = getCollectionName(persistentEntity, entry);
DBCollection dbCollection = con.getCollection(collectionName);
DBObject dbo = createDBObjectWithKey(key);
boolean versioned = isVersioned(ea);
Object currentVersion = null;
if (versioned) {
currentVersion = getCurrentVersion(ea);
incrementVersion(ea);
// query for old version to ensure atomicity
if (currentVersion != null) {
dbo.put(GormProperties.VERSION, currentVersion);
}
}
DBObject newEntry = modifyNullsToUnsets(entry);
MongoSession mongoSession = (MongoSession) session;
WriteConcern writeConcern = mongoSession.getDeclaredWriteConcern(getPersistentEntity());
WriteResult result;
if (writeConcern != null) {
result = dbCollection.update(dbo, newEntry, false, false, writeConcern);
}
else {
result = dbCollection.update(dbo, newEntry, false, false);
}
if (versioned && !((SessionImplementor)getSession()).isStateless(persistentEntity)) {
// ok, we need to check whether the write worked:
// note that this will use the standard write concern unless it wasn't at least ACKNOWLEDGE:
CommandResult error = result.getLastError(WriteConcern.ACKNOWLEDGED);
// may as well handle any networking errors:
error.throwOnError();
// if the document count "n" of the update was 0, the versioning check must have failed:
if (error.getInt("n") == 0) {
if(currentVersion != null) {
ea.setProperty(GormProperties.VERSION, currentVersion);
}
throw new OptimisticLockingException(persistentEntity, key);
}
}
return null;
}
});
}
@Override
protected void setManyToMany(PersistentEntity persistentEntity, Object obj,
DBObject nativeEntry, ManyToMany manyToMany, Collection associatedObjects,
Map<Association, List<Serializable>> toManyKeys) {
List ids = new ArrayList();
if (associatedObjects != null) {
for (Object o : associatedObjects) {
if (o == null) {
ids.add(null);
}
else {
PersistentEntity childPersistentEntity =
getMappingContext().getPersistentEntity(o.getClass().getName());
EntityAccess entityAccess = createEntityAccess(childPersistentEntity, o);
ids.add(entityAccess.getIdentifier());
}
}
}
nativeEntry.put(manyToMany.getName() + "_$$manyToManyIds", ids);
}
@Override
protected Collection getManyToManyKeys(PersistentEntity persistentEntity, Object object,
Serializable nativeKey, DBObject nativeEntry, ManyToMany manyToMany) {
return (Collection)nativeEntry.get(manyToMany.getName() + "_$$manyToManyIds");
}
protected Object getCurrentVersion(final EntityAccess ea) {
Object currentVersion = ea.getProperty(GormProperties.VERSION);
if (Number.class.isAssignableFrom(ea.getPropertyType(GormProperties.VERSION))) {
currentVersion = currentVersion != null ? ((Number)currentVersion).longValue() : currentVersion;
}
return currentVersion;
}
@Override
protected void deleteEntries(String family, final List<Object> keys) {
getMongoTemplate().execute(new DbCallback<Object>() {
public Object doInDB(DB con) throws MongoException, DataAccessException {
String collectionName = getCollectionName(getPersistentEntity());
DBCollection dbCollection = con.getCollection(collectionName);
MongoSession mongoSession = (MongoSession) getSession();
MongoQuery query = mongoSession.createQuery(getPersistentEntity().getJavaClass());
query.in(getPersistentEntity().getIdentity().getName(), keys);
dbCollection.remove(query.getMongoQuery());
return null;
}
});
}
@Override
protected void cascadeDeleteCollection(EntityAccess entityAccess, Association association) {
Object propValue = entityAccess.getProperty(association.getName());
if (!(propValue instanceof Collection)) {
return;
}
Collection collection = ((Collection) propValue);
Persister persister = null;
for (Iterator iter = collection.iterator(); iter.hasNext(); ) {
Object child = iter.next();
if (child == null) {
log.warn("Encountered a null associated reference while cascade-deleting '{}' as part of {} (ID {})",
association.getReferencedPropertyName(), entityAccess.getEntity().getClass().getName(), entityAccess.getIdentifier());
continue;
}
if(persister == null) {
persister = session.getPersister(child);
}
persister.delete(child);
iter.remove();
}
}
protected DBObject createDBObjectWithKey(Object key) {
DBObject dbo = new BasicDBObject();
if (hasNumericalIdentifier || hasStringIdentifier) {
dbo.put(MONGO_ID_FIELD, key);
}
else {
if (key instanceof ObjectId) {
dbo.put(MONGO_ID_FIELD, key);
}
else {
dbo.put(MONGO_ID_FIELD, new ObjectId(key.toString()));
}
}
return dbo;
}
@Override
public boolean isDirty(Object instance, Object entry) {
if (super.isDirty(instance, entry)) {
return true;
}
DBObject dbo = (DBObject)entry;
PersistentEntity entity = getPersistentEntity();
EntityAccess entityAccess = createEntityAccess(entity, instance, dbo);
DBObject cached = (DBObject)((SessionImplementor<?>)getSession()).getCachedEntry(
entity, (Serializable)entityAccess.getIdentifier(), true);
return !dbo.equals(cached);
}
public MongoSession getMongoSession() {
return (MongoSession) getSession();
}
private class MongoAssociationIndexer implements AssociationIndexer {
private DBObject nativeEntry;
private Association association;
private MongoSession session;
private boolean isReference = true;
public MongoAssociationIndexer(DBObject nativeEntry, Association association, MongoSession session) {
this.nativeEntry = nativeEntry;
this.association = association;
this.session = session;
this.isReference = isReference(association);
}
public void preIndex(final Object primaryKey, final List foreignKeys) {
// if the association is a unidirectional one-to-many we store the keys
// embedded in the owning entity, otherwise we use a foreign key
if (!association.isBidirectional()) {
DB db = session.getNativeInterface();
List dbRefs = new ArrayList();
for (Object foreignKey : foreignKeys) {
if (isReference) {
dbRefs.add(new DBRef(db, getCollectionName(association.getAssociatedEntity()), foreignKey));
}
else {
dbRefs.add(foreignKey);
}
}
// update the native entry directly.
nativeEntry.put(association.getName(), dbRefs);
}
}
public void index(final Object primaryKey, final List foreignKeys) {
// indexing is handled by putting the data in the native entry before it is persisted, see preIndex above.
}
public List query(Object primaryKey) {
// for a unidirectional one-to-many we use the embedded keys
if (!association.isBidirectional()) {
final Object indexed = nativeEntry.get(association.getName());
if (!(indexed instanceof Collection)) {
return Collections.emptyList();
}
List indexedList = getIndexedAssociationsAsList(indexed);
if (associationsAreDbRefs(indexedList)) {
return extractIdsFromDbRefs(indexedList);
}
return indexedList;
}
// for a bidirectional one-to-many we use the foreign key to query the inverse side of the association
Association inverseSide = association.getInverseSide();
Query query = session.createQuery(association.getAssociatedEntity().getJavaClass());
query.eq(inverseSide.getName(), primaryKey);
query.projections().id();
return query.list();
}
public PersistentEntity getIndexedEntity() {
return association.getAssociatedEntity();
}
public void index(Object primaryKey, Object foreignKey) {
// TODO: Implement indexing of individual entities
}
private List getIndexedAssociationsAsList(Object indexed) {
return (indexed instanceof List) ? (List) indexed : new ArrayList(((Collection) indexed));
}
private boolean associationsAreDbRefs(List indexedList) {
return !indexedList.isEmpty() && (indexedList.get(0) instanceof DBRef);
}
private List extractIdsFromDbRefs(List indexedList) {
List resolvedDbRefs = new ArrayList();
for (Object indexedAssociation : indexedList) {
resolvedDbRefs.add(((DBRef) indexedAssociation).getId());
}
return resolvedDbRefs;
}
}
}
<file_sep>package org.grails.datastore.gorm.neo4j.engine;
import org.neo4j.cypher.javacompat.ExecutionEngine;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Transaction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
/**
* CypherEngine implementation backed by {@link ExecutionEngine}
* @deprecated due to usage of {@link org.grails.datastore.gorm.neo4j.engine.JdbcCypherEngine}
*/
@Deprecated
public class EmbeddedCypherEngine implements CypherEngine {
private static Logger log = LoggerFactory.getLogger(EmbeddedCypherEngine.class);
private ExecutionEngine executionEngine;
private GraphDatabaseService graphDatabaseService;
private static ThreadLocal<Stack<Transaction>> transactionStackThreadLocal = new ThreadLocal<Stack<Transaction>>() {
@Override
protected Stack<Transaction> initialValue() {
return new Stack<Transaction>();
}
};
public EmbeddedCypherEngine(GraphDatabaseService graphDatabaseService) {
this(graphDatabaseService, new ExecutionEngine(graphDatabaseService));
}
public EmbeddedCypherEngine(GraphDatabaseService graphDatabaseService, ExecutionEngine executionEngine) {
this.graphDatabaseService = graphDatabaseService;
this.executionEngine = executionEngine;
}
@Override
public CypherResult execute(String cypher, List params) {
Map paramsMap = null;
if (params!=null) {
paramsMap = new HashMap();
for (int i = 0; i < params.size(); i++) {
paramsMap.put(Integer.toString(i), params.get(i));
}
}
return new EmbeddedCypherResult(paramsMap == null ?
executionEngine.execute(cypher) :
executionEngine.execute(cypher, paramsMap));
}
@Override
public CypherResult execute(String cypher) {
return execute(cypher, null);
}
@Override
public void beginTx() {
Stack transactionStack = transactionStackThreadLocal.get();
transactionStack.push(graphDatabaseService.beginTx());
log.info("beginTx: " + transactionStack);
}
@Override
public void commit() {
try {
Stack<Transaction> transactionStack = transactionStackThreadLocal.get();
log.info("commit: " + transactionStack);
if (!transactionStack.isEmpty()) { // in case session.disconnect() get manually called
Transaction tx = transactionStack.pop();
log.info("commit after: " + transactionStack);
tx.success();
tx.close();
}
} catch (RuntimeException e) {
throw e;
}
}
@Override
public void rollback() {
Stack<Transaction> transactionStack = transactionStackThreadLocal.get();
log.info("rollback: " + transactionStack);
Transaction tx = transactionStack.pop();
log.info("rollback: " + transactionStack);
tx.failure();
tx.close();
}
}
<file_sep> version = "2.0.0-SNAPSHOT"
//version = "2.0.0-M02"
ext.neo4jVersion = "2.0.3"
ext.gdmVersion = "2.0.7.RELEASE"
sourceSets.main.java.srcDirs = []
sourceSets.main.groovy.srcDirs += ["src/main/java"]
repositories {
//mavenLocal()
//mavenCentral()
// some artifacts used are not available on mavenCentral
maven { url "http://m2.neo4j.org/content/repositories/releases/" }
// maven { url "http://m2.neo4j.org/content/repositories/snapshots/" }
// maven { url "http://repo.grails.org/grails/core" }
maven { url 'http://maven.restlet.com'}
}
dependencies {
compile "org.neo4j:neo4j-community:$neo4jVersion",
"org.neo4j:neo4j-graphviz:$neo4jVersion",
"org.codehaus.groovy:groovy-all:$groovyVersion",
"org.neo4j:neo4j-jdbc:2.0.2",
'org.apache.tomcat:tomcat-jdbc:8.0.8'
// during development (aka snapshot version), use project dependencies
// during release, use 'regular' dependencies
if (version =~ /SNAPSHOT/) {
compile project(":grails-datastore-gorm"),
project(":grails-datastore-web"),
project(":grails-datastore-gorm-plugin-support")
testCompile project(":grails-datastore-gorm-test"),
project(":grails-datastore-gorm-tck")
} else {
compile "org.grails:grails-datastore-gorm:$gdmVersion",
"org.grails:grails-datastore-web:$gdmVersion",
"org.grails:grails-datastore-gorm-plugin-support:$gdmVersion"
testCompile "org.grails:grails-datastore-gorm-test:$gdmVersion",
"org.grails:grails-datastore-gorm-tck:$gdmVersion"
}
testCompile 'org.codehaus.gpars:gpars:0.12',
[group: "org.neo4j", name: "neo4j-kernel", version: neo4jVersion, classifier: 'tests'],
[group: "org.neo4j.app", name: "neo4j-server", version: neo4jVersion, classifier: 'tests'],
[group: "org.neo4j.app", name: "neo4j-server", version: neo4jVersion],
"org.neo4j:neo4j-graphviz:$neo4jVersion",
"org.neo4j:neo4j-management:$neo4jVersion",
"org.neo4j:neo4j-ha:$neo4jVersion",
'com.sun.jersey:jersey-server:1.9',
'com.sun.jersey:jersey-core:1.9',
'com.sun.jersey:jersey-client:1.9',
'p6spy:p6spy:2.0.2'
testRuntime "org.grails:grails-web:$grailsVersion", {
transitive = false
}
testRuntime("org.grails:grails-plugin-domain-class:$grailsVersion") {
exclude group: 'org.grails', module:'grails-plugin-testing'
exclude group: 'org.grails', module:'grails-datastore-core'
exclude group: 'org.grails', module:'grails-datastore-gorm'
exclude group: 'org.grails', module:'grails-datastore-simple'
}
}
/*
test {
// jvmArgs "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"
// jvmArgs "-agentpath:/home/stefan/packages/yjp/bin/linux-x86-64/libyjpagent.so"
}*/
<file_sep>version = "1.0.0.BUILD-SNAPSHOT"
dependencies {
compile project(":grails-datastore-core"),
'org.codehaus.jackson:jackson-core-asl:1.6.1',
'org.codehaus.jackson:jackson-mapper-asl:1.6.1',
'org.springframework.data:spring-data-riak:1.0.0.M3'
}
<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.gemfire.config;
import groovy.lang.Closure;
import org.grails.datastore.mapping.config.groovy.MappingConfigurationBuilder;
import org.grails.datastore.mapping.keyvalue.mapping.config.Family;
import org.grails.datastore.mapping.keyvalue.mapping.config.GormKeyValueMappingFactory;
import org.grails.datastore.mapping.keyvalue.mapping.config.KeyValue;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.config.GormProperties;
import org.grails.datastore.mapping.reflect.ClassPropertyFetcher;
import com.gemstone.gemfire.cache.AttributesFactory;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.RegionAttributes;
/**
* Allows GORM-style configuration of how an entity maps to a
* Gemfire region.
*
* @author <NAME>
* @since 1.0
*/
public class GormGemfireMappingFactory extends GormKeyValueMappingFactory {
private DataPolicy defaultDataPolicy = DataPolicy.PARTITION;
public GormGemfireMappingFactory() {
super("Gemfire");
}
public void setDefaultDataPolicy(DataPolicy defaultDataPolicy) {
this.defaultDataPolicy = defaultDataPolicy;
}
@Override
@SuppressWarnings({"rawtypes", "unchecked"})
public Family createMappedForm(PersistentEntity entity) {
ClassPropertyFetcher cpf = ClassPropertyFetcher.forClass(entity.getJavaClass());
final Closure value = cpf.getStaticPropertyValue(GormProperties.MAPPING, Closure.class);
if (value == null) {
return new Region();
}
final Region family = new Region();
AttributesFactory factory = new AttributesFactory() {
@SuppressWarnings("unused")
public void setRegion(String name) {
family.setRegion(name);
}
};
factory.setDataPolicy(defaultDataPolicy);
MappingConfigurationBuilder builder = new MappingConfigurationBuilder(factory, KeyValue.class);
builder.evaluate(value);
entityToPropertyMap.put(entity, builder.getProperties());
final RegionAttributes regionAttributes = factory.create();
family.setRegionAttributes(regionAttributes);
family.setCacheListeners(regionAttributes.getCacheListeners());
family.setDataPolicy(regionAttributes.getDataPolicy());
family.setCacheLoader(regionAttributes.getCacheLoader());
family.setCacheWriter(regionAttributes.getCacheWriter());
builder = new MappingConfigurationBuilder(family, KeyValue.class);
builder.evaluate(value);
return family;
}
}
<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.mongo;
import static org.grails.datastore.mapping.config.utils.ConfigUtils.read;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.bson.types.Binary;
import org.bson.types.ObjectId;
import org.grails.datastore.gorm.mongo.geo.*;
import org.grails.datastore.mapping.core.AbstractDatastore;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.core.StatelessDatastore;
import org.grails.datastore.mapping.document.config.DocumentMappingContext;
import org.grails.datastore.mapping.model.*;
import org.grails.datastore.mapping.mongo.config.MongoAttribute;
import org.grails.datastore.mapping.mongo.config.MongoCollection;
import org.grails.datastore.mapping.mongo.config.MongoMappingContext;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.convert.converter.Converter;
import org.springframework.dao.DataAccessException;
import org.springframework.data.authentication.UserCredentials;
import org.springframework.data.mongodb.core.DbCallback;
import org.springframework.data.mongodb.core.MongoFactoryBean;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mongodb.Mongo;
import com.mongodb.MongoException;
import com.mongodb.MongoOptions;
import com.mongodb.ServerAddress;
import com.mongodb.WriteConcern;
/**
* A Datastore implementation for the Mongo document store.
*
* @author <NAME>
* @since 1.0
*/
public class MongoDatastore extends AbstractDatastore implements InitializingBean, MappingContext.Listener, DisposableBean, StatelessDatastore {
public static final String PASSWORD = "<PASSWORD>";
public static final String USERNAME = "username";
public static final String MONGO_PORT = "port";
public static final String MONGO_HOST = "host";
public static final String MONGO_STATELESS = "stateless";
public static final String INDEX_ATTRIBUTES = "indexAttributes";
protected Mongo mongo;
protected MongoOptions mongoOptions = new MongoOptions();
protected Map<PersistentEntity, MongoTemplate> mongoTemplates = new ConcurrentHashMap<PersistentEntity, MongoTemplate>();
protected Map<PersistentEntity, String> mongoCollections = new ConcurrentHashMap<PersistentEntity, String>();
protected boolean stateless = false;
protected UserCredentials userCrentials;
/**
* Constructs a MongoDatastore using the default database name of "test" and defaults for the host and port.
* Typically used during testing.
*/
public MongoDatastore() {
this(new MongoMappingContext("test"), Collections.<String, String>emptyMap(), null);
}
/**
* Constructs a MongoDatastore using the given MappingContext and connection details map.
*
* @param mappingContext The MongoMappingContext
* @param connectionDetails The connection details containing the {@link #MONGO_HOST} and {@link #MONGO_PORT} settings
*/
public MongoDatastore(MongoMappingContext mappingContext,
Map<String, String> connectionDetails, MongoOptions mongoOptions, ConfigurableApplicationContext ctx) {
this(mappingContext, connectionDetails, ctx);
if (mongoOptions != null) {
this.mongoOptions = mongoOptions;
}
}
/**
* Constructs a MongoDatastore using the given MappingContext and connection details map.
*
* @param mappingContext The MongoMappingContext
* @param connectionDetails The connection details containing the {@link #MONGO_HOST} and {@link #MONGO_PORT} settings
*/
public MongoDatastore(MongoMappingContext mappingContext,
Map<String, String> connectionDetails, ConfigurableApplicationContext ctx) {
super(mappingContext, connectionDetails, ctx);
if (mappingContext != null) {
mappingContext.addMappingContextListener(this);
}
initializeConverters(mappingContext);
mappingContext.getConverterRegistry().addConverter(new Converter<String, ObjectId>() {
public ObjectId convert(String source) {
return new ObjectId(source);
}
});
mappingContext.getConverterRegistry().addConverter(new Converter<ObjectId, String>() {
public String convert(ObjectId source) {
return source.toString();
}
});
mappingContext.getConverterRegistry().addConverter(new Converter<byte[], Binary>() {
public Binary convert(byte[] source) {
return new Binary(source);
}
});
mappingContext.getConverterRegistry().addConverter(new Converter<Binary,byte[] >() {
public byte[] convert(Binary source) {
return source.getData();
}
});
}
public MongoDatastore(MongoMappingContext mappingContext) {
this(mappingContext, Collections.<String, String>emptyMap(), null);
}
/**
* Constructor for creating a MongoDatastore using an existing Mongo instance
* @param mappingContext The MappingContext
* @param mongo The existing Mongo instance
*/
public MongoDatastore(MongoMappingContext mappingContext, Mongo mongo,
ConfigurableApplicationContext ctx) {
this(mappingContext, Collections.<String, String>emptyMap(), ctx);
this.mongo = mongo;
}
/**
* Constructor for creating a MongoDatastore using an existing Mongo instance. In this case
* the connection details are only used to supply a USERNAME and PASSWORD
*
* @param mappingContext The MappingContext
* @param mongo The existing Mongo instance
*/
public MongoDatastore(MongoMappingContext mappingContext, Mongo mongo,
Map<String, String> connectionDetails, ConfigurableApplicationContext ctx) {
this(mappingContext, connectionDetails, ctx);
this.mongo = mongo;
}
public Mongo getMongo() {
return mongo;
}
public MongoTemplate getMongoTemplate(PersistentEntity entity) {
return mongoTemplates.get(entity);
}
public String getCollectionName(PersistentEntity entity) {
return mongoCollections.get(entity);
}
public UserCredentials getUserCrentials() {
return userCrentials;
}
@Override
protected Session createSession(Map<String, String> connDetails) {
if(stateless) {
return createStatelessSession(connectionDetails);
}
else {
return new MongoSession(this, getMappingContext(), getApplicationEventPublisher(), false);
}
}
@Override
protected Session createStatelessSession(Map<String, String> connectionDetails) {
return new MongoSession(this, getMappingContext(), getApplicationEventPublisher(), true);
}
public void afterPropertiesSet() throws Exception {
if (mongo == null) {
ServerAddress defaults = new ServerAddress();
MongoFactoryBean dbFactory = new MongoFactoryBean();
dbFactory.setHost(read(String.class, MONGO_HOST, connectionDetails, defaults.getHost()));
dbFactory.setPort(read(Integer.class, MONGO_PORT, connectionDetails, defaults.getPort()));
this.stateless = read(Boolean.class, MONGO_STATELESS, connectionDetails, false);
if (mongoOptions != null) {
dbFactory.setMongoOptions(mongoOptions);
}
dbFactory.afterPropertiesSet();
mongo = dbFactory.getObject();
}
for (PersistentEntity entity : mappingContext.getPersistentEntities()) {
// Only create Mongo templates for entities that are mapped with Mongo
if (!entity.isExternal()) {
createMongoTemplate(entity, mongo);
}
}
}
protected void createMongoTemplate(PersistentEntity entity, Mongo mongoInstance) {
DocumentMappingContext dc = (DocumentMappingContext) getMappingContext();
String collectionName = entity.getDecapitalizedName();
String databaseName = dc.getDefaultDatabaseName();
@SuppressWarnings("unchecked") ClassMapping<MongoCollection> mapping = entity.getMapping();
final MongoCollection mongoCollection = mapping.getMappedForm() != null ? mapping.getMappedForm() : null;
if (mongoCollection != null) {
if (mongoCollection.getCollection() != null) {
collectionName = mongoCollection.getCollection();
}
if (mongoCollection.getDatabase() != null) {
databaseName = mongoCollection.getDatabase();
}
}
final SimpleMongoDbFactory dbf;
String username = read(String.class, USERNAME, connectionDetails, null);
String password = read(String.class, PASSWORD, connectionDetails, null);
if (username != null && password != null) {
this.userCrentials = new UserCredentials(username, password);
dbf = new SimpleMongoDbFactory(mongoInstance, databaseName, userCrentials);
}
else {
dbf = new SimpleMongoDbFactory(mongoInstance, databaseName);
}
final MongoTemplate mt = new MongoTemplate(dbf);
if (mongoCollection != null) {
final WriteConcern writeConcern = mongoCollection.getWriteConcern();
if (writeConcern != null) {
final String collectionNameToUse = collectionName;
mt.executeInSession(new DbCallback<Object>() {
public Object doInDB(DB db) throws MongoException, DataAccessException {
if (writeConcern != null) {
DBCollection collection = db.getCollection(collectionNameToUse);
collection.setWriteConcern(writeConcern);
}
return null;
}
});
}
}
mongoTemplates.put(entity, mt);
mongoCollections.put(entity, collectionName);
initializeIndices(entity, mt);
}
/**
* Indexes any properties that are mapped with index:true
* @param entity The entity
* @param template The template
*/
protected void initializeIndices(final PersistentEntity entity, final MongoTemplate template) {
template.execute(new DbCallback<Object>() {
@SuppressWarnings({ "unchecked", "rawtypes" })
public Object doInDB(DB db) throws MongoException, DataAccessException {
final DBCollection collection = db.getCollection(getCollectionName(entity));
final ClassMapping<MongoCollection> classMapping = entity.getMapping();
if (classMapping != null) {
final MongoCollection mappedForm = classMapping.getMappedForm();
if (mappedForm != null) {
List<MongoCollection.Index> indices = mappedForm.getIndices();
for (MongoCollection.Index index : indices) {
collection.ensureIndex(new BasicDBObject(index.getDefinition()), new BasicDBObject(index.getOptions()));
}
for (Map compoundIndex : mappedForm.getCompoundIndices()) {
Map indexAttributes = null;
if(compoundIndex.containsKey(INDEX_ATTRIBUTES)) {
Object o = compoundIndex.remove(INDEX_ATTRIBUTES);
if(o instanceof Map) {
indexAttributes = (Map) o;
}
}
DBObject indexDef = new BasicDBObject(compoundIndex);
if(indexAttributes != null) {
collection.createIndex(indexDef, new BasicDBObject(indexAttributes));
}
else {
collection.createIndex(indexDef);
}
}
}
}
for (PersistentProperty<MongoAttribute> property : entity.getPersistentProperties()) {
final boolean indexed = isIndexed(property);
if (indexed) {
final MongoAttribute mongoAttributeMapping = property.getMapping().getMappedForm();
DBObject dbObject = new BasicDBObject();
final String fieldName = getMongoFieldNameForProperty(property);
dbObject.put(fieldName,1);
DBObject options = new BasicDBObject();
if (mongoAttributeMapping != null) {
Map attributes = mongoAttributeMapping.getIndexAttributes();
if (attributes != null) {
attributes = new HashMap(attributes);
if (attributes.containsKey(MongoAttribute.INDEX_TYPE)) {
dbObject.put(fieldName, attributes.remove(MongoAttribute.INDEX_TYPE));
}
options.putAll(attributes);
}
}
// continue using deprecated method to support older versions of MongoDB
if (options.toMap().isEmpty()) {
collection.ensureIndex(dbObject);
}
else {
collection.ensureIndex(dbObject, options);
}
}
}
return null;
}
String getMongoFieldNameForProperty(PersistentProperty<MongoAttribute> property) {
PropertyMapping<MongoAttribute> pm = property.getMapping();
String propKey = null;
if (pm.getMappedForm() != null) {
propKey = pm.getMappedForm().getField();
}
if (propKey == null) {
propKey = property.getName();
}
return propKey;
}
});
}
public void persistentEntityAdded(PersistentEntity entity) {
createMongoTemplate(entity, mongo);
}
public void destroy() throws Exception {
super.destroy();
if (mongo != null) {
mongo.close();
}
}
@Override
public boolean isSchemaless() {
return true;
}
}
<file_sep>version = "1.0.0.BUILD-SNAPSHOT"
configurations {
grails
}
dependencies {
grails("org.grails:grails-core:$grailsVersion") {
transitive = false
}
grails("org.grails:grails-bootstrap:$grailsVersion") {
transitive = false
}
compile project(":grails-datastore-gorm"), {
exclude group:"org.grails", module:"grails-datastore-core"
}
compile project(":grails-datastore-gorm-plugin-support"),
project(":grails-datastore-jpa"),
project(":grails-datastore-core"),
project(":grails-datastore-web")
testCompile 'org.hibernate:hibernate-entitymanager:3.4.0.GA',
'hsqldb:hsqldb:1.8.0.10'
testCompile('org.hibernate:hibernate-commons-annotations:3.2.0.Final'){
exclude group: 'org.slf4j', module:'slf4j-api'
exclude group: 'commons-logging', module:'commons-logging'
}
testCompile('org.hibernate:hibernate-validator:4.1.0.Final') {
exclude group:'commons-logging', module:'commons-logging'
exclude group:'commons-collections', module:'commons-collections'
exclude group:'org.slf4j', module:'slf4j-api'
}
testCompile('org.hibernate:hibernate-core:3.6.10.Final') {
exclude group:'commons-logging', module:'commons-logging'
exclude group:'commons-collections', module:'commons-collections'
exclude group:'org.slf4j', module:'slf4j-api'
exclude group:'xml-apis', module:'xml-apis'
exclude group:'dom4j', module:'dom4j'
exclude group:'antlr', module: 'antlr'
}
testCompile project(":grails-datastore-gorm-tck")
}
sourceSets {
main {
compileClasspath += configurations.grails
}
test {
compileClasspath += configurations.grails
}
}
<file_sep>version = "1.0.1.BUILD-SNAPSHOT"
dependencies {
compile('redis.clients:jedis:2.5.1')
compile project(":grails-datastore-gorm"),
project(":grails-datastore-gorm-plugin-support"),
project(":grails-datastore-core")
testCompile project(":grails-datastore-gorm-test"),
project(":grails-datastore-gorm-tck")
testRuntime "org.springframework:spring-expression:$springVersion"
}
/*
test {
jvmArgs '-Xmx1024m', '-Xdebug', '-Xnoagent', '-Dgrails.full.stacktrace=true', '-Djava.compiler=NONE',
'-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005'
}
*/
<file_sep>/*
* Copyright (c) 2010 by <NAME> <<EMAIL>>
* Portions (c) 2010 by NPC International, Inc. or the
* original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
buildscript {
repositories {
mavenCentral()
mavenRepo urls: "http://repository.jboss.org/maven2/"
}
dependencies {
classpath "org.grails:grails-gradle-plugin:1.0", "org.grails:grails-bootstrap:1.3.6"
}
}
apply plugin: "grails"
grailsVersion = "1.3.6"
repositories {
flatDir dirs: "lib"
}
configurations {
compile.exclude module: "commons-logging"
compile.exclude module: 'xml-apis'
}
repositories {
mavenRepo urls: "http://repository.codehaus.org/"
}
dependencies {
compile project(":grails-datastore-core"),
project(":grails-datastore-riak"),
project(":grails-datastore-gorm"),
project(":grails-datastore-gorm-riak")
runtime "org.aspectj:aspectjrt:1.6.8"
runtime "org.slf4j:slf4j-simple:1.5.8",
"hsqldb:hsqldb:1.8.0.5",
"net.sf.ehcache:ehcache-core:1.7.1"
runtime 'org.codehaus.jackson:jackson-core-asl:1.6.1',
'org.codehaus.jackson:jackson-mapper-asl:1.6.1',
'org.springframework.data:spring-data-riak:1.0.0.M2-SNAPSHOT'
compile "org.grails:grails-crud:1.3.6",
"org.grails:grails-gorm:1.3.6"
}<file_sep>package org.grails.datastore.mapping.simpledb.engine;
import org.grails.datastore.mapping.model.PersistentEntity;
import java.util.UUID;
/**
* Uses java UUID to generate a unique id.
* @author <NAME>
*/
public class SimpleDBUUIDIdGenerator implements SimpleDBIdGenerator {
public Object generateIdentifier(PersistentEntity persistentEntity, SimpleDBNativeItem nativeEntry) {
return UUID.randomUUID().toString();
}
}
<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.gorm.neo4j;
import org.grails.datastore.gorm.neo4j.engine.CypherEngine;
import org.grails.datastore.mapping.config.Property;
import org.grails.datastore.mapping.core.AbstractDatastore;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.core.StatelessDatastore;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.PersistentProperty;
import org.grails.datastore.mapping.model.types.Simple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Datastore implementation for Neo4j backend
* @author <NAME> <<EMAIL>>
*/
public class Neo4jDatastore extends AbstractDatastore implements InitializingBean, DisposableBean, StatelessDatastore {
private static Logger log = LoggerFactory.getLogger(Neo4jDatastore.class);
protected MappingContext mappingContext;
protected CypherEngine cypherEngine;
protected boolean skipIndexSetup = false;
protected IdGenerator idGenerator = new SnowflakeIdGenerator();
public Neo4jDatastore(MappingContext mappingContext, ApplicationContext applicationContext, CypherEngine cypherEngine) {
super(mappingContext);
this.mappingContext = mappingContext;
this.cypherEngine = cypherEngine;
setApplicationContext(applicationContext);
}
public void setSkipIndexSetup(boolean skipIndexSetup) {
this.skipIndexSetup = skipIndexSetup;
}
@Override
protected Session createSession(Map<String, String> connectionDetails) {
return new Neo4jSession(this, mappingContext, getApplicationContext(), false, cypherEngine);
}
@Override
public void afterPropertiesSet() throws Exception {
if (!skipIndexSetup) {
setupIndexing();
}
}
public long nextIdForType(PersistentEntity pe) {
return idGenerator.nextId();
}
public void setupIndexing() {
Set<String> schemaStrings = new HashSet<String>(); // using set to avoid duplicate index creation
for (PersistentEntity persistentEntity: mappingContext.getPersistentEntities()) {
for (String label: ((GraphPersistentEntity)persistentEntity).getLabels()) {
StringBuilder sb = new StringBuilder();
sb.append("CREATE INDEX ON :").append(label).append("(__id__)");
schemaStrings.add(sb.toString());
for (PersistentProperty persistentProperty : persistentEntity.getPersistentProperties()) {
Property mappedForm = persistentProperty.getMapping().getMappedForm();
if ((persistentProperty instanceof Simple) && (mappedForm != null) && (mappedForm.isIndex())) {
sb = new StringBuilder();
sb.append("CREATE INDEX ON :").append(label).append("(").append(persistentProperty.getName()).append(")");
schemaStrings.add(sb.toString());
log.debug("setting up indexing for " + label + " property " + persistentProperty.getName());
}
}
}
}
for (String cypher: schemaStrings) {
cypherEngine.execute(cypher);
}
cypherEngine.commit();
}
}<file_sep>dependencies {
compile 'commons-lang:commons-lang:2.6'
compile project(":grails-datastore-gorm")
compile 'junit:junit:4.8.2'
compile 'javax.servlet:javax.servlet-api:3.0.1'
}
<file_sep>configurations {
optional
}
dependencies {
compile(project(":grails-datastore-gorm-hibernate-core")) {
exclude group:'org.hibernate', module:'hibernate-core'
exclude group:'org.hibernate', module:'hibernate-validator'
exclude group:'org.hibernate', module:'hibernate-ehcache'
exclude group:'org.hibernate', module:'hibernate-commons-annotations'
}
// Web dependencies optional
optional("org.grails:grails-web:$grailsVersion") {
transitive = false
}
optional("org.grails:grails-spring:$grailsVersion") {
transitive = false
}
optional("org.springframework:spring-webmvc:$springVersion") {
transitive = false
}
optional 'javax.servlet:servlet-api:2.5'
optional(project(":grails-datastore-web"))
String hibernateVersion = '4.3.5.Final'
compile("org.hibernate:hibernate-core:$hibernateVersion") {
exclude group:'commons-logging', module:'commons-logging'
exclude group:'dom4j', module:'dom4j'
exclude group:'com.h2database', module:'h2'
exclude group:'commons-collections', module:'commons-collections'
exclude group:'org.slf4j', module:'jcl-over-slf4j'
exclude group:'org.slf4j', module:'slf4j-api'
exclude group:'org.slf4j', module:'slf4j-log4j12'
exclude group:'xml-apis', module:'xml-apis'
}
compile('org.hibernate.common:hibernate-commons-annotations:4.0.4.Final'){
exclude group: 'org.slf4j', module:'slf4j-api'
exclude group: 'commons-logging', module:'commons-logging'
}
compile('org.hibernate:hibernate-validator:5.0.3.Final') {
exclude group:'commons-logging', module:'commons-logging'
exclude group:'commons-collections', module:'commons-collections'
exclude group:'org.slf4j', module:'slf4j-api'
}
runtime('dom4j:dom4j:1.6.1') {
exclude group: 'xml-apis', module:'xml-apis'
}
optional ("net.sf.ehcache:ehcache-core:2.4.8") {
exclude group: 'commons-logging', module:'commons-logging'
}
optional ("org.hibernate:hibernate-ehcache:$hibernateVersion") {
exclude group:'commons-collections', module:'commons-collections'
exclude group:'commons-logging', module:'commons-logging'
exclude group:'com.h2database', module:'h2'
exclude group:'dom4j', module:'dom4j'
exclude group:'net.sf.ehcache', module:'ehcache'
exclude group:'net.sf.ehcache', module:'ehcache-core'
exclude group:'org.hibernate', module:'hibernate-core'
exclude group:'org.slf4j', module:'jcl-over-slf4j'
exclude group:'org.slf4j', module:'slf4j-api'
exclude group:'org.slf4j', module:'slf4j-log4j12'
exclude group:'xml-apis', module:'xml-apis'
}
testCompile project(":grails-datastore-gorm-test")
testCompile project(":grails-datastore-gorm-tck")
testCompile "com.h2database:h2:1.3.164"
testCompile "net.sf.ehcache:ehcache-core:2.4.6"
testCompile "org.hibernate:hibernate-ehcache:4.3.5.Final"
optional "javax.servlet:servlet-api:2.5"
def excludes = {
exclude group:"org.grails",module: "grails-plugin-url-mappings"
exclude group:"org.grails",module: "grails-plugin-servlets"
exclude group:"org.grails",module: "grails-plugin-controllers"
exclude group:"org.grails",module: "grails-plugin-domain-class"
exclude group:"org.grails",module: "grails-plugin-gsp"
exclude group:"org.grails",module: "grails-plugin-filters"
exclude group:"org.grails",module: "grails-plugin-mimetypes"
exclude group:"org.grails",module: "grails-plugin-converters"
exclude group:"org.grails",module: "grails-logging"
exclude group:"org.grails",module: "grails-test"
exclude group:"org.grails",module: "grails-datastore-gorm"
exclude group:"org.grails",module: "grails-datastore-core"
exclude group:"org.grails",module: "grails-datastore-simple"
exclude group:"org.grails",module: "grails-datastore-gorm"
}
optional "org.grails:grails-test:$grailsVersion", excludes
optional "org.grails:grails-plugin-testing:$grailsVersion", excludes
}
sourceSets {
main {
compileClasspath += configurations.optional
}
javadoc {
classpath = configurations.compile + configurations.optional
}
}
idea {
module {
scopes.PROVIDED.plus += configurations.optional
}
}
eclipse {
classpath {
plusConfigurations += configurations.optional
file {
whenMerged { classpath ->
// move grails-datastore-gorm-hibernate-core reference to end
def hibernateCoreReference = classpath.entries.find { entry -> entry.kind == 'src' && entry.path == "/grails-datastore-gorm-hibernate-core" }
if (hibernateCoreReference) {
classpath.entries.remove hibernateCoreReference
classpath.entries << hibernateCoreReference
}
}
}
}
}
test {
testLogging {
exceptionFormat ='full'
}
forkEvery = 30
maxParallelForks = 4
jvmArgs '-server','-Xmx1024M', '-Xms64M', '-XX:PermSize=32m','-XX:MaxPermSize=256m','-XX:+CMSClassUnloadingEnabled','-XX:+HeapDumpOnOutOfMemoryError'
}
/*
test {
jvmArgs '-Xmx1024m', '-Xdebug', '-Xnoagent', '-Dgrails.full.stacktrace=true', '-Djava.compiler=NONE',
'-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005'
}
*/
<file_sep>package org.grails.datastore.mapping.orient;
import com.orientechnologies.orient.core.db.ODatabase;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import org.grails.datastore.mapping.core.AbstractSession;
import org.grails.datastore.mapping.document.config.DocumentMappingContext;
import org.grails.datastore.mapping.engine.Persister;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.orient.engine.OrientEntityPersister;
import org.grails.datastore.mapping.transactions.SessionOnlyTransaction;
import org.grails.datastore.mapping.transactions.Transaction;
import org.springframework.context.ApplicationEventPublisher;
import java.io.Serializable;
/**
* A {@link org.grails.datastore.mapping.core.Session} implementation for the Orient document store.
*
* @author <NAME>
*/
public class OrientSession extends AbstractSession<ODatabaseDocumentTx> {
ODatabase orientDatabaseService;
public OrientSession(OrientDatastore datastore, MappingContext mappingContext, ODatabase orientDatabaseService, ApplicationEventPublisher publisher) {
super(datastore, mappingContext, publisher, false);
this.orientDatabaseService = orientDatabaseService;
}
public ODatabaseDocumentTx getNativeInterface() {
return (ODatabaseDocumentTx)orientDatabaseService;
}
@Override
protected Persister createPersister(Class cls, MappingContext mappingContext) {
final PersistentEntity entity = mappingContext.getPersistentEntity(cls.getName());
return entity == null ? null : new OrientEntityPersister(mappingContext, entity, this, publisher);
}
@Override
protected Transaction beginTransactionInternal() {
return new SessionOnlyTransaction<ODatabaseDocumentTx>(getNativeInterface(), this);
}
@Override
public void cacheEntry(PersistentEntity entity, Serializable key, Object entry) {
super.cacheEntry(entity.getRootEntity(), key, entry);
}
}
<file_sep>/*
* Copyright (c) 2010 by <NAME> <<EMAIL>>
* Portions (c) 2010 by NPC International, Inc. or the
* original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.riak;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.data.keyvalue.riak.core.RiakTemplate;
import org.grails.datastore.mapping.core.AbstractDatastore;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.keyvalue.mapping.config.KeyValueMappingContext;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.riak.util.Ignore404sErrorHandler;
/**
* A {@link org.grails.datastore.mapping.core.Datastore} implemenation for the Riak
* Key/Value store.
*
* @author <NAME> <<EMAIL>>
*/
public class RiakDatastore extends AbstractDatastore implements InitializingBean, DisposableBean {
public static final String CONFIG_DEFAULT_URI = "defaultUri";
public static final String CONFIG_MAPRED_URI = "mapReduceUri";
public static final String CONFIG_USE_CACHE = "useCache";
public static final String DEFAULT_URI = "http://localhost:8098/riak/{bucket}/{key}";
public static final String DEFAULT_MAPRED_URI = "http://localhost:8098/mapred";
public static final boolean DEFAULT_USE_CACHE = true;
private final Logger log = LoggerFactory.getLogger(getClass());
/**
* The full URI to use on the {@link org.springframework.data.keyvalue.riak.core.RiakTemplate}.
*/
private String defaultUri = DEFAULT_URI;
/**
* The Map/Reduce URI to use on the {@link org.springframework.data.keyvalue.riak.core.RiakTemplate}.
*/
private String mapReduceUri = DEFAULT_MAPRED_URI;
/**
* Whether or not to use the internal, ETag-based object cache.
*/
private boolean useCache = DEFAULT_USE_CACHE;
public RiakDatastore() {
this(new KeyValueMappingContext(""));
}
public RiakDatastore(MappingContext mappingContext) {
this(mappingContext, null, null);
}
public RiakDatastore(MappingContext mappingContext, Map<String, String> connectionDetails,
ConfigurableApplicationContext ctx) {
super(mappingContext, connectionDetails, ctx);
initializeConverters(mappingContext);
if (connectionDetails != null) {
defaultUri = connectionDetails.containsKey(CONFIG_DEFAULT_URI) ? connectionDetails.get(
CONFIG_DEFAULT_URI) : DEFAULT_URI;
mapReduceUri = connectionDetails.containsKey(CONFIG_MAPRED_URI) ? connectionDetails.get(
CONFIG_MAPRED_URI) : DEFAULT_MAPRED_URI;
useCache = connectionDetails.containsKey(CONFIG_USE_CACHE) ? Boolean.parseBoolean(
connectionDetails.get(
CONFIG_USE_CACHE).toString()) : DEFAULT_USE_CACHE;
}
}
@Override
protected Session createSession(Map<String, String> connDetails) {
@SuppressWarnings("hiding") String defaultUri = this.defaultUri;
if (connDetails != null) {
defaultUri = connDetails.containsKey(CONFIG_DEFAULT_URI) ? connDetails.get(
CONFIG_DEFAULT_URI) : DEFAULT_URI;
mapReduceUri = connDetails.containsKey(CONFIG_MAPRED_URI) ? connDetails.get(
CONFIG_MAPRED_URI) : DEFAULT_MAPRED_URI;
useCache = connDetails.containsKey(CONFIG_USE_CACHE) ? Boolean.parseBoolean(
connDetails.get(
CONFIG_USE_CACHE).toString()) : DEFAULT_USE_CACHE;
}
RiakTemplate riak = new RiakTemplate(defaultUri, mapReduceUri);
riak.setUseCache(useCache);
riak.getRestTemplate().setErrorHandler(new Ignore404sErrorHandler());
try {
riak.afterPropertiesSet();
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return new RiakSession(this, mappingContext, riak, getApplicationEventPublisher());
}
public void destroy() throws Exception {
}
public void afterPropertiesSet() throws Exception {
}
}
<file_sep>// Datastore Project
include ("grails-datastore-core",
"grails-datastore-gemfire",
"grails-datastore-jpa",
/* "grails-datastore-riak",*/
"grails-datastore-simple",
"grails-datastore-web",
"grails-datastore-simpledb",
"grails-datastore-dynamodb",
'grails-datastore-rest-client',
'grails-datastore-orient',
/*
"grails-datastore-jcr",*/
// Documentation
'grails-documentation-core',
'grails-documentation-mongodb',
'grails-documentation-redis',
/* 'grails-documentation-riak',*/
'grails-documentation-neo4j',
'grails-documentation-simpledb',
'grails-documentation-dynamodb',
'grails-documentation-rest-client',
// Core GORM Implementation projects
'grails-datastore-gorm',
'grails-datastore-gorm-tck',
'grails-datastore-gorm-test',
'grails-datastore-gorm-plugin-support',
// Grails Plugins
/*
'grails-plugins:redis',
'grails-plugins:riak',
*/
// Boot Extensions
// 'boot-plugins/gorm-hibernate4-spring-boot',
// 'boot-plugins/gorm-mongodb-spring-boot',
// GORM Implementations
// 'grails-datastore-gorm-cassandra',
// 'grails-datastore-gorm-jpa',
// 'grails-datastore-gorm-neo4j',
'grails-datastore-gorm-redis',
'grails-datastore-gorm-rest-client',
'grails-datastore-gorm-orient',
/* 'grails-datastore-gorm-riak',*/
// 'grails-datastore-gorm-mongodb',
// 'grails-datastore-gorm-gemfire',
'grails-datastore-gorm-simpledb',
// 'grails-datastore-gorm-dynamodb',
'grails-datastore-gorm-hibernate-core',
'grails-datastore-gorm-hibernate',
'grails-datastore-gorm-hibernate4'
/*
'grails-datastore-gorm-jcr',
'grails-datastore-gorm-simpledb'
*/
)
<file_sep>/* Copyright (C) 2011 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.simpledb.engine;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.amazonaws.services.simpledb.model.Attribute;
import com.amazonaws.services.simpledb.model.Item;
import com.amazonaws.services.simpledb.model.ReplaceableAttribute;
import com.amazonaws.services.simpledb.model.ReplaceableItem;
/**
* Logical representation of how information is loaded from and sent to AWS.
* <p/>
* It stores all data in an internal Map and then creates appropriate AWS objects (@link com.amazonaws.services.simpledb.model.ReplaceableAttribute).
*
* @author <NAME>
* @since 0.1
*/
public class SimpleDBNativeItem {
private Map<String, String> data = Collections.synchronizedMap(new HashMap<String, String>());
public SimpleDBNativeItem() {}
public SimpleDBNativeItem(Item item) {
//populate map with the item attributes. //todo - handle multi-value attributes/long string etc
List<Attribute> attributes = item.getAttributes();
for (Attribute attribute : attributes) {
put(attribute.getName(), attribute.getValue());
}
}
public void put(String key, String value) {
data.put(key, value);
}
public String get(String key) {
return data.get(key);
}
public ReplaceableItem createReplaceableItem() {
ReplaceableItem replaceableItem = new ReplaceableItem();
for (Map.Entry<String, String> entry : data.entrySet()) {
//exclude id property because that will be specified as the item name
String key = entry.getKey();
if (!"id".equals(key)) {
String value = entry.getValue();
replaceableItem.withAttributes(new ReplaceableAttribute(key, value, true));
}
}
return replaceableItem;
}
@Override
public String toString() {
return "SimpleDBNativeItem{data=" + data + '}';
}
}
<file_sep>/* Copyright (C) 2010 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.core;
import java.util.Collections;
import java.util.Map;
import org.grails.datastore.mapping.cache.TPCacheAdapterRepository;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.convert.converter.ConverterRegistry;
import org.grails.datastore.mapping.config.Property;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentProperty;
import org.grails.datastore.mapping.model.PropertyMapping;
import org.grails.datastore.mapping.model.types.BasicTypeConverterRegistrar;
import org.grails.datastore.mapping.transactions.SessionHolder;
import org.grails.datastore.mapping.validation.ValidatingEventListener;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import org.springframework.util.Assert;
import org.springframework.validation.Errors;
/**
* Abstract Datastore implementation that deals with binding the Session to thread locale upon creation.
*
* @author <NAME>
* @since 1.0
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public abstract class AbstractDatastore implements Datastore, StatelessDatastore, DisposableBean {
private ApplicationContext applicationContext;
private static final SoftThreadLocalMap ERRORS_MAP = new SoftThreadLocalMap();
private static final SoftThreadLocalMap VALIDATE_MAP = new SoftThreadLocalMap();
protected MappingContext mappingContext;
protected Map<String, String> connectionDetails = Collections.emptyMap();
protected TPCacheAdapterRepository cacheAdapterRepository;
public AbstractDatastore() {}
public AbstractDatastore(MappingContext mappingContext) {
this(mappingContext, null, null);
}
public AbstractDatastore(MappingContext mappingContext, Map<String, String> connectionDetails,
ConfigurableApplicationContext ctx) {
this(mappingContext, connectionDetails, ctx, null);
}
public AbstractDatastore(MappingContext mappingContext, Map<String, String> connectionDetails,
ConfigurableApplicationContext ctx, TPCacheAdapterRepository cacheAdapterRepository) {
this.mappingContext = mappingContext;
this.connectionDetails = connectionDetails != null ? connectionDetails : Collections.<String, String>emptyMap();
setApplicationContext(ctx);
this.cacheAdapterRepository = cacheAdapterRepository;
}
public void destroy() throws Exception {
ERRORS_MAP.remove();
VALIDATE_MAP.remove();
}
public void setApplicationContext(ApplicationContext ctx) {
applicationContext = ctx;
if (ctx != null && registerValidationListener()) {
Assert.isInstanceOf(ConfigurableApplicationContext.class, applicationContext,
"ApplicationContext must be an instanceof ConfigurableApplicationContext");
((ConfigurableApplicationContext)ctx).addApplicationListener(new ValidatingEventListener(this));
}
}
protected boolean registerValidationListener() {
return true;
}
public void setConnectionDetails(Map<String, String> connectionDetails) {
this.connectionDetails = connectionDetails;
}
public Session connect() {
return connect(connectionDetails);
}
public final Session connect(Map<String, String> connDetails) {
Session session = createSession(connDetails);
publishSessionCreationEvent(session);
return session;
}
private void publishSessionCreationEvent(Session session) {
ApplicationEventPublisher applicationEventPublisher = getApplicationEventPublisher();
if(applicationEventPublisher != null) {
applicationEventPublisher.publishEvent(new SessionCreationEvent(session));
}
}
@Override
public Session connectStateless() {
Session session = createStatelessSession(connectionDetails);
publishSessionCreationEvent(session);
return session;
}
/**
* Creates the native session
*
* @param connectionDetails The session details
* @return The session object
*/
protected abstract Session createSession(Map<String, String> connectionDetails);
/**
* Creates the native stateless session
*
* @param connectionDetails The session details
* @return The session object
*/
protected Session createStatelessSession(Map<String, String> connectionDetails) {
return createSession(connectionDetails);
}
public Session getCurrentSession() throws ConnectionNotFoundException {
return DatastoreUtils.doGetSession(this, false);
}
public boolean hasCurrentSession() {
return TransactionSynchronizationManager.hasResource(this);
}
/**
* Static way to retrieve the session
* @return The session instance
* @throws ConnectionNotFoundException If no session has been created
*/
public static Session retrieveSession() throws ConnectionNotFoundException {
return retrieveSession(Datastore.class);
}
/**
* Static way to retrieve the session
* @param datastoreClass The type of datastore
* @return The session instance
* @throws ConnectionNotFoundException If no session has been created
*/
public static Session retrieveSession(Class datastoreClass) throws ConnectionNotFoundException {
final Map<Object, Object> resourceMap = TransactionSynchronizationManager.getResourceMap();
Session session = null;
if (resourceMap != null && !resourceMap.isEmpty()) {
for (Object key : resourceMap.keySet()) {
if (datastoreClass.isInstance(key)) {
SessionHolder sessionHolder = (SessionHolder) resourceMap.get(key);
if (sessionHolder != null) {
session = sessionHolder.getSession();
}
}
}
}
if (session == null) {
throw new ConnectionNotFoundException("No datastore session found. Call Datastore.connect(..) before calling Datastore.getCurrentSession()");
}
return session;
}
public MappingContext getMappingContext() {
return mappingContext;
}
public ConfigurableApplicationContext getApplicationContext() {
return (ConfigurableApplicationContext)applicationContext;
}
public ApplicationEventPublisher getApplicationEventPublisher() {
return getApplicationContext();
}
public Errors getObjectErrors(final Object o) {
return getValidationErrorsMap().get(System.identityHashCode(o));
}
public void setObjectErrors(Object object, Errors errors) {
getValidationErrorsMap().put(System.identityHashCode(object), errors);
}
public void setSkipValidation(final Object o, final boolean skip) {
VALIDATE_MAP.get().put(System.identityHashCode(o), skip);
}
public boolean skipValidation(final Object o) {
final Object skipValidation = VALIDATE_MAP.get().get(System.identityHashCode(o));
return skipValidation instanceof Boolean && (Boolean) skipValidation;
}
public static Map<Object, Errors> getValidationErrorsMap() {
return ERRORS_MAP.get();
}
public static Map<Object, Boolean> getValidationSkipMap() {
return VALIDATE_MAP.get();
}
protected void initializeConverters(MappingContext mappingContext) {
final ConverterRegistry conversionService = mappingContext.getConverterRegistry();
BasicTypeConverterRegistrar registrar = new BasicTypeConverterRegistrar();
registrar.register(conversionService);
}
protected boolean isIndexed(PersistentProperty property) {
PropertyMapping<Property> pm = property.getMapping();
final Property keyValue = pm.getMappedForm();
return keyValue != null && keyValue.isIndex();
}
public boolean isSchemaless() {
return false;
}
}
<file_sep>package org.grails.datastore.mapping.orient.config;
import org.grails.datastore.mapping.config.AbstractGormMappingFactory;
import org.grails.datastore.mapping.model.ClassMapping;
import org.grails.datastore.mapping.model.IdentityMapping;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.types.Identity;
import java.beans.PropertyDescriptor;
/**
* Created with IntelliJ IDEA.
* User: emrul
* Date: 11/08/13
* Time: 13:06
* To change this template use File | Settings | File Templates.
*/
public class OrientDocumentMappingFactory extends AbstractGormMappingFactory<OrientClass, OrientAttribute> {
@Override
protected Class<OrientAttribute> getPropertyMappedFormType() {
return OrientAttribute.class;
}
@Override
protected Class<OrientClass> getEntityMappedFormType() {
return OrientClass.class;
}
@Override
public OrientClass createMappedForm(PersistentEntity entity) {
OrientClass mappedForm = super.createMappedForm(entity);
mappedForm.setClassName(entity.getName());
return mappedForm;
}
@Override
public boolean isSimpleType(Class propType) {
if (propType == null) return false;
if (propType.isArray()) {
return isSimpleType(propType.getComponentType()) || super.isSimpleType(propType);
}
return OrientMappingContext.isOrientNativeType(propType) || super.isSimpleType(propType);
}
}
<file_sep>title=Redis GORM
version=1.0.0.M8
authors=<NAME><file_sep>title=Riak GORM
version=1.0.0.M4
authors=<NAME>, <NAME><file_sep>package org.grails.datastore.mapping.orient.engine;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.record.impl.ODocument;
import org.grails.datastore.mapping.core.SessionImplementor;
import org.grails.datastore.mapping.engine.AssociationIndexer;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.types.Association;
import org.grails.datastore.mapping.orient.OrientSession;
import org.grails.datastore.mapping.query.Query;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* Created by emrul on 21/08/14.
*/
public class OrientAssociationIndexer implements AssociationIndexer {
private ODocument nativeEntry;
private Association association;
private OrientSession session;
private boolean isReference = true;
public OrientAssociationIndexer(ODocument nativeEntry, Association association, OrientSession session) {
this.nativeEntry = nativeEntry;
this.association = association;
this.session = session;
//this.isReference = isReference(association);
}
public void preIndex(final Object primaryKey, final List foreignKeys) {
// if the association is a unidirectional one-to-many we store the keys
// embedded in the owning entity, otherwise we use a foreign key
//if (!association.isBidirectional()) {
SessionImplementor<Object> si = (SessionImplementor<Object>) session;
ODatabaseDocumentTx db = session.getNativeInterface();
List refs = new ArrayList();
for (Object foreignKey : foreignKeys) {
if ( foreignKey instanceof Integer) {
Object assocObj = si.getCachedEntry( association.getAssociatedEntity().getRootEntity(), (Serializable)foreignKey);
if (assocObj != null) {
refs.add(assocObj);
}
}
//if (isReference) {
// dbRefs.add(new DBRef(db, getCollectionName(association.getAssociatedEntity()), foreignKey));
//}
//else {
//}
}
// update the native entry directly.
nativeEntry.field(association.getName(), refs);
//}
}
public void index(final Object primaryKey, final List foreignKeys) {
// indexing is handled by putting the data in the native entry before it is persisted, see preIndex above.
}
public List query(Object primaryKey) {
// for a unidirectional one-to-many we use the embedded keys
if (!association.isBidirectional()) {
final Object indexed = nativeEntry.field(association.getName());
if (!(indexed instanceof Collection)) {
return Collections.emptyList();
}
List indexedList = getIndexedAssociationsAsList(indexed);
if (associationsAreDbRefs(indexedList)) {
return extractIdsFromDbRefs(indexedList);
}
return indexedList;
}
// for a bidirectional one-to-many we use the foreign key to query the inverse side of the association
Association inverseSide = association.getInverseSide();
Query query = session.createQuery(association.getAssociatedEntity().getJavaClass());
query.eq(inverseSide.getName(), primaryKey);
query.projections().id();
return query.list();
}
public PersistentEntity getIndexedEntity() {
return association.getAssociatedEntity();
}
public void index(Object primaryKey, Object foreignKey) {
// TODO: Implement indexing of individual entities
}
private List getIndexedAssociationsAsList(Object indexed) {
return (indexed instanceof List) ? (List) indexed : new ArrayList(((Collection) indexed));
}
private boolean associationsAreDbRefs(List indexedList) {
return false;// !indexedList.isEmpty() && (indexedList.get(0) instanceof DBRef);
}
private List extractIdsFromDbRefs(List indexedList) {
List resolvedDbRefs = new ArrayList();
for (Object indexedAssociation : indexedList) {
//resolvedDbRefs.add(((DBRef) indexedAssociation).getId());
}
return resolvedDbRefs;
}
}
<file_sep>package org.grails.datastore.gorm.neo4j;
import org.grails.datastore.gorm.neo4j.engine.CypherResult;
import org.grails.datastore.mapping.dirty.checking.DirtyCheckable;
import org.grails.datastore.mapping.engine.EntityAccess;
import org.grails.datastore.mapping.model.types.Association;
import org.grails.datastore.mapping.model.types.ManyToMany;
import org.grails.datastore.mapping.proxy.ProxyFactory;
import java.util.*;
/**
* Created by stefan on 20.03.14.
*/
public class LazyEnititySet<T> implements Set<T> {
private final ProxyFactory proxyFactory;
private final Neo4jSession session;
private final EntityAccess owner;
private final Set<T> delegate = new HashSet<T>();
private final Association association;
private boolean initialized = false;
private boolean reversed;
private String relType;
public LazyEnititySet(EntityAccess owner, Association association, ProxyFactory proxyFactory, Neo4jSession session) {
this.owner = owner;
this.association = association;
this.proxyFactory = proxyFactory;
this.session = session;
reversed = RelationshipUtils.useReversedMappingFor(association);
relType = RelationshipUtils.relationshipTypeUsedFor(association);
}
private void initialize() {
if (!initialized) {
initialized = true;
String cypher = CypherBuilder.findRelationshipEndpointIdsFor(association);
CypherResult result = session.getNativeInterface().execute(cypher, Collections.singletonList(owner.getIdentifier()));
Class<T> clazz = association.getAssociatedEntity().getJavaClass();
for (Map<String, Object> row : result) {
Long endpoint = (Long) row.get("id");
delegate.add( proxyFactory.createProxy(session, clazz, endpoint));
}
}
}
@Override
public int size() {
initialize();
return delegate.size();
}
@Override
public boolean isEmpty() {
initialize();
return delegate.isEmpty();
}
@Override
public boolean contains(Object o) {
initialize();
return delegate.contains(o);
}
@Override
public Iterator iterator() {
initialize();
return delegate.iterator();
}
@Override
public Object[] toArray() {
initialize();
return delegate.toArray();
}
@Override
public boolean add(Object o) {
initialize();
boolean isNew = delegate.add((T) o);
if (isNew) {
EntityAccess target = new EntityAccess(association.getAssociatedEntity(), o);
if (association.isBidirectional()) {
if (association instanceof ManyToMany) {
Collection coll = (Collection) target.getProperty(association.getReferencedPropertyName());
coll.add(owner.getEntity());
} else {
target.setProperty(association.getReferencedPropertyName(), owner.getEntity());
}
}
if (target.getIdentifier()==null) { // non-persistent instance
session.persist(o);
}
if (!reversed) { // prevent duplicated rels
session.addPendingInsert(new RelationshipPendingInsert(owner, relType, target, session.getNativeInterface()));
}
markDirty();
}
return isNew;
}
@Override
public boolean remove(Object o) {
boolean isDeleted = delegate.remove(o);
if (isDeleted && (!reversed)) {
session.addPendingInsert(new RelationshipPendingDelete(owner, relType,
new EntityAccess(association.getAssociatedEntity(), o),
session.getNativeInterface()));
}
if (isDeleted) {
markDirty();
}
return isDeleted;
}
@Override
public boolean addAll(Collection collection) {
return addAll((Iterable)collection);
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection collection) {
throw new UnsupportedOperationException();
// return false;
}
@Override
public boolean retainAll(Collection collection) {
throw new UnsupportedOperationException();
// return false;
}
@Override
public boolean containsAll(Collection collection) {
throw new UnsupportedOperationException();
// return false;
}
@Override
public T[] toArray(Object[] objects) {
throw new UnsupportedOperationException();
// return new T[0];
}
public boolean addAll(Iterable objects) {
boolean hasChanged = false;
for (Object object: objects) {
hasChanged |= add(object);
}
return hasChanged;
}
private void markDirty() {
Object entity = owner.getEntity();
if (entity instanceof DirtyCheckable) {
((DirtyCheckable) entity).markDirty(association.getName());
}
}
}
<file_sep>version = "0.6.BUILD-SNAPSHOT"
dependencies {
compile project(":grails-datastore-core")
compile('com.amazonaws:aws-java-sdk:1.6.10')
}
<file_sep>dependencies {
compile project(":grails-datastore-core")
compile 'javax.jcr:jcr:1.0'
compile 'org.springframework:se-jcr:1.0-SNAPSHOT'
compile 'org.apache.jackrabbit:jackrabbit-core:1.6.2'
compile 'org.apache.jackrabbit:jackrabbit-api:1.6.2'
compile 'org.apache.jackrabbit:jackrabbit-jcr-commons:1.6.2'
compile 'org.apache.jackrabbit:jackrabbit-spi-commons:1.6.2'
compile 'org.slf4j:slf4j-simple:1.5.8'
// compile 'org.apache.jackrabbit:jackrabbit-ocm:1.5.3'
//TODO: add commons-collections, concurrent, derby, jackrabbit-text, lucence core
//TODO: also add dependency libraries for spring-extensions including spring-webmvc, servlet api
testCompile 'junit:junit:4.8.2'
}
<file_sep>grails-orient
=============
Grails and GORM bindings for OrientDB
This is a work of <NAME> and was published in [mailing list](https://groups.google.com/forum/#!topic/orient-database/HE1fQefw14c)
[GORM Datastore API - Reference Documentation](http://springsource.github.io/grails-data-mapping/manual/guide/index.html)
[](https://travis-ci.org/grails/grails-data-mapping)
Project still under development but you can also see another implementation that already used in production
https://github.com/eugene-kamenev/orientdb-groovy
Grails Datastore API (aka GORM)
===
[Grails][Grails] is a framework used to build web applications with the [Groovy][Groovy] programming language. This project provides the plumbings for the GORM API both for Hibernate and for new implementations of GORM ontop of NoSQL datastores.
[Grails]: http://grails.org/
[Groovy]: http://groovy.codehaus.org/
Getting Started
---
See the following links for documentation on the various implementations:
* [GORM for Hibernate](http://grails.org/doc/latest/guide/GORM.html)
* [GORM for MongoDB](http://grails.github.io/grails-data-mapping/current/mongodb/index.html)
For API documentation see:
* [Core API / GORM for Hibernate](http://grails.github.io/grails-data-mapping/current/api)
* [GORM for MongoDB API](http://grails.github.io/grails-data-mapping/current/mongodb/api/index.html)
*
For other implementations see the [following page](http://grails.github.io/grails-data-mapping/current).
Below is an example of using GORM for Hibernate in a Groovy script:
```groovy
@Grab("org.grails:grails-datastore-gorm-hibernate4:3.0.0.RELEASE")
@Grab("org.grails:grails-spring:2.3.6")
@Grab("com.h2database:h2:1.3.164")
import grails.orm.bootstrap.*
import grails.persistence.*
import org.springframework.jdbc.datasource.DriverManagerDataSource
import org.h2.Driver
init = new HibernateDatastoreSpringInitializer(Person)
def dataSource = new DriverManagerDataSource(Driver.name, "jdbc:h2:prodDb;MVCC=TRUE;LOCK_TIMEOUT=10000;DB_CLOSE_ON_EXIT=FALSE", 'sa', '')
init.configureForDataSource(dataSource)
println "Total people = " + Person.count()
@Entity
class Person {
String name
static constraints = {
name blank:false
}
}
```
Developing Implementations
---
For further information on the project see the comprehensive [developer guide][Developer Guide].
[Developer Guide]: http://projects.spring.io/grails-data-mapping/manual/index.html
License
---
Grails and Groovy are licensed under the terms of the [Apache License, Version 2.0][Apache License, Version 2.0].
[Apache License, Version 2.0]: http://www.apache.org/licenses/LICENSE-2.0.html
<file_sep>package org.grails.datastore.mapping.node.mapping;
import org.grails.datastore.mapping.model.AbstractMappingContext;
import org.grails.datastore.mapping.model.MappingConfigurationStrategy;
import org.grails.datastore.mapping.model.MappingFactory;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.config.GormMappingConfigurationStrategy;
/**
* TODO: write javadoc
*
* @author <NAME>
* @since 1.0
*/
public class NodeMappingContext extends AbstractMappingContext {
private MappingConfigurationStrategy syntaxStrategy;
private NodeMappingFactory mappingFactory;
public NodeMappingContext() {
this.mappingFactory = new NodeMappingFactory();
this.syntaxStrategy = new GormMappingConfigurationStrategy(mappingFactory);
}
@Override
protected PersistentEntity createPersistentEntity(Class javaClass) {
return new NodePersistentEntity(javaClass, this);
}
public MappingConfigurationStrategy getMappingSyntaxStrategy() {
return syntaxStrategy;
}
public MappingFactory getMappingFactory() {
return mappingFactory;
}
}
<file_sep>package org.codehaus.groovy.grails.orm.hibernate.metaclass;
/**
* @deprecated Here for backwards compatibility, use {@link org.grails.datastore.gorm.support.BeforeValidateHelper} instead
*/
public class BeforeValidateHelper extends org.grails.datastore.gorm.support.BeforeValidateHelper {
}
<file_sep>package org.grails.datastore.mapping.appengine;
import java.util.Map;
import org.grails.datastore.mapping.core.AbstractDatastore;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.keyvalue.mapping.config.KeyValueMappingContext;
/**
* @author <NAME>
* @author <NAME>
* @since 1.0
*/
public class AppEngineDatastore extends AbstractDatastore {
// hard coded value of "gae" used for the keyspace since GAE manages spaces automatically
public AppEngineDatastore() {
super(new KeyValueMappingContext("gae"));
}
@Override
protected Session createSession(@SuppressWarnings("hiding") Map<String, String> connectionDetails) {
return new AppEngineSession(this, getMappingContext());
}
}
|
90535baf46b98edfce93fe7f290bc320def33eb8
|
[
"Markdown",
"INI",
"Gradle",
"Java",
"Shell"
] | 74 |
Gradle
|
stokito/grails-data-mapping
|
96fc6f51a0372a095eafe1775988ad7d719d9e61
|
2d4bf4a1c8d95a5ded2a900738ab7097142ff2b0
|
refs/heads/main
|
<repo_name>mihir815/TechnicalAssessmentTest<file_sep>/TechnicalAssessmentTest/Classes/Constants.swift
//
// Constants.swift
// TechnicalAssessmentTest
//
// Created by <NAME> on 23/10/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
import Foundation
import BFKit
import SwiftyUserDefaults
let appDelegate = UIApplication.shared.delegate as! AppDelegate
class Constants: NSObject {
struct messages {
static var unknownError = "Something went wrong, please try again later."
}
struct environment {
struct production
{
static var BASE_URL = "https://jsonplaceholder.typicode.com"
}
struct staging
{
static var BASE_URL = "https://jsonplaceholder.typicode.com"
}
struct current
{
static var BASE_URL = production.BASE_URL
}
}
struct API
{
static var posts = "/posts"
}
struct GoogleAnalytics {
static var Login = "Login"
static var Dashboard = "Dashboard"
}
struct images
{
static var posts = UIImage(named: "posts")
static var favourites = UIImage(named: "favourites")
}
struct color{
static var colorPrimary = UIColor(string: "#2e5902")
static var colorGreenLight = UIColor(string: "#3a7219")
static var colorGreenDark = UIColor(string: "#2a5713")
static var colorWhite = UIColor(string: "#ffffff")
static var colorToolbar = UIColor(string: "#f8f8f8")
static var colorToolbarText_Selected = UIColor(string: "#00511a")
static var colorToolbarText_NotSelected = UIColor(string: "#43860e")
static var colorButtonGreen = UIColor(string: "#43860e")
static var colorListingSeparator = UIColor(string: "#DBDBDB")
static var colorEditTextLine = UIColor(string: "#bababa")
}
struct customFont{
}
}
<file_sep>/README.md
# TechnicalAssessmentTest
TechnicalAssessmentTest Using MVVM design pattern, Alamofire & Reactive Programing
<file_sep>/TechnicalAssessmentTest/Classes/Extensions.swift
//
// Extensions.swift
// TechnicalAssessmentTest
//
// Created by <NAME> on 23/10/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
import Foundation
import UIKit
import MaterialComponents.MaterialButtons
import SwiftyUserDefaults
import SwiftyJSON
extension UINavigationBar{
func setNavigationBarProperties(navigationBar:UINavigationBar){
navigationBar.isTranslucent = false
navigationBar.isOpaque = true
navigationBar.barTintColor = Constants.color.colorPrimary
navigationBar.tintColor = UIColor.color(string: "#ffffff")
navigationBar.titleTextAttributes = [NSAttributedString.Key.foregroundColor : UIColor.color(string: "#ffffff") , NSAttributedString.Key.font : UIFont.boldSystemFont(ofSize: 18.0)]
//navigationBar.setValue(false, forKey: "hidesShadow")
}
}
extension UIDevice {
var iPhoneX: Bool {
return UIScreen.main.bounds.height >= 812
}
}
extension MDCButton{
func addButtonProperties(button:MDCButton , title:String , radius:CGFloat , bgColor:UIColor , titleColor:UIColor,fontSize:CGFloat){
button.setTitle(title, for: .normal)
button.setTitleColor(titleColor)
button.setBackgroundColor(bgColor)
button.setTitleFont(UIFont.systemFont(ofSize: Utilities.dynamicFontSizeForIphone(fontSize: fontSize)), for: .normal)
button.isUppercaseTitle = false
button.titleLabel?.adjustsFontSizeToFitWidth = true
button.titleLabel?.textAlignment = .justified
button.layer.cornerRadius = Utilities.getWidth(width: radius)
button.layer.shadowRadius = 4.0
button.layer.shadowColor = Constants.color.colorPrimary.cgColor
button.layer.shadowOpacity = 0.3
button.layer.shadowOffset = CGSize(width: 0, height: 0)
button.layer.masksToBounds = true
button.clipsToBounds = false
}
func underline() {
guard let text = self.titleLabel?.text else { return }
let attributedString = NSMutableAttributedString(string: text)
//NSAttributedStringKey.foregroundColor : UIColor.blue
attributedString.addAttribute(NSAttributedString.Key.underlineColor, value: self.titleColor(for: .normal)!, range: NSRange(location: 0, length: text.count))
attributedString.addAttribute(NSAttributedString.Key.foregroundColor, value: self.titleColor(for: .normal)!, range: NSRange(location: 0, length: text.count))
attributedString.addAttribute(NSAttributedString.Key.underlineStyle, value: NSUnderlineStyle.single.rawValue, range: NSRange(location: 0, length: text.count))
self.setAttributedTitle(attributedString, for: .normal)
}
}
extension UITextField {
func setLeftPadding(_ amount:CGFloat){
let paddingView = UIView(frame: CGRect(x: 0, y: 0, width: Utilities.getWidth(width: amount), height: self.frame.size.height))
self.leftView = paddingView
self.leftViewMode = .always
}
func setRightPadding(_ amount:CGFloat) {
let paddingView = UIView(frame: CGRect(x: 0, y: 0, width: Utilities.getWidth(width: amount), height: self.frame.size.height))
self.rightView = paddingView
self.rightViewMode = .always
}
func addBottomBorder(color : UIColor){
let bottomLine = CALayer()
bottomLine.frame = CGRect(x: 0, y: self.frame.size.height - 1, width: self.frame.size.width, height: 1)
bottomLine.backgroundColor = color.cgColor
borderStyle = .none
layer.addSublayer(bottomLine)
}
}
extension UITextView{
func addBottomBorder(color : UIColor){
let bottomLine = CALayer()
bottomLine.frame = CGRect(x: 0, y: self.frame.size.height - 1, width: self.frame.size.width, height: 1)
bottomLine.backgroundColor = color.cgColor
layer.addSublayer(bottomLine)
}
}
extension UIView{
func roundCorners(corners: UIRectCorner, radius: CGFloat) {
if #available(iOS 11.0, *) {
clipsToBounds = true
layer.cornerRadius = Utilities.getWidth(width:radius)
layer.maskedCorners = CACornerMask(rawValue: corners.rawValue)
} else {
let path = UIBezierPath(roundedRect: bounds, byRoundingCorners: corners, cornerRadii: CGSize(width: Utilities.getWidth(width:radius), height: Utilities.getHeight(height:radius)))
let mask = CAShapeLayer()
mask.path = path.cgPath
layer.mask = mask
}
}
func addShadowView(width:CGFloat=0.7, height:CGFloat=0.7, Opacidade:Float=0.5, maskToBounds:Bool=false, radius:CGFloat=0.5){
self.layer.shadowColor = UIColor.black.withAlphaComponent(0.8).cgColor
self.layer.shadowOffset = CGSize(width: width, height: height)
self.layer.shadowRadius = radius
self.layer.shadowOpacity = Opacidade
self.layer.masksToBounds = maskToBounds
}
}
extension String{
var length: Int {
return count
}
var validEmail:Bool {
let emailRegEx = "[A-Z0-9a-z._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,64}"
let emailPred = NSPredicate(format:"SELF MATCHES %@", emailRegEx)
return emailPred.evaluate(with: self)
}
var validatePhone:Bool {
let phone_RegEx = "^\\d{3}\\d{3}\\d{4}$"
let phoneTest = NSPredicate(format: "SELF MATCHES %@", phone_RegEx)
return phoneTest.evaluate(with: self)
}
var inValidName:Bool {
let name_RegEx = ".*[^A-Za-z ].*"
let nameTest = NSPredicate(format: "SELF MATCHES %@", name_RegEx)
return nameTest.evaluate(with: self)
}
}
extension UIApplication {
class func topViewController(controller: UIViewController? = UIApplication.shared.keyWindow?.rootViewController) -> UIViewController? {
if let navigationController = controller as? UINavigationController {
return topViewController(controller: navigationController.visibleViewController)
}
if let tabController = controller as? UITabBarController {
if let selected = tabController.selectedViewController {
return topViewController(controller: selected)
}
}
if let presented = controller?.presentedViewController {
return topViewController(controller: presented)
}
return controller
}
}
extension DefaultsKeys {
var isLogin: DefaultsKey<Bool?> { .init("isLogin") }
}
extension Notification.Name {
}
extension Sequence {
func group<GroupingType: Hashable>(by key: (Iterator.Element) -> GroupingType) -> [[Iterator.Element]] {
var groups: [GroupingType: [Iterator.Element]] = [:]
var groupsOrder: [GroupingType] = []
forEach { element in
let key = key(element)
if case nil = groups[key]?.append(element) {
groups[key] = [element]
groupsOrder.append(key)
}
}
return groupsOrder.map { groups[$0]! }
}
}
extension CGPoint {
func isInsidePolygon(vertices: [CGPoint]) -> Bool {
guard vertices.count > 0 else { return false }
var i = 0, j = vertices.count - 1, c = false, vi: CGPoint, vj: CGPoint
while true {
guard i < vertices.count else { break }
vi = vertices[i]
vj = vertices[j]
if (vi.y > y) != (vj.y > y) &&
x < (vj.x - vi.x) * (y - vi.y) / (vj.y - vi.y) + vi.x {
c = !c
}
j = i
i += 1
}
return c
}
}
extension Date {
// Convert local time to UTC (or GMT)
func toGlobalTime() -> Date {
let timezone = TimeZone.current
let seconds = -TimeInterval(timezone.secondsFromGMT(for: self))
return Date(timeInterval: seconds, since: self)
}
// Convert UTC (or GMT) to local time
func toLocalTime() -> Date {
let timezone = TimeZone.current
let seconds = TimeInterval(timezone.secondsFromGMT(for: self))
return Date(timeInterval: seconds, since: self)
}
}
<file_sep>/TechnicalAssessmentTest/ViewControllers/Dashboard/ViewModel/DashboardViewModel.swift
//
// Extensions.swift
// TechnicalAssessmentTest
//
// Created by <NAME> on 23/10/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
import Foundation
import Alamofire
import SwiftyJSON
class DashboardViewModel
{
var items : NSMutableArray! = NSMutableArray()
var error: Error? = nil
var refreshing = false
func fetch(completion: @escaping () -> Void)
{
refreshing = true
let apiURL = Constants.environment.current.BASE_URL + Constants.API.posts
var headers: HTTPHeaders = []
headers.add(name: "Accept", value: "*/*")
AFWrapper.requestGET(apiURL, params: nil, headers: headers, success: { (statusCode, responseJson) in
self.items.removeAllObjects()
if(statusCode == 200)
{
var finalArray:[Any] = []
print("Here response \(responseJson)")
let responseArray = responseJson.array! as NSArray
for post in responseArray
{
let tempObject = JSON(post)
let tempPostModel : PostDataModel = PostDataModel()
tempPostModel.userId = tempObject["userId"].intValue
tempPostModel.id = tempObject["id"].intValue
tempPostModel.title = tempObject["title"].stringValue
tempPostModel.body = tempObject["body"].stringValue
finalArray.append(tempPostModel)
}
if (finalArray.count > 0)
{
let sortedArray = finalArray.sorted
{
($0 as! PostDataModel).id < ($1 as! PostDataModel).id
}
self.items.addObjects(from: sortedArray)
}
completion()
}
else
{
completion()
}
}) { (error) in
self.error = error
completion()
}
}
}
<file_sep>/TechnicalAssessmentTest/ViewControllers/Dashboard/ViewController/TabBarViewController.swift
//
// TabBarViewController.swift
// spiiomobile
//
// Created by <NAME> on 23/10/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
import UIKit
import AZTabBar
class TabBarViewController: UIViewController, AZTabBarDelegate {
var AZTabBarC:AZTabBarController!
override func viewDidLoad() {
super.viewDidLoad()
setAZTabbar()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.navigationController?.navigationBar.isHidden = true
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.navigationController?.navigationBar.isHidden = true
}
func setAZTabbar()
{
var icons = [UIImage]()
icons.append(Constants.images.posts!)
icons.append(Constants.images.favourites!)
AZTabBarC = .insert(into: self, withTabIcons: icons)
AZTabBarC.delegate = self
let v2 = appDelegate.myStoryboard.instantiateViewController(withIdentifier: "DashboardViewController") as! DashboardViewController
//let v3 = appDelegate.myStoryboard.instantiateViewController(withIdentifier: "InstallWebViewController") as! InstallWebViewController
let v3 = appDelegate.myStoryboard.instantiateViewController(withIdentifier: "FavoritesViewController") as! FavoritesViewController
let vc2 = UINavigationController(rootViewController: v2)
let vc3 = UINavigationController(rootViewController: v3)
AZTabBarC.setViewController(vc2, atIndex: 0)
AZTabBarC.setViewController(vc3, atIndex: 1)
AZTabBarC.setTitle("Posts".uppercased(), atIndex: 0)
AZTabBarC.setTitle("Favorites".uppercased(), atIndex: 1)
AZTabBarC.font = UIFont.systemFont(ofSize: Utilities.dynamicFontSizeForIphone(fontSize: 9.0))
//default color of the icons on the buttons
AZTabBarC.defaultColor = Constants.color.colorGreenLight
//the color of the icon when a menu is selected
AZTabBarC.selectedColor = Constants.color.colorGreenDark
//The color of the icon of a highlighted tab
AZTabBarC.highlightColor = Constants.color.colorGreenDark
//The background color of the tab bar
AZTabBarC.buttonsBackgroundColor = Constants.color.colorToolbar
//Tabbar height
AZTabBarC.tabBarHeight = Utilities.getHeight(height: 40)
// default is 3.0
AZTabBarC.selectionIndicatorHeight = 0
//hide or show the seperator line
AZTabBarC.separatorLineVisible = false
AZTabBarC.animateTabChange = false
}
}
<file_sep>/TechnicalAssessmentTest/ViewControllers/Login/ViewController/LoginViewController.swift
//
// ViewController.swift
// TechnicalAssessmentTest
//
// Created by <NAME> on 15/03/21.
//
import UIKit
import MaterialComponents.MaterialButtons
import SkyFloatingLabelTextField
import Alamofire
import SwiftyUserDefaults
import SwiftyJSON
import Firebase
class LoginViewController: UIViewController, UITextViewDelegate, UITextFieldDelegate
{
@IBOutlet weak var scrollView: UIScrollView!
var imgLogo: UIImageView!
var txtEmail: SkyFloatingLabelTextField!
var txtPassword: SkyFloatingLabelTextField!
var btnSubmit: MDCButton!
var btnForgotPassword: MDCButton!
var y_position : CGFloat = 0
// MARK: -
// MARK: Controller Lifecycle Methods
override func viewDidLoad()
{
super.viewDidLoad()
// Do any additional setup after loading the view.
y_position = Utilities.getHeight(height: 140)
addEmail()
addPassword()
addSubmitButton()
self.checkLoginValidations()
}
override func viewWillAppear(_ animated: Bool)
{
super.viewWillAppear(animated)
self.navigationController?.isNavigationBarHidden = false
UIApplication.shared.statusBarStyle = .lightContent
Utilities.trackScreenForGoogleAnalytics(screenName: Constants.GoogleAnalytics.Login)
let navigationBar = self.navigationController?.navigationBar
navigationBar?.tintColor = UIColor.white
navigationBar?.setNavigationBarProperties(navigationBar: navigationBar!)
self.navigationItem.title = "Login"
txtEmail.text = ""
txtPassword.text = ""
}
// MARK: -
// MARK: Screen UI Design
func addEmail()
{
let tempView = UIView(frame: CGRect(x: Utilities.getWidth(width: 20), y: y_position, width: Utilities.getWidth(width: 280), height: Utilities.getHeight(height: 40)))
let leftView1 = UIView(frame: CGRect(x: 0, y: 0, width: Utilities.getWidth(width: 30), height: Utilities.getHeight(height: 40)))
let imageView = UIImageView(frame: CGRect(x: 0, y: 0, width: Utilities.getWidth(width: 15), height: leftView1.frame.size.height));
imageView.contentMode = .scaleAspectFit
imageView.image = UIImage(named: "username");
leftView1.addSubview(imageView)
tempView.addSubview(leftView1)
txtEmail = SkyFloatingLabelTextField()
txtEmail.tintColor = Constants.color.colorPrimary // the color of the blinking cursor
txtEmail.textColor = Constants.color.colorPrimary
txtEmail.delegate = self
txtEmail.lineColor = Constants.color.colorEditTextLine
txtEmail.selectedTitleColor = Constants.color.colorPrimary
txtEmail.selectedLineColor = Constants.color.colorPrimary
txtEmail.lineHeight = 1.0 // bottom line height in points
txtEmail.selectedLineHeight = 1.0
txtEmail.keyboardType = .emailAddress
txtEmail.delegate = self
txtEmail.frame = CGRect(x: Utilities.getWidth(width: 30), y: 0, width: Utilities.getWidth(width: 250), height: Utilities.getHeight(height: 40))
txtEmail.placeholder = "Email"
txtEmail.title = "Email"
// txtEmail.addBottomBorder(color: Constants.color.colorEditTextLine)
txtEmail.textColor = Constants.color.colorPrimary
txtEmail.font = UIFont.systemFont(ofSize: Utilities.dynamicFontSizeForIphone(fontSize: 12))
tempView.addSubview(txtEmail)
scrollView.addSubview(tempView)
y_position = tempView.frame.origin.y + tempView.frame.size.height + Utilities.getHeight(height: 10)
scrollView.contentSize = CGSize(width: self.scrollView.frame.size.width, height: y_position)
}
func addPassword()
{
let tempView = UIView(frame: CGRect(x: Utilities.getWidth(width: 20), y: y_position, width: Utilities.getWidth(width: 280), height: Utilities.getHeight(height: 40)))
let leftView1 = UIView(frame: CGRect(x: 0, y: 0, width: Utilities.getWidth(width: 30), height: Utilities.getHeight(height: 40)))
let imageView = UIImageView(frame: CGRect(x: 0, y: 0, width: Utilities.getWidth(width: 15), height: leftView1.frame.size.height));
imageView.contentMode = .scaleAspectFit
imageView.image = UIImage(named: "password_icon");
leftView1.addSubview(imageView)
tempView.addSubview(leftView1)
txtPassword = SkyFloatingLabelTextField()
txtPassword.tintColor = Constants.color.colorPrimary // the color of the blinking cursor
txtPassword.textColor = Constants.color.colorPrimary
txtPassword.delegate = self
txtPassword.lineColor = Constants.color.colorEditTextLine
txtPassword.selectedTitleColor = Constants.color.colorPrimary
txtPassword.selectedLineColor = Constants.color.colorPrimary
txtPassword.lineHeight = 1.0 // bottom line height in points
txtPassword.selectedLineHeight = 1.0
txtPassword.isSecureTextEntry = true
txtPassword.delegate = self
txtPassword.frame = CGRect(x: Utilities.getWidth(width: 30), y: 0, width: Utilities.getWidth(width: 250), height: Utilities.getHeight(height: 40))
txtPassword.placeholder = "Password"
txtPassword.title = "Password"
//txtPassword.addBottomBorder(color: Constants.color.colorEditTextLine)
txtPassword.textColor = Constants.color.colorPrimary
txtPassword.font = UIFont.systemFont(ofSize: Utilities.dynamicFontSizeForIphone(fontSize: 12))
tempView.addSubview(txtPassword)
scrollView.addSubview(tempView)
y_position = tempView.frame.origin.y + tempView.frame.size.height + Utilities.getHeight(height: 30)
scrollView.contentSize = CGSize(width: self.scrollView.frame.size.width, height: y_position)
}
func addSubmitButton()
{
btnSubmit = MDCButton()
btnSubmit.frame = CGRect(x: Utilities.getWidth(width: 20), y: y_position, width: Utilities.getWidth(width: 280), height: Utilities.getHeight(height: 45))
btnSubmit.setTitle("LOGIN", for: .normal)
btnSubmit.setTitleColor(.white)
btnSubmit.setBackgroundColor(Constants.color.colorButtonGreen)
btnSubmit.setTitleFont(UIFont.boldSystemFont(ofSize: Utilities.dynamicFontSizeForIphone(fontSize: 16)), for: .normal)
btnSubmit.isUppercaseTitle = true
btnSubmit.addTarget(self, action: #selector(onClickSubmit), for: .touchUpInside)
btnSubmit.layer.cornerRadius = Utilities.getWidth(width: 4)
scrollView.addSubview(btnSubmit)
y_position = btnSubmit.frame.origin.y + btnSubmit.frame.size.height + Utilities.getHeight(height: 15)
scrollView.contentSize = CGSize(width: self.scrollView.frame.size.width, height: y_position)
}
func textField(_ textField: UITextField, shouldChangeCharactersIn range: NSRange, replacementString string: String) -> Bool
{
if let text = textField.text as NSString?
{
Utilities.run(after: 0.1) {
self.checkLoginValidations()
}
}
return true
}
func checkLoginValidations()
{
btnSubmit.isEnabled = false
let loginViewModel : LoginViewModel = LoginViewModel(email: txtEmail.text!, password: txtPassword.text!)
loginViewModel.validateFields { (isValidated) in
if(isValidated == true)
{
btnSubmit.isEnabled = true
}
else{
btnSubmit.isEnabled = false
}
}
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool
{
textField.resignFirstResponder()
return true
}
@objc func onClickSubmit()
{
self.view.endEditing(true)
Defaults[\.isLogin] = true
let tabViewController = appDelegate.myStoryboard.instantiateViewController(withIdentifier: "TabBarViewController") as! TabBarViewController
self.navigationController?.pushViewController(tabViewController, animated: false)
}
}
<file_sep>/Podfile
# Uncomment the next line to define a global platform for your project
# platform :ios, '9.0'
target 'TechnicalAssessmentTest' do
# Comment the next line if you don't want to use dynamic frameworks
use_frameworks!
# Pods for TechnicalAssessmentTest
pod 'RxSwift', '6.1.0'
pod 'RxCocoa', '6.1.0'
pod 'BFKit-Swift'
pod 'RMUniversalAlert'
pod 'Toast'
pod 'SVProgressHUD'
pod 'IQKeyboardManager'
pod 'MaterialComponents/Buttons'
pod 'MaterialComponents/TextFields'
pod 'MaterialComponents/Dialogs'
pod 'Alamofire', '~> 5.2'
pod 'SwiftyJSON', '~> 4.0'
pod 'SwiftyUserDefaults', '~> 5.0'
pod 'Firebase/Crashlytics'
pod 'Firebase/Analytics'
pod 'DZNEmptyDataSet'
pod 'RMUniversalAlert'
pod 'SkyFloatingLabelTextField', '~> 3.0'
pod 'AZTabBar'
end
<file_sep>/TechnicalAssessmentTest/Classes/Utilities.swift
//
// Utilities.swift
// TechnicalAssessmentTest
//
// Created by <NAME> on 23/10/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
import Foundation
import UIKit
import SVProgressHUD
import Toast
import RMUniversalAlert
import Firebase
import MaterialComponents.MaterialDialogs
class Utilities:NSObject{
class func size(forText text: String?, font: UIFont?, withinWidth width: CGFloat) -> CGSize {
let constraint = CGSize(width: width, height: 20000.0)
var size: CGSize
var boundingBox: CGSize? = nil
if let font = font {
boundingBox = text?.boundingRect(with: constraint, options: [.usesLineFragmentOrigin, .usesFontLeading], attributes: [
NSAttributedString.Key.font: font
], context: nil).size
}
size = CGSize(width: ceil(boundingBox!.width), height: ceil(boundingBox!.height))
return size
}
class func getAspectSizeOfImage(imageSize: CGSize, widthToFit: Float) -> CGSize {
var imageWidth = Float(imageSize.width)
var imageHeight = Float(imageSize.height)
var imgRatio = imageWidth / imageHeight
let width_1 = widthToFit
let height_1 = MAXFLOAT
let maxRatio = width_1 / height_1
if imgRatio != maxRatio {
if imgRatio < maxRatio {
imgRatio = height_1 / imageHeight
imageWidth = imgRatio * imageWidth
imageHeight = height_1
} else {
imgRatio = width_1 / imageWidth
imageHeight = imgRatio * imageHeight
imageWidth = width_1
}
}
return CGSize(width: CGFloat(imageWidth), height: CGFloat(imageHeight))
}
class func getWidth(width : CGFloat) -> CGFloat
{
var current_Size : CGFloat = 0.0
current_Size = (UIScreen.main.bounds.width/320)
let FinalWidth : CGFloat = width * current_Size
return FinalWidth
}
class func getHeight(height : CGFloat) -> CGFloat
{
var current_Size : CGFloat = 0.0
current_Size = (UIScreen.main.bounds.height/568)
let FinalHight : CGFloat = height * current_Size
return FinalHight
}
class func dynamicFontSizeForIphone(fontSize : CGFloat) -> CGFloat
{
var current_Size : CGFloat = 0.0
current_Size = (UIScreen.main.bounds.width/320)
let FinalSize : CGFloat = fontSize * current_Size
return FinalSize
}
class func isEmpty(_ str: String) -> Bool
{
var string_str : String = str
if (string_str as? NSNull) == NSNull() {
return true
}
if string_str == nil {
return true
}
else if string_str.count == 0 {
return true
}
else {
string_str = string_str.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines)
if string_str.count == 0 {
return true
}
}
if (string_str == "<null>") {
return true
}
return false
}
class func showToastMessage(_ strMessage: String) {
if !Utilities.isEmpty(strMessage) {
CSToastManager.setQueueEnabled(false)
appDelegate.window?.makeToast(strMessage)
/*var messageQueue: RGMessageQueue!
var presenter: RGMessageSnackBarPresenter?
if let window = UIApplication.shared.keyWindow {
presenter = RGMessageSnackBarPresenter(view: window,
animation: RGMessageSnackBarAnimation.slideUp,
bottomMargin: 0.0,
sideMargins: 0.0,
cornerRadius: 0.0)
presenter?.snackBarView.backgroundColor = Constants.color.colorPrimary
messageQueue = RGMessageQueue(presenter: presenter!)
} else {
messageQueue = RGMessageQueue(presenter: RGMessageSnackBarPresenter(view: (UIApplication.topViewController()?.view)!, animation: RGMessageSnackBarAnimation.slideUp))
}
if let rgmessage = RGMessage(text: strMessage, image: nil, actions: nil, priority: .verbose, duration: .long) {
messageQueue.push(rgmessage)
}
*/
}
}
class func showAlertMessage(_ strMessage: String) {
if !Utilities.isEmpty(strMessage) {
RMUniversalAlert.show(in: UIApplication.topViewController()!, withTitle: "Alert", message: strMessage, cancelButtonTitle: "OK", destructiveButtonTitle: nil, otherButtonTitles: nil) { (alert, index) in
}
}
}
class func showAlertMessage(_ title: String, message: String) {
if !Utilities.isEmpty(message)
{
RMUniversalAlert.show(in: UIApplication.topViewController()!, withTitle: title, message: message, cancelButtonTitle: "OK", destructiveButtonTitle: nil, otherButtonTitles: nil) { (alert, index) in
}
}
}
class func run(after wait: TimeInterval, closure: @escaping () -> Void) {
let queue = DispatchQueue.main
queue.asyncAfter(deadline: DispatchTime.now() + wait, execute: closure)
}
class func showActivityIndicator()
{
SVProgressHUD.show()
}
class func hideActivityIndicator()
{
SVProgressHUD.dismiss()
}
class func trackScreenForGoogleAnalytics(screenName : String)
{
//Analytics.setScreenName(screenName, screenClass: screenName)
}
}
<file_sep>/TechnicalAssessmentTest/Models/ObjectModels/PostDataModel.swift
//
// PostDataModel.swift
import Foundation
import SwiftyJSON
class PostDataModel : NSObject
{
var userId : Int = 0
var id : Int = 0
var title : String!
var body : String!
}
<file_sep>/TechnicalAssessmentTest/ViewControllers/Login/ViewModel/LoginViewModel.swift
//
// Extensions.swift
// TechnicalAssessmentTest
//
// Created by <NAME> on 23/10/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
import Foundation
import Alamofire
import SwiftyJSON
class LoginViewModel
{
var email : String!
var password : String!
init(email: String, password: String)
{
self.email = email
self.password = <PASSWORD>
}
func validateFields(finished: (Bool) -> Void)
{
if(self.email?.isEmpty == false && self.password?.isEmpty == false)
{
if (self.email?.isEmail() == true)
{
let passwordString = self.password
if (passwordString!.length >= 8 && passwordString!.length <= 15)
{
finished(true)
return
}
}
}
finished(false)
}
}
<file_sep>/TechnicalAssessmentTest/Classes/AFWrapper.swift
//
// Extensions.swift
// TechnicalAssessmentTest
//
// Created by <NAME> on 23/10/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
import Foundation
import Alamofire
import SwiftyJSON
class AFWrapper: NSObject
{
class func requestGET(_ strURL : String, params : Parameters?, headers : HTTPHeaders? , success:@escaping (Int, JSON) -> Void, failure:@escaping (Error) -> Void){
AF.request(strURL, method: .get, parameters: params, encoding: URLEncoding.httpBody, headers: headers).responseJSON { (responseObject) -> Void in
//print("Response Code",responseObject.response?.statusCode as Any)
switch responseObject.result {
case .success:
let resJson = JSON(responseObject.value!)
success(responseObject.response!.statusCode, resJson)
case .failure:
let error : Error = responseObject.error!
failure(error)
}
}
}
}
<file_sep>/TechnicalAssessmentTest/ViewControllers/Dashboard/ViewController/FavoritesViewController.swift
//
// ViewController.swift
// TechnicalAssessmentTest
//
// Created by <NAME> on 15/03/21.
//
import UIKit
import SwiftyUserDefaults
import Alamofire
import SwiftyJSON
import DZNEmptyDataSet
class FavoritesViewController: UIViewController, UITableViewDelegate, UITableViewDataSource, DZNEmptyDataSetSource, DZNEmptyDataSetDelegate
{
var postDataArray : NSMutableArray! = NSMutableArray()
// cell reuse id (cells that scroll out of view can be reused)
let cellReuseIdentifier = "cell"
private var viewModel: DashboardViewModel = DashboardViewModel()
// don't forget to hook this up from the storyboard
@IBOutlet var tableView: UITableView!
// MARK: -
// MARK: Controller Lifecycle Methods
override func viewDidLoad()
{
super.viewDidLoad()
// Do any additional setup after loading the view.
// Register the table view cell class and its reuse id
self.tableView.register(UITableViewCell.self, forCellReuseIdentifier: cellReuseIdentifier)
// (optional) include this line if you want to remove the extra empty cell divider lines
// self.tableView.tableFooterView = UIView()
// This view controller itself will provide the delegate methods and row data for the table view.
tableView.delegate = self
tableView.dataSource = self
tableView.separatorInset = UIEdgeInsets.zero
tableView.tableFooterView = UIView()
self.getPostDataFromAPI()
}
override func viewWillAppear(_ animated: Bool)
{
super.viewWillAppear(animated)
self.navigationController?.isNavigationBarHidden = false
UIApplication.shared.statusBarStyle = .lightContent
Utilities.trackScreenForGoogleAnalytics(screenName: Constants.GoogleAnalytics.Dashboard)
let navigationBar = self.navigationController?.navigationBar
navigationBar?.tintColor = UIColor.white
navigationBar?.setNavigationBarProperties(navigationBar: navigationBar!)
self.navigationItem.title = "Favorites"
let rightBarButton = UIBarButtonItem()
rightBarButton.target = self
rightBarButton.action = #selector(onClickLogout)
rightBarButton.title = "Logout"
rightBarButton.style = .done
rightBarButton.tintColor = Constants.color.colorWhite
self.navigationItem.rightBarButtonItem = rightBarButton
self.navigationItem.hidesBackButton = true
}
// MARK: -
// MARK: API Methods
func getPostDataFromAPI()
{
print("getPostDataFromAPI")
Utilities.showActivityIndicator()
viewModel.fetch { [weak self] in
Utilities.hideActivityIndicator()
if(self?.viewModel.error == nil)
{
if(self?.viewModel.items.count != 0)
{
self?.postDataArray.addObjects(from: self?.viewModel.items as! [Any])
}
self?.tableView.emptyDataSetSource = self
self?.tableView.emptyDataSetDelegate = self
self?.tableView.reloadData()
self?.tableView.reloadEmptyDataSet()
}
else{
Utilities.showAlertMessage(Constants.messages.unknownError)
}
}
}
private func tableView(tableView: UITableView, heightForRowAtIndexPath indexPath: NSIndexPath) -> CGFloat {
return UITableView.automaticDimension
}
// number of rows in table view
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return self.postDataArray.count
}
// create a cell for each table view row
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
// create a new cell if needed or reuse an old one
let cell:UITableViewCell = UITableViewCell(style: .subtitle,
reuseIdentifier: cellReuseIdentifier)
let tempPostModel : PostDataModel = self.postDataArray.object(at: indexPath.row) as! PostDataModel
let bgColorView = UIView()
bgColorView.backgroundColor = Constants.color.colorPrimary.withAlphaComponent(0.2)
cell.selectedBackgroundView = bgColorView
// set the text from the data model
cell.textLabel?.numberOfLines = 0
cell.textLabel?.lineBreakMode = .byWordWrapping
cell.textLabel?.text = tempPostModel.title
cell.detailTextLabel?.numberOfLines = 0
cell.detailTextLabel?.lineBreakMode = .byWordWrapping
cell.detailTextLabel?.text = tempPostModel.body
return cell
}
// method to run when table view cell is tapped
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath)
{
tableView.deselectRow(at: indexPath, animated: true)
}
func title(forEmptyDataSet scrollView: UIScrollView) -> NSAttributedString?
{
let txt = "Favorites"
let attrs1 = [NSAttributedString.Key.font : UIFont.boldSystemFont(ofSize: Utilities.dynamicFontSizeForIphone(fontSize: 16.0)) , NSAttributedString.Key.foregroundColor : Constants.color.colorPrimary]
let attributedString1 = NSMutableAttributedString(string:"\(txt)", attributes:attrs1 as [NSAttributedString.Key : Any])
return attributedString1
}
func description(forEmptyDataSet scrollView: UIScrollView!) -> NSAttributedString! {
let txt = "No record(s) found."
let attrs1 = [NSAttributedString.Key.font : UIFont.systemFont(ofSize: Utilities.dynamicFontSizeForIphone(fontSize: 12.0)) , NSAttributedString.Key.foregroundColor : UIColor.black]
let attributedString1 = NSMutableAttributedString(string:"\(txt)", attributes:attrs1 as [NSAttributedString.Key : Any])
return attributedString1
}
func emptyDataSetShouldDisplay(_ scrollView: UIScrollView) -> Bool
{
return true
}
// MARK: -
// MARK: Buttons Click Events
@objc func onClickLogout()
{
Defaults[\.isLogin] = false
let viewController = appDelegate.myStoryboard.instantiateViewController(withIdentifier: "LoginViewController") as! LoginViewController
let navigationController = UINavigationController(rootViewController: viewController)
appDelegate.window?.rootViewController = navigationController
}
}
|
e5d8465d273cf02ca7b4fdc613bf907d7ee1fe17
|
[
"Swift",
"Ruby",
"Markdown"
] | 12 |
Swift
|
mihir815/TechnicalAssessmentTest
|
4d36bf986992f9258e215b01f2c3f7c3440bf31f
|
41294eae947a517116838bc32bb728af1182f49c
|
refs/heads/master
|
<repo_name>silveiraSoft/googlecloud<file_sep>/projetotest/index.php
<?PHP
define("dir_server","C:/wamp/www/ProjetosWeb/projetotest");
define("host",$_SERVER['HTTP_HOST']);
define("url_server",$_SERVER['PHP_SELF']);
define("uri",rtrim(dirname($_SERVER['PHP_SELF']), '/\\'));
define("controler_inicial",'/controlers/controler_inicial.php');
//echo host.uri.controler_inicial;
// header("Location: http://$host$uri/$extra");
if(!isset($controler) && !isset($accion)){
$controler="atualizar_historico_diciplina";
include_once dir_server."/conecao.php";
include_once dir_server.controler_inicial;
}
?><file_sep>/projetotest/views/lst_historico_diciplina - Copia.php
<!DOCTYPE html>
<html lang="pt-br">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- As 3 meta tags acima *devem* vir em primeiro lugar dentro do `head`; qualquer outro conteúdo deve vir *após* essas tags -->
<title>Histórico de disciplinas</title>
<!-- Bootstrap
<link href="bootstrap/css/bootstrap.min.css" rel="stylesheet">
-->
<!-- Bootstrap core CSS -->
<link href="<?php echo "http://".host.uri; ?>/bootstrap/css/bootstrap.min.css" rel="stylesheet">
<!-- Bootstrap theme -->
<link href="<?php echo "http://".host.uri; ?>/bootstrap/css/bootstrap-theme.min.css" rel="stylesheet">
<!-- IE10 viewport hack for Surface/desktop Windows 8 bug -->
<link href="../../assets/css/ie10-viewport-bug-workaround.css" rel="stylesheet">
<!-- Custom styles for this template -->
<link href="<?php echo "http://".host.uri; ?>/bootstrap/theme.css" rel="stylesheet">
<!-- Just for debugging purposes. Don't actually copy these 2 lines! -->
<!--[if lt IE 9]><script src="../../assets/js/ie8-responsive-file-warning.js"></script><![endif]-->
<!-- <script src="../../assets/js/ie-emulation-modes-warning.js"></script> -->
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<!--
<h1>Disciplinas já cadatradas (Histórico de disciplinas)</h1>
-->
<div class="container">
<div class="panel panel-primary">
<!-- Default panel contents -->
<div class="panel-heading"><p class="text-center"> Disciplinas já cursadas (Histórico de disciplinas)</p> </div>
<div class="panel-body">
<!-- <button type="button" class="btn btn-sm btn-default">Default</button>-->
<div class="pull-right">
<!-- <button type="button" class="btn btn-primary">Atualizar</button> -->
<button type="button" class="btn btn-warning">Atualizar</button>
</div>
<table class="table table-bordered">
<thead>
<tr>
<th colspan="2">Ano / Módulo 1</th>
<!--
<th>First Name</th>
<th>Last Name</th>
<th>Username</th>
-->
</tr>
</thead>
<tbody>
<tr>
<td>Nome da disciplina</td>
<td>Detalhe</td>
</tr>
<tr rowspan="2">
<td> >> NOME DA DISCIPLINA 1 </br> >> NOME DA DISCIPLINA 2 </td>
<td> Adaptação </br> Adaptação</td>
</tr>
</tbody>
<thead>
<tr>
<th colspan="2">Ano / Módulo 2</th>
<!--
<th>First Name</th>
<th>Last Name</th>
<th>Username</th>
-->
</tr>
</thead>
<tbody>
<tr>
<td>Nome da disciplina</td>
<td>Detalhe</td>
</tr>
<tr rowspan="2">
<td> >> NOME DA DISCIPLINA 1 </br> >> NOME DA DISCIPLINA 1 </td>
<td> Adaptação </br> Adaptação</td>
</tr>
</tbody>
</table>
</div>
<!-- Table
<table class="table">
...
</table>
-->
</div>
</div>
<!-- jQuery (obrigatório para plugins JavaScript do Bootstrap) -->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"></script>
<!-- Inclui todos os plugins compilados (abaixo), ou inclua arquivos separadados se necessário -->
<script src="js/bootstrap.min.js"></script>
</body>
</html><file_sep>/projetotest/sql/disciplina.sql
-- phpMyAdmin SQL Dump
-- version 4.6.4
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: 03-Dez-2016 às 23:18
-- Versão do servidor: 5.7.14
-- PHP Version: 5.6.25
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `db_test_unicesumar`
--
-- --------------------------------------------------------
--
-- Estrutura da tabela `disciplina`
--
CREATE TABLE `disciplina` (
`iddisc` int(11) NOT NULL,
`aluno` varchar(250) NOT NULL,
`turma` int(11) NOT NULL,
`ano` year(4) NOT NULL,
`semestre` int(11) NOT NULL,
`sit_detalhe` varchar(20) NOT NULL,
`NOME_DISCIPLINA` varchar(250) NOT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE utf8_general_ci;
--
-- Extraindo dados da tabela `disciplina`
--
INSERT INTO `disciplina` (`iddisc`, `aluno`, `turma`, `ano`, `semestre`, `sit_detalhe`, `NOME_DISCIPLINA`) VALUES
(1, 'A', 1, 2016, 1, 'Adaptação', 'Disciplina 1'),
(2, 'A', 1, 2016, 1, 'Adaptação', 'Disciplina 2'),
(3, 'B', 1, 2016, 1, 'Adaptação', 'Disciplina 1'),
(4, 'B', 1, 2016, 1, 'Adaptação', 'Disciplina 2'),
(5, 'B', 2, 2016, 2, 'Adaptação', 'Disciplina 1'),
(6, 'B', 2, 2016, 2, 'Adaptação', 'Disciplina 2'),
(7, 'B', 2, 2016, 2, 'Curricular', 'Disciplina 3');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `disciplina`
--
ALTER TABLE `disciplina`
ADD PRIMARY KEY (`iddisc`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `disciplina`
--
ALTER TABLE `disciplina`
MODIFY `iddisc` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/projetotest/controlers/controler_inicial.php
<?php
if(isset($conn)){
if($controler=="atualizar_historico_diciplina"){
try{
exec("SET CHARACTER SET utf8");
$sql = 'SELECT DISTINCT ano,semestre,NOME_DISCIPLINA,sit_detalhe FROM disciplina order by ano,semestre,NOME_DISCIPLINA';
//$sql = 'SELECT DISTINCT ano,semestre FROM disciplina order by ano,semestre';
if($result = $conn->query($sql)){
if($result !== false)
{
include_once dir_server."/views/lst_historico_diciplina.php";
}
}
// Desconectamos do banco
$conn = null;
}
catch(PDOException $e) {
// imprimimos a nossa excecao
echo $e->getMessage();
}
//$conn = null;
/*
foreach ($conn->query($sql) as $row) {
print $row['name'] . "\t";
print $row['color'] . "\t";
print $row['calories'] . "\n";
}
*/
}
}
?>
|
e5f30f3ea41bcf1f18de8d1e72971a77e3177254
|
[
"SQL",
"PHP"
] | 4 |
PHP
|
silveiraSoft/googlecloud
|
9c46e909dc3aafa76885d6c80b2709cc45dc77c3
|
6acde50a47106aa1c02f6ce32b70b2c26f004e17
|
refs/heads/main
|
<repo_name>NikolayStoyanovv/20210226<file_sep>/20210226_11.c
/* Задача 11. тайпдефната структура node, съдържаща един член от
тип инт и един пойнтер към тип самата структура. Заделете динамично
памет за тази структура с malloc по описания по-горе начин. */
#include<stdio.h>
#include<stdlib.h>
typedef struct node{
int x;
struct node* ptr;
}node;
int main() {
node a;
a.x = 3;
node* np = malloc(sizeof (node));
np->x = 5;
np->ptr = &a;
printf("%d\n",np->x);
printf("%d\n",np->ptr->x);
}<file_sep>/20210226_1.c
/* Задача 1. Дефинирайте структура като
потребителски тип. Инициализирайте членовете на
структурата, използвайки новия потребителски тип.
Отпечатайте. */
#include<stdio.h>
#include<string.h>
typedef struct user{
char name[80];
int age;
}t_user;
int main(){
t_user u1;
strcpy(u1.name,"Sam");
u1.age=10;
printf( "The user's name is: %s\n", u1.name);
printf( "The user's age is : %d\n", u1.age);
return 0;
}<file_sep>/20210226_5.c
/* Задача 5 Дефинирайте потребителски тип
към масив. Инициализирайте масива, изведете на
конзолата. */
#include <stdio.h>
typedef int twoInts[2];
typedef int List[10];
List x;
List y;
int main () {
typedef char Name[20];
Name first="Zack", middle="LZ", last="Miller";
printf("First name:%s\n",first);
printf("SN :%s\n",middle);
printf("Last name:%s\n",last);
return 0;
}
<file_sep>/20210226_9.c
/* Задача 9. Изведете резултата от деленето на две цели
числа от тип int като double с преобразуване и без такова */
#include<stdio.h>
int main(){
int iValue=5;
int iDiv=3;
double dfRes=0.0;
dfRes=(double)iValue/iDiv;
printf("The result is :%lf\n",dfRes);
short b=13;
int* ptr=(int*)&b;
*ptr=31;
return 0;
}<file_sep>/20210226_2.c
/* Задача 2 Създайте нов потребителски тип
към тип long long int. Използвайте го във функцията
printf, отпечатайте размера. */
#include<stdio.h>
#include<string.h>
typedef long long int t_user;
int main(){
t_user u1;
printf( "The size of long long int is: %ld\n",sizeof(t_user));
return 0;
}<file_sep>/20210226_3.c
/* Задача 3. Дефинирайте потребителски тип към
указател.Създайте променлива, насочете указателя
към нея, използвайки новия потребителски тип. */
#include<stdio.h>
typedef struct user{
char name[80];
int age;
}t_user;
typedef int*t_pointer;
int main(){
int x=5;
t_pointer b=&x;
printf("The size of t_pointer:%ld\n",sizeof(*b));
return 0;
}<file_sep>/20210226_10.c
/* Задача 10. Опитайте се да използвате колкото се може
повече неявни преобразувания на типове в С-и */
#include<stdio.h>
int main() {
short int sivalue = 13;
int* ptrValue = (int*) &sivalue;
*ptrValue = 31;
printf("%d, %d\n", sivalue, *ptrValue);
char chValue = 'a';
double* ptrdValue = (double*) &chValue;
*ptrdValue = 0.0;
printf("%c, %f\n", chValue, *ptrdValue);
}<file_sep>/20210226_4.c
/* Задача 4 Дефинирайте функция, която събира
две променливи и приема и изпълнява предефиниран
указател, като стойност. */
#include<stdio.h>
typedef int*t_pointer(int *,int *);
int print_to_n(int *a,int *b){
return *a+*b;
}
int main(){
int a=5;
int b=6;
int *ptr1=&a;
int *ptr2=&b;
t_pointer *t=print_to_n(ptr1,ptr2);
printf("The value of t_pointer:%ls\n",t(ptr1,ptr2));
return 0;
}
|
64bab6cf7abf1f61f5177262a8b7df330b769354
|
[
"C"
] | 8 |
C
|
NikolayStoyanovv/20210226
|
2ba1eb5e1eee9beb17d44337e45e79172e2b2188
|
95227415050003e1d824334cc95449bd6967f075
|
refs/heads/master
|
<repo_name>mugukamil/ptf<file_sep>/projects/webpaint/js/main.js
$(function(){
$('#Grid').mixitup();
});
|
3a515e76ac6e7a9e559153b083abddb4f98e844b
|
[
"JavaScript"
] | 1 |
JavaScript
|
mugukamil/ptf
|
831504fac25a370b86ea179b4cd751f59e9fbe0d
|
5459abccc54a12b1e857ff167382a52d0a823eca
|
refs/heads/master
|
<file_sep>var category=["Rock","Asians","Pop"];
var htmlCategory='<button class="btnFilter" id="active" value="all">Show all</button>';
for(var i=0;i<category.length;i++){
htmlCategory = htmlCategory+'<button class="btnFilter" value="'+category[i]+'">'+category[i]+'</button>';
}
$('#filterCategory').html(htmlCategory);
filterSelection("all");
var htmlFilterContent="";
for(var i=0;i<category.length;i++){
for(var j=0;j<5;j++){
htmlFilterContent = htmlFilterContent+'<img src="./Images/Genre/'+category[i]+'/'+ category[i]+(j+1)+ '.jpg"class="filterImg '+category[i]+'">';
}
}
$('#filterContent').html(htmlFilterContent);
function filterSelection(c) {
console.log(c)
var x, i;
x = $(".filterImg");
x.hide();
if (c == "all"){
x.show();
}
else{
x = $('.'+c);
console.log(x);
x.show();
}
}
var btns = document.getElementsByClassName("btnFilter");
for (var i = 0; i < btns.length; i++) {
btns[i].addEventListener("click", function() {
var value=this.value;
filterSelection(value);
var current = document.getElementById("active");
current.id = current.id.replace("active", "");
this.id = this.id.replace("", "active");
});
}
|
9897b8209679a44fb278709b4d4c63977e811ae2
|
[
"JavaScript"
] | 1 |
JavaScript
|
PhantomZe/ProjectHCI
|
e960609ea3369697ee0cf5fbe712f307b425dc4d
|
c7c5146def7e38ec749635f8116d8a789288e492
|
refs/heads/main
|
<file_sep>This is a clone of the google-keep app , which is used to store your tasks and notes.
**Technologies Used: React JS, Firebase, SCSS**
For cloning the repo :
1.Go to the 'code' dropdown button in my github page and there you will find a HTTPS link, copy that.
2.Open the terminal in your local machine and run the below command
**git clone https://github.com/Sushmita-Ghosh/google-keep-clone.git**
3.After the second step is successful, make sure to run yarn install or npm install to install all the dependencies of the app
4. To be logged in to the website , either you can Sign Up option or use the below credentials
**Username: <EMAIL>**
**Password: <PASSWORD>**
**For the live demo, : visit the link https://keepzz-google-keep.herokuapp.com/**
<file_sep>import "./App.css";
import React, { useState, useEffect } from "react";
import Header from "../src/components/header/header.component";
import Footer from "./components/footer/footer.component";
import Notes from "./components/notes/notes.component";
import Note from "./components/note/note.component";
import SearchBox from "./components/search-box/search-box.component";
import fire from "./firebase/firebase";
import Login from "./components/login/login.component";
function App() {
// for adding the note , also notes consits of all the notes
const [notes, setNotes] = useState([]);
//for each note
const [note, setNote] = useState({
title: "",
content: "",
});
// toggle the button to edit or not based on the state
const [toggleSubmit, setToggleSubmit] = useState(true);
//to get the selected item when we click on edit button in notes
const [isEditItem, setIsEditItem] = useState(null);
// for the search functionality
const [searchField, setSearchField] = useState("");
const [searchResults, setSearchResults] = useState([]);
// for the firebase functionalities
const [user, setUser] = useState("");
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [emailError, setEmailError] = useState("");
const [passwordError, setPasswordError] = useState("");
const [hasAccount, sethasAccount] = useState(false); // for switching bw sign-in and sign-out
const clearInputs = () => {
setEmail("");
setPassword("");
};
const clearErrors = () => {
setEmailError("");
setPasswordError("");
};
const loginHandler = () => {
clearErrors("");
fire
.auth()
.signInWithEmailAndPassword(email, password)
.catch((err) => {
switch (err.code) {
case "auth/invalid-email":
case "auth/user-diabled":
case "auth/user-not-found":
setEmailError(err.message);
break;
case "auth/wrong-password":
setPasswordError(err.message);
break;
}
});
};
const signUpHandler = () => {
clearErrors("");
fire
.auth()
.createUserWithEmailAndPassword(email, password)
.catch((err) => {
switch (err.code) {
case "auth/email-already-in-use":
case "auth/invalid-email":
setEmailError(err.message);
break;
case "auth/weak-password":
setPasswordError(err.message);
break;
}
});
};
const handleLogout = () => {
fire.auth().signOut();
};
const authListener = () => {
fire.auth().onAuthStateChanged((user) => {
if (user) {
clearInputs();
setUser(user);
} else {
setUser("");
}
});
};
useEffect(() => {
authListener();
}, []);
// function to show all the notes
const addNotes = (note) => {
setNotes((prevData) => {
return [...prevData, note];
});
//console.log(note);
};
const onDeleteHandler = (id) => {
//console.log(id);
// console.log(addNote);
// the previous data is filtered according if the id and the index matches
setNotes((prevData) =>
prevData.filter((item, ind) => {
return ind !== id;
})
);
};
const changeEdittedItemsInNotesHandler = (note) => {
if (!toggleSubmit) {
setNotes((prevData) =>
prevData.map((item, ind) => {
if (ind === isEditItem) {
return { ...item, title: note.title, content: note.content };
}
return item;
})
);
console.log(notes);
setToggleSubmit(true);
}
};
// for ediiting the inputs
const onEditHandler = (id) => {
// find which item we are trying to edit
const newEditItem = notes.find((item, ind) => {
return ind === id;
});
//console.log(newEditItem);
//set the toggleSubmit to false to edit the note
setToggleSubmit(false);
//the setNotes contain the notes data need that particular note
setNote((prevState) => {
return {
title: newEditItem.title,
content: newEditItem.content,
};
});
// setting the id to setIsItem to compare and change
setIsEditItem(id);
};
// for searching the terms
const searchHandler = (searchField) => {
setSearchField(searchField);
// console.log(searchField);
// if our search term is not empty then
if (searchField !== "") {
const filteredSearchNotes = notes.filter((note) => {
//console.log(Object.values(note));
return Object.values(note)
.join("")
.toLowerCase()
.includes(searchField.toLowerCase());
});
// once we have filtered , set the searchResults to the newNotes
setSearchResults(filteredSearchNotes);
} else {
// if the search field is empty set it to notes array
setSearchResults(notes);
}
};
return (
<div className="App">
{user ? (
<>
<Header handleLogout={handleLogout} />
<SearchBox searchField={searchField} searchHandler={searchHandler} />
<Notes
passNote={addNotes}
setToggleSubmit={setToggleSubmit}
toggleSubmit={toggleSubmit}
note={note}
setNote={setNote}
editItemChange={changeEdittedItemsInNotesHandler}
/>
<div className="notes">
{(searchField.length < 1 ? notes : searchResults).map(
(item, index) => {
return (
<Note
title={item.title}
content={item.content}
id={index} // for deletion purpose we need the id
key={index}
deleteItem={onDeleteHandler}
editItem={onEditHandler}
/>
);
}
)}
</div>
<Footer />
</>
) : (
<Login
email={email}
setEmail={setEmail}
password={<PASSWORD>}
setPassword={set<PASSWORD>}
loginHandler={loginHandler}
signUpHandler={signUpHandler}
hasAccount={hasAccount}
sethasAccount={sethasAccount}
emailError={emailError}
passwordError={passwordError}
/>
)}
</div>
);
}
export default App;
<file_sep>// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (master)
// $ git init
// Reinitialized existing Git repository in C:/Users/user/Desktop/Google Keep/google-keep/.git/
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (master)
// $ git remote add origin https://github.com/Sushmita-Ghosh/google-keep.git
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (master)
// $ git status
// On branch master
// Changes not staged for commit:
// (use "git add/rm <file>..." to update what will be committed)
// (use "git restore <file>..." to discard changes in working directory)
// modified: src/App.css
// modified: src/App.js
// deleted: src/App.test.js
// deleted: src/reportWebVitals.js
// no changes added to commit (use "git add" and/or "git commit -a")
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (master)
// $ git add -A
// warning: LF will be replaced by CRLF in src/App.js.
// The file will have its original line endings in your working directory
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (master)
// $ git commit -m "created readme"
// [master dc9a003] created readme
// 4 files changed, 8 insertions(+), 84 deletions(-)
// rewrite src/App.css (100%)
// rewrite src/App.js (92%)
// delete mode 100644 src/App.test.js
// delete mode 100644 src/reportWebVitals.js
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (master)
// $ git fetch origin
// remote: Enumerating objects: 3, done.
// remote: Counting objects: 100% (3/3), done.
// remote: Total 3 (delta 0), reused 0 (delta 0), pack-reused 0
// Unpacking objects: 100% (3/3), 602 bytes | 2.00 KiB/s, done.
// From https://github.com/Sushmita-Ghosh/google-keep
// * [new branch] main -> origin/main
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (master)
// $ git push -u origin main
// error: src refspec main does not match any
// error: failed to push some refs to 'https://github.com/Sushmita-Ghosh/google-keep.git'
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (master)
// $ git branch -m master main
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main)
// $ git push -u origin main
// To https://github.com/Sushmita-Ghosh/google-keep.git
// ! [rejected] main -> main (non-fast-forward)
// error: failed to push some refs to 'https://github.com/Sushmita-Ghosh/google-keep.git'
// hint: Updates were rejected because the tip of your current branch is behind
// hint: its remote counterpart. Integrate the remote changes (e.g.
// hint: 'git pull ...') before pushing again.
// hint: See the 'Note about fast-forwards' in 'git push --help' for details.
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main)
// $ git fetch
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main)
// $ git pull --rebase origin main
// From https://github.com/Sushmita-Ghosh/google-keep
// * branch main -> FETCH_HEAD
// error: could not apply 3a77b81... Initialize project using Create React App
// Resolve all conflicts manually, mark them as resolved with
// "git add/rm <conflicted_files>", then run "git rebase --continue".
// You can instead skip this commit: run "git rebase --skip".
// To abort and get back to the state before "git rebase", run "git rebase --abort".
// Could not apply 3a77b81... Initialize project using Create React App
// CONFLICT (add/add): Merge conflict in README.md
// Auto-merging README.md
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main|REBASE 1/2)
// $ git push -u origin main
// To https://github.com/Sushmita-Ghosh/google-keep.git
// ! [rejected] main -> main (non-fast-forward)
// error: failed to push some refs to 'https://github.com/Sushmita-Ghosh/google-keep.git'
// hint: Updates were rejected because a pushed branch tip is behind its remote
// hint: counterpart. Check out this branch and integrate the remote changes
// hint: (e.g. 'git pull ...') before pushing again.
// hint: See the 'Note about fast-forwards' in 'git push --help' for details.
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main|REBASE 1/2)
// $ git add -A
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main|REBASE 1/2)
// $ git commit -m "my local changes"
// [detached HEAD 8b1b852] my local changes
// 18 files changed, 11732 insertions(+), 1 deletion(-)
// create mode 100644 .gitignore
// rewrite README.md (100%)
// create mode 100644 package.json
// create mode 100644 public/favicon.ico
// create mode 100644 public/index.html
// create mode 100644 public/logo192.png
// create mode 100644 public/logo512.png
// create mode 100644 public/manifest.json
// create mode 100644 public/robots.txt
// create mode 100644 src/App.css
// create mode 100644 src/App.js
// create mode 100644 src/App.test.js
// create mode 100644 src/index.css
// create mode 100644 src/index.js
// create mode 100644 src/logo.svg
// create mode 100644 src/reportWebVitals.js
// create mode 100644 src/setupTests.js
// create mode 100644 yarn.lock
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main|REBASE 1/2)
// $ git pull
// You are not currently on a branch.
// Please specify which branch you want to merge with.
// See git-pull(1) for details.
// git pull <remote> <branch>
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main|REBASE 1/2)
// $ git pull origin main
// From https://github.com/Sushmita-Ghosh/google-keep
// * branch main -> FETCH_HEAD
// Already up to date.
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main|REBASE 1/2)
// $ git push -u origin main
// To https://github.com/Sushmita-Ghosh/google-keep.git
// ! [rejected] main -> main (non-fast-forward)
// error: failed to push some refs to 'https://github.com/Sushmita-Ghosh/google-keep.git'
// hint: Updates were rejected because a pushed branch tip is behind its remote
// hint: counterpart. Check out this branch and integrate the remote changes
// hint: (e.g. 'git pull ...') before pushing again.
// hint: See the 'Note about fast-forwards' in 'git push --help' for details.
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main|REBASE 1/2)
// $ git push origin main
// To https://github.com/Sushmita-Ghosh/google-keep.git
// ! [rejected] main -> main (non-fast-forward)
// error: failed to push some refs to 'https://github.com/Sushmita-Ghosh/google-keep.git'
// hint: Updates were rejected because a pushed branch tip is behind its remote
// hint: counterpart. Check out this branch and integrate the remote changes
// hint: (e.g. 'git pull ...') before pushing again.
// hint: See the 'Note about fast-forwards' in 'git push --help' for details.
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main|REBASE 1/2)
// $ git push -f origin main
// Enumerating objects: 27, done.
// Counting objects: 100% (27/27), done.
// Delta compression using up to 4 threads
// Compressing objects: 100% (26/26), done.
// Writing objects: 100% (27/27), 212.16 KiB | 7.86 MiB/s, done.
// Total 27 (delta 1), reused 0 (delta 0), pack-reused 0
// remote: Resolving deltas: 100% (1/1), done.
// To https://github.com/Sushmita-Ghosh/google-keep.git
// + f3ef2ce...dc9a003 main -> main (forced update)
// user@DESKTOP-L70T6QA MINGW64 ~/Desktop/Google Keep/google-keep (main|REBASE 1/2)
<file_sep>import React, { useState } from "react";
import "./notes.styles.scss";
import { MdAdd, MdModeEdit } from "react-icons/md";
const Notes = (props) => {
const formHandler = (event) => {
event.preventDefault();
};
const setInputHandler = (event) => {
const { name, value } = event.target;
props.setNote((prevState) => {
return {
...prevState,
[name]: value,
};
});
};
const addTriggerHandler = () => {
props.passNote(props.note);
props.setNote({
title: "",
content: "",
});
};
const editTriggerHandler = () => {
props.editItemChange(props.note);
props.setNote({
title: "",
content: "",
});
};
return (
<div className="notes-container">
<div className="form">
<input
type="text"
name="title"
value={props.note.title}
onChange={setInputHandler}
placeholder="Title"
autoComplete="off"
/>
<textarea
rows=""
cols=""
name="content"
value={props.note.content}
onChange={setInputHandler}
placeholder="Take a note.."
/>
{props.toggleSubmit ? (
<button className="button" onClick={addTriggerHandler}>
<MdAdd className="icon" />
</button>
) : (
<button className="button" onClick={editTriggerHandler}>
<MdModeEdit className="icon" />
</button>
)}
</div>
</div>
);
};
export default Notes;
<file_sep>import React from "react";
import "./header.styles.scss";
const Header = ({ handleLogout }) => {
return (
<div className="header">
<div className="logo">
{" "}
<h2>KEEPZZ</h2>{" "}
</div>
<div className="logout">
<button onClick={handleLogout} className="logout-button">
Logout
</button>
</div>
</div>
);
};
export default Header;
<file_sep>import React, { useState } from "react";
import { MdSearch } from "react-icons/md";
import "./search-box.component.scss";
const SearchBox = ({ searchField, searchHandler }) => {
const getSearchField = (e) => {
//console.log(e.target.value);
searchHandler(e.target.value);
};
return (
<div className="search">
<input
className="input"
type="text"
//value={searchField}
onChange={getSearchField}
placeholder="Search the notes"
/>
</div>
);
};
export default SearchBox;
|
04f5481e2d1b7fbcfddb1f99de14b406b4617166
|
[
"Markdown",
"JavaScript"
] | 6 |
Markdown
|
Sushmita-Ghosh/google-keep-clone
|
c4292ffcd8d87a3eda2320db5c25edbf77522456
|
a59db4dff9ffc6898a4d7eafb2513fcd9850f37c
|
refs/heads/master
|
<file_sep><?php
$title = "Guaranteed job placements apply to get hired";
$description = "Finding you the right opportunity for your career growth";
?>
<?php require_once("templates/header.php") ?>
<!-- CONTENT WRAPPER
============================================= -->
<div id="content-wrapper">
<!-- INTRO
============================================= -->
<section id="intro" class="intro-parallax">
<div class="container">
<div class="row">
<!-- Intro Section Description -->
<div id="intro_description" class="col-sm-7 col-md-7">
<!-- Intro Section Title -->
<h1><strong>Guaranteed </strong> Job Placements apply to<strong> get
hired!</strong></h1>
<!-- Description #1 -->
<div class="intro_feature">
<h4><i class="fa fa-check"></i> Do not waste time on interviews</h4>
<p>The next call you get will be your next office tour or meeting your employer, your first
3 months contract will pay your salary afterwards your employer will make your role a
more permanent one!
</p>
</div>
<!-- Description #2 -->
<div class="intro_feature">
<h4><i class="fa fa-check"></i> Get new job placement within 6 weeks!</h4>
<p>Nobody enjoys preparing for interviews they can overwhelm you at times, often you make
mistakes and loose the job that was rightfully yours.
</p>
</div>
<!-- Description #3 -->
<div class="intro_feature">
<h4><i class="fa fa-check"></i> Send us a thank you review!</h4>
<p>Got a call from your new boss? It doesn't hurt to say thank you, we do not accept
payments from candidates a small review or refering us to your friends will be good!
</p>
</div>
</div> <!-- End Intro Section Description -->
<!-- Intro Section Form -->
<div id="intro_form" class="col-sm-5 col-md-5">
<!--Register form -->
<div class="form_register" >
<h2> Register Now! </h2>
<!-- Begin Mailchimp Signup Form -->
<style type="text/css">
#mc_embed_signup{background:#fff; clear:left; font:14px Helvetica,Arial,sans-serif; }
/* Add your own Mailchimp form style overrides in your site stylesheet or in this style block.
We recommend moving this block and the preceding CSS link to the HEAD of your HTML file. */
</style>
<div id="mc_embed_signup">
<form style="padding:1em;" action="https://online.us4.list-manage.com/subscribe/post?u=8f295d775ea20e08ac099b7c1&id=63c5bd88cb" method="post" id="mc-embedded-subscribe-form" name="mc-embedded-subscribe-form" class="validate" target="_blank" novalidate>
<div id="mc_embed_signup_scroll">
<h2 style="color:black">Get Started</h2>
<br>
<div class="mc-field-group">
<label for="mce-EMAIL" class="text-dark" style="color:black">Email Address <span class="asterisk">*</span>
</label>
<input type="email" value="" name="EMAIL" required class="required email form-control" id="mce-EMAIL">
</div>
<div class="mc-field-group">
<label for="mce-FNAME" class="text-dark" style="color:black">First Name </label>
<input type="text" value="" name="FNAME" required class="form-control" id="mce-FNAME">
</div>
<div class="mc-field-group">
<label for="mce-LNAME" class="text-dark" style="color:black">Last Name </label>
<input type="text" value="" name="LNAME" required class="form-control" id="mce-LNAME">
</div>
<div class="mc-field-group size1of2">
</div> <div id="mce-responses" class="clear">
<div class="response" id="mce-error-response" style="display:none"></div>
<div class="response" id="mce-success-response" style="display:none"></div>
</div> <!-- real people should not fill this in and expect good things - do not remove this or risk form bot signups-->
<div style="position: absolute; left: -5000px;" aria-hidden="true"><input type="text" name="b_8f295d775ea20e08ac099b7c1_63c5bd88cb" tabindex="-1" value=""></div>
<div class="clear">
<input type="submit" value="Register Today" name="subscribe" id="mc-embedded-subscribe" class="btn btn-success btn-block my-5 p-2"></div>
</div>
</form>
</div>
<script type='text/javascript' src='//s3.amazonaws.com/downloads.mailchimp.com/js/mc-validate.js'></script><script type='text/javascript'>(function($) {window.fnames = new Array(); window.ftypes = new Array();fnames[0]='EMAIL';ftypes[0]='email';fnames[1]='FNAME';ftypes[1]='text';fnames[2]='LNAME';ftypes[2]='text';fnames[3]='ADDRESS';ftypes[3]='address';fnames[4]='PHONE';ftypes[4]='phone';fnames[5]='BIRTHDAY';ftypes[5]='birthday';}(jQuery));var $mcj = jQuery.noConflict(true);</script>
<!--End mc_embed_signup-->
</div>
</div> <!-- End Intro Section Form -->
</div> <!-- End row -->
</div> <!-- End container -->
</section> <!-- END INTRO -->
<?php require_once("templates/footer.php") ?>
<file_sep>use strict;
use warnings;
print "Content-Type:text/html\n\n";
print "This is a test perl script";
<file_sep><?php
$title = "Frequently asked questions from our candidates";
$description = "Understanding the requirements will help you speed up your applications read carefully";
?>
<?php require_once("templates/header.php") ?>
<div id="content-wrapper">
<section id="faq">
<div class="container">
<!-- Section Title -->
<div class="row">
<div class="col-md-12 titlebar">
<h1>Frequently <strong>asked questions</strong></h1>
<p>For any question not listed here amongst the top 4 asked email <a
href="mailto:<EMAIL>"><EMAIL></a></p>
</div>
</div>
<div class="row">
<!-- Question #1-->
<div id="question_1" class="col-md-6">
<div class="question">
<h4>What is Employment Equity / Affirmative Action?</h4>
<p> The South African Employment Equity Act defines Affirmative Action candidates as
previously disadvantaged people. Currently it includes people of African, Coloured,
Indian or Chinese ethnicity as well as females and disabled people.</p>
</div>
</div>
<!-- Question #2-->
<div id="question_2" class="col-md-6">
<div class="question">
<h4>How safe is my information?</h4>
<p> We respect your privacy. That is why we belong to TRUSTe and abide by their rules. We
will never share or sell your information to a third party. All your information remains
confidential in terms of our privacy policy. </p>
</div>
</div>
</div> <!-- End row -->
<div class="row">
<!-- Question #3-->
<div id="question_3" class="col-md-6">
<div class="question">
<h4>What else does we do to protect and defend the anonymous?</h4>
<p>We take steps to alert our members and publicly warn our community if we believe an
employer may be inappropriately using the legal system to suppress free speech by
threatening or pursuing legal action against our anonymous members
</p>
</div>
</div>
<!-- Question #4-->
<div id="question_4" class="col-md-6">
<div class="question">
<h4>How to get a Tax Number?</h4>
<p>
You can get it on eFiling if you are registered as an eFiler. All you need to do is log
on and check your number. You can request it verbally from the SARS Contact Centre on
0800 00 7277. Please have your ID number on hand as SARS will have to authenticate who
you are
</p>
</div>
</div>
</div> <!-- End row -->
</div> <!-- End container -->
</section> <!-- END FAQs -->
<?php require_once("templates/footer.php") ?>
<file_sep><!DOCTYPE html>
<!-- Crossway - Startup Landing Page Template design by DSA79 (http://www.dsathemes.com) -->
<!--[if lt IE 7 ]><html class="ie ie6" lang="en"> <![endif]-->
<!--[if IE 7 ]><html class="ie ie7" lang="en"> <![endif]-->
<!--[if IE 8 ]><html class="ie ie8" lang="en"> <![endif]-->
<!--[if (gte IE 9)|!(IE)]><!-->
<html lang="en">
<!--<![endif]-->
<head>
<!-- Global site tag (gtag.js) - Google Analytics -->
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-157502961-1"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'UA-157502961-1');
</script>
<!-- Basic -->
<meta charset="utf-8">
<title>
<?php
echo $title
?>
</title>
<meta name="author" content="workplacements.online">
<link rel="shortcut icon" href="img/logo.png" type="image/*">
<meta name="description"
content="<?php
echo $description
?>">
<!-- Mobile Specific Metas -->
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<!-- Libs CSS -->
<link href="css/bootstrap.css" rel="stylesheet">
<link href="css/font-awesome.min.css" rel="stylesheet">
<link href="css/flexslider.css" rel="stylesheet">
<link href="css/owl.carousel.css" rel="stylesheet">
<!-- Template CSS -->
<link href="css/style.css" rel="stylesheet">
<!-- Responsive CSS -->
<link href="css/responsive.css" rel="stylesheet">
<!-- Favicons -->
<link rel="shortcut icon" href="img/icons/favicon.ico">
<link rel="apple-touch-icon" sizes="114x114" href="img/icons/apple-touch-icon-114x114.png">
<link rel="apple-touch-icon" sizes="72x72" href="img/icons/apple-touch-icon-72x72.png">
<link rel="apple-touch-icon" href="img/icons/apple-touch-icon.png">
<!-- Google Fonts -->
<link
href='https://fonts.googleapis.com/css?family=Lato:400,900italic,900,700italic,400italic,300italic,300,100italic,100'
rel='stylesheet' type='text/css'>
<link href="https://stackpath.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css" rel="stylesheet"
integrity="<KEY>" crossorigin="anonymous">
<style>
.content-adjust{
margin-top: 10em;
margin-bottom: 5em;
}
</style>
<script>
(function () {
if (location.protocol !== "https:") {
location.replace("https://workplacements.online")
}
}())
</script>
<!-- The core Firebase JS SDK is always required and must be listed first -->
<script src="https://www.gstatic.com/firebasejs/7.8.0/firebase-app.js"></script>
<script src="https://www.gstatic.com/firebasejs/7.8.0/firebase-analytics.js"></script>
<script src="https://www.gstatic.com/firebasejs/7.8.0/firebase-auth.js"></script>
<script src="https://www.gstatic.com/firebasejs/7.8.0/firebase-firestore.js"></script>
<script>
// Your web app's Firebase configuration
var firebaseConfig = {
apiKey: "<KEY>",
authDomain: "workplacements-299fd.firebaseapp.com",
databaseURL: "https://workplacements-299fd.firebaseio.com",
projectId: "workplacements-299fd",
storageBucket: "workplacements-299fd.appspot.com",
messagingSenderId: "58006187682",
appId: "1:58006187682:web:e5154e52ee90139e1bf1fb",
measurementId: "G-KLF24TXSHG"
};
// Initialize Firebase
firebase.initializeApp(firebaseConfig);
firebase.analytics();
</script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/sweetalert/2.1.2/sweetalert.min.js" integrity="<KEY> crossorigin="anonymous"></script>
</head>
<body>
<!-- PRELOADER
============================================= -->
<div id="preloader">
<div id="status"></div>
</div>
<!-- HEADER
============================================= -->
<header id="header">
<div class="navbar navbar-fixed-top">
<div class="container">
<!-- Logo & Responsive Menu -->
<div class="navbar-header">
<button type="button" id="nav-toggle" class="navbar-toggle" data-toggle="collapse"
data-target="#navigation-menu">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="#top-page-banner"><img src="img/logo.png" alt="logo" width="100"
role="banner"></a>
</div> <!-- /.navbar-header -->
<!-- Navigation -->
<nav id="navigation-menu" class="collapse navbar-collapse" role="navigation">
<ul class="nav navbar-nav navbar-right">
<li><a id="GoToHome" href="/">Home</a></li>
<li><a id="GoToAbout" href="about.php">About</a></li>
<li><a id="GoToFeatures" href="process.php">Recruitment Process</a></li>
<li><a id="GoToFaq" href="faq.php">FAQ</a></li>
<li><a id="GoToClients" href="clients.php">Clients</a></li>
</ul>
</nav> <!-- /.navbar-collapse -->
</div> <!-- /.container -->
</div> <!-- /.navbar -->
</header> <!-- END HEADER -->
|
9320aa80508d25c2d666ae12ae184d0141ef0ff2
|
[
"C",
"PHP"
] | 4 |
PHP
|
MatyilaGoodwish/workdev
|
4a874da490300ee233190f835423b6cf49bcc961
|
eefbece960ddeb6524ffea616bfa3f4b63ddd7b6
|
refs/heads/master
|
<file_sep>ffmpeg -f concat -i list.txt -c copy playlist.mp3
<file_sep>ssh [email protected]
<file_sep>ffplay -fflags nobuffer http://192.168.127.12:8080/hls/1234.m3u8<file_sep>ssh [email protected]<file_sep>ffplay -fflags nobuffer rtmp://172.16.31.10:1935/play/1234<file_sep>tmux attach-session -t 0
|
0f3ac6a10c7b6eeaa63fa5343fdd10ea78746c9e
|
[
"Shell"
] | 6 |
Shell
|
khacpv/playlist-api
|
b39f0e97649eae2eaa6464ca2eda5666e889fb7d
|
4b4f4ab561e5c59c5d48add33de1c0eafa2c248e
|
refs/heads/master
|
<repo_name>Twynzen/Apicon-nodejs-json-y-express<file_sep>/routes/index.js
module.exports = (app)=> {
app.get('/', (req, res) =>{
res.status(200).json({message: 'Estoy desde la carpeta routes en home'});
})
app.post('/formulario', (req, res) =>{
const nombre = req.body.nombre;
const apellido = req.body.apellido
const ciudad = req.body.ciudad;
console.log(req.body);
res.status(200).json({message: `Yo soy ${nombre} ${apellido} y vivo en ${ciudad}`});
})
app.get('/formulario', (req, res) =>{
res.status(404).json({message: 'El formulario esta en post'});
})
}
<file_sep>/config/index.js
module.exports = {
//Utiliza un puerto disponible
SERVER_PORT: process.env.PORT || 3000
}
|
571d21f1dcf59a61f88d522762ceedbbdb7ba413
|
[
"JavaScript"
] | 2 |
JavaScript
|
Twynzen/Apicon-nodejs-json-y-express
|
74d7d7262860d795721bc9d40e579900bb413802
|
374c75c67a9158b4daa9df708b9ac099392e5255
|
refs/heads/main
|
<file_sep>name = 'Bob'
age = 3000
if name == 'Alice':
print('Hi Alice')
elif age < 12:
print('You are not Alice!')
elif age > 2000:
print('Unlike you, Alice is not a vampire!')
<file_sep>def plusOne(num):
return num + 1
newNum = plusOne(3)
print(newNum)
<file_sep>def div2by(divideBy):
try:
return 2 / divideBy
except ZeroDivisionError:
print('Error: Cannot divide by zero.')
print(div2by(2))
print(div2by(0))
print(div2by(1))
<file_sep>def spam():
eggs = 'Hello'
print(eggs)
eggs = 43
spam()
print(eggs)
<file_sep># Learning Python
My journey - learning python, following along - [Automate the boring stuff with Python](https://automatetheboringstuff.com./)
<file_sep># argparse_example.py
import argparse
if __name__ == '__main__':
# Initialize
parser = argparse.ArgumentParser(description="Simple calculator")
# Adding optional parameters
parser.add_argument('-n1',
'--num1',
help="Number 1",
type=float)
parser.add_argument('-n2',
'--num2',
help="Number 2",
type=float)
parser.add_argument('-op',
'--operation',
help="operator",
default="*")
# Parsing the argument
args = parser.parse_args()
print(args)
# Initialize result to None
result = None
# Simple calculator operations
if args.operation == '+':
result = args.num1 + args.num2
if args.operation == '-':
result = args.num1 - args.num2
if args.operation == '/':
result = args.num1 / args.num2
if args.operation == '*':
result = args.num1 * args.num2
if args.operation == 'pow':
result = pow(args.num1, args.num2)
# Print the result
print("Result = ", result)
|
81c884e603bc56cda70027d94582fc88e1f271d4
|
[
"Markdown",
"Python"
] | 6 |
Python
|
rishabkumar7/LearningPython
|
145575c19c3a0f217590cc350df82aef86486f8e
|
17cb9303e55a973ede81cfff320b93b224ec19b5
|
refs/heads/master
|
<file_sep>
def hitung_periode(waktu, jumlah):
periode = waktu / jumlah
print(f'waktu = {waktu / 30}s dalam jumlah = {jumlah / 60} ')
print(f'sehingga periode = {periode} s')
return periode
#waktu = 30
#jumlah = 60
periode = hitung_periode(30, 60)
periode = hitung_periode (30, 60)
def hitung_frekuensi(jumlah, waktu):
frekuensi = jumlah / waktu
print(f'jumlah = {jumlah / 500} dalam waktu = {waktu / 100}s')
print(f'sehingga frekuensi = {frekuensi} Hz')
return frekuensi
#jumlah = 500
#waktu = 100
frekuensi = hitung_frekuensi(500, 100)
frekuensi = hitung_frekuensi (500, 100)
def hitung_periode(waktu, jumlah):
periode = waktu / jumlah
print(f'waktu = {waktu / 5}s dalam jumlah = {jumlah / 20 }')
print(f'sehingga periode = {periode} s')
return periode
#waktu = 5
#jumlah = 20
periode = hitung_periode(5, 20)
periode = hitung_periode (5, 20)
|
b6548628a651f38bcfc649956165d8352babbcc1
|
[
"Python"
] | 1 |
Python
|
silvadamaa/uin_modularization_using_class_and_package
|
0d86a8b85a2ecc91523b5a09db0f8d329870c63d
|
d61d749a01c2ec10fc9faa42bd84dad90231db9f
|
refs/heads/main
|
<repo_name>techieshouvik/Large-Small-ML<file_sep>/Main.cpp
//CODE written by <NAME>
//Header Files
#include<iostream>
#include<conio.h>
using namespace std;
//Main function
int main() {
//Heading
cout << " ----MACHINE LEARNING :: ---- "<<endl;
cout << "\n This Program calculates whether a number is LARGE or SMALL based on 10 datasets \n used in training - CODE by Techieshouvik\n";
//Variables
string a, b;
int inp,trained_data=1,i=1,n;
//Labels
a = "LARGE"; //Classification Label 1
b = "SMALL"; //Classification Label 2
cout << "\n***--TRAINING SESSION---[10 Gen Trainer]-***"<<endl;
//Loop 10 times
while (i <= 10) {
cout << "\n------------------------GEN :: " << i<<">>>>";
//Input DATA
cout << "\nEnter a number :: ";
cin >> inp;
cout << "\nIs it Large [1] / Small [0] :: ";
cin >> n;
//Define the Rule
if (inp >= trained_data) {
cout <<endl<<"AI says :: "<< a; //LARGE NUMBER
if (n == 0) {
trained_data++; //Tweaking
cout << " [Tweaked] ";
}
}
else {
cout <<endl<<"AI says :: "<< b; //SMALL NUMBER
if (n == 1) {
trained_data--; //Tweaking
cout << " [Tweaked] ";
}
}
i++; //COUNTER VARIABLE INCREMENT
}
cout << endl << "----------------------------------------";
cout << endl << "TRAINED DATA FINAL :: Anything above " << trained_data<<" is LARGE";
cout << endl << "----------------------------------------";
_getch();
}<file_sep>/README.md
# LargeOrSmallNumber-ML-Cpp
This C++ Program calculates whether a number is LARGE or SMALL based on 10 datasets used in training - CODE by <NAME>
|
7cf987b62fe3df80a80384a022ea6c3a46f5c4e9
|
[
"Markdown",
"C++"
] | 2 |
C++
|
techieshouvik/Large-Small-ML
|
4d9b892709a9dc3156cf3927864f4f1c7e19273e
|
5cd2aee87f38a585c722ee65650e1d3c13a13617
|
refs/heads/master
|
<file_sep>package fire.alarm.server;
import java.rmi.Remote;
import javax.management.monitor.Monitor;
import fire.monitor.FireSensorMonitor;
import fire.alarm.server.FireAlarmServer;
public interface IRmiServer extends Remote{
//public void bindToRegistry(ServerInstance serverInstance);
public void addMonitor(FireSensorMonitor monitor);
public void removeMonitor(FireSensorMonitor monitor);
public void notifyMonitors(FireSensorData fireSensorData); // we don't need to pass the sensorId here since the fireSensorData obj already has it inside it.
public void notifyMonitors(String error);
}
<file_sep>TODO: complete the readme file.
|
978ff72fc0688dcc60ba30959a2019d410204e40
|
[
"Markdown",
"Java"
] | 2 |
Java
|
yohanAnush/DS_Assignment
|
97c375f5c3ecf035383d088699cfe51a8468f3d9
|
64ecbceae3d151b21066ebe71170135ec90f6f54
|
refs/heads/master
|
<file_sep>from tkinter import *
from tkinter import messagebox
m = Tk()
m.title("Phone Database")
m.configure(background="yellow")
a1=StringVar()
a2=StringVar()
a3=StringVar()
a4=StringVar()
a5=StringVar()
a6=StringVar()
count=0
def add():
f=open("abc.txt",'a')
a1=t1.get()
a2=t2.get()
a3=t3.get()
a4=t4.get()
a5=t5.get()
if(a1=='' or a2=='' or a3=='' or a4=='' or a5==''):
print("Invalid Entry")
exit()
f.writelines(a1.ljust(10)+a2.ljust(10)+a3.ljust(10)+a4.ljust(10)+a5.ljust(10)+"\n")
print("Your record has been added")
f.close()
def srch():
fname = "abc.txt"
aid=input("Enter the Name of the Model of the phone you want to search:")
with open(fname, 'r') as f:
for line in f:
words = line.split()
if words[0]==aid:
print(line)
def updt():
fname = "abc.txt"
aid=input("Enter the Name of the Model of the phone you want to search: ")
with open(fname, 'r') as f:
for line in f:
words = line.split()
if words[0]==aid:
print(line)
asd=int(input("Enter the attribute you want to update 0: Name of the model 1: RAM 2: Storage Memory 3: Camera Quality(in pixels) 4: Price in India"))
twitch = (input("Enter the new value : "))
words[asd]=twitch
line1 = ""
for k in words:
line1 += k
line1 += " "
fname = "abc.txt"
f2=open("xyz.txt",'w')
with open(fname, 'r') as f:
for line in f:
words = line.split()
if words[0]!=aid:
f2.writelines(line)
f2.close()
f2=open("xyz.txt",'a')
f2.writelines(line1)
f2.close()
'''def dele():
# f2=open("abc.txt",'r')
# ctr=0
# for line in f2:
# ctr=ctr+1
# print(ctr)
# f2.seek(0)
# rl=f2.readlines()
# l=list(rl)
# print(l)
# f2.close()
# f2=open("abc.txt",'a')
# i=0
# aid=input("Enter the Name of the Model of the phone you want to delete:")
# while(rl!="" and i<ctr):
# print(l[i][0])
# if(l[i][0]!=aid):
# f2.writelines(l[i][0].ljust(20)+l[i][1].ljust(20)+l[i][2].ljust(20)+l[i][3].ljust(20)+l[i][4].ljust(3)+"\n")
# print("Record has been deleted")
# i=i+1
# f2.close()
fname = "abc.txt"
f2=open("xyz.txt",'w')
aid=input("Enter the Name of the Model of the phone you want to delete:")
with open(fname, 'r') as f:
for line in f:
words = line.split()
if words[0]!=aid:
f2.writelines(line)
f2.close()
'''
def Del() :
str = input("Enter the Name of the Model of the phone you want to delete:")
with open("abc.txt",'r+') as f :
f1=f.readlines()
f.seek(0)
for line in f1:
if str not in line :
f.write(line)
f.truncate()
print("Your record has been deleted")
def firstrecord():
f3=open("abc.txt",'r')
line = f3.readline()
print(line)
lp=line.split()
a1.set(lp[0])
a2.set(lp[1])
a3.set(lp[2])
a4.set(lp[3])
a5.set(lp[4])
f3.close()
def lastrecord():
f4=open("abc.txt","r")
lp=f4.readlines()
leng=lp[len(lp)-1]
print(leng)
l12=leng.split()
a1.set(l12[0])
a2.set(l12[1])
a3.set(l12[2])
a4.set(l12[3])
a5.set(l12[4])
f4.close()
def Exit() :
Exit = messagebox.askyesno("Quit System", "Do you want to quit?")
if Exit > 0:
m.destroy()
return
def Next():
global count
f=open("abc.txt",'r')
i=0
while(i<=count):
l=f.readline()
i=i+1
list1=l.split()
if list1._len_() != 0:
a1.set(list1[0])
a2.set(list1[1])
a3.set(list1[2])
a4.set(list1[3])
a5.set(list1[4])
count = count + 1
f.close()
def prev():
global count
if count!=1:
f=open("abc.txt",'r')
i=0
count = count - 1
while(i<count):
l=f.readline()
i=i+1
list1=l.split()
a1.set(list1[0])
a2.set(list1[1])
a3.set(list1[2])
a4.set(list1[3])
a5.set(list1[4])
f.close()
l0=Label(m,text="",bg="yellow")
l0.grid(row=1,column=1)
l1=Label(m,text="Name of the model",bg="yellow",fg="blue")
l1.grid(row=2,column=1)
l2=Label(m,text="RAM",bg="yellow",fg="blue")
l2.grid(row=3,column=1)
l3=Label(m,text="Storage Memory",bg="yellow",fg="blue")
l3.grid(row=4,column=1)
l4=Label(m,text="Camera Quality(in pixels)",bg="yellow",fg="blue")
l4.grid(row=5,column=1)
l5=Label(m,text="Price in India",bg="yellow",fg="blue")
l5.grid(row=6,column=1)
l6=Label(m,text="",bg="yellow")
l6.grid(row=7,column=1)
t1=Entry(m,textvariable=a1)
t1.grid(row=2,column=3)
t2=Entry(m,textvariable=a2)
t2.grid(row=3,column=3)
t3=Entry(m,textvariable=a3)
t3.grid(row=4,column=3)
t4=Entry(m,textvariable=a4)
t4.grid(row=5,column=3)
t5=Entry(m,textvariable=a5)
t5.grid(row=6,column=3)
b1=Button(m,text="Save",fg="black",bg="red",width=20,command=add)
b1.grid(row=10,column=1)
b2=Button(m,text="Delete",fg="black",bg="red",width=20,command=Del)
b2.grid(row=10,column=2)
b3=Button(m,text="Search",fg="black",bg="red",width=20,command=srch)
b3.grid(row=10,column=3)
b4=Button(m,text="Update",fg="black",bg="red",width=20,command=updt)
b4.grid(row=10,column=4)
b5=Button(m,text="First record",fg="black",bg="red",width=20,command=firstrecord)
b5.grid(row=9,column=1)
b6=Button(m,text="|<",fg="black",bg="red",width=20,command=prev)
b6.grid(row=9,column=2)
b7=Button(m,text=">|",fg="black",bg="red",width=20,command=Next)
b7.grid(row=9,column=3)
b8=Button(m,text="Last Record",fg="black",bg="red",width=20,command=lastrecord)
b8.grid(row=9,column=4)
b9=Button(m,text="Exit",fg="black",bg="red",width=20,command=Exit)
b9.grid(row=11,column=3)
m.mainloop()
|
b57ffa405edd330b291c3627cf84e224871a5821
|
[
"Python"
] | 1 |
Python
|
rajatbajaj1998/pythonprpoject
|
71838ea7595a97f16f8e228b34645b95499b2b20
|
dc1987242caaef84cf893e9d27966ab73b298c48
|
refs/heads/master
|
<repo_name>Utah-CS-4540-Fall-2018/cs4540-fall-2018-assignment-1-richardlynnchild<file_sep>/web_server.rb
# Assignment 1
# <NAME>
# uID: u0581030
# Github: richardlynnchild
# This is a simple server class that can be instantiated in other files.
# It will open a socket on the localhost at a specified port. Then it can
# listen for incoming clients on the specified port. The server will identify
# which browser the client is using and send a message back to the user.
require 'socket'
class WebServer
# Constructs new TCP Server on local host with
# specified port. Default port is '1234'.
def initialize(port=1234)
@server = TCPServer.new('localhost',port)
end
# Continuously istens for incoming clients, handles the request, constructs
# a response and sends the response. No parameters needed.
def listen()
loop do
collect_request()
create_response()
write_response()
end
end
# Creates a socket client by accepting the new server connection.
# Collects the incoming request from the client and puts each header
# and line into an array. No parameters needed.
def collect_request()
@socket = @server.accept
@request_lines = []
line = @socket.gets.strip
while line != ''
@request_lines << line
line = @socket.gets.strip
end
end
# Parses the incoming request information and pulls out the User-Agent
# information. Then creates a HTTP OK response with a greeting message.
def create_response()
agent = ''
@request_lines.each do |line|
puts line
words = line.split
if words[0] == "User-Agent:"
agent = words[1]
end
end
greeting = "You are using #{agent}\n"
@response = "HTTP/1.1 200 OK\r\n" +
"Content-Type: text/plain\r\n" +
"Content-Length: #{greeting.bytesize}\r\n" +
"Connection: close\r\n\r\n" + greeting
end
# Sends the response string back to the client and closes the socket.
def write_response()
@socket.print @response
@socket.close
end
end
|
f7f0bcbbc31ad07836cb6237a46867722fe336e7
|
[
"Ruby"
] | 1 |
Ruby
|
Utah-CS-4540-Fall-2018/cs4540-fall-2018-assignment-1-richardlynnchild
|
2d7aaca0826bd96c9068339dd4907b7bbb2bac14
|
49607cfd12c9e624755901166d57085932a4b469
|
refs/heads/main
|
<file_sep>import { UserType } from '../TypeDef/User';
import { MessageType } from '../TypeDef/Message';
import { GraphQLString, GraphQLID } from 'graphql';
import { Users } from '../../Entities/Users';
export const CREATE_USER = {
type: UserType,
args: {
name: { type: GraphQLString },
username: { type: GraphQLString },
password: { type: GraphQLString },
},
async resolve(parent: any, args: any) {
const { name, username, password } = args;
if (!name || !username || !password) {
return {
success: false,
message: 'You are missing some fields',
};
}
await Users.insert({ name, username, password });
return {
name,
username,
password,
};
},
};
export const DELETE_USER = {
type: MessageType,
args: {
id: { type: GraphQLID },
},
async resolve(parent: any, args: any) {
const { id } = args;
const theUser = await Users.findOne({ id });
if (!theUser) {
return {
success: false,
message: 'User does not exist',
};
}
if (theUser) {
await Users.delete({ id });
return {
success: true,
message: 'The user was deleted successfully',
};
}
},
};
export const UPDATE_USER_INFO = {
type: UserType,
args: {
id: { type: GraphQLID },
},
async resolve(parent: any, args: any) {},
};
export const CHANGE_PASSWORD = {
type: MessageType,
args: {
id: { type: GraphQLID },
oldPassword: { type: GraphQLString },
newPassword: { type: GraphQLString },
},
async resolve(parent: any, args: any) {
const { id, oldPassword, newPassword } = args;
// Check if User exists in the db
const user = await Users.findOne({ id });
if (!user) {
// throw new Error('No record found for this user');
return {
success: false,
message: 'No record found for this user',
};
}
if (user) {
const existingPassword = user.password;
if (oldPassword === existingPassword) {
// Proceed to change users password
await Users.update({ id: id }, { password: <PASSWORD> });
return {
success: true,
message: 'Password updated',
};
} else {
// throw new Error('Incorrect exisitng password');
return {
success: false,
message: 'Incorrect exisitng password',
};
}
}
},
};
<file_sep>import { GraphQLList } from 'graphql';
import { UserType } from '../TypeDef/User';
import { Users } from '../../Entities/Users';
interface IUser {
id: number;
name: string;
username: string;
password: string;
}
export const GET_ALL_USERS = {
type: new GraphQLList(UserType),
resolve(): Promise<IUser[]> {
return Users.find();
},
};
<file_sep>import { GraphQLSchema, GraphQLObjectType } from 'graphql';
import { GET_ALL_USERS } from './Queries/User';
import { CREATE_USER, DELETE_USER, CHANGE_PASSWORD } from './Mutations/User';
const RootQuery = new GraphQLObjectType({
name: 'RootQuery',
fields: {
getAllUsers: GET_ALL_USERS,
},
});
const RootMutation = new GraphQLObjectType({
name: 'RootMutation',
fields: {
createUser: CREATE_USER,
deleteUser: DELETE_USER,
changePassword: CHANGE_PASSWORD,
},
});
export const schema = new GraphQLSchema({
query: RootQuery,
mutation: RootMutation,
});
|
8c079464dd7d0e40da65e311e5ed5b4d2f233a54
|
[
"TypeScript"
] | 3 |
TypeScript
|
GreatEki/React-Node-GraphQL
|
5de03b78f99b0a4e75fcadd2216d01cad0e14972
|
6ca7abc6409c9579e6e663d39db8548708b7c129
|
refs/heads/master
|
<repo_name>seongminp/SlideButton<file_sep>/app/src/main/java/rojoxpress/slideexample/MainActivity.java
package rojoxpress.slideexample;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.SwitchCompat;
import android.widget.CompoundButton;
import android.widget.TextView;
import android.widget.Toast;
import com.rojoxpress.slidebutton.SlideButton;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final TextView textView = (TextView) findViewById(R.id.progress);
final SlideButton slideButton = (SlideButton) findViewById(R.id.slide_button);
SwitchCompat switchCompat = (SwitchCompat) findViewById(R.id.switch_);
slideButton.setSlideButtonListener(new SlideButton.SlideButtonListener() {
@Override
public void onSlide() {
Toast.makeText(MainActivity.this,"UNLOCKED",Toast.LENGTH_SHORT).show();
}
});
slideButton.setOnSlideChangeListener(new SlideButton.OnSlideChangeListener() {
@Override
public void onSlideChange(float position) {
textView.setText("Progress: "+position);
}
});
switchCompat.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
slideButton.setEnabled(b);
}
});
}
}
<file_sep>/slidebutton/src/main/java/com/rojoxpress/slidebutton/SlideButton.java
package com.rojoxpress.slidebutton;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.annotation.Nullable;
import android.support.annotation.StringRes;
import android.support.v4.content.ContextCompat;
import android.support.v7.widget.AppCompatSeekBar;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.SeekBar;
import android.widget.TextView;
public class SlideButton extends FrameLayout {
private TextView textView;
private SlideBar slideBar;
private SlideButtonListener listener;
private OnSlideChangeListener slideChangeListener;
private int offsetThumb;
public SlideButton(Context context) {
super(context);
init(null);
}
public SlideButton(Context context, AttributeSet attrs) {
super(context, attrs);
init(attrs);
}
public SlideButton(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(attrs);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public SlideButton(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(attrs);
}
public int dpToPixels(int dp) {
return (int) (dp * getContext().getResources().getDisplayMetrics().density);
}
public void init(@Nullable AttributeSet set){
offsetThumb = dpToPixels(16);
textView = new TextView(getContext());
slideBar = new SlideBar(getContext());
LayoutParams childParams = new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
childParams.gravity = Gravity.CENTER;
slideBar.setLayoutParams(childParams);
textView.setLayoutParams(childParams);
slideBar.setProgressDrawable(ContextCompat.getDrawable(getContext(),R.drawable.back_slide_layer));
textView.setGravity(Gravity.CENTER);
if(set != null){
TypedArray a = getContext().obtainStyledAttributes(set,R.styleable.slider_button,0,0);
if(a.hasValue(R.styleable.slider_button_text)){
String buttonText = a.getString(R.styleable.slider_button_text);
setText(buttonText);
}
if(a.hasValue(R.styleable.slider_button_thumb)){
Drawable thumbDrawable;
thumbDrawable = a.getDrawable(R.styleable.slider_button_thumb);
slideBar.setThumb(thumbDrawable);
} else {
slideBar.setThumb(ContextCompat.getDrawable(getContext(),R.drawable.thumb_def));
}
if(a.hasValue(R.styleable.slider_button_thumbOffset)){
int offset = a.getDimensionPixelSize(R.styleable.slider_button_thumbOffset,dpToPixels(10));
offsetThumb += offset;
}
if(a.hasValue(R.styleable.slider_button_sliderBackground)){
setBackgroundDrawable(a.getDrawable(R.styleable.slider_button_sliderBackground));
} else {
setBackgroundDrawable(ContextCompat.getDrawable(getContext(),R.drawable.back_slide_button));
}
float unitsTextSize = a.getDimensionPixelSize(R.styleable.slider_button_textSize, dpToPixels(20));
textView.setTextSize(TypedValue.COMPLEX_UNIT_PX,unitsTextSize);
int color = a.getColor(R.styleable.slider_button_textColor,Color.WHITE);
textView.setTextColor(color);
a.recycle();
}
setThumbOffset(offsetThumb);
/*post(new Runnable() {
@Override
public void run() {
Drawable drawable = slideBar.thumb;
if(drawable != null){
Drawable n = new ScaleDrawable(drawable,Gravity.CENTER,getHeight(),getHeight());
slideBar.setThumb(n);
}
}
});*/
this.addView(textView);
this.addView(slideBar);
}
public TextView getTexView(){
return textView;
}
public void setText(@StringRes int res){
textView.setText(res);
}
public void setText(CharSequence charSequence){
textView.setText(charSequence);
}
public void setThumb(Drawable drawable){
slideBar.setThumb(drawable);
}
public void setThumbOffset(int offset){
slideBar.setThumbOffset(offset);
}
public void setOnSlideChangeListener(OnSlideChangeListener slideChangeListener) {
this.slideChangeListener = slideChangeListener;
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
slideBar.setEnabled(enabled);
textView.setEnabled(enabled);
int color = 0;
if(!enabled) {
color = ContextCompat.getColor(getContext(), R.color.disabled_filter);
textView.setVisibility(GONE);
} else {
textView.setVisibility(VISIBLE);
}
slideBar.getThumb().setColorFilter(color, PorterDuff.Mode.XOR);
}
protected class SlideBar extends AppCompatSeekBar {
private Drawable thumb;
private OnSeekBarChangeListener seekBarChangeListener = new OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
onSlideChange(((float) i/getMax()));
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
};
public SlideBar(Context context) {
super(context);
init();
}
public SlideBar(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
public SlideBar(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
@Override
public void setThumb(Drawable thumb) {
super.setThumb(thumb);
this.thumb = thumb;
}
@Override
public Drawable getThumb() {
return thumb;
}
public void init(){
setMax(100);
setOnSeekBarChangeListener(seekBarChangeListener);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_DOWN) {
if (thumb.getBounds().contains((int) event.getX(), (int) event.getY())) {
super.onTouchEvent(event);
} else
return false;
} else if (event.getAction() == MotionEvent.ACTION_UP) {
if (getProgress() > 90)
onSlide();
setProgress(0);
} else
super.onTouchEvent(event);
return true;
}
private void onSlide() {
if(listener != null) {
listener.onSlide();
}
}
private void onSlideChange(float position) {
if(slideChangeListener != null) {
slideChangeListener.onSlideChange(position);
}
}
}
public void setSlideButtonListener(SlideButtonListener listener) {
this.listener = listener;
}
public interface SlideButtonListener {
public void onSlide();
}
public interface OnSlideChangeListener {
public void onSlideChange(float position);
}
}
|
5b7943c5ab7df9564334d1922a8544e84b2e4bce
|
[
"Java"
] | 2 |
Java
|
seongminp/SlideButton
|
44021e0263a2d0000204e6f2e1782fbca2a6db93
|
21e0f8f568e2855ff269efd69c0ed31adeee0140
|
refs/heads/master
|
<repo_name>zachwooding/Surface<file_sep>/Assets/PlayerMotor.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Networking;
[RequireComponent(typeof(Rigidbody))]
public class PlayerMotor : NetworkBehaviour {
[SerializeField]
private Camera cam;
[SerializeField]
private float jumpForce = 2.0f;
private Vector3 velocity = Vector3.zero;
private Vector3 rotation = Vector3.zero;
private Vector3 jump = new Vector3(0.0f, 2.0f, 0.0f);
private Vector3 cameraRotation = Vector3.zero;
private bool isGrounded = true;
private Rigidbody rb;
// Use this for initialization
void Start () {
rb = GetComponent<Rigidbody>();
}
public void Move(Vector3 _velocity)
{
velocity = _velocity;
}
public void Rotate(Vector3 _rotation)
{
rotation = _rotation;
}
public void RotateCamera(Vector3 _cameraRotation)
{
cameraRotation = _cameraRotation;
}
// Update is called once per frame
void FixedUpdate () {
PerformMovement();
PerformRotation();
}
void PerformMovement()
{
if(velocity != Vector3.zero)
{
rb.MovePosition(rb.position + velocity * Time.fixedDeltaTime);
}
}
void PerformRotation()
{
rb.MoveRotation(rb.rotation * Quaternion.Euler(rotation));
if(cam != null)
{
cam.transform.Rotate(-cameraRotation);
}
}
private void OnCollisionStay()
{
isGrounded = true;
}
void Update()
{
if (!isLocalPlayer)
{
return;
}
if (Input.GetKeyDown(KeyCode.Space) && isGrounded)
{
rb.AddForce(jump * jumpForce, ForceMode.Impulse);
isGrounded = false;
}
}
}
|
aca90804944f10aab2fa8487712418c251a83aec
|
[
"C#"
] | 1 |
C#
|
zachwooding/Surface
|
adbdff187f15e8f7d84dfad62ba06844f7e917ba
|
869f652c0c58e420def99175748e32b8124e392f
|
refs/heads/master
|
<repo_name>MosZQ/SweetKit<file_sep>/SweetKit/SweetKit/Extensions/UIKit/UIImage+Extension.swift
import UIKit
extension UIImage {
func roundWithCornerRadius(_ cornerRadius: CGFloat) -> UIImage {
let rect = CGRect(origin: CGPoint(x: 0, y: 0), size: self.size)
UIGraphicsBeginImageContextWithOptions(self.size, false, 1)
UIBezierPath(roundedRect: rect, cornerRadius: cornerRadius).addClip()
draw(in: rect)
return UIGraphicsGetImageFromCurrentImageContext()!
}
/// 缩放
// public func resize(_ size: CGSize) -> UIImage {
// UIGraphicsBeginImageContextWithOptions(size, false, 0)
// draw(in: CGRect(origin: CGPoint.zero, size: size))
// let image = UIGraphicsGetImageFromCurrentImageContext()
// UIGraphicsEndImageContext()
// return image!
// }
/// Resizes the image by a given rate for a given interpolation quality.
///
/// - Parameters:
/// - rate: The resize rate. Positive to enlarge, negative to shrink. Defaults to medium.
/// - quality: The interpolation quality.
/// - Returns: The resized image.
public func resized(by rate: CGFloat, quality: CGInterpolationQuality = .medium) -> UIImage {
let width = self.size.width * rate
let height = self.size.height * rate
let size = CGSize(width: width, height: height)
UIGraphicsBeginImageContext(size)
let context = UIGraphicsGetCurrentContext()
context?.interpolationQuality = quality
self.draw(in: CGRect(origin: .zero, size: size))
let resized = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return resized
}
// More information here: http://nshipster.com/image-resizing/
func resizeImage(newWidth: CGFloat) -> UIImage {
let scale = newWidth / self.size.width
let newHeight = self.size.height * scale
UIGraphicsBeginImageContextWithOptions(CGSize(width: newWidth, height: newHeight), false, 0)
self.draw(in: CGRect(x: 0, y: 0, width: newWidth, height: newHeight))
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage!
}
/// 切图
public func crop(_ rect: CGRect) -> UIImage {
UIGraphicsBeginImageContextWithOptions(rect.size, false, 0)
draw(at: CGPoint(x: -rect.origin.x, y: -rect.origin.y))
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image!
}
/// 根据颜色生成一张图片
static func withColor(_ color: UIColor, size: CGSize = CGSize(width: 1, height: 1)) -> UIImage {
let rect = CGRect(x: 0, y: 0, width: size.width, height: size.height)
UIGraphicsBeginImageContextWithOptions(size, false, 0)
color.setFill()
UIRectFill(rect)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image!
}
/// 获取指定View图片
class func imageWithView(_ view: UIView) -> UIImage {
UIGraphicsBeginImageContext(view.bounds.size)
view.layer.render(in: UIGraphicsGetCurrentContext()!)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image!
}
func imageClipOvalImage() -> UIImage {
UIGraphicsBeginImageContextWithOptions(size, false, 0.0)
let ctx = UIGraphicsGetCurrentContext()
let rect = CGRect(x: 0, y: 0, width: size.width, height: size.height)
ctx?.addEllipse(in: rect)
ctx?.clip()
draw(in: rect)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image!
}
/// Creates a QR code from a string.
/// Resizing rate defaults to 15.0 here because the CIFilter result is 31x31 pixels in size.
///
/// - Parameter string: Text to be the QR Code content
/// - Parameter resizeRate: The resizing rate. Positive for enlarging and negative for shrinking. Defaults to 15.0.
/// - Returns: image QR Code image
public static func imageQRCode(for string: String, resizeRate: CGFloat = 15.0) -> UIImage {
let data = string.data(using: .isoLatin1, allowLossyConversion: false)
let filter = CIFilter(name: "CIQRCodeGenerator")!
filter.setDefaults()
filter.setValue(data, forKey: "inputMessage")
filter.setValue("H", forKey: "inputCorrectionLevel")
let cImage = filter.outputImage!
let qrCode = UIImage(ciImage: cImage)
let qrCodeResized = qrCode.resized(by: resizeRate, quality: .none)
return qrCodeResized
}
/// 图片高宽比
public var aspectRatio: CGFloat {
return size.width / size.height
}
var base64: String {
return UIImageJPEGRepresentation(self, 1.0)!.base64EncodedString()
}
/**
Fix the image's orientation
https://github.com/cosnovae/fixUIImageOrientation/blob/master/fixImageOrientation.swift
- parameter src: the source image
- returns: new image
*/
public func fixImageOrientation() -> UIImage {
if self.imageOrientation == UIImageOrientation.up {
return self
}
var transform: CGAffineTransform = CGAffineTransform.identity
switch self.imageOrientation {
case UIImageOrientation.down, UIImageOrientation.downMirrored:
transform = transform.translatedBy(x: self.size.width, y: self.size.height)
transform = transform.rotated(by: .pi)
break
case UIImageOrientation.left, UIImageOrientation.leftMirrored:
transform = transform.translatedBy(x: self.size.width, y: 0)
transform = transform.rotated(by: .pi / 2)
break
case UIImageOrientation.right, UIImageOrientation.rightMirrored:
transform = transform.translatedBy(x: 0, y: self.size.height)
transform = transform.rotated(by: -.pi / 2)
break
case UIImageOrientation.up, UIImageOrientation.upMirrored:
break
}
switch self.imageOrientation {
case UIImageOrientation.upMirrored, UIImageOrientation.downMirrored:
transform.translatedBy(x: self.size.width, y: 0)
transform.scaledBy(x: -1, y: 1)
break
case UIImageOrientation.leftMirrored, UIImageOrientation.rightMirrored:
transform.translatedBy(x: self.size.height, y: 0)
transform.scaledBy(x: -1, y: 1)
case UIImageOrientation.up, UIImageOrientation.down, UIImageOrientation.left, UIImageOrientation.right:
break
}
let ctx:CGContext = CGContext(data: nil, width: Int(self.size.width), height: Int(self.size.height), bitsPerComponent: self.cgImage!.bitsPerComponent, bytesPerRow: 0, space: self.cgImage!.colorSpace!, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)!
ctx.concatenate(transform)
switch self.imageOrientation {
case UIImageOrientation.left, UIImageOrientation.leftMirrored, UIImageOrientation.right, UIImageOrientation.rightMirrored:
ctx.draw(self.cgImage!, in: CGRect(x: 0, y: 0, width: self.size.height, height: self.size.width))
break
default:
ctx.draw(self.cgImage!, in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
break
}
let cgimage:CGImage = ctx.makeImage()!
let image:UIImage = UIImage(cgImage: cgimage)
return image
}
//https://github.com/melvitax/AFImageHelper/blob/master/AFImageHelper%2FAFImageExtension.swift
public enum UIImageContentMode {
case scaleToFill, scaleAspectFit, scaleAspectFill
}
/**
Creates a resized copy of an image.
- Parameter size: The new size of the image.
- Parameter contentMode: The way to handle the content in the new size.
- Parameter quality: The image quality
- Returns A new image
*/
public func resize(_ size:CGSize, contentMode: UIImageContentMode = .scaleToFill, quality: CGInterpolationQuality = .medium) -> UIImage? {
let horizontalRatio = size.width / self.size.width;
let verticalRatio = size.height / self.size.height;
var ratio: CGFloat!
switch contentMode {
case .scaleToFill:
ratio = 1
case .scaleAspectFill:
ratio = max(horizontalRatio, verticalRatio)
case .scaleAspectFit:
ratio = min(horizontalRatio, verticalRatio)
}
let rect = CGRect(x: 0, y: 0, width: size.width * ratio, height: size.height * ratio)
// Fix for a colorspace / transparency issue that affects some types of
// images. See here: http://vocaro.com/trevor/blog/2009/10/12/resize-a-uiimage-the-right-way/comment-page-2/#comment-39951
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedLast.rawValue)
let context = CGContext(data: nil, width: Int(rect.size.width), height: Int(rect.size.height), bitsPerComponent: 8, bytesPerRow: 0, space: colorSpace, bitmapInfo: bitmapInfo.rawValue)
let transform = CGAffineTransform.identity
// Rotate and/or flip the image if required by its orientation
context?.concatenate(transform);
// Set the quality level to use when rescaling
context!.interpolationQuality = quality
//CGContextSetInterpolationQuality(context, CGInterpolationQuality(kCGInterpolationHigh.value))
// Draw into the context; this scales the image
context?.draw(self.cgImage!, in: rect)
// Get the resized image from the context and a UIImage
let newImage = UIImage(cgImage: (context?.makeImage()!)!, scale: self.scale, orientation: self.imageOrientation)
return newImage;
}
public func j_crop(_ bounds: CGRect) -> UIImage? {
return UIImage(cgImage: (self.cgImage?.cropping(to: bounds)!)!, scale: 0.0, orientation: self.imageOrientation)
}
public func cropToSquare() -> UIImage? {
let size = CGSize(width: self.size.width * self.scale, height: self.size.height * self.scale)
let shortest = min(size.width, size.height)
let left: CGFloat = size.width > shortest ? (size.width-shortest)/2 : 0
let top: CGFloat = size.height > shortest ? (size.height-shortest)/2 : 0
let rect = CGRect(x: 0, y: 0, width: size.width, height: size.height)
let insetRect = rect.insetBy(dx: left, dy: top)
return crop(insetRect)
}
}
<file_sep>/SweetKit/SweetKit/Extensions/Foundation/Date+Extension.swift
import Foundation
extension Date {
// 获取今天日期
static func today() -> String {
let dataFormatter : DateFormatter = DateFormatter()
dataFormatter.dateFormat = "yyyy-MM-dd"
let now : Date = Date()
return dataFormatter.string(from: now)
}
// 判断是否是今天
static func isToday (dateString : String) -> Bool {
// let date : String = NSDate.formattDay(dateString)
return dateString == self.today()
}
// 判断是否是昨天
static func isLastDay (dateString : String) -> Bool {
let todayTimestamp = self.getTimestamp(dateString: today())
let lastdayTimestamp = self.getTimestamp(dateString: dateString)
return lastdayTimestamp == todayTimestamp-(24*60*60)
}
// yyyy-MM-dd格式 转 MM月dd日
static func formattDay (dataString : String) -> String {
if dataString.length <= 0 {
return "errorDate"
}
let dateFormatter : DateFormatter = DateFormatter()
dateFormatter.dateFormat = "yyyy-MM-dd"
let date: Date = dateFormatter.date(from: dataString)!
// 转换成xx月xx日格式
let newDateFormatter : DateFormatter = DateFormatter()
newDateFormatter.dateFormat = "MM月dd日"
return newDateFormatter.string(from: date)
}
static func formattYYYYMMDDHHMMSS(dateString: String) -> Date {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
return dateFormatter.date(from: dateString) ?? Date()
}
// 根据日期获取时间戳
static func getTimestamp (dateString : String) -> TimeInterval {
if dateString.length <= 0 {
return 0
}
let newDateStirng = dateString.appending(" 00:00:00")
let formatter : DateFormatter = DateFormatter()
formatter.dateStyle = DateFormatter.Style.medium
formatter.dateStyle = DateFormatter.Style.short
formatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
formatter.timeZone = TimeZone(identifier: "Asia/Beijing")
let dateNow = formatter.date(from: newDateStirng)
return (dateNow?.timeIntervalSince1970)!
}
//时间戳转化时间
static func timeStampToString(timeStamp:String) -> String {
let string = NSString(string: timeStamp)
let timeSta:TimeInterval = string.doubleValue
let formatter = DateFormatter()
formatter.dateFormat="yyyy-MM-dd HH:mm:ss"
formatter.timeZone = TimeZone(identifier: "Asia/Beijing")
let date = NSDate(timeIntervalSince1970: timeSta)
return formatter.string(from: date as Date)
}
// 获取星期
static func weekWithDateString (dateString : String) -> String{
let timestamp = Date.getTimestamp(dateString: dateString)
let day = Int(timestamp/86400)
let array : Array = ["星期一","星期二","星期三","星期四","星期五","星期六","星期日"];
return array[(day-3)%7]
// return "星期\((day-3)%7))"
}
static func currentDayzero() -> Date {
let calendar = Calendar.current
let unitFlags = Set<Calendar.Component>([.year, .month, .day, .hour, .minute, .second])
var components = calendar.dateComponents(unitFlags, from: Date())
components.timeZone = TimeZone.current
components.hour = 0
components.minute = 0
components.second = 0
if let date = calendar.date(from: components) {
return date
}
return Date()
}
var YYYYMMDDDateString : String {
let dateFormatter: DateFormatter = DateFormatter();
dateFormatter.dateFormat = "yyyy-MM-dd"
return dateFormatter.string(from: self)
}
var HHMMDateString : String {
let dateFormatter: DateFormatter = DateFormatter()
dateFormatter.dateFormat = "HH:mm"
return dateFormatter.string(from: self)
}
}
<file_sep>/SweetKit/SweetKit/Extensions/UIKit/UIKit+Extension.swift
import UIKit
import CoreGraphics
import SystemConfiguration.CaptiveNetwork
// MARK: - CGRect
extension CGRect {
public init(x: CGFloat = 0, y: CGFloat = 0, width: CGFloat = 0, height: CGFloat = 0) {
self.init(origin: CGPoint(x: x, y: y), size: CGSize(width: width, height: height))
}
public init(x: CGFloat = 0, y: CGFloat = 0, size: CGSize) {
self.init(origin: CGPoint(x: x, y: y), size: size)
}
public init(origin: CGPoint, width: CGFloat = 0, height: CGFloat = 0) {
self.init(origin: origin, size: CGSize(width: width, height: height))
}
}
extension UIEdgeInsets {
public init(_ top: CGFloat = 0, left: CGFloat = 0, bottom: CGFloat = 0, right: CGFloat = 0) {
self.init(top: top, left: left, bottom: bottom, right: right)
}
}
// MARK: - CGSize
extension CGSize {
public init(_ both: CGFloat) {
self.init(width: both, height: both)
}
public init(width: CGFloat) {
self.init(width: width, height: 0)
}
public init(height: CGFloat) {
self.init(width: 0, height: height)
}
/**
Aspect fit size
- parameter boundingSize: boundingSize
- returns: CGSize
*/
func aspectFit(_ boundingSize: CGSize) -> CGSize {
let minRatio = min(boundingSize.width / width, boundingSize.height / height)
return CGSize(width: width * minRatio, height: height*minRatio)
}
/**
Pixel size
- returns: CGSize
*/
func toPixel() -> CGSize {
let scale = UIScreen.main.scale
return CGSize(width: self.width * scale, height: self.height * scale)
}
}
// MARK: Float、Interger
public extension IntegerLiteralType {
public var f: CGFloat {
return CGFloat(self)
}
}
public extension FloatLiteralType {
public var f: CGFloat {
return CGFloat(self)
}
}
extension CGFloat {
public var half: CGFloat {
return self * 0.5
}
public var double: CGFloat {
return self * 2
}
public static var max = CGFloat.greatestFiniteMagnitude
public static var min = CGFloat.leastNormalMagnitude
}
// MARK: - Int
extension Int {
public var boolValue: Bool {
return self > 0
}
}
extension Bool {
public var reverse: Bool {
return !self
}
public var intValue: Int {
return self ? 1 : 0
}
}
// MARK: - UserDefaults
extension UserDefaults {
static func save(at value: Any?, forKey key: String) {
UserDefaults.standard.set(value, forKey: key)
UserDefaults.standard.synchronize()
}
static func get(forKey key: String) -> Any? {
return UserDefaults.standard.object(forKey: key)
}
static func remove(forKey key: String) {
UserDefaults.standard.removeObject(forKey: key)
UserDefaults.standard.synchronize()
}
}
// MARK: - UIApplication
public extension UIApplication {
/// App版本
public class func appVersion() -> String {
return Bundle.main.object(forInfoDictionaryKey: "CFBundleShortVersionString") as! String
}
/// App构建版本
public class func appBuild() -> String {
return Bundle.main.object(forInfoDictionaryKey: kCFBundleVersionKey as String) as! String
}
public class var iconFilePath: String {
let iconFilename = Bundle.main.object(forInfoDictionaryKey: "CFBundleIconFile")
let iconBasename = (iconFilename as! NSString).deletingPathExtension
let iconExtension = (iconFilename as! NSString).pathExtension
return Bundle.main.path(forResource: iconBasename, ofType: iconExtension)!
}
public class func iconImage() -> UIImage? {
guard let image = UIImage(contentsOfFile:self.iconFilePath) else {
return nil
}
return image
}
public class func versionDescription() -> String {
let version = appVersion()
#if DEBUG
return "Debug - \(version)"
#else
return "Release - \(version)"
#endif
}
public class func appBundleName() -> String{
return Bundle.main.object(forInfoDictionaryKey: "CFBundleName") as! String
}
}
// MARK: - UIDevice
extension UIDevice {
/// MARK: - 获取设备型号
public static func phoneModel() -> String {
var systemInfo = utsname()
uname(&systemInfo)
let machineMirror = Mirror(reflecting: systemInfo.machine)
let identifier = machineMirror.children.reduce("") { identifier, element in
guard let value = element.value as? Int8 , value != 0 else { return identifier }
return identifier + String(UnicodeScalar(UInt8(value)))
}
switch identifier {
case "iPod5,1": return "iPod Touch 5"
case "iPod7,1": return "iPod Touch 6"
case "iPhone3,1", "iPhone3,2", "iPhone3,3": return "iPhone 4"
case "iPhone4,1": return "iPhone 4s"
case "iPhone5,1", "iPhone5,2": return "iPhone 5"
case "iPhone5,3", "iPhone5,4": return "iPhone 5c"
case "iPhone6,1", "iPhone6,2": return "iPhone 5s"
case "iPhone7,2": return "iPhone 6"
case "iPhone7,1": return "iPhone 6 Plus"
case "iPhone8,1": return "iPhone 6s"
case "iPhone8,2": return "iPhone 6s Plus"
case "iPhone8,4": return "iPhone 5SE"
case "iPhone9,1": return "iPhone 7"
case "iPhone9,2": return "iPhone 7 Plus"
case "iPad2,1", "iPad2,2", "iPad2,3", "iPad2,4":return "iPad 2"
case "iPad3,1", "iPad3,2", "iPad3,3": return "iPad 3"
case "iPad3,4", "iPad3,5", "iPad3,6": return "iPad 4"
case "iPad4,1", "iPad4,2", "iPad4,3": return "iPad Air"
case "iPad5,3", "iPad5,4": return "iPad Air 2"
case "iPad2,5", "iPad2,6", "iPad2,7": return "iPad Mini"
case "iPad4,4", "iPad4,5", "iPad4,6": return "iPad Mini 2"
case "iPad4,7", "iPad4,8", "iPad4,9": return "iPad Mini 3"
case "iPad5,1", "iPad5,2": return "iPad Mini 4"
case "iPad6,7", "iPad6,8": return "iPad Pro"
case "AppleTV5,3": return "Apple TV"
case "i386", "x86_64": return "Simulator"
default: return identifier
}
}
/// 判断是不是模拟器
public static var isSimulator: Bool {
return UIDevice.phoneModel() == "Simulator"
}
public static var isiPad: Bool {
return UIDevice.current.userInterfaceIdiom == .pad
}
/// 获取mac地址
static func getMacAddress() -> String! {
if let cfa: NSArray = CNCopySupportedInterfaces() {
for x in cfa {
if let dic = CFBridgingRetain(CNCopyCurrentNetworkInfo(x as! CFString)) {
let mac = dic["BSSID"]
return mac as! String!
}
}
}
return nil
}
/// 返回当前屏幕的一个像素的点大小
public class var onePixel: CGFloat {
return CGFloat(1.0) / UIScreen.main.scale
}
/// 将浮动值返回到当前屏幕的最近像素
static public func roundFloatToPixel(_ value: CGFloat) -> CGFloat {
return round(value * UIScreen.main.scale) / UIScreen.main.scale
}
}
// MARK: - UUID
extension UUID {
static var string: String {
get {
return Foundation.UUID().uuidString.replacingOccurrences(of: "-", with: "")
}
}
}
extension UIBezierPath {
static func midpoint(p0: CGPoint, p1: CGPoint) -> CGPoint {
return CGPoint(x: (p0.x + p1.x) / 2.0, y: (p0.y + p1.y) / 2.0)
}
}
// MARK: - UITapGestureRecognizer
extension UITapGestureRecognizer {
/// UILabel 添加链接点击响应
///
/// - Parameters:
/// - label: 需要响应事件的label
/// - targetRange: 需要响应点击文字的Range
/// step 1. policyPromptLabel.addGestureRecognizer(pan)
/// step 2 let range1 = (text as NSString).range(of: subStr1)
/// let range2 = (text as NSString).range(of: subStr2)
/// step 3 gesture.didTapAttributedTextInLabel(policyPromptLabel, targetRange: range1)
/// gesture.didTapAttributedTextInLabel(policyPromptLabel, targetRange: range2)
func didTapAttributedTextInLabel(_ label: UILabel, targetRange: NSRange) -> Bool {
//Create instances of NSLayoutManager, NSTextContainer and NSTextStorage
let layoutManager = NSLayoutManager()
let textContainer = NSTextContainer(size: CGSize.zero)
let textStorage = NSTextStorage(attributedString: label.attributedText!)
//Configure layoutManager and textStorage
layoutManager.addTextContainer(textContainer)
textStorage.addLayoutManager(layoutManager)
//Configure textContainer
textContainer.lineFragmentPadding = 0.0
textContainer.lineBreakMode = label.lineBreakMode
textContainer.maximumNumberOfLines = label.numberOfLines
let labelSize = label.bounds.size
textContainer.size = labelSize
//Find the tapped character location and compare it to the specified range
let locationOfTouchInLabel = self.location(in: label)
let textBoundingBox = layoutManager.usedRect(for: textContainer)
let textContainerOffset = CGPoint(x: (labelSize.width - textBoundingBox.size.width) * 0.5 - textBoundingBox.origin.x, y: (labelSize.height - textBoundingBox.size.height) * 0.5 - textBoundingBox.origin.y)
let locationOfTouchInTextContainer = CGPoint(x: locationOfTouchInLabel.x - textContainerOffset.x, y: locationOfTouchInLabel.y - textContainerOffset.y)
let indexOfCharacter = layoutManager.characterIndex(for: locationOfTouchInTextContainer, in: textContainer, fractionOfDistanceBetweenInsertionPoints: nil)
return NSLocationInRange(indexOfCharacter, targetRange)
}
}
<file_sep>/SweetKit/SweetKit/Extensions/Foundation/Foundation+Extension.swift
import Foundation
extension URL {
static func inDocumentsFolder(fileName: String) -> URL {
return NSURL(fileURLWithPath: NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0], isDirectory: true)
.appendingPathComponent(fileName)!
}
}
extension FileManager {
/// 存文件到沙盒
///
/// - Parameters:
/// - data: 数据源
/// - savePath: 保存位置
/// - Returns: 删除或者保存错误
class func save(_ data: Data, savePath: String) -> Error? {
if FileManager.default.fileExists(atPath: savePath) {
do {
try FileManager.default.removeItem(atPath: savePath)
} catch let error {
return error
}
}
do {
try data.write(to: URL(fileURLWithPath: savePath))
} catch let error {
return error
}
return nil
}
/// 在沙盒创建文件夹
///
/// - Parameter path: 文件夹地址
/// - Returns: 创建错误
@discardableResult
class func create(at path: String) -> Error? {
if (!FileManager.default.fileExists(atPath: path)) {
do {
try FileManager.default.createDirectory(atPath: path, withIntermediateDirectories: true, attributes: nil)
} catch let error {
print("error:\(error)")
return error
}
}
return nil
}
/// 在沙盒中删除文件
///
/// - Parameter path: 需要删除的文件地址
/// - Returns: 删除错误
@discardableResult
class func delete(at path: String) -> Error? {
if (FileManager.default.fileExists(atPath: path)) {
do {
try FileManager.default.removeItem(atPath: path)
} catch let error {
return error
}
return nil
}
return NSError(domain: "File does not exist", code: -1, userInfo: nil) as Error
}
class func rename(oldFileName: String, newFileName: String) -> Bool {
do {
try FileManager.default.moveItem(atPath: oldFileName, toPath: newFileName)
return true
} catch {
print("error:\(error)")
return false
}
}
class func copy(oldFileName: String, newFileName: String) -> Bool {
do {
try FileManager.default.copyItem(atPath: oldFileName, toPath: newFileName)
return true
} catch {
return false
}
}
class var document: String {
get {
return NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
}
}
class var library: String {
get {
return NSSearchPathForDirectoriesInDomains(.libraryDirectory, .userDomainMask, true)[0]
}
}
class var temp: String {
get {
return NSTemporaryDirectory()
}
}
class var caches: String {
get {
return NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true)[0]
}
}
}
<file_sep>/SweetKit/SweetKit/Extensions/Foundation/Dictionary+Extension.swift
import Foundation
extension Dictionary {
/// 字典转JSON字符串
func toJSONString() -> String {
let data = try? JSONSerialization.data(withJSONObject: self, options: JSONSerialization.WritingOptions.prettyPrinted)
let strJson = String(data: data!, encoding: .utf8)
return strJson!
}
}
<file_sep>/SweetKit/SweetKit/Extensions/UIKit/String+Extension.swift
import UIKit
extension String {
/// 转换成 Int
func toUInt() -> UInt? {
return UInt(self)
}
/// 转换成 Int, 如果转换失败,返回默认值
func toUIntWithDefault(defaultValue: UInt) -> UInt {
return UInt(self) ?? defaultValue
}
/// 转换成 Float
var float: Float? {
let numberFormatter = NumberFormatter()
return numberFormatter.number(from: self)?.floatValue
}
/// 转换成 Float
var cgfloat: CGFloat? {
return CGFloat(NumberFormatter().number(from: self) ?? 0 )
}
/// 转换成 Double
var double: Double? {
let numberFormatter = NumberFormatter()
return numberFormatter.number(from: self)?.doubleValue
}
/// 获取字符串长度
var length : Int {
return characters.count
}
func hash() -> Int {
let sum = self.characters
.map { String($0).unicodeScalars.first?.value }
.flatMap { $0 }
.reduce(0, +)
return Int(sum)
}
func localized() -> String {
return NSLocalizedString(self, comment: "")
}
/// 截取字符串
///
/// - Parameter r: [0..>n]
subscript (r: Range<Int>) -> String {
get {
let startIndex = self.characters.index(self.startIndex, offsetBy: r.lowerBound)
let endIndex = self.characters.index(self.startIndex, offsetBy: r.upperBound)
return self[Range(startIndex..<endIndex)]
}
}
/// 剪切空格和换行字符
public mutating func trim() {
self = self.trimmed()
}
/// 剪切空格和换行字符,返回一个新字符串。
public func trimmed() -> String {
return self.trimmingCharacters(in: .whitespacesAndNewlines)
}
/// 字符串大小
func toSize(size: CGSize, fontSize: CGFloat, maximumNumberOfLines: Int = 0) -> CGSize {
let font = UIFont.systemFont(ofSize: fontSize)
var size = self.boundingRect(with: size, options: .usesLineFragmentOrigin, attributes:[NSFontAttributeName : font], context: nil).size
if maximumNumberOfLines > 0 {
size.height = min(size.height, CGFloat(maximumNumberOfLines) * font.lineHeight)
}
return size
}
/// 字符串宽度
func toWidth(fontSize: CGFloat, maximumNumberOfLines: Int = 0) -> CGFloat {
let size = CGSize(width: CGFloat.greatestFiniteMagnitude, height: CGFloat.greatestFiniteMagnitude)
return toSize(size: size, fontSize: fontSize, maximumNumberOfLines: maximumNumberOfLines).width
}
/**
Calculate the height of string, and limit the width
- parameter width: width
- parameter font: font
- returns: height value
*/
func heightWithConstrainedWidth(_ width: CGFloat, font: UIFont) -> CGFloat {
let constraintRect = CGSize(width: width, height: .greatestFiniteMagnitude)
let boundingBox = self.boundingRect(
with: constraintRect,
options: .usesLineFragmentOrigin,
attributes: [NSFontAttributeName: font],
context: nil)
return boundingBox.height
}
/// 字符串高度
func toHeight(width: CGFloat, fontSize: CGFloat, maximumNumberOfLines: Int = 0) -> CGFloat {
let size = CGSize(width: width, height: CGFloat.greatestFiniteMagnitude)
return toSize(size: size, fontSize: fontSize, maximumNumberOfLines: maximumNumberOfLines).height
}
/// 设置指定文字颜色
func makeSubstringColor(_ text: String, color: UIColor) -> NSAttributedString {
let attributedText = NSMutableAttributedString(string: self)
let range = (self as NSString).range(of: text)
if range.location != NSNotFound {
attributedText.setAttributes([NSForegroundColorAttributeName: color], range: range)
}
return attributedText
}
}
|
f724af4dd1f0e7ea02076d3fbdb4066ecb06f200
|
[
"Swift"
] | 6 |
Swift
|
MosZQ/SweetKit
|
b47cf6f9a505b70fb569f6c3f58846d4ba5426c4
|
e3376f37a5b96b7551f0f139f75f731f09d3465c
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.